From 925e6bd14c0f90e86ee8a85976e1ebdacb7c5371 Mon Sep 17 00:00:00 2001 From: CentOS Sources Date: Oct 31 2019 07:48:53 +0000 Subject: import python-2.7.5-88.el7 --- diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9ca0244 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +SOURCES/Python-2.7.5.tar.xz diff --git a/.python.metadata b/.python.metadata new file mode 100644 index 0000000..a8fe566 --- /dev/null +++ b/.python.metadata @@ -0,0 +1 @@ +b7389791f789625c2ba9d897aa324008ff482daf SOURCES/Python-2.7.5.tar.xz diff --git a/SOURCES/00001-pydocnogui.patch b/SOURCES/00001-pydocnogui.patch new file mode 100644 index 0000000..3b34842 --- /dev/null +++ b/SOURCES/00001-pydocnogui.patch @@ -0,0 +1,38 @@ +diff -up Python-2.7.3/Lib/pydoc.py.no_gui Python-2.7.3/Lib/pydoc.py +--- Python-2.7.3/Lib/pydoc.py.no_gui 2012-04-09 19:07:31.000000000 -0400 ++++ Python-2.7.3/Lib/pydoc.py 2013-02-19 13:48:44.480054515 -0500 +@@ -19,9 +19,6 @@ of all available modules. + Run "pydoc -p " to start an HTTP server on a given port on the + local machine to generate documentation web pages. + +-For platforms without a command line, "pydoc -g" starts the HTTP server +-and also pops up a little window for controlling it. +- + Run "pydoc -w " to write out the HTML documentation for a module + to a file named ".html". + +@@ -2290,9 +2287,6 @@ def cli(): + writing = 0 + + for opt, val in opts: +- if opt == '-g': +- gui() +- return + if opt == '-k': + apropos(val) + return +@@ -2346,13 +2340,10 @@ def cli(): + %s -p + Start an HTTP server on the given port on the local machine. + +-%s -g +- Pop up a graphical interface for finding and serving documentation. +- + %s -w ... + Write out the HTML documentation for a module to a file in the current + directory. If contains a '%s', it is treated as a filename; if + it names a directory, documentation is written for all the contents. +-""" % (cmd, os.sep, cmd, cmd, cmd, cmd, os.sep) ++""" % (cmd, os.sep, cmd, cmd, cmd, os.sep) + + if __name__ == '__main__': cli() diff --git a/SOURCES/00055-systemtap.patch b/SOURCES/00055-systemtap.patch new file mode 100644 index 0000000..67ec005 --- /dev/null +++ b/SOURCES/00055-systemtap.patch @@ -0,0 +1,198 @@ +diff -up Python-2.7rc1/configure.ac.systemtap Python-2.7rc1/configure.ac +--- Python-2.7rc1/configure.ac.systemtap 2010-06-06 10:53:15.514975012 -0400 ++++ Python-2.7rc1/configure.ac 2010-06-06 10:53:15.520974361 -0400 +@@ -2616,6 +2616,38 @@ if test "$with_valgrind" != no; then + ) + fi + ++# Check for dtrace support ++AC_MSG_CHECKING(for --with-dtrace) ++AC_ARG_WITH(dtrace, ++ AC_HELP_STRING(--with(out)-dtrace, disable/enable dtrace support)) ++ ++if test ! -z "$with_dtrace" ++then ++ if dtrace -G -o /dev/null -s $srcdir/Include/pydtrace.d 2>/dev/null ++ then ++ AC_DEFINE(WITH_DTRACE, 1, ++ [Define if you want to compile in Dtrace support]) ++ with_dtrace="Sun" ++ DTRACEOBJS="Python/dtrace.o" ++ DTRADEHDRS="" ++ elif dtrace -h -o /dev/null -s $srcdir/Include/pydtrace.d ++ then ++ AC_DEFINE(WITH_DTRACE, 1, ++ [Define if you want to compile in Dtrace support]) ++ with_dtrace="Apple" ++ DTRACEOBJS="" ++ DTRADEHDRS="pydtrace.h" ++ else ++ with_dtrace="no" ++ fi ++else ++ with_dtrace="no" ++fi ++ ++AC_MSG_RESULT($with_dtrace) ++AC_SUBST(DTRACEOBJS) ++AC_SUBST(DTRACEHDRS) ++ + # Check for --with-wctype-functions + AC_MSG_CHECKING(for --with-wctype-functions) + AC_ARG_WITH(wctype-functions, +diff -up Python-2.7rc1/Include/pydtrace.d.systemtap Python-2.7rc1/Include/pydtrace.d +--- Python-2.7rc1/Include/pydtrace.d.systemtap 2010-06-06 10:53:15.520974361 -0400 ++++ Python-2.7rc1/Include/pydtrace.d 2010-06-06 10:53:15.520974361 -0400 +@@ -0,0 +1,10 @@ ++provider python { ++ probe function__entry(const char *, const char *, int); ++ probe function__return(const char *, const char *, int); ++}; ++ ++#pragma D attributes Evolving/Evolving/Common provider python provider ++#pragma D attributes Private/Private/Common provider python module ++#pragma D attributes Private/Private/Common provider python function ++#pragma D attributes Evolving/Evolving/Common provider python name ++#pragma D attributes Evolving/Evolving/Common provider python args +diff -up Python-2.7rc1/Makefile.pre.in.systemtap Python-2.7rc1/Makefile.pre.in +--- Python-2.7rc1/Makefile.pre.in.systemtap 2010-06-06 10:53:15.488978775 -0400 ++++ Python-2.7rc1/Makefile.pre.in 2010-06-06 11:05:30.411100568 -0400 +@@ -298,6 +298,7 @@ PYTHON_OBJS= \ + Python/formatter_unicode.o \ + Python/formatter_string.o \ + Python/$(DYNLOADFILE) \ ++ @DTRACEOBJS@ \ + $(LIBOBJS) \ + $(MACHDEP_OBJS) \ + $(THREADOBJ) +@@ -599,6 +600,18 @@ Python/formatter_unicode.o: $(srcdir)/Py + Python/formatter_string.o: $(srcdir)/Python/formatter_string.c \ + $(STRINGLIB_HEADERS) + ++# Only needed with --with-dtrace ++buildinclude: ++ mkdir -p Include ++ ++Include/pydtrace.h: buildinclude $(srcdir)/Include/pydtrace.d ++ dtrace -o $@ $(DFLAGS) -C -h -s $(srcdir)/Include/pydtrace.d ++ ++Python/ceval.o: Include/pydtrace.h ++ ++Python/dtrace.o: buildinclude $(srcdir)/Include/pydtrace.d Python/ceval.o ++ dtrace -o $@ $(DFLAGS) -C -G -s $(srcdir)/Include/pydtrace.d Python/ceval.o ++ + ############################################################################ + # Header files + +@@ -1251,7 +1264,7 @@ Python/thread.o: @THREADHEADERS@ + .PHONY: frameworkinstall frameworkinstallframework frameworkinstallstructure + .PHONY: frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools + .PHONY: frameworkaltinstallunixtools recheck autoconf clean clobber distclean +-.PHONY: smelly funny patchcheck touch altmaninstall ++.PHONY: smelly funny patchcheck touch altmaninstall buildinclude + .PHONY: gdbhooks + + # IF YOU PUT ANYTHING HERE IT WILL GO AWAY +diff -up Python-2.7rc1/pyconfig.h.in.systemtap Python-2.7rc1/pyconfig.h.in +--- Python-2.7rc1/pyconfig.h.in.systemtap 2010-05-08 07:04:18.000000000 -0400 ++++ Python-2.7rc1/pyconfig.h.in 2010-06-06 10:53:15.521974070 -0400 +@@ -1074,6 +1074,9 @@ + /* Define if you want documentation strings in extension modules */ + #undef WITH_DOC_STRINGS + ++/* Define if you want to compile in Dtrace support */ ++#undef WITH_DTRACE ++ + /* Define if you want to use the new-style (Openstep, Rhapsody, MacOS) dynamic + linker (dyld) instead of the old-style (NextStep) dynamic linker (rld). + Dyld is necessary to support frameworks. */ +diff -up Python-2.7rc1/Python/ceval.c.systemtap Python-2.7rc1/Python/ceval.c +--- Python-2.7rc1/Python/ceval.c.systemtap 2010-05-09 10:46:46.000000000 -0400 ++++ Python-2.7rc1/Python/ceval.c 2010-06-06 11:08:40.683100500 -0400 +@@ -19,6 +19,10 @@ + + #include + ++#ifdef WITH_DTRACE ++#include "pydtrace.h" ++#endif ++ + #ifndef WITH_TSC + + #define READ_TIMESTAMP(var) +@@ -671,6 +675,55 @@ PyEval_EvalCode(PyCodeObject *co, PyObje + NULL); + } + ++#ifdef WITH_DTRACE ++static void ++dtrace_entry(PyFrameObject *f) ++{ ++ const char *filename; ++ const char *fname; ++ int lineno; ++ ++ filename = PyString_AsString(f->f_code->co_filename); ++ fname = PyString_AsString(f->f_code->co_name); ++ lineno = PyCode_Addr2Line(f->f_code, f->f_lasti); ++ ++ PYTHON_FUNCTION_ENTRY((char *)filename, (char *)fname, lineno); ++ ++ /* ++ * Currently a USDT tail-call will not receive the correct arguments. ++ * Disable the tail call here. ++ */ ++#if defined(__sparc) ++ asm("nop"); ++#endif ++} ++ ++static void ++dtrace_return(PyFrameObject *f) ++{ ++ const char *filename; ++ const char *fname; ++ int lineno; ++ ++ filename = PyString_AsString(f->f_code->co_filename); ++ fname = PyString_AsString(f->f_code->co_name); ++ lineno = PyCode_Addr2Line(f->f_code, f->f_lasti); ++ PYTHON_FUNCTION_RETURN((char *)filename, (char *)fname, lineno); ++ ++ /* ++ * Currently a USDT tail-call will not receive the correct arguments. ++ * Disable the tail call here. ++ */ ++#if defined(__sparc) ++ asm("nop"); ++#endif ++} ++#else ++#define PYTHON_FUNCTION_ENTRY_ENABLED() 0 ++#define PYTHON_FUNCTION_RETURN_ENABLED() 0 ++#define dtrace_entry(f) ++#define dtrace_return(f) ++#endif + + /* Interpreter main loop */ + +@@ -909,6 +962,9 @@ PyEval_EvalFrameEx(PyFrameObject *f, int + } + } + ++ if (PYTHON_FUNCTION_ENTRY_ENABLED()) ++ dtrace_entry(f); ++ + co = f->f_code; + names = co->co_names; + consts = co->co_consts; +@@ -3000,6 +3056,9 @@ fast_yield: + + /* pop frame */ + exit_eval_frame: ++ if (PYTHON_FUNCTION_RETURN_ENABLED()) ++ dtrace_return(f); ++ + Py_LeaveRecursiveCall(); + tstate->frame = f->f_back; + diff --git a/SOURCES/00104-lib64-fix-for-test_install.patch b/SOURCES/00104-lib64-fix-for-test_install.patch new file mode 100644 index 0000000..7852bf6 --- /dev/null +++ b/SOURCES/00104-lib64-fix-for-test_install.patch @@ -0,0 +1,13 @@ +--- Python-2.7.2/Lib/distutils/tests/test_install.py.lib64 2011-09-08 17:51:57.851405376 -0400 ++++ Python-2.7.2/Lib/distutils/tests/test_install.py 2011-09-08 18:40:46.754205096 -0400 +@@ -41,8 +41,9 @@ class InstallTestCase(support.TempdirMan + self.assertEqual(got, expected) + + libdir = os.path.join(destination, "lib", "python") ++ platlibdir = os.path.join(destination, "lib64", "python") + check_path(cmd.install_lib, libdir) +- check_path(cmd.install_platlib, libdir) ++ check_path(cmd.install_platlib, platlibdir) + check_path(cmd.install_purelib, libdir) + check_path(cmd.install_headers, + os.path.join(destination, "include", "python", "foopkg")) diff --git a/SOURCES/00111-no-static-lib.patch b/SOURCES/00111-no-static-lib.patch new file mode 100644 index 0000000..2f4fdd6 --- /dev/null +++ b/SOURCES/00111-no-static-lib.patch @@ -0,0 +1,50 @@ +diff -up Python-2.7.3/Makefile.pre.in.no-static-lib Python-2.7.3/Makefile.pre.in +--- Python-2.7.3/Makefile.pre.in.no-static-lib 2013-02-19 14:03:40.801993224 -0500 ++++ Python-2.7.3/Makefile.pre.in 2013-02-19 14:04:44.070988898 -0500 +@@ -397,7 +397,7 @@ coverage: + + + # Build the interpreter +-$(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY) ++$(BUILDPYTHON): Modules/python.o $(LDLIBRARY) + $(LINKCC) $(CFLAGS) $(LDFLAGS) $(LINKFORSHARED) -o $@ \ + Modules/python.o \ + $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) +@@ -413,18 +413,6 @@ sharedmods: $(BUILDPYTHON) + $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + +-# Build static library +-# avoid long command lines, same as LIBRARY_OBJS +-$(LIBRARY): $(LIBRARY_OBJS) +- -rm -f $@ +- $(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o +- $(AR) $(ARFLAGS) $@ $(PARSER_OBJS) +- $(AR) $(ARFLAGS) $@ $(OBJECT_OBJS) +- $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS) +- $(AR) $(ARFLAGS) $@ $(MODULE_OBJS) $(SIGNAL_OBJS) +- $(AR) $(ARFLAGS) $@ $(MODOBJS) +- $(RANLIB) $@ +- + libpython$(VERSION).so: $(LIBRARY_OBJS) + if test $(INSTSONAME) != $(LDLIBRARY); then \ + $(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \ +@@ -1021,18 +1009,6 @@ libainstall: all python-config + else true; \ + fi; \ + done +- @if test -d $(LIBRARY); then :; else \ +- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ +- if test "$(SO)" = .dll; then \ +- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \ +- else \ +- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \ +- $(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \ +- fi; \ +- else \ +- echo Skip install of $(LIBRARY) - use make frameworkinstall; \ +- fi; \ +- fi + $(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c + $(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o + $(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in diff --git a/SOURCES/00113-more-configuration-flags.patch b/SOURCES/00113-more-configuration-flags.patch new file mode 100644 index 0000000..2d447b2 --- /dev/null +++ b/SOURCES/00113-more-configuration-flags.patch @@ -0,0 +1,50 @@ +diff -up Python-2.6.5/configure.ac.more-configuration-flags Python-2.6.5/configure.ac +--- Python-2.6.5/configure.ac.more-configuration-flags 2010-05-24 18:51:25.410111792 -0400 ++++ Python-2.6.5/configure.ac 2010-05-24 18:59:23.954986388 -0400 +@@ -2515,6 +2515,30 @@ else AC_MSG_RESULT(no) + fi], + [AC_MSG_RESULT(no)]) + ++AC_MSG_CHECKING(for --with-count-allocs) ++AC_ARG_WITH(count-allocs, ++[ --with(out)count-allocs enable/disable per-type instance accounting], [ ++if test "$withval" != no ++then ++ AC_DEFINE(COUNT_ALLOCS, 1, ++ [Define to keep records of the number of instances of each type]) ++ AC_MSG_RESULT(yes) ++else AC_MSG_RESULT(no) ++fi], ++[AC_MSG_RESULT(no)]) ++ ++AC_MSG_CHECKING(for --with-call-profile) ++AC_ARG_WITH(call-profile, ++[ --with(out)-call-profile enable/disable statistics on function call invocation], [ ++if test "$withval" != no ++then ++ AC_DEFINE(CALL_PROFILE, 1, ++ [Define to keep records on function call invocation]) ++ AC_MSG_RESULT(yes) ++else AC_MSG_RESULT(no) ++fi], ++[AC_MSG_RESULT(no)]) ++ + # Check for Python-specific malloc support + AC_MSG_CHECKING(for --with-pymalloc) + AC_ARG_WITH(pymalloc, +diff -up Python-2.6.5/pyconfig.h.in.more-configuration-flags Python-2.6.5/pyconfig.h.in +--- Python-2.6.5/pyconfig.h.in.more-configuration-flags 2010-05-24 18:51:45.677988086 -0400 ++++ Python-2.6.5/pyconfig.h.in 2010-05-24 19:00:44.163987730 -0400 +@@ -1019,6 +1019,12 @@ + /* Define to profile with the Pentium timestamp counter */ + #undef WITH_TSC + ++/* Define to keep records of the number of instances of each type */ ++#undef COUNT_ALLOCS ++ ++/* Define to keep records on function call invocation */ ++#undef CALL_PROFILE ++ + /* Define if you want pymalloc to be disabled when running under valgrind */ + #undef WITH_VALGRIND + diff --git a/SOURCES/00114-statvfs-f_flag-constants.patch b/SOURCES/00114-statvfs-f_flag-constants.patch new file mode 100644 index 0000000..83e7b59 --- /dev/null +++ b/SOURCES/00114-statvfs-f_flag-constants.patch @@ -0,0 +1,47 @@ +diff -up Python-2.7rc1/Modules/posixmodule.c.statvfs-f-flag-constants Python-2.7rc1/Modules/posixmodule.c +--- Python-2.7rc1/Modules/posixmodule.c.statvfs-f-flag-constants 2010-05-15 17:45:30.000000000 -0400 ++++ Python-2.7rc1/Modules/posixmodule.c 2010-06-07 22:54:16.162068624 -0400 +@@ -9174,6 +9174,43 @@ all_ins(PyObject *d) + #endif + #endif + ++ /* These came from statvfs.h */ ++#ifdef ST_RDONLY ++ if (ins(d, "ST_RDONLY", (long)ST_RDONLY)) return -1; ++#endif /* ST_RDONLY */ ++#ifdef ST_NOSUID ++ if (ins(d, "ST_NOSUID", (long)ST_NOSUID)) return -1; ++#endif /* ST_NOSUID */ ++ ++ /* GNU extensions */ ++#ifdef ST_NODEV ++ if (ins(d, "ST_NODEV", (long)ST_NODEV)) return -1; ++#endif /* ST_NODEV */ ++#ifdef ST_NOEXEC ++ if (ins(d, "ST_NOEXEC", (long)ST_NOEXEC)) return -1; ++#endif /* ST_NOEXEC */ ++#ifdef ST_SYNCHRONOUS ++ if (ins(d, "ST_SYNCHRONOUS", (long)ST_SYNCHRONOUS)) return -1; ++#endif /* ST_SYNCHRONOUS */ ++#ifdef ST_MANDLOCK ++ if (ins(d, "ST_MANDLOCK", (long)ST_MANDLOCK)) return -1; ++#endif /* ST_MANDLOCK */ ++#ifdef ST_WRITE ++ if (ins(d, "ST_WRITE", (long)ST_WRITE)) return -1; ++#endif /* ST_WRITE */ ++#ifdef ST_APPEND ++ if (ins(d, "ST_APPEND", (long)ST_APPEND)) return -1; ++#endif /* ST_APPEND */ ++#ifdef ST_NOATIME ++ if (ins(d, "ST_NOATIME", (long)ST_NOATIME)) return -1; ++#endif /* ST_NOATIME */ ++#ifdef ST_NODIRATIME ++ if (ins(d, "ST_NODIRATIME", (long)ST_NODIRATIME)) return -1; ++#endif /* ST_NODIRATIME */ ++#ifdef ST_RELATIME ++ if (ins(d, "ST_RELATIME", (long)ST_RELATIME)) return -1; ++#endif /* ST_RELATIME */ ++ + #if defined(PYOS_OS2) + if (insertvalues(d)) return -1; + #endif diff --git a/SOURCES/00121-add-Modules-to-build-path.patch b/SOURCES/00121-add-Modules-to-build-path.patch new file mode 100644 index 0000000..6e3294d --- /dev/null +++ b/SOURCES/00121-add-Modules-to-build-path.patch @@ -0,0 +1,13 @@ +--- Python-2.7.5/Lib/site.py.orig 2013-05-16 12:47:55.000000000 +0200 ++++ Python-2.7.5/Lib/site.py 2013-05-16 12:56:20.089058109 +0200 +@@ -529,6 +529,10 @@ def main(): + + abs__file__() + known_paths = removeduppaths() ++ from sysconfig import is_python_build ++ if is_python_build(): ++ from _sysconfigdata import build_time_vars ++ sys.path.append(os.path.join(build_time_vars['abs_builddir'], 'Modules')) + if ENABLE_USER_SITE is None: + ENABLE_USER_SITE = check_enableusersite() + known_paths = addusersitepackages(known_paths) diff --git a/SOURCES/00125-less-verbose-COUNT_ALLOCS.patch b/SOURCES/00125-less-verbose-COUNT_ALLOCS.patch new file mode 100644 index 0000000..8cef015 --- /dev/null +++ b/SOURCES/00125-less-verbose-COUNT_ALLOCS.patch @@ -0,0 +1,20 @@ +diff -up Python-2.7/Python/pythonrun.c.less-verbose-COUNT_ALLOCS Python-2.7/Python/pythonrun.c +--- Python-2.7/Python/pythonrun.c.less-verbose-COUNT_ALLOCS 2010-08-17 14:49:33.321913909 -0400 ++++ Python-2.7/Python/pythonrun.c 2010-08-17 14:54:48.750910403 -0400 +@@ -470,7 +470,15 @@ Py_Finalize(void) + + /* Debugging stuff */ + #ifdef COUNT_ALLOCS +- dump_counts(stdout); ++ /* This is a downstream Fedora modification. ++ The upstream default with COUNT_ALLOCS is to always dump the counts to ++ stdout on exit. For our debug builds its useful to have the info from ++ COUNT_ALLOCS available, but the stdout info here gets in the way, so ++ we make it optional, wrapping it in an environment variable (modelled ++ on the other PYTHONDUMP* env variables): ++ */ ++ if (Py_GETENV("PYTHONDUMPCOUNTS")) ++ dump_counts(stdout); + #endif + + PRINT_TOTAL_REFS(); diff --git a/SOURCES/00131-disable-tests-in-test_io.patch b/SOURCES/00131-disable-tests-in-test_io.patch new file mode 100644 index 0000000..d81a2d0 --- /dev/null +++ b/SOURCES/00131-disable-tests-in-test_io.patch @@ -0,0 +1,11 @@ +diff -up Python-2.7.2/Lib/test/test_io.py.disable-tests-in-test_io Python-2.7.2/Lib/test/test_io.py +--- Python-2.7.2/Lib/test/test_io.py.disable-tests-in-test_io 2011-09-01 14:18:45.963304089 -0400 ++++ Python-2.7.2/Lib/test/test_io.py 2011-09-01 15:08:53.796098413 -0400 +@@ -2669,6 +2669,7 @@ class SignalsTest(unittest.TestCase): + self.check_interrupted_read_retry(lambda x: x, + mode="r") + ++ @unittest.skip('rhbz#732998') + @unittest.skipUnless(threading, 'Threading required for this test.') + def check_interrupted_write_retry(self, item, **fdopen_kwargs): + """Check that a buffered write, when it gets interrupted (either diff --git a/SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch b/SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch new file mode 100644 index 0000000..e63395f --- /dev/null +++ b/SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch @@ -0,0 +1,68 @@ +diff -up Python-2.7.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-2.7.2/Lib/unittest/case.py +--- Python-2.7.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-08 14:45:47.677169191 -0400 ++++ Python-2.7.2/Lib/unittest/case.py 2011-09-08 16:01:36.287858159 -0400 +@@ -1,6 +1,7 @@ + """Test case implementation""" + + import collections ++import os + import sys + import functools + import difflib +@@ -94,6 +95,43 @@ def expectedFailure(func): + return wrapper + + ++# Non-standard/downstream-only hooks for handling issues with specific test ++# cases: ++ ++def _skipInRpmBuild(reason): ++ """ ++ Non-standard/downstream-only decorator for marking a specific unit test ++ to be skipped when run within the %check of an rpmbuild. ++ ++ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within ++ the environment, and has no effect otherwise. ++ """ ++ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ: ++ return skip(reason) ++ else: ++ return _id ++ ++def _expectedFailureInRpmBuild(func): ++ """ ++ Non-standard/downstream-only decorator for marking a specific unit test ++ as expected to fail within the %check of an rpmbuild. ++ ++ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within ++ the environment, and has no effect otherwise. ++ """ ++ @functools.wraps(func) ++ def wrapper(*args, **kwargs): ++ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ: ++ try: ++ func(*args, **kwargs) ++ except Exception: ++ raise _ExpectedFailure(sys.exc_info()) ++ raise _UnexpectedSuccess ++ else: ++ # Call directly: ++ func(*args, **kwargs) ++ return wrapper ++ + class _AssertRaisesContext(object): + """A context manager used to implement TestCase.assertRaises* methods.""" + +diff -up Python-2.7.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-2.7.2/Lib/unittest/__init__.py +--- Python-2.7.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-08 14:59:39.534112310 -0400 ++++ Python-2.7.2/Lib/unittest/__init__.py 2011-09-08 15:07:09.191081562 -0400 +@@ -57,7 +57,8 @@ __unittest = True + + from .result import TestResult + from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf, +- skipUnless, expectedFailure) ++ skipUnless, expectedFailure, ++ _skipInRpmBuild, _expectedFailureInRpmBuild) + from .suite import BaseTestSuite, TestSuite + from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames, + findTestCases) diff --git a/SOURCES/00133-skip-test_dl.patch b/SOURCES/00133-skip-test_dl.patch new file mode 100644 index 0000000..04ad05b --- /dev/null +++ b/SOURCES/00133-skip-test_dl.patch @@ -0,0 +1,13 @@ +diff -up Python-2.7.2/Lib/test/test_dl.py.skip-test_dl Python-2.7.2/Lib/test/test_dl.py +--- Python-2.7.2/Lib/test/test_dl.py.skip-test_dl 2011-09-08 15:18:40.529034289 -0400 ++++ Python-2.7.2/Lib/test/test_dl.py 2011-09-08 16:29:45.184742670 -0400 +@@ -13,6 +13,9 @@ sharedlibs = [ + ('/usr/lib/libc.dylib', 'getpid'), + ] + ++# (also, "dl" is deprecated in favor of ctypes) ++@unittest._skipInRpmBuild('fails on 64-bit builds: ' ++ 'module dl requires sizeof(int) == sizeof(long) == sizeof(char*)') + def test_main(): + for s, func in sharedlibs: + try: diff --git a/SOURCES/00134-fix-COUNT_ALLOCS-failure-in-test_sys.patch b/SOURCES/00134-fix-COUNT_ALLOCS-failure-in-test_sys.patch new file mode 100644 index 0000000..38381ef --- /dev/null +++ b/SOURCES/00134-fix-COUNT_ALLOCS-failure-in-test_sys.patch @@ -0,0 +1,14 @@ +--- Python-2.7.2/Lib/test/test_sys.py.mark-tests-that-fail-in-rpmbuild 2011-09-08 18:02:31.627362039 -0400 ++++ Python-2.7.2/Lib/test/test_sys.py 2011-09-08 18:15:29.450308851 -0400 +@@ -734,6 +734,11 @@ class SizeofTest(unittest.TestCase): + # (PyTypeObject + PyNumberMethods + PyMappingMethods + + # PySequenceMethods + PyBufferProcs) + s = vsize('P2P15Pl4PP9PP11PI') + struct.calcsize('41P 10P 3P 6P') ++ ++ # COUNT_ALLOCS adds further fields to the end of a PyTypeObject: ++ if hasattr(sys, 'getcounts'): ++ s += size('P') ++ + class newstyleclass(object): + pass + check(newstyleclass, s) diff --git a/SOURCES/00135-skip-test-within-test_weakref-in-debug-build.patch b/SOURCES/00135-skip-test-within-test_weakref-in-debug-build.patch new file mode 100644 index 0000000..e464aa9 --- /dev/null +++ b/SOURCES/00135-skip-test-within-test_weakref-in-debug-build.patch @@ -0,0 +1,18 @@ +diff -up Python-2.7.2/Lib/test/test_weakref.py.skip-test-within-test_weakref-in-debug-build Python-2.7.2/Lib/test/test_weakref.py +--- Python-2.7.2/Lib/test/test_weakref.py.skip-test-within-test_weakref-in-debug-build 2011-09-08 17:55:09.675392260 -0400 ++++ Python-2.7.2/Lib/test/test_weakref.py 2011-09-08 17:59:08.857375903 -0400 +@@ -550,6 +550,14 @@ class ReferencesTestCase(TestBase): + del c1, c2, C, D + gc.collect() + ++ # In a debug build, this fails with: ++ # AssertionError: Lists differ: [] != ['C went away'] ++ # Second list contains 1 additional elements. ++ # First extra element 0: ++ # C went away ++ # - [] ++ # + ['C went away'] ++ @unittest.skipIf(hasattr(sys, 'getobjects'), 'debug build') + def test_callback_in_cycle_resurrection(self): + import gc + diff --git a/SOURCES/00136-skip-tests-of-seeking-stdin-in-rpmbuild.patch b/SOURCES/00136-skip-tests-of-seeking-stdin-in-rpmbuild.patch new file mode 100644 index 0000000..845fb2a --- /dev/null +++ b/SOURCES/00136-skip-tests-of-seeking-stdin-in-rpmbuild.patch @@ -0,0 +1,22 @@ +diff -up Python-2.7.2/Lib/test/test_file2k.py.skip-tests-of-seeking-stdin-in-rpmbuild Python-2.7.2/Lib/test/test_file2k.py +--- Python-2.7.2/Lib/test/test_file2k.py.skip-tests-of-seeking-stdin-in-rpmbuild 2011-09-08 17:23:50.922520729 -0400 ++++ Python-2.7.2/Lib/test/test_file2k.py 2011-09-08 17:24:41.368517277 -0400 +@@ -213,6 +213,7 @@ class OtherFileTests(unittest.TestCase): + else: + f.close() + ++ @unittest._skipInRpmBuild('seems not to raise the exception when run in Koji') + def testStdin(self): + # This causes the interpreter to exit on OSF1 v5.1. + if sys.platform != 'osf1V5': +diff -up Python-2.7.2/Lib/test/test_file.py.skip-tests-of-seeking-stdin-in-rpmbuild Python-2.7.2/Lib/test/test_file.py +--- Python-2.7.2/Lib/test/test_file.py.skip-tests-of-seeking-stdin-in-rpmbuild 2011-09-08 17:20:31.146534389 -0400 ++++ Python-2.7.2/Lib/test/test_file.py 2011-09-08 17:24:45.016517030 -0400 +@@ -154,6 +154,7 @@ class OtherFileTests(unittest.TestCase): + f.close() + self.fail('%r is an invalid file mode' % mode) + ++ @unittest._skipInRpmBuild('seems not to raise the exception when run in Koji') + def testStdin(self): + # This causes the interpreter to exit on OSF1 v5.1. + if sys.platform != 'osf1V5': diff --git a/SOURCES/00137-skip-distutils-tests-that-fail-in-rpmbuild.patch b/SOURCES/00137-skip-distutils-tests-that-fail-in-rpmbuild.patch new file mode 100644 index 0000000..8653772 --- /dev/null +++ b/SOURCES/00137-skip-distutils-tests-that-fail-in-rpmbuild.patch @@ -0,0 +1,12 @@ +diff -up Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py.mark-tests-that-fail-in-rpmbuild Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py +--- Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py.mark-tests-that-fail-in-rpmbuild 2012-04-09 19:07:29.000000000 -0400 ++++ Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py 2012-04-13 00:20:08.223819263 -0400 +@@ -24,6 +24,7 @@ setup(name='foo', version='0.1', py_modu + + """ + ++@unittest._skipInRpmBuild("don't try to nest one rpm build inside another rpm build") + class BuildRpmTestCase(support.TempdirManager, + support.LoggingSilencer, + unittest.TestCase): +diff -up Python-2.7.3/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild Python-2.7.3/Lib/distutils/tests/test_build_ext.py diff --git a/SOURCES/00138-fix-distutils-tests-in-debug-build.patch b/SOURCES/00138-fix-distutils-tests-in-debug-build.patch new file mode 100644 index 0000000..0bfda90 --- /dev/null +++ b/SOURCES/00138-fix-distutils-tests-in-debug-build.patch @@ -0,0 +1,68 @@ +diff -up Python-2.7.2/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild Python-2.7.2/Lib/distutils/tests/test_build_ext.py +--- Python-2.7.2/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild 2011-09-08 16:07:25.033834312 -0400 ++++ Python-2.7.2/Lib/distutils/tests/test_build_ext.py 2011-09-08 17:43:15.656441082 -0400 +@@ -330,6 +332,7 @@ class BuildExtTestCase(support.TempdirMa + self.assertEqual(lastdir, 'bar') + + def test_ext_fullpath(self): ++ debug_ext = sysconfig.get_config_var("DEBUG_EXT") + ext = sysconfig.get_config_vars()['SO'] + dist = Distribution() + cmd = build_ext(dist) +@@ -337,14 +340,14 @@ class BuildExtTestCase(support.TempdirMa + cmd.distribution.package_dir = {'': 'src'} + cmd.distribution.packages = ['lxml', 'lxml.html'] + curdir = os.getcwd() +- wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) ++ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + debug_ext + ext) + path = cmd.get_ext_fullpath('lxml.etree') + self.assertEqual(wanted, path) + + # building lxml.etree not inplace + cmd.inplace = 0 + cmd.build_lib = os.path.join(curdir, 'tmpdir') +- wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext) ++ wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + debug_ext + ext) + path = cmd.get_ext_fullpath('lxml.etree') + self.assertEqual(wanted, path) + +@@ -354,13 +357,13 @@ class BuildExtTestCase(support.TempdirMa + cmd.distribution.packages = ['twisted', 'twisted.runner.portmap'] + path = cmd.get_ext_fullpath('twisted.runner.portmap') + wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', +- 'portmap' + ext) ++ 'portmap' + debug_ext + ext) + self.assertEqual(wanted, path) + + # building twisted.runner.portmap inplace + cmd.inplace = 1 + path = cmd.get_ext_fullpath('twisted.runner.portmap') +- wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) ++ wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + debug_ext + ext) + self.assertEqual(wanted, path) + + def test_build_ext_inplace(self): +@@ -373,8 +376,9 @@ class BuildExtTestCase(support.TempdirMa + cmd.distribution.package_dir = {'': 'src'} + cmd.distribution.packages = ['lxml', 'lxml.html'] + curdir = os.getcwd() ++ debug_ext = sysconfig.get_config_var("DEBUG_EXT") + ext = sysconfig.get_config_var("SO") +- wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) ++ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + debug_ext + ext) + path = cmd.get_ext_fullpath('lxml.etree') + self.assertEqual(wanted, path) + +@@ -412,10 +416,11 @@ class BuildExtTestCase(support.TempdirMa + dist = Distribution({'name': 'UpdateManager'}) + cmd = build_ext(dist) + cmd.ensure_finalized() ++ debug_ext = sysconfig.get_config_var("DEBUG_EXT") + ext = sysconfig.get_config_var("SO") + ext_name = os.path.join('UpdateManager', 'fdsend') + ext_path = cmd.get_ext_fullpath(ext_name) +- wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + ext) ++ wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + debug_ext + ext) + self.assertEqual(ext_path, wanted) + + def test_build_ext_path_cross_platform(self): diff --git a/SOURCES/00139-skip-test_float-known-failure-on-arm.patch b/SOURCES/00139-skip-test_float-known-failure-on-arm.patch new file mode 100644 index 0000000..9d0bfad --- /dev/null +++ b/SOURCES/00139-skip-test_float-known-failure-on-arm.patch @@ -0,0 +1,11 @@ +diff -up Python-2.7.2/Lib/test/test_float.py.skip-test_float-known-failure-on-arm Python-2.7.2/Lib/test/test_float.py +--- Python-2.7.2/Lib/test/test_float.py.skip-test_float-known-failure-on-arm 2011-09-08 19:34:09.000986128 -0400 ++++ Python-2.7.2/Lib/test/test_float.py 2011-09-08 19:34:57.969982779 -0400 +@@ -1072,6 +1072,7 @@ class HexFloatTestCase(unittest.TestCase + self.identical(got, expected) + + ++ @unittest.skip('Known failure on ARM: http://bugs.python.org/issue8265') + def test_from_hex(self): + MIN = self.MIN; + MAX = self.MAX; diff --git a/SOURCES/00140-skip-test_ctypes-known-failure-on-sparc.patch b/SOURCES/00140-skip-test_ctypes-known-failure-on-sparc.patch new file mode 100644 index 0000000..95aa41e --- /dev/null +++ b/SOURCES/00140-skip-test_ctypes-known-failure-on-sparc.patch @@ -0,0 +1,11 @@ +diff -up Python-2.7.2/Lib/ctypes/test/test_callbacks.py.skip-test_ctypes-known-failure-on-sparc Python-2.7.2/Lib/ctypes/test/test_callbacks.py +--- Python-2.7.2/Lib/ctypes/test/test_callbacks.py.skip-test_ctypes-known-failure-on-sparc 2011-09-08 19:42:35.541951490 -0400 ++++ Python-2.7.2/Lib/ctypes/test/test_callbacks.py 2011-09-08 19:43:40.676947036 -0400 +@@ -67,6 +67,7 @@ class Callbacks(unittest.TestCase): + self.check_type(c_longlong, 42) + self.check_type(c_longlong, -42) + ++ @unittest.skip('Known failure on Sparc: http://bugs.python.org/issue8314') + def test_ulonglong(self): + # test some 64-bit values, with and without msb set. + self.check_type(c_ulonglong, 10955412242170339782) diff --git a/SOURCES/00141-fix-test_gc_with_COUNT_ALLOCS.patch b/SOURCES/00141-fix-test_gc_with_COUNT_ALLOCS.patch new file mode 100644 index 0000000..d5bf3c9 --- /dev/null +++ b/SOURCES/00141-fix-test_gc_with_COUNT_ALLOCS.patch @@ -0,0 +1,24 @@ +diff -up Python-2.7.2/Lib/test/test_gc.py.fix-test_gc_with_COUNT_ALLOCS Python-2.7.2/Lib/test/test_gc.py +--- Python-2.7.2/Lib/test/test_gc.py.fix-test_gc_with_COUNT_ALLOCS 2011-09-08 19:49:13.045924309 -0400 ++++ Python-2.7.2/Lib/test/test_gc.py 2011-09-08 19:50:07.035920617 -0400 +@@ -102,11 +102,17 @@ class GCTests(unittest.TestCase): + del a + self.assertNotEqual(gc.collect(), 0) + del B, C +- self.assertNotEqual(gc.collect(), 0) ++ if hasattr(sys, 'getcounts'): ++ self.assertEqual(gc.collect(), 0) ++ else: ++ self.assertNotEqual(gc.collect(), 0) + A.a = A() + del A +- self.assertNotEqual(gc.collect(), 0) +- self.assertEqual(gc.collect(), 0) ++ if hasattr(sys, 'getcounts'): ++ self.assertEqual(gc.collect(), 0) ++ else: ++ self.assertNotEqual(gc.collect(), 0) ++ self.assertEqual(gc.collect(), 0) + + def test_method(self): + # Tricky: self.__init__ is a bound method, it references the instance. diff --git a/SOURCES/00142-skip-failing-pty-tests-in-rpmbuild.patch b/SOURCES/00142-skip-failing-pty-tests-in-rpmbuild.patch new file mode 100644 index 0000000..414ffcd --- /dev/null +++ b/SOURCES/00142-skip-failing-pty-tests-in-rpmbuild.patch @@ -0,0 +1,22 @@ +diff -up Python-2.7.2/Lib/test/test_openpty.py.skip-failing-pty-tests-in-rpmbuild Python-2.7.2/Lib/test/test_openpty.py +--- Python-2.7.2/Lib/test/test_openpty.py.skip-failing-pty-tests-in-rpmbuild 2011-09-09 05:09:28.698920379 -0400 ++++ Python-2.7.2/Lib/test/test_openpty.py 2011-09-09 05:10:54.805914490 -0400 +@@ -8,6 +8,7 @@ if not hasattr(os, "openpty"): + + + class OpenptyTest(unittest.TestCase): ++ @unittest._skipInRpmBuild('sometimes fails in Koji, possibly due to a mock issue (rhbz#714627)') + def test(self): + master, slave = os.openpty() + if not os.isatty(slave): +diff -up Python-2.7.2/Lib/test/test_pty.py.skip-failing-pty-tests-in-rpmbuild Python-2.7.2/Lib/test/test_pty.py +--- Python-2.7.2/Lib/test/test_pty.py.skip-failing-pty-tests-in-rpmbuild 2011-09-09 05:09:36.781919825 -0400 ++++ Python-2.7.2/Lib/test/test_pty.py 2011-09-09 05:11:14.741913127 -0400 +@@ -109,6 +109,7 @@ class PtyTest(unittest.TestCase): + os.close(master_fd) + + ++ @unittest._skipInRpmBuild('sometimes fails in Koji, possibly due to a mock issue (rhbz#714627)') + def test_fork(self): + debug("calling pty.fork()") + pid, master_fd = pty.fork() diff --git a/SOURCES/00143-tsc-on-ppc.patch b/SOURCES/00143-tsc-on-ppc.patch new file mode 100644 index 0000000..447c6e3 --- /dev/null +++ b/SOURCES/00143-tsc-on-ppc.patch @@ -0,0 +1,58 @@ +diff -up Python-2.7.2/Python/ceval.c.tsc-on-ppc Python-2.7.2/Python/ceval.c +--- Python-2.7.2/Python/ceval.c.tsc-on-ppc 2011-08-23 14:59:48.051300849 -0400 ++++ Python-2.7.2/Python/ceval.c 2011-08-23 15:33:25.412162902 -0400 +@@ -37,24 +37,42 @@ typedef unsigned long long uint64; + */ + #if defined(__ppc__) || defined (__powerpc__) + +-#define READ_TIMESTAMP(var) ppc_getcounter(&var) ++#if defined( __powerpc64__) || defined(__LP64__) ++/* 64-bit PowerPC */ ++#define READ_TIMESTAMP(var) ppc64_getcounter(&var) ++static void ++ppc64_getcounter(uint64 *v) ++{ ++ /* On 64-bit PowerPC we can read the 64-bit timebase directly into a ++ 64-bit register */ ++ uint64 timebase; ++#ifdef _ARCH_PWR4 ++ asm volatile ("mfspr %0,268" : "=r" (timebase)); ++#else ++ asm volatile ("mftb %0" : "=r" (timebase)); ++#endif ++ *v = timebase; ++} ++ ++#else ++/* 32-bit PowerPC */ ++#define READ_TIMESTAMP(var) ppc32_getcounter(&var) + + static void +-ppc_getcounter(uint64 *v) ++ppc32_getcounter(uint64 *v) + { +- register unsigned long tbu, tb, tbu2; ++ union { long long ll; long ii[2]; } u; ++ long tmp; + + loop: +- asm volatile ("mftbu %0" : "=r" (tbu) ); +- asm volatile ("mftb %0" : "=r" (tb) ); +- asm volatile ("mftbu %0" : "=r" (tbu2)); +- if (__builtin_expect(tbu != tbu2, 0)) goto loop; +- +- /* The slightly peculiar way of writing the next lines is +- compiled better by GCC than any other way I tried. */ +- ((long*)(v))[0] = tbu; +- ((long*)(v))[1] = tb; ++ asm volatile ("mftbu %0" : "=r" (u.ii[0]) ); ++ asm volatile ("mftb %0" : "=r" (u.ii[1]) ); ++ asm volatile ("mftbu %0" : "=r" (tmp)); ++ if (__builtin_expect(u.ii[0] != tmp, 0)) goto loop; ++ ++ *v = u.ll; + } ++#endif /* powerpc 32/64 bit */ + + #elif defined(__i386__) + diff --git a/SOURCES/00144-no-gdbm.patch b/SOURCES/00144-no-gdbm.patch new file mode 100644 index 0000000..0378d44 --- /dev/null +++ b/SOURCES/00144-no-gdbm.patch @@ -0,0 +1,12 @@ +diff -up Python-2.7.2/Modules/Setup.dist.no-gdbm Python-2.7.2/Modules/Setup.dist +--- Python-2.7.2/Modules/Setup.dist.no-gdbm 2011-09-13 14:25:43.496095926 -0400 ++++ Python-2.7.2/Modules/Setup.dist 2011-09-13 14:25:46.491095724 -0400 +@@ -396,7 +396,7 @@ dl dlmodule.c + # + # First, look at Setup.config; configure may have set this for you. + +-gdbm gdbmmodule.c -lgdbm ++# gdbm gdbmmodule.c -lgdbm + + + # Sleepycat Berkeley DB interface. diff --git a/SOURCES/00146-hashlib-fips.patch b/SOURCES/00146-hashlib-fips.patch new file mode 100644 index 0000000..c67eb3b --- /dev/null +++ b/SOURCES/00146-hashlib-fips.patch @@ -0,0 +1,729 @@ +diff -up Python-2.7.2/Lib/hashlib.py.hashlib-fips Python-2.7.2/Lib/hashlib.py +--- Python-2.7.2/Lib/hashlib.py.hashlib-fips 2011-06-11 11:46:24.000000000 -0400 ++++ Python-2.7.2/Lib/hashlib.py 2011-09-14 00:21:26.194252001 -0400 +@@ -6,9 +6,12 @@ + + __doc__ = """hashlib module - A common interface to many hash functions. + +-new(name, string='') - returns a new hash object implementing the +- given hash function; initializing the hash +- using the given string data. ++new(name, string='', usedforsecurity=True) ++ - returns a new hash object implementing the given hash function; ++ initializing the hash using the given string data. ++ ++ "usedforsecurity" is a non-standard extension for better supporting ++ FIPS-compliant environments (see below) + + Named constructor functions are also available, these are much faster + than using new(): +@@ -24,6 +27,20 @@ the zlib module. + Choose your hash function wisely. Some have known collision weaknesses. + sha384 and sha512 will be slow on 32 bit platforms. + ++Our implementation of hashlib uses OpenSSL. ++ ++OpenSSL has a "FIPS mode", which, if enabled, may restrict the available hashes ++to only those that are compliant with FIPS regulations. For example, it may ++deny the use of MD5, on the grounds that this is not secure for uses such as ++authentication, system integrity checking, or digital signatures. ++ ++If you need to use such a hash for non-security purposes (such as indexing into ++a data structure for speed), you can override the keyword argument ++"usedforsecurity" from True to False to signify that your code is not relying ++on the hash for security purposes, and this will allow the hash to be usable ++even in FIPS mode. This is not a standard feature of Python 2.7's hashlib, and ++is included here to better support FIPS mode. ++ + Hash objects have these methods: + - update(arg): Update the hash object with the string arg. Repeated calls + are equivalent to a single call with the concatenation of all +@@ -63,74 +80,39 @@ algorithms = __always_supported + __all__ = __always_supported + ('new', 'algorithms') + + +-def __get_builtin_constructor(name): +- try: +- if name in ('SHA1', 'sha1'): +- import _sha +- return _sha.new +- elif name in ('MD5', 'md5'): +- import _md5 +- return _md5.new +- elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'): +- import _sha256 +- bs = name[3:] +- if bs == '256': +- return _sha256.sha256 +- elif bs == '224': +- return _sha256.sha224 +- elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'): +- import _sha512 +- bs = name[3:] +- if bs == '512': +- return _sha512.sha512 +- elif bs == '384': +- return _sha512.sha384 +- except ImportError: +- pass # no extension module, this hash is unsupported. +- +- raise ValueError('unsupported hash type ' + name) +- +- + def __get_openssl_constructor(name): + try: + f = getattr(_hashlib, 'openssl_' + name) + # Allow the C module to raise ValueError. The function will be + # defined but the hash not actually available thanks to OpenSSL. +- f() ++ # ++ # We pass "usedforsecurity=False" to disable FIPS-based restrictions: ++ # at this stage we're merely seeing if the function is callable, ++ # rather than using it for actual work. ++ f(usedforsecurity=False) + # Use the C function directly (very fast) + return f + except (AttributeError, ValueError): +- return __get_builtin_constructor(name) ++ raise + +- +-def __py_new(name, string=''): +- """new(name, string='') - Return a new hashing object using the named algorithm; +- optionally initialized with a string. +- """ +- return __get_builtin_constructor(name)(string) +- +- +-def __hash_new(name, string=''): ++def __hash_new(name, string='', usedforsecurity=True): + """new(name, string='') - Return a new hashing object using the named algorithm; + optionally initialized with a string. ++ Override 'usedforsecurity' to False when using for non-security purposes in ++ a FIPS environment + """ + try: +- return _hashlib.new(name, string) ++ return _hashlib.new(name, string, usedforsecurity) + except ValueError: +- # If the _hashlib module (OpenSSL) doesn't support the named +- # hash, try using our builtin implementations. +- # This allows for SHA224/256 and SHA384/512 support even though +- # the OpenSSL library prior to 0.9.8 doesn't provide them. +- return __get_builtin_constructor(name)(string) +- ++ raise + + try: + import _hashlib + new = __hash_new + __get_hash = __get_openssl_constructor + except ImportError: +- new = __py_new +- __get_hash = __get_builtin_constructor ++ # We don't build the legacy modules ++ raise + + for __func_name in __always_supported: + # try them all, some may not work due to the OpenSSL +@@ -143,4 +125,4 @@ for __func_name in __always_supported: + + # Cleanup locals() + del __always_supported, __func_name, __get_hash +-del __py_new, __hash_new, __get_openssl_constructor ++del __hash_new, __get_openssl_constructor +diff -up Python-2.7.2/Lib/test/test_hashlib.py.hashlib-fips Python-2.7.2/Lib/test/test_hashlib.py +--- Python-2.7.2/Lib/test/test_hashlib.py.hashlib-fips 2011-06-11 11:46:25.000000000 -0400 ++++ Python-2.7.2/Lib/test/test_hashlib.py 2011-09-14 01:08:55.525254195 -0400 +@@ -32,6 +32,19 @@ def hexstr(s): + r = r + h[(i >> 4) & 0xF] + h[i & 0xF] + return r + ++def openssl_enforces_fips(): ++ # Use the "openssl" command (if present) to try to determine if the local ++ # OpenSSL is configured to enforce FIPS ++ from subprocess import Popen, PIPE ++ try: ++ p = Popen(['openssl', 'md5'], ++ stdin=PIPE, stdout=PIPE, stderr=PIPE) ++ except OSError: ++ # "openssl" command not found ++ return False ++ stdout, stderr = p.communicate(input=b'abc') ++ return b'unknown cipher' in stderr ++OPENSSL_ENFORCES_FIPS = openssl_enforces_fips() + + class HashLibTestCase(unittest.TestCase): + supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', +@@ -61,10 +74,10 @@ class HashLibTestCase(unittest.TestCase) + # of hashlib.new given the algorithm name. + for algorithm, constructors in self.constructors_to_test.items(): + constructors.add(getattr(hashlib, algorithm)) +- def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm): ++ def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, usedforsecurity=True): + if data is None: +- return hashlib.new(_alg) +- return hashlib.new(_alg, data) ++ return hashlib.new(_alg, usedforsecurity=usedforsecurity) ++ return hashlib.new(_alg, data, usedforsecurity=usedforsecurity) + constructors.add(_test_algorithm_via_hashlib_new) + + _hashlib = self._conditional_import_module('_hashlib') +@@ -78,28 +91,13 @@ class HashLibTestCase(unittest.TestCase) + if constructor: + constructors.add(constructor) + +- _md5 = self._conditional_import_module('_md5') +- if _md5: +- self.constructors_to_test['md5'].add(_md5.new) +- _sha = self._conditional_import_module('_sha') +- if _sha: +- self.constructors_to_test['sha1'].add(_sha.new) +- _sha256 = self._conditional_import_module('_sha256') +- if _sha256: +- self.constructors_to_test['sha224'].add(_sha256.sha224) +- self.constructors_to_test['sha256'].add(_sha256.sha256) +- _sha512 = self._conditional_import_module('_sha512') +- if _sha512: +- self.constructors_to_test['sha384'].add(_sha512.sha384) +- self.constructors_to_test['sha512'].add(_sha512.sha512) +- + super(HashLibTestCase, self).__init__(*args, **kwargs) + + def test_hash_array(self): + a = array.array("b", range(10)) + constructors = self.constructors_to_test.itervalues() + for cons in itertools.chain.from_iterable(constructors): +- c = cons(a) ++ c = cons(a, usedforsecurity=False) + c.hexdigest() + + def test_algorithms_attribute(self): +@@ -115,28 +113,9 @@ class HashLibTestCase(unittest.TestCase) + self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam') + self.assertRaises(TypeError, hashlib.new, 1) + +- def test_get_builtin_constructor(self): +- get_builtin_constructor = hashlib.__dict__[ +- '__get_builtin_constructor'] +- self.assertRaises(ValueError, get_builtin_constructor, 'test') +- try: +- import _md5 +- except ImportError: +- pass +- # This forces an ImportError for "import _md5" statements +- sys.modules['_md5'] = None +- try: +- self.assertRaises(ValueError, get_builtin_constructor, 'md5') +- finally: +- if '_md5' in locals(): +- sys.modules['_md5'] = _md5 +- else: +- del sys.modules['_md5'] +- self.assertRaises(TypeError, get_builtin_constructor, 3) +- + def test_hexdigest(self): + for name in self.supported_hash_names: +- h = hashlib.new(name) ++ h = hashlib.new(name, usedforsecurity=False) + self.assertTrue(hexstr(h.digest()) == h.hexdigest()) + + def test_large_update(self): +@@ -145,16 +125,16 @@ class HashLibTestCase(unittest.TestCase) + abcs = aas + bees + cees + + for name in self.supported_hash_names: +- m1 = hashlib.new(name) ++ m1 = hashlib.new(name, usedforsecurity=False) + m1.update(aas) + m1.update(bees) + m1.update(cees) + +- m2 = hashlib.new(name) ++ m2 = hashlib.new(name, usedforsecurity=False) + m2.update(abcs) + self.assertEqual(m1.digest(), m2.digest(), name+' update problem.') + +- m3 = hashlib.new(name, abcs) ++ m3 = hashlib.new(name, abcs, usedforsecurity=False) + self.assertEqual(m1.digest(), m3.digest(), name+' new problem.') + + def check(self, name, data, digest): +@@ -162,7 +142,7 @@ class HashLibTestCase(unittest.TestCase) + # 2 is for hashlib.name(...) and hashlib.new(name, ...) + self.assertGreaterEqual(len(constructors), 2) + for hash_object_constructor in constructors: +- computed = hash_object_constructor(data).hexdigest() ++ computed = hash_object_constructor(data, usedforsecurity=False).hexdigest() + self.assertEqual( + computed, digest, + "Hash algorithm %s constructed using %s returned hexdigest" +@@ -172,7 +152,8 @@ class HashLibTestCase(unittest.TestCase) + + def check_unicode(self, algorithm_name): + # Unicode objects are not allowed as input. +- expected = hashlib.new(algorithm_name, str(u'spam')).hexdigest() ++ expected = hashlib.new(algorithm_name, str(u'spam'), ++ usedforsecurity=False).hexdigest() + self.check(algorithm_name, u'spam', expected) + + def test_unicode(self): +@@ -354,6 +335,70 @@ class HashLibTestCase(unittest.TestCase) + + self.assertEqual(expected_hash, hasher.hexdigest()) + ++ def test_issue9146(self): ++ # Ensure that various ways to use "MD5" from "hashlib" don't segfault: ++ m = hashlib.md5(usedforsecurity=False) ++ m.update(b'abc\n') ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = hashlib.new('md5', usedforsecurity=False) ++ m.update(b'abc\n') ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = hashlib.md5(b'abc\n', usedforsecurity=False) ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = hashlib.new('md5', b'abc\n', usedforsecurity=False) ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ def assertRaisesUnknownCipher(self, callable_obj=None, *args, **kwargs): ++ try: ++ callable_obj(*args, **kwargs) ++ except ValueError, e: ++ if not e.args[0].endswith('unknown cipher'): ++ self.fail('Incorrect exception raised') ++ else: ++ self.fail('Exception was not raised') ++ ++ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS, ++ 'FIPS enforcement required for this test.') ++ def test_hashlib_fips_mode(self): ++ # Ensure that we raise a ValueError on vanilla attempts to use MD5 ++ # in hashlib in a FIPS-enforced setting: ++ self.assertRaisesUnknownCipher(hashlib.md5) ++ self.assertRaisesUnknownCipher(hashlib.new, 'md5') ++ ++ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS, ++ 'FIPS enforcement required for this test.') ++ def test_hashopenssl_fips_mode(self): ++ # Verify the _hashlib module's handling of md5: ++ import _hashlib ++ ++ assert hasattr(_hashlib, 'openssl_md5') ++ ++ # Ensure that _hashlib raises a ValueError on vanilla attempts to ++ # use MD5 in a FIPS-enforced setting: ++ self.assertRaisesUnknownCipher(_hashlib.openssl_md5) ++ self.assertRaisesUnknownCipher(_hashlib.new, 'md5') ++ ++ # Ensure that in such a setting we can whitelist a callsite with ++ # usedforsecurity=False and have it succeed: ++ m = _hashlib.openssl_md5(usedforsecurity=False) ++ m.update('abc\n') ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = _hashlib.new('md5', usedforsecurity=False) ++ m.update('abc\n') ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = _hashlib.openssl_md5('abc\n', usedforsecurity=False) ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = _hashlib.new('md5', 'abc\n', usedforsecurity=False) ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ ++ + def test_main(): + test_support.run_unittest(HashLibTestCase) + +diff -up Python-2.7.2/Modules/_hashopenssl.c.hashlib-fips Python-2.7.2/Modules/_hashopenssl.c +--- Python-2.7.2/Modules/_hashopenssl.c.hashlib-fips 2011-06-11 11:46:26.000000000 -0400 ++++ Python-2.7.2/Modules/_hashopenssl.c 2011-09-14 00:21:26.199252001 -0400 +@@ -36,6 +36,8 @@ + #endif + + /* EVP is the preferred interface to hashing in OpenSSL */ ++#include ++#include + #include + + #define MUNCH_SIZE INT_MAX +@@ -65,11 +67,19 @@ typedef struct { + + static PyTypeObject EVPtype; + ++/* Struct to hold all the cached information we need on a specific algorithm. ++ We have one of these per algorithm */ ++typedef struct { ++ PyObject *name_obj; ++ EVP_MD_CTX ctxs[2]; ++ /* ctx_ptrs will point to ctxs unless an error occurred, when it will ++ be NULL: */ ++ EVP_MD_CTX *ctx_ptrs[2]; ++ PyObject *error_msgs[2]; ++} EVPCachedInfo; + +-#define DEFINE_CONSTS_FOR_NEW(Name) \ +- static PyObject *CONST_ ## Name ## _name_obj = NULL; \ +- static EVP_MD_CTX CONST_new_ ## Name ## _ctx; \ +- static EVP_MD_CTX *CONST_new_ ## Name ## _ctx_p = NULL; ++#define DEFINE_CONSTS_FOR_NEW(Name) \ ++ static EVPCachedInfo cached_info_ ##Name; + + DEFINE_CONSTS_FOR_NEW(md5) + DEFINE_CONSTS_FOR_NEW(sha1) +@@ -115,6 +125,48 @@ EVP_hash(EVPobject *self, const void *vp + } + } + ++static void ++mc_ctx_init(EVP_MD_CTX *ctx, int usedforsecurity) ++{ ++ EVP_MD_CTX_init(ctx); ++ ++ /* ++ If the user has declared that this digest is being used in a ++ non-security role (e.g. indexing into a data structure), set ++ the exception flag for openssl to allow it ++ */ ++ if (!usedforsecurity) { ++#ifdef EVP_MD_CTX_FLAG_NON_FIPS_ALLOW ++ EVP_MD_CTX_set_flags(ctx, ++ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW); ++#endif ++ } ++} ++ ++/* Get an error msg for the last error as a PyObject */ ++static PyObject * ++error_msg_for_last_error(void) ++{ ++ char *errstr; ++ ++ errstr = ERR_error_string(ERR_peek_last_error(), NULL); ++ ERR_clear_error(); ++ ++ return PyString_FromString(errstr); /* Can be NULL */ ++} ++ ++static void ++set_evp_exception(void) ++{ ++ char *errstr; ++ ++ errstr = ERR_error_string(ERR_peek_last_error(), NULL); ++ ERR_clear_error(); ++ ++ PyErr_SetString(PyExc_ValueError, errstr); ++} ++ ++ + /* Internal methods for a hash object */ + + static void +@@ -313,14 +365,15 @@ EVP_repr(PyObject *self) + static int + EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds) + { +- static char *kwlist[] = {"name", "string", NULL}; ++ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL}; + PyObject *name_obj = NULL; ++ int usedforsecurity = 1; + Py_buffer view = { 0 }; + char *nameStr; + const EVP_MD *digest; + +- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|s*:HASH", kwlist, +- &name_obj, &view)) { ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|s*i:HASH", kwlist, ++ &name_obj, &view, &usedforsecurity)) { + return -1; + } + +@@ -336,7 +389,12 @@ EVP_tp_init(EVPobject *self, PyObject *a + PyBuffer_Release(&view); + return -1; + } +- EVP_DigestInit(&self->ctx, digest); ++ mc_ctx_init(&self->ctx, usedforsecurity); ++ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) { ++ set_evp_exception(); ++ PyBuffer_Release(&view); ++ return -1; ++ } + + self->name = name_obj; + Py_INCREF(self->name); +@@ -420,7 +478,8 @@ static PyTypeObject EVPtype = { + static PyObject * + EVPnew(PyObject *name_obj, + const EVP_MD *digest, const EVP_MD_CTX *initial_ctx, +- const unsigned char *cp, Py_ssize_t len) ++ const unsigned char *cp, Py_ssize_t len, ++ int usedforsecurity) + { + EVPobject *self; + +@@ -435,7 +494,12 @@ EVPnew(PyObject *name_obj, + if (initial_ctx) { + EVP_MD_CTX_copy(&self->ctx, initial_ctx); + } else { +- EVP_DigestInit(&self->ctx, digest); ++ mc_ctx_init(&self->ctx, usedforsecurity); ++ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) { ++ set_evp_exception(); ++ Py_DECREF(self); ++ return NULL; ++ } + } + + if (cp && len) { +@@ -459,20 +523,28 @@ PyDoc_STRVAR(EVP_new__doc__, + An optional string argument may be provided and will be\n\ + automatically hashed.\n\ + \n\ +-The MD5 and SHA1 algorithms are always supported.\n"); ++The MD5 and SHA1 algorithms are always supported.\n\ ++\n\ ++An optional \"usedforsecurity=True\" keyword argument is provided for use in\n\ ++environments that enforce FIPS-based restrictions. Some implementations of\n\ ++OpenSSL can be configured to prevent the usage of non-secure algorithms (such\n\ ++as MD5). If you have a non-security use for these algorithms (e.g. a hash\n\ ++table), you can override this argument by marking the callsite as\n\ ++\"usedforsecurity=False\"."); + + static PyObject * + EVP_new(PyObject *self, PyObject *args, PyObject *kwdict) + { +- static char *kwlist[] = {"name", "string", NULL}; ++ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL}; + PyObject *name_obj = NULL; + Py_buffer view = { 0 }; + PyObject *ret_obj; + char *name; + const EVP_MD *digest; ++ int usedforsecurity = 1; + +- if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|s*:new", kwlist, +- &name_obj, &view)) { ++ if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|s*i:new", kwlist, ++ &name_obj, &view, &usedforsecurity)) { + return NULL; + } + +@@ -484,58 +556,118 @@ EVP_new(PyObject *self, PyObject *args, + digest = EVP_get_digestbyname(name); + + ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, +- view.len); ++ view.len, usedforsecurity); + PyBuffer_Release(&view); + + return ret_obj; + } + + /* +- * This macro generates constructor function definitions for specific +- * hash algorithms. These constructors are much faster than calling +- * the generic one passing it a python string and are noticably +- * faster than calling a python new() wrapper. Thats important for ++ * This macro and function generates a family of constructor function ++ * definitions for specific hash algorithms. These constructors are much ++ * faster than calling the generic one passing it a python string and are ++ * noticably faster than calling a python new() wrapper. That's important for + * code that wants to make hashes of a bunch of small strings. + */ + #define GEN_CONSTRUCTOR(NAME) \ + static PyObject * \ +- EVP_new_ ## NAME (PyObject *self, PyObject *args) \ ++ EVP_new_ ## NAME (PyObject *self, PyObject *args, PyObject *kwdict) \ + { \ +- Py_buffer view = { 0 }; \ +- PyObject *ret_obj; \ +- \ +- if (!PyArg_ParseTuple(args, "|s*:" #NAME , &view)) { \ +- return NULL; \ +- } \ +- \ +- ret_obj = EVPnew( \ +- CONST_ ## NAME ## _name_obj, \ +- NULL, \ +- CONST_new_ ## NAME ## _ctx_p, \ +- (unsigned char*)view.buf, view.len); \ +- PyBuffer_Release(&view); \ +- return ret_obj; \ ++ return implement_specific_EVP_new(self, args, kwdict, \ ++ "|s*i:" #NAME, \ ++ &cached_info_ ## NAME ); \ + } + ++static PyObject * ++implement_specific_EVP_new(PyObject *self, PyObject *args, PyObject *kwdict, ++ const char *format, ++ EVPCachedInfo *cached_info) ++{ ++ static char *kwlist[] = {"string", "usedforsecurity", NULL}; ++ Py_buffer view = { 0 }; ++ int usedforsecurity = 1; ++ int idx; ++ PyObject *ret_obj = NULL; ++ ++ assert(cached_info); ++ ++ if (!PyArg_ParseTupleAndKeywords(args, kwdict, format, kwlist, ++ &view, &usedforsecurity)) { ++ return NULL; ++ } ++ ++ idx = usedforsecurity ? 1 : 0; ++ ++ /* ++ * If an error occurred during creation of the global content, the ctx_ptr ++ * will be NULL, and the error_msg will hopefully be non-NULL: ++ */ ++ if (cached_info->ctx_ptrs[idx]) { ++ /* We successfully initialized this context; copy it: */ ++ ret_obj = EVPnew(cached_info->name_obj, ++ NULL, ++ cached_info->ctx_ptrs[idx], ++ (unsigned char*)view.buf, view.len, ++ usedforsecurity); ++ } else { ++ /* Some kind of error happened initializing the global context for ++ this (digest, usedforsecurity) pair. ++ Raise an exception with the saved error message: */ ++ if (cached_info->error_msgs[idx]) { ++ PyErr_SetObject(PyExc_ValueError, cached_info->error_msgs[idx]); ++ } else { ++ PyErr_SetString(PyExc_ValueError, "Error initializing hash"); ++ } ++ } ++ ++ PyBuffer_Release(&view); ++ ++ return ret_obj; ++} ++ + /* a PyMethodDef structure for the constructor */ + #define CONSTRUCTOR_METH_DEF(NAME) \ +- {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \ ++ {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, \ ++ METH_VARARGS |METH_KEYWORDS, \ + PyDoc_STR("Returns a " #NAME \ + " hash object; optionally initialized with a string") \ + } + +-/* used in the init function to setup a constructor: initialize OpenSSL +- constructor constants if they haven't been initialized already. */ +-#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \ +- if (CONST_ ## NAME ## _name_obj == NULL) { \ +- CONST_ ## NAME ## _name_obj = PyString_FromString(#NAME); \ +- if (EVP_get_digestbyname(#NAME)) { \ +- CONST_new_ ## NAME ## _ctx_p = &CONST_new_ ## NAME ## _ctx; \ +- EVP_DigestInit(CONST_new_ ## NAME ## _ctx_p, EVP_get_digestbyname(#NAME)); \ +- } \ +- } \ ++/* ++ Macro/function pair to set up the constructors. ++ ++ Try to initialize a context for each hash twice, once with ++ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW and once without. ++ ++ Any that have errors during initialization will end up wit a NULL ctx_ptrs ++ entry, and err_msgs will be set (unless we're very low on memory) ++*/ ++#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \ ++ init_constructor_constant(&cached_info_ ## NAME, #NAME); \ + } while (0); + ++static void ++init_constructor_constant(EVPCachedInfo *cached_info, const char *name) ++{ ++ assert(cached_info); ++ cached_info->name_obj = PyString_FromString(name); ++ if (EVP_get_digestbyname(name)) { ++ int i; ++ for (i=0; i<2; i++) { ++ mc_ctx_init(&cached_info->ctxs[i], i); ++ if (EVP_DigestInit_ex(&cached_info->ctxs[i], ++ EVP_get_digestbyname(name), NULL)) { ++ /* Success: */ ++ cached_info->ctx_ptrs[i] = &cached_info->ctxs[i]; ++ } else { ++ /* Failure: */ ++ cached_info->ctx_ptrs[i] = NULL; ++ cached_info->error_msgs[i] = error_msg_for_last_error(); ++ } ++ } ++ } ++} ++ + GEN_CONSTRUCTOR(md5) + GEN_CONSTRUCTOR(sha1) + #ifdef _OPENSSL_SUPPORTS_SHA2 +@@ -565,13 +700,10 @@ init_hashlib(void) + { + PyObject *m; + ++ SSL_load_error_strings(); ++ SSL_library_init(); + OpenSSL_add_all_digests(); + +- /* TODO build EVP_functions openssl_* entries dynamically based +- * on what hashes are supported rather than listing many +- * but having some be unsupported. Only init appropriate +- * constants. */ +- + Py_TYPE(&EVPtype) = &PyType_Type; + if (PyType_Ready(&EVPtype) < 0) + return; +diff -up Python-2.7.2/Modules/Setup.dist.hashlib-fips Python-2.7.2/Modules/Setup.dist +--- Python-2.7.2/Modules/Setup.dist.hashlib-fips 2011-09-14 00:21:26.163252001 -0400 ++++ Python-2.7.2/Modules/Setup.dist 2011-09-14 00:21:26.201252001 -0400 +@@ -248,14 +248,14 @@ imageop imageop.c # Operations on images + # Message-Digest Algorithm, described in RFC 1321. The necessary files + # md5.c and md5.h are included here. + +-_md5 md5module.c md5.c ++#_md5 md5module.c md5.c + + + # The _sha module implements the SHA checksum algorithms. + # (NIST's Secure Hash Algorithms.) +-_sha shamodule.c +-_sha256 sha256module.c +-_sha512 sha512module.c ++#_sha shamodule.c ++#_sha256 sha256module.c ++#_sha512 sha512module.c + + + # SGI IRIX specific modules -- off by default. +diff -up Python-2.7.2/setup.py.hashlib-fips Python-2.7.2/setup.py +--- Python-2.7.2/setup.py.hashlib-fips 2011-09-14 00:21:25.722252001 -0400 ++++ Python-2.7.2/setup.py 2011-09-14 00:21:26.203252001 -0400 +@@ -768,21 +768,6 @@ class PyBuildExt(build_ext): + print ("warning: openssl 0x%08x is too old for _hashlib" % + openssl_ver) + missing.append('_hashlib') +- if COMPILED_WITH_PYDEBUG or not have_usable_openssl: +- # The _sha module implements the SHA1 hash algorithm. +- exts.append( Extension('_sha', ['shamodule.c']) ) +- # The _md5 module implements the RSA Data Security, Inc. MD5 +- # Message-Digest Algorithm, described in RFC 1321. The +- # necessary files md5.c and md5.h are included here. +- exts.append( Extension('_md5', +- sources = ['md5module.c', 'md5.c'], +- depends = ['md5.h']) ) +- +- min_sha2_openssl_ver = 0x00908000 +- if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: +- # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash +- exts.append( Extension('_sha256', ['sha256module.c']) ) +- exts.append( Extension('_sha512', ['sha512module.c']) ) + + # Modules that provide persistent dictionary-like semantics. You will + # probably want to arrange for at least one of them to be available on diff --git a/SOURCES/00147-add-debug-malloc-stats.patch b/SOURCES/00147-add-debug-malloc-stats.patch new file mode 100644 index 0000000..48952f0 --- /dev/null +++ b/SOURCES/00147-add-debug-malloc-stats.patch @@ -0,0 +1,762 @@ +diff --git a/Include/dictobject.h b/Include/dictobject.h +index ece01c6..acc1df0 100644 +--- a/Include/dictobject.h ++++ b/Include/dictobject.h +@@ -150,6 +150,8 @@ PyAPI_FUNC(PyObject *) PyDict_GetItemString(PyObject *dp, const char *key); + PyAPI_FUNC(int) PyDict_SetItemString(PyObject *dp, const char *key, PyObject *item); + PyAPI_FUNC(int) PyDict_DelItemString(PyObject *dp, const char *key); + ++PyAPI_FUNC(void) _PyDict_DebugMallocStats(FILE *out); ++ + #ifdef __cplusplus + } + #endif +diff --git a/Include/floatobject.h b/Include/floatobject.h +index 54e8825..33c6ac0 100644 +--- a/Include/floatobject.h ++++ b/Include/floatobject.h +@@ -132,6 +132,7 @@ PyAPI_FUNC(PyObject *) _PyFloat_FormatAdvanced(PyObject *obj, + failure. Used in builtin_round in bltinmodule.c. */ + PyAPI_FUNC(PyObject *) _Py_double_round(double x, int ndigits); + ++PyAPI_FUNC(void) _PyFloat_DebugMallocStats(FILE* out); + + + #ifdef __cplusplus +diff --git a/Include/frameobject.h b/Include/frameobject.h +index 17e7679..66d9d8b 100644 +--- a/Include/frameobject.h ++++ b/Include/frameobject.h +@@ -80,6 +80,8 @@ PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *); + + PyAPI_FUNC(int) PyFrame_ClearFreeList(void); + ++PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out); ++ + /* Return the line of code the frame is currently executing. */ + PyAPI_FUNC(int) PyFrame_GetLineNumber(PyFrameObject *); + +diff --git a/Include/intobject.h b/Include/intobject.h +index 252eea9..4003736 100644 +--- a/Include/intobject.h ++++ b/Include/intobject.h +@@ -75,6 +75,8 @@ PyAPI_FUNC(PyObject *) _PyInt_FormatAdvanced(PyObject *obj, + char *format_spec, + Py_ssize_t format_spec_len); + ++PyAPI_FUNC(void) _PyInt_DebugMallocStats(FILE *out); ++ + #ifdef __cplusplus + } + #endif +diff --git a/Include/listobject.h b/Include/listobject.h +index c445873..04664d7 100644 +--- a/Include/listobject.h ++++ b/Include/listobject.h +@@ -62,6 +62,8 @@ PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *); + #define PyList_SET_ITEM(op, i, v) (((PyListObject *)(op))->ob_item[i] = (v)) + #define PyList_GET_SIZE(op) Py_SIZE(op) + ++PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out); ++ + #ifdef __cplusplus + } + #endif +diff --git a/Include/methodobject.h b/Include/methodobject.h +index 6e160b6..1944517 100644 +--- a/Include/methodobject.h ++++ b/Include/methodobject.h +@@ -87,6 +87,10 @@ typedef struct { + + PyAPI_FUNC(int) PyCFunction_ClearFreeList(void); + ++PyAPI_FUNC(void) _PyCFunction_DebugMallocStats(FILE *out); ++PyAPI_FUNC(void) _PyMethod_DebugMallocStats(FILE *out); ++ ++ + #ifdef __cplusplus + } + #endif +diff --git a/Include/object.h b/Include/object.h +index afbc68d..ce5febf 100644 +--- a/Include/object.h ++++ b/Include/object.h +@@ -1005,6 +1005,13 @@ PyAPI_FUNC(void) _PyTrash_thread_destroy_chain(void); + _PyTrash_thread_deposit_object((PyObject*)op); \ + } while (0); + ++PyAPI_FUNC(void) ++_PyDebugAllocatorStats(FILE *out, const char *block_name, int num_blocks, ++ size_t sizeof_block); ++ ++PyAPI_FUNC(void) ++_PyObject_DebugTypeStats(FILE *out); ++ + #ifdef __cplusplus + } + #endif +diff --git a/Include/objimpl.h b/Include/objimpl.h +index 55e83ec..331b456 100644 +--- a/Include/objimpl.h ++++ b/Include/objimpl.h +@@ -101,13 +101,13 @@ PyAPI_FUNC(void) PyObject_Free(void *); + + /* Macros */ + #ifdef WITH_PYMALLOC ++PyAPI_FUNC(void) _PyObject_DebugMallocStats(FILE *out); + #ifdef PYMALLOC_DEBUG /* WITH_PYMALLOC && PYMALLOC_DEBUG */ + PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes); + PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes); + PyAPI_FUNC(void) _PyObject_DebugFree(void *p); + PyAPI_FUNC(void) _PyObject_DebugDumpAddress(const void *p); + PyAPI_FUNC(void) _PyObject_DebugCheckAddress(const void *p); +-PyAPI_FUNC(void) _PyObject_DebugMallocStats(void); + PyAPI_FUNC(void *) _PyObject_DebugMallocApi(char api, size_t nbytes); + PyAPI_FUNC(void *) _PyObject_DebugReallocApi(char api, void *p, size_t nbytes); + PyAPI_FUNC(void) _PyObject_DebugFreeApi(char api, void *p); +diff --git a/Include/setobject.h b/Include/setobject.h +index 52b07d5..143b175 100644 +--- a/Include/setobject.h ++++ b/Include/setobject.h +@@ -93,6 +93,7 @@ PyAPI_FUNC(int) _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key, + PyAPI_FUNC(PyObject *) PySet_Pop(PyObject *set); + PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable); + ++PyAPI_FUNC(void) _PySet_DebugMallocStats(FILE *out); + #ifdef __cplusplus + } + #endif +diff --git a/Include/stringobject.h b/Include/stringobject.h +index 18b5b41..de78d76 100644 +--- a/Include/stringobject.h ++++ b/Include/stringobject.h +@@ -204,6 +204,8 @@ PyAPI_FUNC(PyObject *) _PyBytes_FormatAdvanced(PyObject *obj, + char *format_spec, + Py_ssize_t format_spec_len); + ++PyAPI_FUNC(void) _PyString_DebugMallocStats(FILE *out); ++ + #ifdef __cplusplus + } + #endif +diff --git a/Include/tupleobject.h b/Include/tupleobject.h +index a5ab733..e233f47 100644 +--- a/Include/tupleobject.h ++++ b/Include/tupleobject.h +@@ -54,7 +54,7 @@ PyAPI_FUNC(void) _PyTuple_MaybeUntrack(PyObject *); + #define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v) + + PyAPI_FUNC(int) PyTuple_ClearFreeList(void); +- ++PyAPI_FUNC(void) _PyTuple_DebugMallocStats(FILE *out); + #ifdef __cplusplus + } + #endif +diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h +index 9ab724a..b91250a 100644 +--- a/Include/unicodeobject.h ++++ b/Include/unicodeobject.h +@@ -1406,6 +1406,8 @@ PyAPI_FUNC(int) _PyUnicode_IsAlpha( + Py_UNICODE ch /* Unicode character */ + ); + ++PyAPI_FUNC(void) _PyUnicode_DebugMallocStats(FILE *out); ++ + #ifdef __cplusplus + } + #endif +diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py +index 82243f3..8f1e1a0 100644 +--- a/Lib/test/test_sys.py ++++ b/Lib/test/test_sys.py +@@ -488,6 +488,32 @@ class SysModuleTest(unittest.TestCase): + p.wait() + self.assertIn(executable, ["''", repr(sys.executable)]) + ++ def test_debugmallocstats(self): ++ # Test sys._debugmallocstats() ++ ++ import subprocess ++ ++ # Verify the default of writing to stderr: ++ p = subprocess.Popen([sys.executable, ++ '-c', 'import sys; sys._debugmallocstats()'], ++ stderr=subprocess.PIPE) ++ out, err = p.communicate() ++ p.wait() ++ self.assertIn("arenas allocated current", err) ++ ++ # Verify that we can redirect the output to a file (not a file-like ++ # object, though): ++ with open('mallocstats.txt', 'w') as out: ++ sys._debugmallocstats(out) ++ result = open('mallocstats.txt').read() ++ self.assertIn("arenas allocated current", result) ++ os.unlink('mallocstats.txt') ++ ++ # Verify that the destination must be a file: ++ with self.assertRaises(TypeError): ++ sys._debugmallocstats(42) ++ ++ + class SizeofTest(unittest.TestCase): + + def setUp(self): +diff --git a/Objects/classobject.c b/Objects/classobject.c +index 2c9c216..2ba7077 100644 +--- a/Objects/classobject.c ++++ b/Objects/classobject.c +@@ -2694,3 +2694,12 @@ PyMethod_Fini(void) + { + (void)PyMethod_ClearFreeList(); + } ++ ++/* Print summary info about the state of the optimized allocator */ ++void ++_PyMethod_DebugMallocStats(FILE *out) ++{ ++ _PyDebugAllocatorStats(out, ++ "free PyMethodObject", ++ numfree, sizeof(PyMethodObject)); ++} +diff --git a/Objects/dictobject.c b/Objects/dictobject.c +index ba36b18..b8a5c7f 100644 +--- a/Objects/dictobject.c ++++ b/Objects/dictobject.c +@@ -225,6 +225,15 @@ show_track(void) + static PyDictObject *free_list[PyDict_MAXFREELIST]; + static int numfree = 0; + ++/* Print summary info about the state of the optimized allocator */ ++void ++_PyDict_DebugMallocStats(FILE *out) ++{ ++ _PyDebugAllocatorStats(out, ++ "free PyDictObject", numfree, sizeof(PyDictObject)); ++} ++ ++ + void + PyDict_Fini(void) + { +diff --git a/Objects/floatobject.c b/Objects/floatobject.c +index ba867ef..533511d 100644 +--- a/Objects/floatobject.c ++++ b/Objects/floatobject.c +@@ -35,6 +35,22 @@ typedef struct _floatblock PyFloatBlock; + static PyFloatBlock *block_list = NULL; + static PyFloatObject *free_list = NULL; + ++/* Print summary info about the state of the optimized allocator */ ++void ++_PyFloat_DebugMallocStats(FILE *out) ++{ ++ int num_blocks = 0; ++ PyFloatBlock *block; ++ ++ /* Walk the block list, counting */ ++ for (block = block_list; block ; block = block->next) { ++ num_blocks++; ++ } ++ ++ _PyDebugAllocatorStats(out, ++ "PyFloatBlock", num_blocks, sizeof(PyFloatBlock)); ++} ++ + static PyFloatObject * + fill_free_list(void) + { +diff --git a/Objects/frameobject.c b/Objects/frameobject.c +index f9e4a0e..337fc58 100644 +--- a/Objects/frameobject.c ++++ b/Objects/frameobject.c +@@ -982,3 +982,13 @@ PyFrame_Fini(void) + Py_XDECREF(builtin_object); + builtin_object = NULL; + } ++ ++/* Print summary info about the state of the optimized allocator */ ++void ++_PyFrame_DebugMallocStats(FILE *out) ++{ ++ _PyDebugAllocatorStats(out, ++ "free PyFrameObject", ++ numfree, sizeof(PyFrameObject)); ++} ++ +diff --git a/Objects/intobject.c b/Objects/intobject.c +index 28182f9..f442ea0 100644 +--- a/Objects/intobject.c ++++ b/Objects/intobject.c +@@ -44,6 +44,23 @@ typedef struct _intblock PyIntBlock; + static PyIntBlock *block_list = NULL; + static PyIntObject *free_list = NULL; + ++ ++/* Print summary info about the state of the optimized allocator */ ++void ++_PyInt_DebugMallocStats(FILE *out) ++{ ++ int num_blocks = 0; ++ PyIntBlock *block; ++ ++ /* Walk the block list, counting */ ++ for (block = block_list; block ; block = block->next) { ++ num_blocks++; ++ } ++ ++ _PyDebugAllocatorStats(out, ++ "PyIntBlock", num_blocks, sizeof(PyIntBlock)); ++} ++ + static PyIntObject * + fill_free_list(void) + { +diff --git a/Objects/listobject.c b/Objects/listobject.c +index f753643..e6fa17d 100644 +--- a/Objects/listobject.c ++++ b/Objects/listobject.c +@@ -109,6 +109,15 @@ PyList_Fini(void) + } + } + ++/* Print summary info about the state of the optimized allocator */ ++void ++_PyList_DebugMallocStats(FILE *out) ++{ ++ _PyDebugAllocatorStats(out, ++ "free PyListObject", ++ numfree, sizeof(PyListObject)); ++} ++ + PyObject * + PyList_New(Py_ssize_t size) + { +diff --git a/Objects/methodobject.c b/Objects/methodobject.c +index 0b60ca3..3193135 100644 +--- a/Objects/methodobject.c ++++ b/Objects/methodobject.c +@@ -412,6 +412,15 @@ PyCFunction_Fini(void) + (void)PyCFunction_ClearFreeList(); + } + ++/* Print summary info about the state of the optimized allocator */ ++void ++_PyCFunction_DebugMallocStats(FILE *out) ++{ ++ _PyDebugAllocatorStats(out, ++ "free PyCFunction", ++ numfree, sizeof(PyCFunction)); ++} ++ + /* PyCFunction_New() is now just a macro that calls PyCFunction_NewEx(), + but it's part of the API so we need to keep a function around that + existing C extensions can call. +diff --git a/Objects/object.c b/Objects/object.c +index 14f4e9f..68aedcd 100644 +--- a/Objects/object.c ++++ b/Objects/object.c +@@ -2355,6 +2355,23 @@ PyMem_Free(void *p) + PyMem_FREE(p); + } + ++void ++_PyObject_DebugTypeStats(FILE *out) ++{ ++ _PyString_DebugMallocStats(out); ++ _PyCFunction_DebugMallocStats(out); ++ _PyDict_DebugMallocStats(out); ++ _PyFloat_DebugMallocStats(out); ++ _PyFrame_DebugMallocStats(out); ++ _PyInt_DebugMallocStats(out); ++ _PyList_DebugMallocStats(out); ++ _PyMethod_DebugMallocStats(out); ++ _PySet_DebugMallocStats(out); ++ _PyTuple_DebugMallocStats(out); ++#if Py_USING_UNICODE ++ _PyUnicode_DebugMallocStats(out); ++#endif ++} + + /* These methods are used to control infinite recursion in repr, str, print, + etc. Container objects that may recursively contain themselves, +diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c +index 38ebc37..2c05359 100644 +--- a/Objects/obmalloc.c ++++ b/Objects/obmalloc.c +@@ -508,12 +508,10 @@ static struct arena_object* usable_arenas = NULL; + /* Number of arenas allocated that haven't been free()'d. */ + static size_t narenas_currently_allocated = 0; + +-#ifdef PYMALLOC_DEBUG + /* Total number of times malloc() called to allocate an arena. */ + static size_t ntimes_arena_allocated = 0; + /* High water mark (max value ever seen) for narenas_currently_allocated. */ + static size_t narenas_highwater = 0; +-#endif + + /* Allocate a new arena. If we run out of memory, return NULL. Else + * allocate a new arena, and return the address of an arena_object +@@ -528,7 +526,7 @@ new_arena(void) + + #ifdef PYMALLOC_DEBUG + if (Py_GETENV("PYTHONMALLOCSTATS")) +- _PyObject_DebugMallocStats(); ++ _PyObject_DebugMallocStats(stderr); + #endif + if (unused_arena_objects == NULL) { + uint i; +@@ -588,11 +586,9 @@ new_arena(void) + } + + ++narenas_currently_allocated; +-#ifdef PYMALLOC_DEBUG + ++ntimes_arena_allocated; + if (narenas_currently_allocated > narenas_highwater) + narenas_highwater = narenas_currently_allocated; +-#endif + arenaobj->freepools = NULL; + /* pool_address <- first pool-aligned address in the arena + nfreepools <- number of whole pools that fit after alignment */ +@@ -1694,17 +1690,19 @@ _PyObject_DebugDumpAddress(const void *p) + } + } + ++#endif /* PYMALLOC_DEBUG */ ++ + static size_t +-printone(const char* msg, size_t value) ++printone(FILE *out, const char* msg, size_t value) + { + int i, k; + char buf[100]; + size_t origvalue = value; + +- fputs(msg, stderr); ++ fputs(msg, out); + for (i = (int)strlen(msg); i < 35; ++i) +- fputc(' ', stderr); +- fputc('=', stderr); ++ fputc(' ', out); ++ fputc('=', out); + + /* Write the value with commas. */ + i = 22; +@@ -1725,17 +1723,32 @@ printone(const char* msg, size_t value) + + while (i >= 0) + buf[i--] = ' '; +- fputs(buf, stderr); ++ fputs(buf, out); + + return origvalue; + } + +-/* Print summary info to stderr about the state of pymalloc's structures. ++void ++_PyDebugAllocatorStats(FILE *out, ++ const char *block_name, int num_blocks, size_t sizeof_block) ++{ ++ char buf1[128]; ++ char buf2[128]; ++ PyOS_snprintf(buf1, sizeof(buf1), ++ "%d %ss * %zd bytes each", ++ num_blocks, block_name, sizeof_block); ++ PyOS_snprintf(buf2, sizeof(buf2), ++ "%48s ", buf1); ++ (void)printone(out, buf2, num_blocks * sizeof_block); ++} ++ ++ ++/* Print summary info to "out" about the state of pymalloc's structures. + * In Py_DEBUG mode, also perform some expensive internal consistency + * checks. + */ + void +-_PyObject_DebugMallocStats(void) ++_PyObject_DebugMallocStats(FILE *out) + { + uint i; + const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT; +@@ -1764,7 +1777,7 @@ _PyObject_DebugMallocStats(void) + size_t total; + char buf[128]; + +- fprintf(stderr, "Small block threshold = %d, in %u size classes.\n", ++ fprintf(out, "Small block threshold = %d, in %u size classes.\n", + SMALL_REQUEST_THRESHOLD, numclasses); + + for (i = 0; i < numclasses; ++i) +@@ -1818,10 +1831,10 @@ _PyObject_DebugMallocStats(void) + } + assert(narenas == narenas_currently_allocated); + +- fputc('\n', stderr); ++ fputc('\n', out); + fputs("class size num pools blocks in use avail blocks\n" + "----- ---- --------- ------------- ------------\n", +- stderr); ++ out); + + for (i = 0; i < numclasses; ++i) { + size_t p = numpools[i]; +@@ -1832,7 +1845,7 @@ _PyObject_DebugMallocStats(void) + assert(b == 0 && f == 0); + continue; + } +- fprintf(stderr, "%5u %6u " ++ fprintf(out, "%5u %6u " + "%11" PY_FORMAT_SIZE_T "u " + "%15" PY_FORMAT_SIZE_T "u " + "%13" PY_FORMAT_SIZE_T "u\n", +@@ -1842,36 +1855,35 @@ _PyObject_DebugMallocStats(void) + pool_header_bytes += p * POOL_OVERHEAD; + quantization += p * ((POOL_SIZE - POOL_OVERHEAD) % size); + } +- fputc('\n', stderr); +- (void)printone("# times object malloc called", serialno); +- +- (void)printone("# arenas allocated total", ntimes_arena_allocated); +- (void)printone("# arenas reclaimed", ntimes_arena_allocated - narenas); +- (void)printone("# arenas highwater mark", narenas_highwater); +- (void)printone("# arenas allocated current", narenas); ++ fputc('\n', out); ++#ifdef PYMALLOC_DEBUG ++ (void)printone(out, "# times object malloc called", serialno); ++#endif ++ (void)printone(out, "# arenas allocated total", ntimes_arena_allocated); ++ (void)printone(out, "# arenas reclaimed", ntimes_arena_allocated - narenas); ++ (void)printone(out, "# arenas highwater mark", narenas_highwater); ++ (void)printone(out, "# arenas allocated current", narenas); + + PyOS_snprintf(buf, sizeof(buf), + "%" PY_FORMAT_SIZE_T "u arenas * %d bytes/arena", + narenas, ARENA_SIZE); +- (void)printone(buf, narenas * ARENA_SIZE); ++ (void)printone(out, buf, narenas * ARENA_SIZE); + +- fputc('\n', stderr); ++ fputc('\n', out); + +- total = printone("# bytes in allocated blocks", allocated_bytes); +- total += printone("# bytes in available blocks", available_bytes); ++ total = printone(out, "# bytes in allocated blocks", allocated_bytes); ++ total += printone(out, "# bytes in available blocks", available_bytes); + + PyOS_snprintf(buf, sizeof(buf), + "%u unused pools * %d bytes", numfreepools, POOL_SIZE); +- total += printone(buf, (size_t)numfreepools * POOL_SIZE); ++ total += printone(out, buf, (size_t)numfreepools * POOL_SIZE); + +- total += printone("# bytes lost to pool headers", pool_header_bytes); +- total += printone("# bytes lost to quantization", quantization); +- total += printone("# bytes lost to arena alignment", arena_alignment); +- (void)printone("Total", total); ++ total += printone(out, "# bytes lost to pool headers", pool_header_bytes); ++ total += printone(out, "# bytes lost to quantization", quantization); ++ total += printone(out, "# bytes lost to arena alignment", arena_alignment); ++ (void)printone(out, "Total", total); + } + +-#endif /* PYMALLOC_DEBUG */ +- + #ifdef Py_USING_MEMORY_DEBUGGER + /* Make this function last so gcc won't inline it since the definition is + * after the reference. +diff --git a/Objects/setobject.c b/Objects/setobject.c +index af1ce16..3439b7c 100644 +--- a/Objects/setobject.c ++++ b/Objects/setobject.c +@@ -1088,6 +1088,16 @@ PySet_Fini(void) + Py_CLEAR(emptyfrozenset); + } + ++/* Print summary info about the state of the optimized allocator */ ++void ++_PySet_DebugMallocStats(FILE *out) ++{ ++ _PyDebugAllocatorStats(out, ++ "free PySetObject", ++ numfree, sizeof(PySetObject)); ++} ++ ++ + static PyObject * + set_new(PyTypeObject *type, PyObject *args, PyObject *kwds) + { +diff --git a/Objects/stringobject.c b/Objects/stringobject.c +index 1209197..b8646dd 100644 +--- a/Objects/stringobject.c ++++ b/Objects/stringobject.c +@@ -4843,3 +4843,43 @@ void _Py_ReleaseInternedStrings(void) + PyDict_Clear(interned); + Py_CLEAR(interned); + } ++ ++void _PyString_DebugMallocStats(FILE *out) ++{ ++ ssize_t i; ++ int num_immortal = 0, num_mortal = 0; ++ ssize_t immortal_size = 0, mortal_size = 0; ++ ++ if (interned == NULL || !PyDict_Check(interned)) ++ return; ++ ++ for (i = 0; i <= ((PyDictObject*)interned)->ma_mask; i++) { ++ PyDictEntry *ep = ((PyDictObject*)interned)->ma_table + i; ++ PyObject *pvalue = ep->me_value; ++ if (pvalue != NULL) { ++ PyStringObject *s = (PyStringObject *)ep->me_key; ++ ++ switch (s->ob_sstate) { ++ case SSTATE_NOT_INTERNED: ++ /* XXX Shouldn't happen */ ++ break; ++ case SSTATE_INTERNED_IMMORTAL: ++ num_immortal ++; ++ immortal_size += s->ob_size; ++ break; ++ case SSTATE_INTERNED_MORTAL: ++ num_mortal ++; ++ mortal_size += s->ob_size; ++ break; ++ default: ++ Py_FatalError("Inconsistent interned string state."); ++ } ++ } ++ } ++ ++ fprintf(out, "%d mortal interned strings\n", num_mortal); ++ fprintf(out, "%d immortal interned strings\n", num_immortal); ++ fprintf(out, "total size of all interned strings: " ++ "%zi/%zi " ++ "mortal/immortal\n", mortal_size, immortal_size); ++} +diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c +index 00f2e47..7682d81 100644 +--- a/Objects/tupleobject.c ++++ b/Objects/tupleobject.c +@@ -44,6 +44,22 @@ show_track(void) + } + #endif + ++/* Print summary info about the state of the optimized allocator */ ++void ++_PyTuple_DebugMallocStats(FILE *out) ++{ ++#if PyTuple_MAXSAVESIZE > 0 ++ int i; ++ char buf[128]; ++ for (i = 1; i < PyTuple_MAXSAVESIZE; i++) { ++ PyOS_snprintf(buf, sizeof(buf), ++ "free %d-sized PyTupleObject", i); ++ _PyDebugAllocatorStats(out, ++ buf, ++ numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i)); ++ } ++#endif ++} + + PyObject * + PyTuple_New(register Py_ssize_t size) +diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c +index 6bea370..ced9acf 100644 +--- a/Objects/unicodeobject.c ++++ b/Objects/unicodeobject.c +@@ -8920,6 +8920,12 @@ _PyUnicode_Fini(void) + (void)PyUnicode_ClearFreeList(); + } + ++void _PyUnicode_DebugMallocStats(FILE *out) ++{ ++ _PyDebugAllocatorStats(out, "free PyUnicodeObject", numfree, ++ sizeof(PyUnicodeObject)); ++} ++ + #ifdef __cplusplus + } + #endif +diff --git a/Python/pythonrun.c b/Python/pythonrun.c +index f0fbd74..0b73f3a 100644 +--- a/Python/pythonrun.c ++++ b/Python/pythonrun.c +@@ -557,7 +557,7 @@ Py_Finalize(void) + #endif /* Py_TRACE_REFS */ + #ifdef PYMALLOC_DEBUG + if (Py_GETENV("PYTHONMALLOCSTATS")) +- _PyObject_DebugMallocStats(); ++ _PyObject_DebugMallocStats(stderr); + #endif + + call_ll_exitfuncs(); +diff --git a/Python/sysmodule.c b/Python/sysmodule.c +index 2a7c207..fbb637b 100644 +--- a/Python/sysmodule.c ++++ b/Python/sysmodule.c +@@ -873,6 +873,57 @@ a 11-tuple where the entries in the tuple are counts of:\n\ + extern "C" { + #endif + ++static PyObject * ++sys_debugmallocstats(PyObject *self, PyObject *args) ++{ ++ PyObject *file = NULL; ++ FILE *fp; ++ ++ if (!PyArg_ParseTuple(args, "|O!", ++ &PyFile_Type, &file)) { ++ return NULL; ++ } ++ if (!file) { ++ /* Default to sys.stderr: */ ++ file = PySys_GetObject("stderr"); ++ if (!file) { ++ PyErr_SetString(PyExc_ValueError, "sys.stderr not set"); ++ return NULL; ++ } ++ if (!PyFile_Check(file)) { ++ PyErr_SetString(PyExc_TypeError, "sys.stderr is not a file"); ++ return NULL; ++ } ++ } ++ ++ Py_INCREF(file); ++ /* OK, we now own a ref on non-NULL "file" */ ++ ++ fp = PyFile_AsFile(file); ++ if (!fp) { ++ PyErr_SetString(PyExc_ValueError, "file is closed"); ++ Py_DECREF(file); ++ return NULL; ++ } ++ ++ _PyObject_DebugMallocStats(fp); ++ fputc('\n', fp); ++ _PyObject_DebugTypeStats(fp); ++ ++ Py_DECREF(file); ++ ++ Py_RETURN_NONE; ++} ++PyDoc_STRVAR(debugmallocstats_doc, ++"_debugmallocstats([file])\n\ ++\n\ ++Print summary info to the given file (or sys.stderr) about the state of\n\ ++pymalloc's structures.\n\ ++\n\ ++In Py_DEBUG mode, also perform some expensive internal consistency\n\ ++checks.\n\ ++"); ++ + #ifdef Py_TRACE_REFS + /* Defined in objects.c because it uses static globals if that file */ + extern PyObject *_Py_GetObjects(PyObject *, PyObject *); +@@ -971,6 +1022,8 @@ static PyMethodDef sys_methods[] = { + {"settrace", sys_settrace, METH_O, settrace_doc}, + {"gettrace", sys_gettrace, METH_NOARGS, gettrace_doc}, + {"call_tracing", sys_call_tracing, METH_VARARGS, call_tracing_doc}, ++ {"_debugmallocstats", sys_debugmallocstats, METH_VARARGS, ++ debugmallocstats_doc}, + {NULL, NULL} /* sentinel */ + }; + diff --git a/SOURCES/00153-fix-test_gdb-noise.patch b/SOURCES/00153-fix-test_gdb-noise.patch new file mode 100644 index 0000000..50a0917 --- /dev/null +++ b/SOURCES/00153-fix-test_gdb-noise.patch @@ -0,0 +1,31 @@ +--- Lib/test/test_gdb.py.old 2012-04-11 21:04:01.367073855 -0400 ++++ Lib/test/test_gdb.py 2012-04-12 08:52:58.320288761 -0400 +@@ -96,6 +96,15 @@ class DebuggerTests(unittest.TestCase): + # Generate a list of commands in gdb's language: + commands = ['set breakpoint pending yes', + 'break %s' % breakpoint, ++ ++ # GDB as of Fedora 17 onwards can distinguish between the ++ # value of a variable at entry vs current value: ++ # http://sourceware.org/gdb/onlinedocs/gdb/Variables.html ++ # which leads to the selftests failing with errors like this: ++ # AssertionError: 'v@entry=()' != '()' ++ # Disable this: ++ 'set print entry-values no', ++ + 'run'] + if cmds_after_breakpoint: + commands += cmds_after_breakpoint +--- Lib/test/test_gdb.py.old 2012-04-11 21:04:01.367073855 -0400 ++++ Lib/test/test_gdb.py 2012-04-12 08:52:58.320288761 -0400 +@@ -144,6 +153,10 @@ + 'Do you need "set solib-search-path" or ' + '"set sysroot"?', + ) ++ ignore_patterns += ('warning: Unable to open', ++ 'Missing separate debuginfo for', ++ 'Try: yum --disablerepo=', ++ 'Undefined set print command') + for line in errlines: + if not line.startswith(ignore_patterns): + unexpected_errlines.append(line) diff --git a/SOURCES/00155-avoid-ctypes-thunks.patch b/SOURCES/00155-avoid-ctypes-thunks.patch new file mode 100644 index 0000000..92dd668 --- /dev/null +++ b/SOURCES/00155-avoid-ctypes-thunks.patch @@ -0,0 +1,15 @@ +diff -up Python-2.7.3/Lib/ctypes/__init__.py.rhbz814391 Python-2.7.3/Lib/ctypes/__init__.py +--- Python-2.7.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 14:51:19.390990244 -0400 ++++ Python-2.7.3/Lib/ctypes/__init__.py 2012-04-20 14:51:45.141668316 -0400 +@@ -272,11 +272,6 @@ def _reset_cache(): + # _SimpleCData.c_char_p_from_param + POINTER(c_char).from_param = c_char_p.from_param + _pointer_type_cache[None] = c_void_p +- # XXX for whatever reasons, creating the first instance of a callback +- # function is needed for the unittests on Win64 to succeed. This MAY +- # be a compiler bug, since the problem occurs only when _ctypes is +- # compiled with the MS SDK compiler. Or an uninitialized variable? +- CFUNCTYPE(c_int)(lambda: None) + + try: + from _ctypes import set_conversion_mode diff --git a/SOURCES/00156-gdb-autoload-safepath.patch b/SOURCES/00156-gdb-autoload-safepath.patch new file mode 100644 index 0000000..54a5a6e --- /dev/null +++ b/SOURCES/00156-gdb-autoload-safepath.patch @@ -0,0 +1,52 @@ +diff -up Python-2.7.3/Lib/test/test_gdb.py.gdb-autoload-safepath Python-2.7.3/Lib/test/test_gdb.py +--- Python-2.7.3/Lib/test/test_gdb.py.gdb-autoload-safepath 2012-04-30 15:53:57.254045220 -0400 ++++ Python-2.7.3/Lib/test/test_gdb.py 2012-04-30 16:19:19.569941124 -0400 +@@ -54,6 +54,19 @@ def gdb_has_frame_select(): + + HAS_PYUP_PYDOWN = gdb_has_frame_select() + ++def gdb_has_autoload_safepath(): ++ # Recent GDBs will only auto-load scripts from certain safe ++ # locations, so we will need to turn off this protection. ++ # However, if the GDB doesn't have it, then the following ++ # command will generate noise on stderr (rhbz#817072): ++ cmd = "--eval-command=set auto-load safe-path /" ++ p = subprocess.Popen(["gdb", "--batch", cmd], ++ stderr=subprocess.PIPE) ++ _, stderr = p.communicate() ++ return '"on" or "off" expected.' not in stderr ++ ++HAS_AUTOLOAD_SAFEPATH = gdb_has_autoload_safepath() ++ + class DebuggerTests(unittest.TestCase): + + """Test that the debugger can debug Python.""" +@@ -112,15 +125,28 @@ class DebuggerTests(unittest.TestCase): + 'set print entry-values no', + + 'run'] ++ ++ if HAS_AUTOLOAD_SAFEPATH: ++ # Recent GDBs will only auto-load scripts from certain safe ++ # locations. ++ # Where necessary, turn off this protection to ensure that ++ # our -gdb.py script can be loaded - but not on earlier gdb builds ++ # as this would generate noise on stderr (rhbz#817072): ++ init_commands = ['set auto-load safe-path /'] ++ else: ++ init_commands = [] ++ + if cmds_after_breakpoint: + commands += cmds_after_breakpoint + else: + commands += ['backtrace'] + ++ # print init_commands + # print commands + + # Use "commands" to generate the arguments with which to invoke "gdb": + args = ["gdb", "--batch"] ++ args += ['--init-eval-command=%s' % cmd for cmd in init_commands] + args += ['--eval-command=%s' % cmd for cmd in commands] + args += ["--args", + sys.executable] diff --git a/SOURCES/00157-uid-gid-overflows.patch b/SOURCES/00157-uid-gid-overflows.patch new file mode 100644 index 0000000..13546bb --- /dev/null +++ b/SOURCES/00157-uid-gid-overflows.patch @@ -0,0 +1,49 @@ +diff -up Python-2.7.3/Lib/test/test_os.py.uid-gid-overflows Python-2.7.3/Lib/test/test_os.py +--- Python-2.7.3/Lib/test/test_os.py.uid-gid-overflows 2012-04-09 19:07:32.000000000 -0400 ++++ Python-2.7.3/Lib/test/test_os.py 2012-06-26 14:51:36.000817929 -0400 +@@ -677,30 +677,36 @@ if sys.platform != 'win32': + def test_setuid(self): + if os.getuid() != 0: + self.assertRaises(os.error, os.setuid, 0) ++ self.assertRaises(TypeError, os.setuid, 'not an int') + self.assertRaises(OverflowError, os.setuid, 1<<32) + + if hasattr(os, 'setgid'): + def test_setgid(self): + if os.getuid() != 0: + self.assertRaises(os.error, os.setgid, 0) ++ self.assertRaises(TypeError, os.setgid, 'not an int') + self.assertRaises(OverflowError, os.setgid, 1<<32) + + if hasattr(os, 'seteuid'): + def test_seteuid(self): + if os.getuid() != 0: + self.assertRaises(os.error, os.seteuid, 0) ++ self.assertRaises(TypeError, os.seteuid, 'not an int') + self.assertRaises(OverflowError, os.seteuid, 1<<32) + + if hasattr(os, 'setegid'): + def test_setegid(self): + if os.getuid() != 0: + self.assertRaises(os.error, os.setegid, 0) ++ self.assertRaises(TypeError, os.setegid, 'not an int') + self.assertRaises(OverflowError, os.setegid, 1<<32) + + if hasattr(os, 'setreuid'): + def test_setreuid(self): + if os.getuid() != 0: + self.assertRaises(os.error, os.setreuid, 0, 0) ++ self.assertRaises(TypeError, os.setreuid, 'not an int', 0) ++ self.assertRaises(TypeError, os.setreuid, 0, 'not an int') + self.assertRaises(OverflowError, os.setreuid, 1<<32, 0) + self.assertRaises(OverflowError, os.setreuid, 0, 1<<32) + +@@ -715,6 +721,8 @@ if sys.platform != 'win32': + def test_setregid(self): + if os.getuid() != 0: + self.assertRaises(os.error, os.setregid, 0, 0) ++ self.assertRaises(TypeError, os.setregid, 'not an int', 0) ++ self.assertRaises(TypeError, os.setregid, 0, 'not an int') + self.assertRaises(OverflowError, os.setregid, 1<<32, 0) + self.assertRaises(OverflowError, os.setregid, 0, 1<<32) + diff --git a/SOURCES/00165-crypt-module-salt-backport.patch b/SOURCES/00165-crypt-module-salt-backport.patch new file mode 100644 index 0000000..4308b4c --- /dev/null +++ b/SOURCES/00165-crypt-module-salt-backport.patch @@ -0,0 +1,285 @@ +diff -up Python-2.7.3/Doc/library/crypt.rst.crypt-module-salt-backport Python-2.7.3/Doc/library/crypt.rst +--- Python-2.7.3/Doc/library/crypt.rst.crypt-module-salt-backport 2012-04-09 19:07:28.000000000 -0400 ++++ Python-2.7.3/Doc/library/crypt.rst 2013-02-19 16:44:20.465334062 -0500 +@@ -16,9 +16,9 @@ + + This module implements an interface to the :manpage:`crypt(3)` routine, which is + a one-way hash function based upon a modified DES algorithm; see the Unix man +-page for further details. Possible uses include allowing Python scripts to +-accept typed passwords from the user, or attempting to crack Unix passwords with +-a dictionary. ++page for further details. Possible uses include storing hashed passwords ++so you can check passwords without storing the actual password, or attempting ++to crack Unix passwords with a dictionary. + + .. index:: single: crypt(3) + +@@ -27,15 +27,81 @@ the :manpage:`crypt(3)` routine in the r + extensions available on the current implementation will also be available on + this module. + ++Hashing Methods ++--------------- + +-.. function:: crypt(word, salt) ++The :mod:`crypt` module defines the list of hashing methods (not all methods ++are available on all platforms): ++ ++.. data:: METHOD_SHA512 ++ ++ A Modular Crypt Format method with 16 character salt and 86 character ++ hash. This is the strongest method. ++ ++.. versionadded:: 3.3 ++ ++.. data:: METHOD_SHA256 ++ ++ Another Modular Crypt Format method with 16 character salt and 43 ++ character hash. ++ ++.. versionadded:: 3.3 ++ ++.. data:: METHOD_MD5 ++ ++ Another Modular Crypt Format method with 8 character salt and 22 ++ character hash. ++ ++.. versionadded:: 3.3 ++ ++.. data:: METHOD_CRYPT ++ ++ The traditional method with a 2 character salt and 13 characters of ++ hash. This is the weakest method. ++ ++.. versionadded:: 3.3 ++ ++ ++Module Attributes ++----------------- ++ ++ ++.. attribute:: methods ++ ++ A list of available password hashing algorithms, as ++ ``crypt.METHOD_*`` objects. This list is sorted from strongest to ++ weakest, and is guaranteed to have at least ``crypt.METHOD_CRYPT``. ++ ++.. versionadded:: 3.3 ++ ++ ++Module Functions ++---------------- ++ ++The :mod:`crypt` module defines the following functions: ++ ++.. function:: crypt(word, salt=None) + + *word* will usually be a user's password as typed at a prompt or in a graphical +- interface. *salt* is usually a random two-character string which will be used +- to perturb the DES algorithm in one of 4096 ways. The characters in *salt* must +- be in the set ``[./a-zA-Z0-9]``. Returns the hashed password as a string, which +- will be composed of characters from the same alphabet as the salt (the first two +- characters represent the salt itself). ++ interface. The optional *salt* is either a string as returned from ++ :func:`mksalt`, one of the ``crypt.METHOD_*`` values (though not all ++ may be available on all platforms), or a full encrypted password ++ including salt, as returned by this function. If *salt* is not ++ provided, the strongest method will be used (as returned by ++ :func:`methods`. ++ ++ Checking a password is usually done by passing the plain-text password ++ as *word* and the full results of a previous :func:`crypt` call, ++ which should be the same as the results of this call. ++ ++ *salt* (either a random 2 or 16 character string, possibly prefixed with ++ ``$digit$`` to indicate the method) which will be used to perturb the ++ encryption algorithm. The characters in *salt* must be in the set ++ ``[./a-zA-Z0-9]``, with the exception of Modular Crypt Format which ++ prefixes a ``$digit$``. ++ ++ Returns the hashed password as a string, which will be composed of ++ characters from the same alphabet as the salt. + + .. index:: single: crypt(3) + +@@ -43,6 +109,27 @@ this module. + different sizes in the *salt*, it is recommended to use the full crypted + password as salt when checking for a password. + ++.. versionchanged:: 3.3 ++ Before version 3.3, *salt* must be specified as a string and cannot ++ accept ``crypt.METHOD_*`` values (which don't exist anyway). ++ ++ ++.. function:: mksalt(method=None) ++ ++ Return a randomly generated salt of the specified method. If no ++ *method* is given, the strongest method available as returned by ++ :func:`methods` is used. ++ ++ The return value is a string either of 2 characters in length for ++ ``crypt.METHOD_CRYPT``, or 19 characters starting with ``$digit$`` and ++ 16 random characters from the set ``[./a-zA-Z0-9]``, suitable for ++ passing as the *salt* argument to :func:`crypt`. ++ ++.. versionadded:: 3.3 ++ ++Examples ++-------- ++ + A simple example illustrating typical use:: + + import crypt, getpass, pwd +@@ -59,3 +146,11 @@ A simple example illustrating typical us + else: + return 1 + ++To generate a hash of a password using the strongest available method and ++check it against the original:: ++ ++ import crypt ++ ++ hashed = crypt.crypt(plaintext) ++ if hashed != crypt.crypt(plaintext, hashed): ++ raise "Hashed version doesn't validate against original" +diff -up Python-2.7.3/Lib/crypt.py.crypt-module-salt-backport Python-2.7.3/Lib/crypt.py +--- Python-2.7.3/Lib/crypt.py.crypt-module-salt-backport 2013-02-19 16:44:20.465334062 -0500 ++++ Python-2.7.3/Lib/crypt.py 2013-02-19 16:49:56.425311089 -0500 +@@ -0,0 +1,71 @@ ++"""Wrapper to the POSIX crypt library call and associated functionality. ++ ++Note that the ``methods`` and ``METHOD_*`` attributes are non-standard ++extensions to Python 2.7, backported from 3.3""" ++ ++import _crypt ++import string as _string ++from random import SystemRandom as _SystemRandom ++from collections import namedtuple as _namedtuple ++ ++ ++_saltchars = _string.ascii_letters + _string.digits + './' ++_sr = _SystemRandom() ++ ++ ++class _Method(_namedtuple('_Method', 'name ident salt_chars total_size')): ++ ++ """Class representing a salt method per the Modular Crypt Format or the ++ legacy 2-character crypt method.""" ++ ++ def __repr__(self): ++ return '' % self.name ++ ++ ++def mksalt(method=None): ++ """Generate a salt for the specified method. ++ ++ If not specified, the strongest available method will be used. ++ ++ This is a non-standard extension to Python 2.7, backported from 3.3 ++ """ ++ if method is None: ++ method = methods[0] ++ s = '$%s$' % method.ident if method.ident else '' ++ s += ''.join(_sr.sample(_saltchars, method.salt_chars)) ++ return s ++ ++ ++def crypt(word, salt=None): ++ """Return a string representing the one-way hash of a password, with a salt ++ prepended. ++ ++ If ``salt`` is not specified or is ``None``, the strongest ++ available method will be selected and a salt generated. Otherwise, ++ ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as ++ returned by ``crypt.mksalt()``. ++ ++ Note that these are non-standard extensions to Python 2.7's crypt.crypt() ++ entrypoint, backported from 3.3: the standard Python 2.7 crypt.crypt() ++ entrypoint requires two strings as the parameters, and does not support ++ keyword arguments. ++ """ ++ if salt is None or isinstance(salt, _Method): ++ salt = mksalt(salt) ++ return _crypt.crypt(word, salt) ++ ++ ++# available salting/crypto methods ++METHOD_CRYPT = _Method('CRYPT', None, 2, 13) ++METHOD_MD5 = _Method('MD5', '1', 8, 34) ++METHOD_SHA256 = _Method('SHA256', '5', 16, 63) ++METHOD_SHA512 = _Method('SHA512', '6', 16, 106) ++ ++methods = [] ++for _method in (METHOD_SHA512, METHOD_SHA256, METHOD_MD5): ++ _result = crypt('', _method) ++ if _result and len(_result) == _method.total_size: ++ methods.append(_method) ++methods.append(METHOD_CRYPT) ++del _result, _method ++ +diff -up Python-2.7.3/Lib/test/test_crypt.py.crypt-module-salt-backport Python-2.7.3/Lib/test/test_crypt.py +--- Python-2.7.3/Lib/test/test_crypt.py.crypt-module-salt-backport 2012-04-09 19:07:31.000000000 -0400 ++++ Python-2.7.3/Lib/test/test_crypt.py 2013-02-19 16:44:20.465334062 -0500 +@@ -10,6 +10,25 @@ class CryptTestCase(unittest.TestCase): + if test_support.verbose: + print 'Test encryption: ', c + ++ def test_salt(self): ++ self.assertEqual(len(crypt._saltchars), 64) ++ for method in crypt.methods: ++ salt = crypt.mksalt(method) ++ self.assertEqual(len(salt), ++ method.salt_chars + (3 if method.ident else 0)) ++ ++ def test_saltedcrypt(self): ++ for method in crypt.methods: ++ pw = crypt.crypt('assword', method) ++ self.assertEqual(len(pw), method.total_size) ++ pw = crypt.crypt('assword', crypt.mksalt(method)) ++ self.assertEqual(len(pw), method.total_size) ++ ++ def test_methods(self): ++ # Gurantee that METHOD_CRYPT is the last method in crypt.methods. ++ self.assertTrue(len(crypt.methods) >= 1) ++ self.assertEqual(crypt.METHOD_CRYPT, crypt.methods[-1]) ++ + def test_main(): + test_support.run_unittest(CryptTestCase) + +diff -up Python-2.7.3/Modules/cryptmodule.c.crypt-module-salt-backport Python-2.7.3/Modules/cryptmodule.c +--- Python-2.7.3/Modules/cryptmodule.c.crypt-module-salt-backport 2012-04-09 19:07:34.000000000 -0400 ++++ Python-2.7.3/Modules/cryptmodule.c 2013-02-19 16:44:20.466334063 -0500 +@@ -43,7 +43,7 @@ static PyMethodDef crypt_methods[] = { + }; + + PyMODINIT_FUNC +-initcrypt(void) ++init_crypt(void) + { +- Py_InitModule("crypt", crypt_methods); ++ Py_InitModule("_crypt", crypt_methods); + } +diff -up Python-2.7.3/Modules/Setup.dist.crypt-module-salt-backport Python-2.7.3/Modules/Setup.dist +--- Python-2.7.3/Modules/Setup.dist.crypt-module-salt-backport 2013-02-19 16:44:20.463334063 -0500 ++++ Python-2.7.3/Modules/Setup.dist 2013-02-19 16:44:20.466334063 -0500 +@@ -221,7 +221,7 @@ _ssl _ssl.c \ + # + # First, look at Setup.config; configure may have set this for you. + +-crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems ++_crypt _cryptmodule.c -lcrypt # crypt(3); needs -lcrypt on some systems + + + # Some more UNIX dependent modules -- off by default, since these +diff -up Python-2.7.3/setup.py.crypt-module-salt-backport Python-2.7.3/setup.py +--- Python-2.7.3/setup.py.crypt-module-salt-backport 2013-02-19 16:44:20.425334067 -0500 ++++ Python-2.7.3/setup.py 2013-02-19 16:44:20.466334063 -0500 +@@ -693,7 +693,7 @@ class PyBuildExt(build_ext): + libs = ['crypt'] + else: + libs = [] +- exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) ++ exts.append( Extension('_crypt', ['_cryptmodule.c'], libraries=libs) ) + + # CSV files + exts.append( Extension('_csv', ['_csv.c']) ) diff --git a/SOURCES/00166-fix-fake-repr-in-gdb-hooks.patch b/SOURCES/00166-fix-fake-repr-in-gdb-hooks.patch new file mode 100644 index 0000000..bfd2459 --- /dev/null +++ b/SOURCES/00166-fix-fake-repr-in-gdb-hooks.patch @@ -0,0 +1,125 @@ +diff -up Python-2.7.3/Tools/gdb/libpython.py.fix-fake-repr-in-gdb-hooks Python-2.7.3/Tools/gdb/libpython.py +--- Python-2.7.3/Tools/gdb/libpython.py.fix-fake-repr-in-gdb-hooks 2013-02-19 17:21:33.541181366 -0500 ++++ Python-2.7.3/Tools/gdb/libpython.py 2013-02-19 17:21:42.090180782 -0500 +@@ -105,6 +105,24 @@ class TruncatedStringIO(object): + def getvalue(self): + return self._val + ++class FakeProxy(object): ++ """ ++ Class representing a non-descript PyObject* value in the inferior ++ process for when we don't have a custom scraper, intended to have ++ a sane repr(). ++ """ ++ def __init__(self, tp_name, address): ++ self.tp_name = tp_name ++ self.address = address ++ ++ def __repr__(self): ++ # For the NULL pointer, we have no way of knowing a type, so ++ # special-case it as per ++ # http://bugs.python.org/issue8032#msg100882 ++ if self.address == 0: ++ return '0x0' ++ return '<%s at remote 0x%x>' % (self.tp_name, self.address) ++ + class PyObjectPtr(object): + """ + Class wrapping a gdb.Value that's a either a (PyObject*) within the +@@ -232,28 +250,8 @@ class PyObjectPtr(object): + visiting object graphs with loops). Analogous to Py_ReprEnter and + Py_ReprLeave + ''' +- +- class FakeRepr(object): +- """ +- Class representing a non-descript PyObject* value in the inferior +- process for when we don't have a custom scraper, intended to have +- a sane repr(). +- """ +- +- def __init__(self, tp_name, address): +- self.tp_name = tp_name +- self.address = address +- +- def __repr__(self): +- # For the NULL pointer, we have no way of knowing a type, so +- # special-case it as per +- # http://bugs.python.org/issue8032#msg100882 +- if self.address == 0: +- return '0x0' +- return '<%s at remote 0x%x>' % (self.tp_name, self.address) +- +- return FakeRepr(self.safe_tp_name(), +- long(self._gdbval)) ++ return FakeProxy(self.safe_tp_name(), ++ long(self._gdbval)) + + def write_repr(self, out, visited): + ''' +@@ -384,7 +382,7 @@ def _write_instance_repr(out, visited, n + if not first: + out.write(', ') + first = False +- out.write(pyop_arg.proxyval(visited)) ++ out.write(str(pyop_arg.proxyval(visited))) + out.write('=') + pyop_val.write_repr(out, visited) + out.write(')') +@@ -785,6 +783,8 @@ class PyNoneStructPtr(PyObjectPtr): + def proxyval(self, visited): + return None + ++class CantReadFilename(ValueError): ++ pass + + class PyFrameObjectPtr(PyObjectPtr): + _typename = 'PyFrameObject' +@@ -861,7 +861,10 @@ class PyFrameObjectPtr(PyObjectPtr): + '''Get the path of the current Python source file, as a string''' + if self.is_optimized_out(): + return '(frame information optimized out)' +- return self.co_filename.proxyval(set()) ++ value = self.co_filename.proxyval(set()) ++ if isinstance(value, FakeProxy): ++ raise CantReadFilename('unable to extract filename)') ++ return value + + def current_line_num(self): + '''Get current line number as an integer (1-based) +@@ -907,7 +910,7 @@ class PyFrameObjectPtr(PyObjectPtr): + out.write(', ') + first = False + +- out.write(pyop_name.proxyval(visited)) ++ out.write(str(pyop_name.proxyval(visited))) + out.write('=') + pyop_value.write_repr(out, visited) + +@@ -1252,8 +1255,11 @@ class Frame(object): + if pyop: + sys.stdout.write('#%i %s\n' % (self.get_index(), pyop.get_truncated_repr(MAX_OUTPUT_LEN))) + if not pyop.is_optimized_out(): +- line = pyop.current_line() +- sys.stdout.write(' %s\n' % line.strip()) ++ try: ++ line = pyop.current_line() ++ sys.stdout.write(' %s\n' % line.strip()) ++ except CantReadFilename: ++ sys.stdout.write(' %s\n' % '(unable to read filename)') + else: + sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index()) + else: +@@ -1303,7 +1309,11 @@ class PyList(gdb.Command): + print 'Unable to read information on python frame' + return + +- filename = pyop.filename() ++ try: ++ filename = pyop.filename() ++ except CantReadFilename: ++ print "Unable to extract filename from python frame" ++ return + lineno = pyop.current_line_num() + + if start is None: diff --git a/SOURCES/00167-disable-stack-navigation-tests-when-optimized-in-test_gdb.patch b/SOURCES/00167-disable-stack-navigation-tests-when-optimized-in-test_gdb.patch new file mode 100644 index 0000000..3fa94fb --- /dev/null +++ b/SOURCES/00167-disable-stack-navigation-tests-when-optimized-in-test_gdb.patch @@ -0,0 +1,43 @@ +diff -up Python-2.7.3/Lib/test/test_gdb.py.disable-stack-navigation-tests-when-optimized-in-test_gdb Python-2.7.3/Lib/test/test_gdb.py +--- Python-2.7.3/Lib/test/test_gdb.py.disable-stack-navigation-tests-when-optimized-in-test_gdb 2013-02-20 12:27:05.669526425 -0500 ++++ Python-2.7.3/Lib/test/test_gdb.py 2013-02-20 12:27:05.715526422 -0500 +@@ -653,10 +653,10 @@ class PyListTests(DebuggerTests): + ' 3 def foo(a, b, c):\n', + bt) + ++@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") ++@unittest.skipIf(python_is_optimized(), ++ "Python was compiled with optimizations") + class StackNavigationTests(DebuggerTests): +- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") +- @unittest.skipIf(python_is_optimized(), +- "Python was compiled with optimizations") + def test_pyup_command(self): + 'Verify that the "py-up" command works' + bt = self.get_stack_trace(script=self.get_sample_script(), +@@ -667,7 +667,6 @@ class StackNavigationTests(DebuggerTests + baz\(a, b, c\) + $''') + +- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") + def test_down_at_bottom(self): + 'Verify handling of "py-down" at the bottom of the stack' + bt = self.get_stack_trace(script=self.get_sample_script(), +@@ -675,7 +674,6 @@ $''') + self.assertEndsWith(bt, + 'Unable to find a newer python frame\n') + +- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") + def test_up_at_top(self): + 'Verify handling of "py-up" at the top of the stack' + bt = self.get_stack_trace(script=self.get_sample_script(), +@@ -683,9 +681,6 @@ $''') + self.assertEndsWith(bt, + 'Unable to find an older python frame\n') + +- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") +- @unittest.skipIf(python_is_optimized(), +- "Python was compiled with optimizations") + def test_up_then_down(self): + 'Verify "py-up" followed by "py-down"' + bt = self.get_stack_trace(script=self.get_sample_script(), diff --git a/SOURCES/00168-distutils-cflags.patch b/SOURCES/00168-distutils-cflags.patch new file mode 100644 index 0000000..0c4a8df --- /dev/null +++ b/SOURCES/00168-distutils-cflags.patch @@ -0,0 +1,12 @@ +diff -up Python-2.6.6/Lib/distutils/sysconfig.py.distutils-cflags Python-2.6.6/Lib/distutils/sysconfig.py +--- Python-2.6.6/Lib/distutils/sysconfig.py.distutils-cflags 2011-08-12 17:18:17.833091153 -0400 ++++ Python-2.6.6/Lib/distutils/sysconfig.py 2011-08-12 17:18:27.449106938 -0400 +@@ -187,7 +187,7 @@ def customize_compiler(compiler): + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: +- cflags = opt + ' ' + os.environ['CFLAGS'] ++ cflags = cflags + ' ' + os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] diff --git a/SOURCES/00169-avoid-implicit-usage-of-md5-in-multiprocessing.patch b/SOURCES/00169-avoid-implicit-usage-of-md5-in-multiprocessing.patch new file mode 100644 index 0000000..debf92f --- /dev/null +++ b/SOURCES/00169-avoid-implicit-usage-of-md5-in-multiprocessing.patch @@ -0,0 +1,41 @@ +diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py +--- a/Lib/multiprocessing/connection.py ++++ b/Lib/multiprocessing/connection.py +@@ -41,6 +41,10 @@ + # A very generous timeout when it comes to local connections... + CONNECTION_TIMEOUT = 20. + ++# The hmac module implicitly defaults to using MD5. ++# Support using a stronger algorithm for the challenge/response code: ++HMAC_DIGEST_NAME='sha256' ++ + _mmap_counter = itertools.count() + + default_family = 'AF_INET' +@@ -700,12 +704,16 @@ + WELCOME = b'#WELCOME#' + FAILURE = b'#FAILURE#' + ++def get_digestmod_for_hmac(): ++ import hashlib ++ return getattr(hashlib, HMAC_DIGEST_NAME) ++ + def deliver_challenge(connection, authkey): + import hmac + assert isinstance(authkey, bytes) + message = os.urandom(MESSAGE_LENGTH) + connection.send_bytes(CHALLENGE + message) +- digest = hmac.new(authkey, message).digest() ++ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest() + response = connection.recv_bytes(256) # reject large message + if response == digest: + connection.send_bytes(WELCOME) +@@ -719,7 +727,7 @@ + message = connection.recv_bytes(256) # reject large message + assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message + message = message[len(CHALLENGE):] +- digest = hmac.new(authkey, message).digest() ++ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest() + connection.send_bytes(digest) + response = connection.recv_bytes(256) # reject large message + if response != WELCOME: diff --git a/SOURCES/00170-gc-assertions.patch b/SOURCES/00170-gc-assertions.patch new file mode 100644 index 0000000..3fb37ff --- /dev/null +++ b/SOURCES/00170-gc-assertions.patch @@ -0,0 +1,276 @@ +diff -up Python-2.7.3/Lib/test/test_gc.py.gc-assertions Python-2.7.3/Lib/test/test_gc.py +--- Python-2.7.3/Lib/test/test_gc.py.gc-assertions 2013-02-20 16:28:20.890536607 -0500 ++++ Python-2.7.3/Lib/test/test_gc.py 2013-02-20 16:39:52.720489297 -0500 +@@ -1,6 +1,7 @@ + import unittest +-from test.test_support import verbose, run_unittest ++from test.test_support import verbose, run_unittest, import_module + import sys ++import sysconfig + import time + import gc + import weakref +@@ -32,6 +33,8 @@ class GC_Detector(object): + self.wr = weakref.ref(C1055820(666), it_happened) + + ++BUILT_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS']) ++ + ### Tests + ############################################################################### + +@@ -476,6 +479,49 @@ class GCTests(unittest.TestCase): + # would be damaged, with an empty __dict__. + self.assertEqual(x, None) + ++ @unittest.skipIf(BUILT_WITH_NDEBUG, ++ 'built with -NDEBUG') ++ def test_refcount_errors(self): ++ # Verify the "handling" of objects with broken refcounts ++ ++ import_module("ctypes") #skip if not supported ++ ++ import subprocess ++ code = '''if 1: ++ a = [] ++ b = [a] ++ ++ # Simulate the refcount of "a" being too low (compared to the ++ # references held on it by live data), but keeping it above zero ++ # (to avoid deallocating it): ++ import ctypes ++ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a)) ++ ++ # The garbage collector should now have a fatal error when it reaches ++ # the broken object: ++ import gc ++ gc.collect() ++ ''' ++ p = subprocess.Popen([sys.executable, "-c", code], ++ stdout=subprocess.PIPE, ++ stderr=subprocess.PIPE) ++ stdout, stderr = p.communicate() ++ p.stdout.close() ++ p.stderr.close() ++ # Verify that stderr has a useful error message: ++ self.assertRegexpMatches(stderr, ++ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "gc->gc.gc_refs != 0" failed.') ++ self.assertRegexpMatches(stderr, ++ b'refcount was too small') ++ self.assertRegexpMatches(stderr, ++ b'object : \[\]') ++ self.assertRegexpMatches(stderr, ++ b'type : list') ++ self.assertRegexpMatches(stderr, ++ b'refcount: 1') ++ self.assertRegexpMatches(stderr, ++ b'address : 0x[0-9a-f]+') ++ + class GCTogglingTests(unittest.TestCase): + def setUp(self): + gc.enable() +diff -up Python-2.7.3/Modules/gcmodule.c.gc-assertions Python-2.7.3/Modules/gcmodule.c +--- Python-2.7.3/Modules/gcmodule.c.gc-assertions 2012-04-09 19:07:34.000000000 -0400 ++++ Python-2.7.3/Modules/gcmodule.c 2013-02-20 16:28:21.029536600 -0500 +@@ -21,6 +21,73 @@ + #include "Python.h" + #include "frameobject.h" /* for PyFrame_ClearFreeList */ + ++/* ++ Define a pair of assertion macros. ++ ++ These work like the regular C assert(), in that they will abort the ++ process with a message on stderr if the given condition fails to hold, ++ but compile away to nothing if NDEBUG is defined. ++ ++ However, before aborting, Python will also try to call _PyObject_Dump() on ++ the given object. This may be of use when investigating bugs in which a ++ particular object is corrupt (e.g. buggy a tp_visit method in an extension ++ module breaking the garbage collector), to help locate the broken objects. ++ ++ The WITH_MSG variant allows you to supply an additional message that Python ++ will attempt to print to stderr, after the object dump. ++*/ ++#ifdef NDEBUG ++/* No debugging: compile away the assertions: */ ++#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0) ++#else ++/* With debugging: generate checks: */ ++#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \ ++ ((expr) \ ++ ? (void)(0) \ ++ : _PyObject_AssertFailed((obj), \ ++ (msg), \ ++ (__STRING(expr)), \ ++ (__FILE__), \ ++ (__LINE__), \ ++ (__PRETTY_FUNCTION__))) ++#endif ++ ++#define PyObject_ASSERT(obj, expr) \ ++ PyObject_ASSERT_WITH_MSG(obj, expr, NULL) ++ ++static void _PyObject_AssertFailed(PyObject *, const char *, ++ const char *, const char *, int, ++ const char *); ++ ++static void ++_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr, ++ const char *file, int line, const char *function) ++{ ++ fprintf(stderr, ++ "%s:%d: %s: Assertion \"%s\" failed.\n", ++ file, line, function, expr); ++ if (msg) { ++ fprintf(stderr, "%s\n", msg); ++ } ++ ++ fflush(stderr); ++ ++ if (obj) { ++ /* This might succeed or fail, but we're about to abort, so at least ++ try to provide any extra info we can: */ ++ _PyObject_Dump(obj); ++ } ++ else { ++ fprintf(stderr, "NULL object\n"); ++ } ++ ++ fflush(stdout); ++ fflush(stderr); ++ ++ /* Terminate the process: */ ++ abort(); ++} ++ + /* Get an object's GC head */ + #define AS_GC(o) ((PyGC_Head *)(o)-1) + +@@ -288,7 +355,8 @@ update_refs(PyGC_Head *containers) + { + PyGC_Head *gc = containers->gc.gc_next; + for (; gc != containers; gc = gc->gc.gc_next) { +- assert(gc->gc.gc_refs == GC_REACHABLE); ++ PyObject_ASSERT(FROM_GC(gc), ++ gc->gc.gc_refs == GC_REACHABLE); + gc->gc.gc_refs = Py_REFCNT(FROM_GC(gc)); + /* Python's cyclic gc should never see an incoming refcount + * of 0: if something decref'ed to 0, it should have been +@@ -308,7 +376,8 @@ update_refs(PyGC_Head *containers) + * so serious that maybe this should be a release-build + * check instead of an assert? + */ +- assert(gc->gc.gc_refs != 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ gc->gc.gc_refs != 0); + } + } + +@@ -323,7 +392,9 @@ visit_decref(PyObject *op, void *data) + * generation being collected, which can be recognized + * because only they have positive gc_refs. + */ +- assert(gc->gc.gc_refs != 0); /* else refcount was too small */ ++ PyObject_ASSERT_WITH_MSG(FROM_GC(gc), ++ gc->gc.gc_refs != 0, ++ "refcount was too small"); + if (gc->gc.gc_refs > 0) + gc->gc.gc_refs--; + } +@@ -383,9 +454,10 @@ visit_reachable(PyObject *op, PyGC_Head + * If gc_refs == GC_UNTRACKED, it must be ignored. + */ + else { +- assert(gc_refs > 0 +- || gc_refs == GC_REACHABLE +- || gc_refs == GC_UNTRACKED); ++ PyObject_ASSERT(FROM_GC(gc), ++ gc_refs > 0 ++ || gc_refs == GC_REACHABLE ++ || gc_refs == GC_UNTRACKED); + } + } + return 0; +@@ -427,7 +499,7 @@ move_unreachable(PyGC_Head *young, PyGC_ + */ + PyObject *op = FROM_GC(gc); + traverseproc traverse = Py_TYPE(op)->tp_traverse; +- assert(gc->gc.gc_refs > 0); ++ PyObject_ASSERT(op, gc->gc.gc_refs > 0); + gc->gc.gc_refs = GC_REACHABLE; + (void) traverse(op, + (visitproc)visit_reachable, +@@ -494,7 +566,8 @@ move_finalizers(PyGC_Head *unreachable, + for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) { + PyObject *op = FROM_GC(gc); + +- assert(IS_TENTATIVELY_UNREACHABLE(op)); ++ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op)); ++ + next = gc->gc.gc_next; + + if (has_finalizer(op)) { +@@ -570,7 +643,7 @@ handle_weakrefs(PyGC_Head *unreachable, + PyWeakReference **wrlist; + + op = FROM_GC(gc); +- assert(IS_TENTATIVELY_UNREACHABLE(op)); ++ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op)); + next = gc->gc.gc_next; + + if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op))) +@@ -591,9 +664,9 @@ handle_weakrefs(PyGC_Head *unreachable, + * the callback pointer intact. Obscure: it also + * changes *wrlist. + */ +- assert(wr->wr_object == op); ++ PyObject_ASSERT(wr->wr_object, wr->wr_object == op); + _PyWeakref_ClearRef(wr); +- assert(wr->wr_object == Py_None); ++ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None); + if (wr->wr_callback == NULL) + continue; /* no callback */ + +@@ -627,7 +700,7 @@ handle_weakrefs(PyGC_Head *unreachable, + */ + if (IS_TENTATIVELY_UNREACHABLE(wr)) + continue; +- assert(IS_REACHABLE(wr)); ++ PyObject_ASSERT(op, IS_REACHABLE(wr)); + + /* Create a new reference so that wr can't go away + * before we can process it again. +@@ -636,7 +709,8 @@ handle_weakrefs(PyGC_Head *unreachable, + + /* Move wr to wrcb_to_call, for the next pass. */ + wrasgc = AS_GC(wr); +- assert(wrasgc != next); /* wrasgc is reachable, but ++ PyObject_ASSERT(op, wrasgc != next); ++ /* wrasgc is reachable, but + next isn't, so they can't + be the same */ + gc_list_move(wrasgc, &wrcb_to_call); +@@ -652,11 +726,11 @@ handle_weakrefs(PyGC_Head *unreachable, + + gc = wrcb_to_call.gc.gc_next; + op = FROM_GC(gc); +- assert(IS_REACHABLE(op)); +- assert(PyWeakref_Check(op)); ++ PyObject_ASSERT(op, IS_REACHABLE(op)); ++ PyObject_ASSERT(op, PyWeakref_Check(op)); + wr = (PyWeakReference *)op; + callback = wr->wr_callback; +- assert(callback != NULL); ++ PyObject_ASSERT(op, callback != NULL); + + /* copy-paste of weakrefobject.c's handle_callback() */ + temp = PyObject_CallFunctionObjArgs(callback, wr, NULL); +@@ -759,7 +833,7 @@ delete_garbage(PyGC_Head *collectable, P + PyGC_Head *gc = collectable->gc.gc_next; + PyObject *op = FROM_GC(gc); + +- assert(IS_TENTATIVELY_UNREACHABLE(op)); ++ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op)); + if (debug & DEBUG_SAVEALL) { + PyList_Append(garbage, op); + } diff --git a/SOURCES/00173-workaround-ENOPROTOOPT-in-bind_port.patch b/SOURCES/00173-workaround-ENOPROTOOPT-in-bind_port.patch new file mode 100644 index 0000000..eb34610 --- /dev/null +++ b/SOURCES/00173-workaround-ENOPROTOOPT-in-bind_port.patch @@ -0,0 +1,13 @@ +diff -up Python-2.7.3/Lib/test/test_support.py.rhbz913732 Python-2.7.3/Lib/test/test_support.py +--- Python-2.7.3/Lib/test/test_support.py.rhbz913732 2013-03-04 16:11:53.757315921 -0500 ++++ Python-2.7.3/Lib/test/test_support.py 2013-03-04 16:12:11.331314722 -0500 +@@ -304,7 +304,8 @@ def bind_port(sock, host=HOST): + if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: + raise TestFailed("tests should never set the SO_REUSEADDR " \ + "socket option on TCP/IP sockets!") +- if hasattr(socket, 'SO_REUSEPORT'): ++ if hasattr(socket, 'SO_REUSEPORT') \ ++ and 'WITHIN_PYTHON_RPM_BUILD' not in os.environ: # rhbz#913732 + if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: + raise TestFailed("tests should never set the SO_REUSEPORT " \ + "socket option on TCP/IP sockets!") diff --git a/SOURCES/00174-fix-for-usr-move.patch b/SOURCES/00174-fix-for-usr-move.patch new file mode 100644 index 0000000..b48dc5c --- /dev/null +++ b/SOURCES/00174-fix-for-usr-move.patch @@ -0,0 +1,28 @@ +diff -up Python-2.7.3/Modules/getpath.c.fix-for-usr-move Python-2.7.3/Modules/getpath.c +--- Python-2.7.3/Modules/getpath.c.fix-for-usr-move 2013-03-06 14:25:32.801828698 -0500 ++++ Python-2.7.3/Modules/getpath.c 2013-03-06 15:59:30.872443168 -0500 +@@ -510,6 +510,24 @@ calculate_path(void) + MAXPATHLEN bytes long. + */ + ++ /* ++ Workaround for rhbz#817554, where an empty argv0_path erroneously ++ locates "prefix" as "/lib[64]/python2.7" due to it finding ++ "/lib[64]/python2.7/os.py" via the /lib -> /usr/lib symlink for ++ https://fedoraproject.org/wiki/Features/UsrMove ++ */ ++ if (argv0_path[0] == '\0' && 0 == strcmp(prog, "cmpi_swig")) { ++ /* ++ We have an empty argv0_path, presumably because prog aka ++ Py_GetProgramName() was not found on $PATH. ++ ++ Set argv0_path to "/usr/" so that search_for_prefix() and ++ search_for_exec_prefix() don't erroneously pick up ++ on /lib/ via the UsrMove symlink: ++ */ ++ strcpy(argv0_path, "/usr/"); ++ } ++ + if (!(pfound = search_for_prefix(argv0_path, home))) { + if (!Py_FrozenFlag) + fprintf(stderr, diff --git a/SOURCES/00180-python-add-support-for-ppc64p7.patch b/SOURCES/00180-python-add-support-for-ppc64p7.patch new file mode 100644 index 0000000..022944a --- /dev/null +++ b/SOURCES/00180-python-add-support-for-ppc64p7.patch @@ -0,0 +1,12 @@ +diff -r de35eae9048a config.sub +--- a/config.sub Wed Apr 24 23:33:20 2013 +0200 ++++ b/config.sub Thu Apr 25 08:51:00 2013 +0200 +@@ -1008,7 +1008,7 @@ + ;; + ppc64) basic_machine=powerpc64-unknown + ;; +- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppc64le | powerpc64little | ppc64-le | powerpc64-little) + basic_machine=powerpc64le-unknown diff --git a/SOURCES/00181-allow-arbitrary-timeout-in-condition-wait.patch b/SOURCES/00181-allow-arbitrary-timeout-in-condition-wait.patch new file mode 100644 index 0000000..665965d --- /dev/null +++ b/SOURCES/00181-allow-arbitrary-timeout-in-condition-wait.patch @@ -0,0 +1,70 @@ +diff --git a/Lib/threading.py b/Lib/threading.py +index cb49c4a..c9795a5 100644 +--- a/Lib/threading.py ++++ b/Lib/threading.py +@@ -305,7 +305,7 @@ class _Condition(_Verbose): + else: + return True + +- def wait(self, timeout=None): ++ def wait(self, timeout=None, balancing=True): + """Wait until notified or until a timeout occurs. + + If the calling thread has not acquired the lock when this method is +@@ -354,7 +354,10 @@ class _Condition(_Verbose): + remaining = endtime - _time() + if remaining <= 0: + break +- delay = min(delay * 2, remaining, .05) ++ if balancing: ++ delay = min(delay * 2, remaining, 0.05) ++ else: ++ delay = remaining + _sleep(delay) + if not gotit: + if __debug__: +@@ -599,7 +602,7 @@ class _Event(_Verbose): + finally: + self.__cond.release() + +- def wait(self, timeout=None): ++ def wait(self, timeout=None, balancing=True): + """Block until the internal flag is true. + + If the internal flag is true on entry, return immediately. Otherwise, +@@ -617,7 +620,7 @@ class _Event(_Verbose): + self.__cond.acquire() + try: + if not self.__flag: +- self.__cond.wait(timeout) ++ self.__cond.wait(timeout, balancing) + return self.__flag + finally: + self.__cond.release() +@@ -908,7 +911,7 @@ class Thread(_Verbose): + if 'dummy_threading' not in _sys.modules: + raise + +- def join(self, timeout=None): ++ def join(self, timeout=None, balancing=True): + """Wait until the thread terminates. + + This blocks the calling thread until the thread whose join() method is +@@ -957,7 +960,7 @@ class Thread(_Verbose): + if __debug__: + self._note("%s.join(): timed out", self) + break +- self.__block.wait(delay) ++ self.__block.wait(delay, balancing) + else: + if __debug__: + self._note("%s.join(): thread stopped", self) +@@ -1143,7 +1146,7 @@ class _DummyThread(Thread): + def _set_daemon(self): + return True + +- def join(self, timeout=None): ++ def join(self, timeout=None, balancing=True): + assert False, "cannot join a dummy thread" + + diff --git a/SOURCES/00184-ctypes-should-build-with-libffi-multilib-wrapper.patch b/SOURCES/00184-ctypes-should-build-with-libffi-multilib-wrapper.patch new file mode 100644 index 0000000..8230986 --- /dev/null +++ b/SOURCES/00184-ctypes-should-build-with-libffi-multilib-wrapper.patch @@ -0,0 +1,13 @@ +diff -up Python-2.7.5/setup.py.orig Python-2.7.5/setup.py +--- Python-2.7.5/setup.py.orig 2013-07-17 15:20:12.086820082 +0200 ++++ Python-2.7.5/setup.py 2013-07-17 15:21:28.490023903 +0200 +@@ -2050,7 +2050,8 @@ class PyBuildExt(build_ext): + if not line: + ffi_inc = None + break +- if line.startswith('#define LIBFFI_H'): ++ if line.startswith('#define LIBFFI_H') or \ ++ line.startswith('#define ffi_wrapper_h'): + break + ffi_lib = None + if ffi_inc is not None: diff --git a/SOURCES/00185-urllib2-honors-noproxy-for-ftp.patch b/SOURCES/00185-urllib2-honors-noproxy-for-ftp.patch new file mode 100644 index 0000000..b26c4d4 --- /dev/null +++ b/SOURCES/00185-urllib2-honors-noproxy-for-ftp.patch @@ -0,0 +1,12 @@ +diff -up Python-2.7.5/Lib/urllib2.py.orig Python-2.7.5/Lib/urllib2.py +--- Python-2.7.5/Lib/urllib2.py.orig 2013-07-17 12:22:58.595525622 +0200 ++++ Python-2.7.5/Lib/urllib2.py 2013-07-17 12:19:59.875898030 +0200 +@@ -728,6 +728,8 @@ class ProxyHandler(BaseHandler): + if proxy_type is None: + proxy_type = orig_type + ++ req.get_host() ++ + if req.host and proxy_bypass(req.host): + return None + diff --git a/SOURCES/00186-memory-leak-marshalc.patch b/SOURCES/00186-memory-leak-marshalc.patch new file mode 100644 index 0000000..19fb175 --- /dev/null +++ b/SOURCES/00186-memory-leak-marshalc.patch @@ -0,0 +1,57 @@ +--- Python-2.7.5/Python/marshal.c 2013-05-12 05:32:53.000000000 +0200 ++++ /home/rkuska/hg/cpython/Python/marshal.c 2013-07-18 10:33:26.392486235 +0200 +@@ -88,7 +88,7 @@ + } + + static void +-w_string(char *s, Py_ssize_t n, WFILE *p) ++w_string(const char *s, Py_ssize_t n, WFILE *p) + { + if (p->fp != NULL) { + fwrite(s, 1, n, p->fp); +@@ -141,6 +141,13 @@ + # define W_SIZE w_long + #endif + ++static void ++w_pstring(const char *s, Py_ssize_t n, WFILE *p) ++{ ++ W_SIZE(n, p); ++ w_string(s, n, p); ++} ++ + /* We assume that Python longs are stored internally in base some power of + 2**15; for the sake of portability we'll always read and write them in base + exactly 2**15. */ +@@ -338,9 +345,7 @@ + else { + w_byte(TYPE_STRING, p); + } +- n = PyString_GET_SIZE(v); +- W_SIZE(n, p); +- w_string(PyString_AS_STRING(v), n, p); ++ w_pstring(PyBytes_AS_STRING(v), PyString_GET_SIZE(v), p); + } + #ifdef Py_USING_UNICODE + else if (PyUnicode_CheckExact(v)) { +@@ -352,9 +357,7 @@ + return; + } + w_byte(TYPE_UNICODE, p); +- n = PyString_GET_SIZE(utf8); +- W_SIZE(n, p); +- w_string(PyString_AS_STRING(utf8), n, p); ++ w_pstring(PyString_AS_STRING(utf8), PyString_GET_SIZE(utf8), p); + Py_DECREF(utf8); + } + #endif +@@ -441,8 +444,7 @@ + PyBufferProcs *pb = v->ob_type->tp_as_buffer; + w_byte(TYPE_STRING, p); + n = (*pb->bf_getreadbuffer)(v, 0, (void **)&s); +- W_SIZE(n, p); +- w_string(s, n, p); ++ w_pstring(s, n, p); + } + else { + w_byte(TYPE_UNKNOWN, p); diff --git a/SOURCES/00187-add-RPATH-to-pyexpat.patch b/SOURCES/00187-add-RPATH-to-pyexpat.patch new file mode 100644 index 0000000..0ac5227 --- /dev/null +++ b/SOURCES/00187-add-RPATH-to-pyexpat.patch @@ -0,0 +1,25 @@ +diff -r e8b8279ca118 setup.py +--- a/setup.py Sun Jul 21 21:57:52 2013 -0400 ++++ b/setup.py Tue Aug 20 09:45:31 2013 +0200 +@@ -1480,12 +1480,21 @@ + 'expat/xmltok_impl.h' + ] + ++ # Add an explicit RPATH to pyexpat.so pointing at the directory ++ # containing the system expat (which has the extra XML_SetHashSalt ++ # symbol), to avoid an ImportError with a link error if there's an ++ # LD_LIBRARY_PATH containing a "vanilla" build of expat (without the ++ # symbol) (rhbz#833271): ++ EXPAT_RPATH = '/usr/lib64' if sys.maxint == 0x7fffffffffffffff else '/usr/lib' ++ ++ + exts.append(Extension('pyexpat', + define_macros = define_macros, + include_dirs = expat_inc, + libraries = expat_lib, + sources = ['pyexpat.c'] + expat_sources, + depends = expat_depends, ++ extra_link_args = ['-Wl,-rpath,%s' % EXPAT_RPATH] + )) + + # Fredrik Lundh's cElementTree module. Note that this also diff --git a/SOURCES/00188-CVE-2013-4238-hostname-check-bypass-in-SSL-module.patch b/SOURCES/00188-CVE-2013-4238-hostname-check-bypass-in-SSL-module.patch new file mode 100644 index 0000000..e215589 --- /dev/null +++ b/SOURCES/00188-CVE-2013-4238-hostname-check-bypass-in-SSL-module.patch @@ -0,0 +1,247 @@ +diff -r 9ddc63c039ba Lib/test/nullbytecert.pem +--- /dev/null Thu Jan 01 00:00:00 1970 +0000 ++++ b/Lib/test/nullbytecert.pem Sun Aug 11 18:13:17 2013 +0200 +@@ -0,0 +1,90 @@ ++Certificate: ++ Data: ++ Version: 3 (0x2) ++ Serial Number: 0 (0x0) ++ Signature Algorithm: sha1WithRSAEncryption ++ Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org ++ Validity ++ Not Before: Aug 7 13:11:52 2013 GMT ++ Not After : Aug 7 13:12:52 2013 GMT ++ Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org ++ Subject Public Key Info: ++ Public Key Algorithm: rsaEncryption ++ Public-Key: (2048 bit) ++ Modulus: ++ 00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3: ++ 03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97: ++ 16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2: ++ 88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1: ++ 32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4: ++ 56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8: ++ a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02: ++ 45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75: ++ ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91: ++ 90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d: ++ 7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30: ++ 68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7: ++ f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12: ++ f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5: ++ ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb: ++ d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f: ++ 9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da: ++ 2f:85 ++ Exponent: 65537 (0x10001) ++ X509v3 extensions: ++ X509v3 Basic Constraints: critical ++ CA:FALSE ++ X509v3 Subject Key Identifier: ++ 88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C ++ X509v3 Key Usage: ++ Digital Signature, Non Repudiation, Key Encipherment ++ X509v3 Subject Alternative Name: ++ ************************************************************* ++ WARNING: The values for DNS, email and URI are WRONG. OpenSSL ++ doesn't print the text after a NULL byte. ++ ************************************************************* ++ DNS:altnull.python.org, email:null@python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1 ++ Signature Algorithm: sha1WithRSAEncryption ++ ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5: ++ a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44: ++ 3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37: ++ 86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3: ++ 56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86: ++ de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac: ++ 3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4: ++ 60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60: ++ d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5: ++ 25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60: ++ 1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6: ++ 4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d: ++ 0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e: ++ 5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6: ++ c1:ca:a9:94 ++-----BEGIN CERTIFICATE----- ++MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx ++DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ ++eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg ++RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y ++ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw ++NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI ++DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv ++ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt ++ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq ++hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB ++BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j ++pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P ++vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv ++KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA ++oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL ++08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV ++HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E ++BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu ++Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 ++bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA ++AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 ++i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j ++HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk ++kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx ++VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW ++RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= ++-----END CERTIFICATE----- +diff -r 9ddc63c039ba Lib/test/test_ssl.py +--- a/Lib/test/test_ssl.py Sun Aug 11 13:04:50 2013 +0300 ++++ b/Lib/test/test_ssl.py Sun Aug 11 18:13:17 2013 +0200 +@@ -25,6 +25,7 @@ + HOST = test_support.HOST + CERTFILE = None + SVN_PYTHON_ORG_ROOT_CERT = None ++NULLBYTECERT = None + + def handle_error(prefix): + exc_format = ' '.join(traceback.format_exception(*sys.exc_info())) +@@ -123,6 +124,27 @@ + ('DNS', 'projects.forum.nokia.com')) + ) + ++ def test_parse_cert_CVE_2013_4073(self): ++ p = ssl._ssl._test_decode_cert(NULLBYTECERT) ++ if test_support.verbose: ++ sys.stdout.write("\n" + pprint.pformat(p) + "\n") ++ subject = ((('countryName', 'US'),), ++ (('stateOrProvinceName', 'Oregon'),), ++ (('localityName', 'Beaverton'),), ++ (('organizationName', 'Python Software Foundation'),), ++ (('organizationalUnitName', 'Python Core Development'),), ++ (('commonName', 'null.python.org\x00example.org'),), ++ (('emailAddress', 'python-dev@python.org'),)) ++ self.assertEqual(p['subject'], subject) ++ self.assertEqual(p['issuer'], subject) ++ self.assertEqual(p['subjectAltName'], ++ (('DNS', 'altnull.python.org\x00example.com'), ++ ('email', 'null@python.org\x00user@example.org'), ++ ('URI', 'http://null.python.org\x00http://example.org'), ++ ('IP Address', '192.0.2.1'), ++ ('IP Address', '2001:DB8:0:0:0:0:0:1\n')) ++ ) ++ + def test_DER_to_PEM(self): + with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: + pem = f.read() +@@ -1360,7 +1382,7 @@ + + + def test_main(verbose=False): +- global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, NOKIACERT ++ global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, NOKIACERT, NULLBYTECERT + CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, + "keycert.pem") + SVN_PYTHON_ORG_ROOT_CERT = os.path.join( +@@ -1368,10 +1390,13 @@ + "https_svn_python_org_root.pem") + NOKIACERT = os.path.join(os.path.dirname(__file__) or os.curdir, + "nokia.pem") ++ NULLBYTECERT = os.path.join(os.path.dirname(__file__) or os.curdir, ++ "nullbytecert.pem") + + if (not os.path.exists(CERTFILE) or + not os.path.exists(SVN_PYTHON_ORG_ROOT_CERT) or +- not os.path.exists(NOKIACERT)): ++ not os.path.exists(NOKIACERT) or ++ not os.path.exists(NULLBYTECERT)): + raise test_support.TestFailed("Can't read certificate files!") + + tests = [BasicTests, BasicSocketTests] +diff -r 9ddc63c039ba Modules/_ssl.c +--- a/Modules/_ssl.c Sun Aug 11 13:04:50 2013 +0300 ++++ b/Modules/_ssl.c Sun Aug 11 18:13:17 2013 +0200 +@@ -741,8 +741,13 @@ + + /* get a rendering of each name in the set of names */ + ++ int gntype; ++ ASN1_STRING *as = NULL; ++ + name = sk_GENERAL_NAME_value(names, j); +- if (name->type == GEN_DIRNAME) { ++ gntype = name-> type; ++ switch (gntype) { ++ case GEN_DIRNAME: + + /* we special-case DirName as a tuple of tuples of attributes */ + +@@ -764,11 +769,61 @@ + goto fail; + } + PyTuple_SET_ITEM(t, 1, v); ++ break; + +- } else { ++ case GEN_EMAIL: ++ case GEN_DNS: ++ case GEN_URI: ++ /* GENERAL_NAME_print() doesn't handle NUL bytes in ASN1_string ++ correctly. */ ++ t = PyTuple_New(2); ++ if (t == NULL) ++ goto fail; ++ switch (gntype) { ++ case GEN_EMAIL: ++ v = PyUnicode_FromString("email"); ++ as = name->d.rfc822Name; ++ break; ++ case GEN_DNS: ++ v = PyUnicode_FromString("DNS"); ++ as = name->d.dNSName; ++ break; ++ case GEN_URI: ++ v = PyUnicode_FromString("URI"); ++ as = name->d.uniformResourceIdentifier; ++ break; ++ } ++ if (v == NULL) { ++ Py_DECREF(t); ++ goto fail; ++ } ++ PyTuple_SET_ITEM(t, 0, v); ++ v = PyString_FromStringAndSize((char *)ASN1_STRING_data(as), ++ ASN1_STRING_length(as)); ++ if (v == NULL) { ++ Py_DECREF(t); ++ goto fail; ++ } ++ PyTuple_SET_ITEM(t, 1, v); ++ break; + ++ default: + /* for everything else, we use the OpenSSL print form */ +- ++ switch (gntype) { ++ /* check for new general name type */ ++ case GEN_OTHERNAME: ++ case GEN_X400: ++ case GEN_EDIPARTY: ++ case GEN_IPADD: ++ case GEN_RID: ++ break; ++ default: ++ if (PyErr_Warn(PyExc_RuntimeWarning, ++ "Unknown general name type") == -1) { ++ goto fail; ++ } ++ break; ++ } + (void) BIO_reset(biobuf); + GENERAL_NAME_print(biobuf, name); + len = BIO_gets(biobuf, buf, sizeof(buf)-1); +@@ -794,6 +849,7 @@ + goto fail; + } + PyTuple_SET_ITEM(t, 1, v); ++ break; + } + + /* and add that rendering to the list */ diff --git a/SOURCES/00189-gdb-py-bt-dont-raise-exception-from-eval.patch b/SOURCES/00189-gdb-py-bt-dont-raise-exception-from-eval.patch new file mode 100644 index 0000000..4e82859 --- /dev/null +++ b/SOURCES/00189-gdb-py-bt-dont-raise-exception-from-eval.patch @@ -0,0 +1,11 @@ +--- Python-2.7.5-orig/Tools/gdb/libpython.py 2013-05-12 03:32:54.000000000 +0000 ++++ Python-2.7.5-orig/Tools/gdb/libpython.py 2013-09-15 09:56:25.494000000 +0000 +@@ -887,6 +887,8 @@ + newline character''' + if self.is_optimized_out(): + return '(frame information optimized out)' ++ if self.filename() == '': ++ return '(in an eval block)' + with open(self.filename(), 'r') as f: + all_lines = f.readlines() + # Convert from 1-based current_line_num to 0-based list offset: diff --git a/SOURCES/00190-gdb-fix-ppc64-failures.patch b/SOURCES/00190-gdb-fix-ppc64-failures.patch new file mode 100644 index 0000000..9bb723f --- /dev/null +++ b/SOURCES/00190-gdb-fix-ppc64-failures.patch @@ -0,0 +1,207 @@ +--- Tools/gdb/libpython.py.orig 2013-10-09 10:54:59.894701668 +0200 ++++ Tools/gdb/libpython.py 2013-10-09 11:09:30.278703290 +0200 +@@ -1194,39 +1194,113 @@ + iter_frame = iter_frame.newer() + return index + ++ # We divide frames into: ++ # - "python frames": ++ # - "bytecode frames" i.e. PyEval_EvalFrameEx ++ # - "other python frames": things that are of interest from a python ++ # POV, but aren't bytecode (e.g. GC, GIL) ++ # - everything else ++ ++ def is_python_frame(self): ++ '''Is this a PyEval_EvalFrameEx frame, or some other important ++ frame? (see is_other_python_frame for what "important" means in this ++ context)''' ++ if self.is_evalframeex(): ++ return True ++ if self.is_other_python_frame(): ++ return True ++ return False ++ + def is_evalframeex(self): +- '''Is this a PyEval_EvalFrameEx frame?''' +- if self._gdbframe.name() == 'PyEval_EvalFrameEx': +- ''' +- I believe we also need to filter on the inline +- struct frame_id.inline_depth, only regarding frames with +- an inline depth of 0 as actually being this function +- +- So we reject those with type gdb.INLINE_FRAME +- ''' +- if self._gdbframe.type() == gdb.NORMAL_FRAME: +- # We have a PyEval_EvalFrameEx frame: +- return True ++ if self._gdbframe.function(): ++ if self._gdbframe.function().name == 'PyEval_EvalFrameEx': ++ ''' ++ I believe we also need to filter on the inline ++ struct frame_id.inline_depth, only regarding frames with ++ an inline depth of 0 as actually being this function ++ ++ So we reject those with type gdb.INLINE_FRAME ++ ''' ++ if self._gdbframe.type() == gdb.NORMAL_FRAME: ++ # We have a PyEval_EvalFrameEx frame: ++ return True ++ ++ return False ++ ++ def is_other_python_frame(self): ++ '''Is this frame worth displaying in python backtraces? ++ Examples: ++ - waiting on the GIL ++ - garbage-collecting ++ - within a CFunction ++ If it is, return a descriptive string ++ For other frames, return False ++ ''' ++ if self.is_waiting_for_gil(): ++ return 'Waiting for a lock (e.g. GIL)' ++ elif self.is_gc_collect(): ++ return 'Garbage-collecting' ++ else: ++ # Detect invocations of PyCFunction instances: ++ if self._gdbframe.name() == 'PyCFunction_Call': ++ try: ++ func = self._gdbframe.read_var('func') ++ # Use the prettyprinter for the func: ++ return str(func) ++ except RuntimeError: ++ return 'PyCFunction invocation (unable to read "func")' ++ older = self.older() ++ if older and older._gdbframe.name() == 'call_function': ++ # Within that frame: ++ # 'call_function' contains, amongst other things, a ++ # hand-inlined copy of PyCFunction_Call. ++ # "func" is the local containing the PyObject* of the ++ # callable instance ++ # Report it, but only if it's a PyCFunction (since otherwise ++ # we'd be reporting an implementation detail of every other ++ # function invocation) ++ try: ++ func = older._gdbframe.read_var('func') ++ funcobj = PyObjectPtr.from_pyobject_ptr(func) ++ if isinstance(funcobj, PyCFunctionObjectPtr): ++ # Use the prettyprinter for the func: ++ return str(func) ++ except RuntimeError: ++ return False + ++ # This frame isn't worth reporting: + return False + ++ def is_waiting_for_gil(self): ++ '''Is this frame waiting for a lock?''' ++ framename = self._gdbframe.name() ++ if framename: ++ return 'pthread_cond_timedwait' in framename or \ ++ 'PyThread_acquire_lock' in framename ++ ++ def is_gc_collect(self): ++ '''Is this frame "collect" within the the garbage-collector?''' ++ return self._gdbframe.name() == 'collect' ++ + def get_pyop(self): + try: + f = self._gdbframe.read_var('f') +- frame = PyFrameObjectPtr.from_pyobject_ptr(f) +- if not frame.is_optimized_out(): +- return frame +- # gdb is unable to get the "f" argument of PyEval_EvalFrameEx() +- # because it was "optimized out". Try to get "f" from the frame +- # of the caller, PyEval_EvalCodeEx(). +- orig_frame = frame +- caller = self._gdbframe.older() +- if caller: +- f = caller.read_var('f') +- frame = PyFrameObjectPtr.from_pyobject_ptr(f) +- if not frame.is_optimized_out(): +- return frame +- return orig_frame ++ obj = PyFrameObjectPtr.from_pyobject_ptr(f) ++ if isinstance(obj, PyFrameObjectPtr): ++ return obj ++ else: ++ return None ++ except ValueError: ++ return None ++ ++ def get_py_co(self): ++ try: ++ co = self._gdbframe.read_var('co') ++ obj = PyCodeObjectPtr.from_pyobject_ptr(co) ++ if isinstance(obj, PyCodeObjectPtr): ++ return obj ++ else: ++ return None + except ValueError: + return None + +@@ -1239,8 +1313,22 @@ + + @classmethod + def get_selected_python_frame(cls): +- '''Try to obtain the Frame for the python code in the selected frame, +- or None''' ++ '''Try to obtain the Frame for the python-related code in the selected ++ frame, or None''' ++ frame = cls.get_selected_frame() ++ ++ while frame: ++ if frame.is_python_frame(): ++ return frame ++ frame = frame.older() ++ ++ # Not found: ++ return None ++ ++ @classmethod ++ def get_selected_bytecode_frame(cls): ++ '''Try to obtain the Frame for the python bytecode interpreter in the ++ selected GDB frame, or None''' + frame = cls.get_selected_frame() + + while frame: +@@ -1265,7 +1353,11 @@ + else: + sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index()) + else: +- sys.stdout.write('#%i\n' % self.get_index()) ++ info = self.is_other_python_frame() ++ if info: ++ sys.stdout.write('#%i %s\n' % (self.get_index(), info)) ++ else: ++ sys.stdout.write('#%i\n' % self.get_index()) + + class PyList(gdb.Command): + '''List the current Python source code, if any +@@ -1301,7 +1393,7 @@ + if m: + start, end = map(int, m.groups()) + +- frame = Frame.get_selected_python_frame() ++ frame = Frame.get_selected_bytecode_frame() + if not frame: + print 'Unable to locate python frame' + return +@@ -1353,7 +1445,7 @@ + if not iter_frame: + break + +- if iter_frame.is_evalframeex(): ++ if iter_frame.is_python_frame(): + # Result: + if iter_frame.select(): + iter_frame.print_summary() +@@ -1407,7 +1499,7 @@ + def invoke(self, args, from_tty): + frame = Frame.get_selected_python_frame() + while frame: +- if frame.is_evalframeex(): ++ if frame.is_python_frame(): + frame.print_summary() + frame = frame.older() + diff --git a/SOURCES/00191-add-RPATH-to-elementtree.patch b/SOURCES/00191-add-RPATH-to-elementtree.patch new file mode 100644 index 0000000..10cd585 --- /dev/null +++ b/SOURCES/00191-add-RPATH-to-elementtree.patch @@ -0,0 +1,21 @@ +diff -up Python-2.7.5/setup.py.orig Python-2.7.5/setup.py +--- Python-2.7.5/setup.py.orig 2013-11-07 01:36:18.853604232 +0100 ++++ Python-2.7.5/setup.py 2013-11-07 01:39:22.163305821 +0100 +@@ -1483,6 +1483,9 @@ class PyBuildExt(build_ext): + # Fredrik Lundh's cElementTree module. Note that this also + # uses expat (via the CAPI hook in pyexpat). + ++ # Add an explicit RPATH to _elementtree.so (rhbz#1019345) ++ EXPAT_RPATH = '/usr/lib64' if sys.maxint == 0x7fffffffffffffff else '/usr/lib' ++ + if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): + define_macros.append(('USE_PYEXPAT_CAPI', None)) + exts.append(Extension('_elementtree', +@@ -1492,6 +1495,7 @@ class PyBuildExt(build_ext): + sources = ['_elementtree.c'], + depends = ['pyexpat.c'] + expat_sources + + expat_depends, ++ extra_link_args = ['-Wl,-rpath,%s' % EXPAT_RPATH] + )) + else: + missing.append('_elementtree') diff --git a/SOURCES/00192-Fix-missing-documentation-for-some-keywords.patch b/SOURCES/00192-Fix-missing-documentation-for-some-keywords.patch new file mode 100644 index 0000000..d40f0b0 --- /dev/null +++ b/SOURCES/00192-Fix-missing-documentation-for-some-keywords.patch @@ -0,0 +1,42 @@ +diff --git a/Doc/tools/sphinxext/pyspecific.py b/Doc/tools/sphinxext/pyspecific.py +--- a/Doc/tools/sphinxext/pyspecific.py ++++ b/Doc/tools/sphinxext/pyspecific.py +@@ -184,11 +184,11 @@ + 'bltin-null-object', 'bltin-type-objects', 'booleans', + 'break', 'callable-types', 'calls', 'class', 'comparisons', 'compound', + 'context-managers', 'continue', 'conversions', 'customization', 'debugger', +- 'del', 'dict', 'dynamic-features', 'else', 'exceptions', 'execmodel', ++ 'del', 'dict', 'dynamic-features', 'else', 'exceptions', 'exec', 'execmodel', + 'exprlists', 'floating', 'for', 'formatstrings', 'function', 'global', + 'id-classes', 'identifiers', 'if', 'imaginary', 'import', 'in', 'integers', + 'lambda', 'lists', 'naming', 'numbers', 'numeric-types', +- 'objects', 'operator-summary', 'pass', 'power', 'raise', 'return', ++ 'objects', 'operator-summary', 'pass', 'power', 'print', 'raise', 'return', + 'sequence-types', 'shifting', 'slicings', 'specialattrs', 'specialnames', + 'string-methods', 'strings', 'subscriptions', 'truth', 'try', 'types', + 'typesfunctions', 'typesmapping', 'typesmethods', 'typesmodules', +diff -up Python-2.7.5/Lib/pydoc_data/topics.py.orig Python-2.7.5/Lib/pydoc_data/topics.py +--- Python-2.7.5/Lib/pydoc_data/topics.py.orig 2014-01-14 12:29:32.511756259 +0100 ++++ Python-2.7.5/Lib/pydoc_data/topics.py 2014-01-14 12:29:40.396795516 +0100 +@@ -29,11 +29,12 @@ topics = {'assert': '\nThe ``assert`` st + 'dynamic-features': '\nInteraction with dynamic features\n*********************************\n\nThere are several cases where Python statements are illegal when used\nin conjunction with nested scopes that contain free variables.\n\nIf a variable is referenced in an enclosing scope, it is illegal to\ndelete the name. An error will be reported at compile time.\n\nIf the wild card form of import --- ``import *`` --- is used in a\nfunction and the function contains or is a nested block with free\nvariables, the compiler will raise a ``SyntaxError``.\n\nIf ``exec`` is used in a function and the function contains or is a\nnested block with free variables, the compiler will raise a\n``SyntaxError`` unless the exec explicitly specifies the local\nnamespace for the ``exec``. (In other words, ``exec obj`` would be\nillegal, but ``exec obj in ns`` would be legal.)\n\nThe ``eval()``, ``execfile()``, and ``input()`` functions and the\n``exec`` statement do not have access to the full environment for\nresolving names. Names may be resolved in the local and global\nnamespaces of the caller. Free variables are not resolved in the\nnearest enclosing namespace, but in the global namespace. [1] The\n``exec`` statement and the ``eval()`` and ``execfile()`` functions\nhave optional arguments to override the global and local namespace.\nIf only one namespace is specified, it is used for both.\n', + 'else': '\nThe ``if`` statement\n********************\n\nThe ``if`` statement is used for conditional execution:\n\n if_stmt ::= "if" expression ":" suite\n ( "elif" expression ":" suite )*\n ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the ``if`` statement is executed or evaluated).\nIf all expressions are false, the suite of the ``else`` clause, if\npresent, is executed.\n', + 'exceptions': '\nExceptions\n**********\n\nExceptions are a means of breaking out of the normal flow of control\nof a code block in order to handle errors or other exceptional\nconditions. An exception is *raised* at the point where the error is\ndetected; it may be *handled* by the surrounding code block or by any\ncode block that directly or indirectly invoked the code block where\nthe error occurred.\n\nThe Python interpreter raises an exception when it detects a run-time\nerror (such as division by zero). A Python program can also\nexplicitly raise an exception with the ``raise`` statement. Exception\nhandlers are specified with the ``try`` ... ``except`` statement. The\n``finally`` clause of such a statement can be used to specify cleanup\ncode which does not handle the exception, but is executed whether an\nexception occurred or not in the preceding code.\n\nPython uses the "termination" model of error handling: an exception\nhandler can find out what happened and continue execution at an outer\nlevel, but it cannot repair the cause of the error and retry the\nfailing operation (except by re-entering the offending piece of code\nfrom the top).\n\nWhen an exception is not handled at all, the interpreter terminates\nexecution of the program, or returns to its interactive main loop. In\neither case, it prints a stack backtrace, except when the exception is\n``SystemExit``.\n\nExceptions are identified by class instances. The ``except`` clause\nis selected depending on the class of the instance: it must reference\nthe class of the instance or a base class thereof. The instance can\nbe received by the handler and can carry additional information about\nthe exceptional condition.\n\nExceptions can also be identified by strings, in which case the\n``except`` clause is selected by object identity. An arbitrary value\ncan be raised along with the identifying string which can be passed to\nthe handler.\n\nNote: Messages to exceptions are not part of the Python API. Their\n contents may change from one version of Python to the next without\n warning and should not be relied on by code which will run under\n multiple versions of the interpreter.\n\nSee also the description of the ``try`` statement in section *The try\nstatement* and ``raise`` statement in section *The raise statement*.\n\n-[ Footnotes ]-\n\n[1] This limitation occurs because the code that is executed by these\n operations is not available at the time the module is compiled.\n', ++'exec': '\nThe ``exec`` statement\n**********************\n\n exec_stmt ::= "exec" or_expr ["in" expression ["," expression]]\n\nThis statement supports dynamic execution of Python code. The first\nexpression should evaluate to either a Unicode string, a *Latin-1*\nencoded string, an open file object, a code object, or a tuple. If it\nis a string, the string is parsed as a suite of Python statements\nwhich is then executed (unless a syntax error occurs). [1] If it is an\nopen file, the file is parsed until EOF and executed. If it is a code\nobject, it is simply executed. For the interpretation of a tuple, see\nbelow. In all cases, the code that\'s executed is expected to be valid\nas file input (see section *File input*). Be aware that the\n``return`` and ``yield`` statements may not be used outside of\nfunction definitions even within the context of code passed to the\n``exec`` statement.\n\nIn all cases, if the optional parts are omitted, the code is executed\nin the current scope. If only the first expression after ``in`` is\nspecified, it should be a dictionary, which will be used for both the\nglobal and the local variables. If two expressions are given, they\nare used for the global and local variables, respectively. If\nprovided, *locals* can be any mapping object. Remember that at module\nlevel, globals and locals are the same dictionary. If two separate\nobjects are given as *globals* and *locals*, the code will be executed\nas if it were embedded in a class definition.\n\nThe first expression may also be a tuple of length 2 or 3. In this\ncase, the optional parts must be omitted. The form ``exec(expr,\nglobals)`` is equivalent to ``exec expr in globals``, while the form\n``exec(expr, globals, locals)`` is equivalent to ``exec expr in\nglobals, locals``. The tuple form of ``exec`` provides compatibility\nwith Python 3, where ``exec`` is a function rather than a statement.\n\nChanged in version 2.4: Formerly, *locals* was required to be a\ndictionary.\n\nAs a side effect, an implementation may insert additional keys into\nthe dictionaries given besides those corresponding to variable names\nset by the executed code. For example, the current implementation may\nadd a reference to the dictionary of the built-in module\n``__builtin__`` under the key ``__builtins__`` (!).\n\n**Programmer\'s hints:** dynamic evaluation of expressions is supported\nby the built-in function ``eval()``. The built-in functions\n``globals()`` and ``locals()`` return the current global and local\ndictionary, respectively, which may be useful to pass around for use\nby ``exec``.\n\n-[ Footnotes ]-\n\n[1] Note that the parser only accepts the Unix-style end of line\n convention. If you are reading the code from a file, make sure to\n use *universal newlines* mode to convert Windows or Mac-style\n newlines.\n', + 'execmodel': '\nExecution model\n***************\n\n\nNaming and binding\n==================\n\n*Names* refer to objects. Names are introduced by name binding\noperations. Each occurrence of a name in the program text refers to\nthe *binding* of that name established in the innermost function block\ncontaining the use.\n\nA *block* is a piece of Python program text that is executed as a\nunit. The following are blocks: a module, a function body, and a class\ndefinition. Each command typed interactively is a block. A script\nfile (a file given as standard input to the interpreter or specified\non the interpreter command line the first argument) is a code block.\nA script command (a command specified on the interpreter command line\nwith the \'**-c**\' option) is a code block. The file read by the\nbuilt-in function ``execfile()`` is a code block. The string argument\npassed to the built-in function ``eval()`` and to the ``exec``\nstatement is a code block. The expression read and evaluated by the\nbuilt-in function ``input()`` is a code block.\n\nA code block is executed in an *execution frame*. A frame contains\nsome administrative information (used for debugging) and determines\nwhere and how execution continues after the code block\'s execution has\ncompleted.\n\nA *scope* defines the visibility of a name within a block. If a local\nvariable is defined in a block, its scope includes that block. If the\ndefinition occurs in a function block, the scope extends to any blocks\ncontained within the defining one, unless a contained block introduces\na different binding for the name. The scope of names defined in a\nclass block is limited to the class block; it does not extend to the\ncode blocks of methods -- this includes generator expressions since\nthey are implemented using a function scope. This means that the\nfollowing will fail:\n\n class A:\n a = 42\n b = list(a + i for i in range(10))\n\nWhen a name is used in a code block, it is resolved using the nearest\nenclosing scope. The set of all such scopes visible to a code block\nis called the block\'s *environment*.\n\nIf a name is bound in a block, it is a local variable of that block.\nIf a name is bound at the module level, it is a global variable. (The\nvariables of the module code block are local and global.) If a\nvariable is used in a code block but not defined there, it is a *free\nvariable*.\n\nWhen a name is not found at all, a ``NameError`` exception is raised.\nIf the name refers to a local variable that has not been bound, a\n``UnboundLocalError`` exception is raised. ``UnboundLocalError`` is a\nsubclass of ``NameError``.\n\nThe following constructs bind names: formal parameters to functions,\n``import`` statements, class and function definitions (these bind the\nclass or function name in the defining block), and targets that are\nidentifiers if occurring in an assignment, ``for`` loop header, in the\nsecond position of an ``except`` clause header or after ``as`` in a\n``with`` statement. The ``import`` statement of the form ``from ...\nimport *`` binds all names defined in the imported module, except\nthose beginning with an underscore. This form may only be used at the\nmodule level.\n\nA target occurring in a ``del`` statement is also considered bound for\nthis purpose (though the actual semantics are to unbind the name). It\nis illegal to unbind a name that is referenced by an enclosing scope;\nthe compiler will report a ``SyntaxError``.\n\nEach assignment or import statement occurs within a block defined by a\nclass or function definition or at the module level (the top-level\ncode block).\n\nIf a name binding operation occurs anywhere within a code block, all\nuses of the name within the block are treated as references to the\ncurrent block. This can lead to errors when a name is used within a\nblock before it is bound. This rule is subtle. Python lacks\ndeclarations and allows name binding operations to occur anywhere\nwithin a code block. The local variables of a code block can be\ndetermined by scanning the entire text of the block for name binding\noperations.\n\nIf the global statement occurs within a block, all uses of the name\nspecified in the statement refer to the binding of that name in the\ntop-level namespace. Names are resolved in the top-level namespace by\nsearching the global namespace, i.e. the namespace of the module\ncontaining the code block, and the builtins namespace, the namespace\nof the module ``__builtin__``. The global namespace is searched\nfirst. If the name is not found there, the builtins namespace is\nsearched. The global statement must precede all uses of the name.\n\nThe builtins namespace associated with the execution of a code block\nis actually found by looking up the name ``__builtins__`` in its\nglobal namespace; this should be a dictionary or a module (in the\nlatter case the module\'s dictionary is used). By default, when in the\n``__main__`` module, ``__builtins__`` is the built-in module\n``__builtin__`` (note: no \'s\'); when in any other module,\n``__builtins__`` is an alias for the dictionary of the ``__builtin__``\nmodule itself. ``__builtins__`` can be set to a user-created\ndictionary to create a weak form of restricted execution.\n\n**CPython implementation detail:** Users should not touch\n``__builtins__``; it is strictly an implementation detail. Users\nwanting to override values in the builtins namespace should ``import``\nthe ``__builtin__`` (no \'s\') module and modify its attributes\nappropriately.\n\nThe namespace for a module is automatically created the first time a\nmodule is imported. The main module for a script is always called\n``__main__``.\n\nThe ``global`` statement has the same scope as a name binding\noperation in the same block. If the nearest enclosing scope for a\nfree variable contains a global statement, the free variable is\ntreated as a global.\n\nA class definition is an executable statement that may use and define\nnames. These references follow the normal rules for name resolution.\nThe namespace of the class definition becomes the attribute dictionary\nof the class. Names defined at the class scope are not visible in\nmethods.\n\n\nInteraction with dynamic features\n---------------------------------\n\nThere are several cases where Python statements are illegal when used\nin conjunction with nested scopes that contain free variables.\n\nIf a variable is referenced in an enclosing scope, it is illegal to\ndelete the name. An error will be reported at compile time.\n\nIf the wild card form of import --- ``import *`` --- is used in a\nfunction and the function contains or is a nested block with free\nvariables, the compiler will raise a ``SyntaxError``.\n\nIf ``exec`` is used in a function and the function contains or is a\nnested block with free variables, the compiler will raise a\n``SyntaxError`` unless the exec explicitly specifies the local\nnamespace for the ``exec``. (In other words, ``exec obj`` would be\nillegal, but ``exec obj in ns`` would be legal.)\n\nThe ``eval()``, ``execfile()``, and ``input()`` functions and the\n``exec`` statement do not have access to the full environment for\nresolving names. Names may be resolved in the local and global\nnamespaces of the caller. Free variables are not resolved in the\nnearest enclosing namespace, but in the global namespace. [1] The\n``exec`` statement and the ``eval()`` and ``execfile()`` functions\nhave optional arguments to override the global and local namespace.\nIf only one namespace is specified, it is used for both.\n\n\nExceptions\n==========\n\nExceptions are a means of breaking out of the normal flow of control\nof a code block in order to handle errors or other exceptional\nconditions. An exception is *raised* at the point where the error is\ndetected; it may be *handled* by the surrounding code block or by any\ncode block that directly or indirectly invoked the code block where\nthe error occurred.\n\nThe Python interpreter raises an exception when it detects a run-time\nerror (such as division by zero). A Python program can also\nexplicitly raise an exception with the ``raise`` statement. Exception\nhandlers are specified with the ``try`` ... ``except`` statement. The\n``finally`` clause of such a statement can be used to specify cleanup\ncode which does not handle the exception, but is executed whether an\nexception occurred or not in the preceding code.\n\nPython uses the "termination" model of error handling: an exception\nhandler can find out what happened and continue execution at an outer\nlevel, but it cannot repair the cause of the error and retry the\nfailing operation (except by re-entering the offending piece of code\nfrom the top).\n\nWhen an exception is not handled at all, the interpreter terminates\nexecution of the program, or returns to its interactive main loop. In\neither case, it prints a stack backtrace, except when the exception is\n``SystemExit``.\n\nExceptions are identified by class instances. The ``except`` clause\nis selected depending on the class of the instance: it must reference\nthe class of the instance or a base class thereof. The instance can\nbe received by the handler and can carry additional information about\nthe exceptional condition.\n\nExceptions can also be identified by strings, in which case the\n``except`` clause is selected by object identity. An arbitrary value\ncan be raised along with the identifying string which can be passed to\nthe handler.\n\nNote: Messages to exceptions are not part of the Python API. Their\n contents may change from one version of Python to the next without\n warning and should not be relied on by code which will run under\n multiple versions of the interpreter.\n\nSee also the description of the ``try`` statement in section *The try\nstatement* and ``raise`` statement in section *The raise statement*.\n\n-[ Footnotes ]-\n\n[1] This limitation occurs because the code that is executed by these\n operations is not available at the time the module is compiled.\n', + 'exprlists': '\nExpression lists\n****************\n\n expression_list ::= expression ( "," expression )* [","]\n\nAn expression list containing at least one comma yields a tuple. The\nlength of the tuple is the number of expressions in the list. The\nexpressions are evaluated from left to right.\n\nThe trailing comma is required only to create a single tuple (a.k.a. a\n*singleton*); it is optional in all other cases. A single expression\nwithout a trailing comma doesn\'t create a tuple, but rather yields the\nvalue of that expression. (To create an empty tuple, use an empty pair\nof parentheses: ``()``.)\n', + 'floating': '\nFloating point literals\n***********************\n\nFloating point literals are described by the following lexical\ndefinitions:\n\n floatnumber ::= pointfloat | exponentfloat\n pointfloat ::= [intpart] fraction | intpart "."\n exponentfloat ::= (intpart | pointfloat) exponent\n intpart ::= digit+\n fraction ::= "." digit+\n exponent ::= ("e" | "E") ["+" | "-"] digit+\n\nNote that the integer and exponent parts of floating point numbers can\nlook like octal integers, but are interpreted using radix 10. For\nexample, ``077e010`` is legal, and denotes the same number as\n``77e10``. The allowed range of floating point literals is\nimplementation-dependent. Some examples of floating point literals:\n\n 3.14 10. .001 1e100 3.14e-10 0e0\n\nNote that numeric literals do not include a sign; a phrase like ``-1``\nis actually an expression composed of the unary operator ``-`` and the\nliteral ``1``.\n', + 'for': '\nThe ``for`` statement\n*********************\n\nThe ``for`` statement is used to iterate over the elements of a\nsequence (such as a string, tuple or list) or other iterable object:\n\n for_stmt ::= "for" target_list "in" expression_list ":" suite\n ["else" ":" suite]\n\nThe expression list is evaluated once; it should yield an iterable\nobject. An iterator is created for the result of the\n``expression_list``. The suite is then executed once for each item\nprovided by the iterator, in the order of ascending indices. Each\nitem in turn is assigned to the target list using the standard rules\nfor assignments, and then the suite is executed. When the items are\nexhausted (which is immediately when the sequence is empty), the suite\nin the ``else`` clause, if present, is executed, and the loop\nterminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ncontinues with the next item, or with the ``else`` clause if there was\nno next item.\n\nThe suite may assign to the variable(s) in the target list; this does\nnot affect the next item assigned to it.\n\nThe target list is not deleted when the loop is finished, but if the\nsequence is empty, it will not have been assigned to at all by the\nloop. Hint: the built-in function ``range()`` returns a sequence of\nintegers suitable to emulate the effect of Pascal\'s ``for i := a to b\ndo``; e.g., ``range(3)`` returns the list ``[0, 1, 2]``.\n\nNote: There is a subtlety when the sequence is being modified by the loop\n (this can only occur for mutable sequences, i.e. lists). An internal\n counter is used to keep track of which item is used next, and this\n is incremented on each iteration. When this counter has reached the\n length of the sequence the loop terminates. This means that if the\n suite deletes the current (or a previous) item from the sequence,\n the next item will be skipped (since it gets the index of the\n current item which has already been treated). Likewise, if the\n suite inserts an item in the sequence before the current item, the\n current item will be treated again the next time through the loop.\n This can lead to nasty bugs that can be avoided by making a\n temporary copy using a slice of the whole sequence, e.g.,\n\n for x in a[:]:\n if x < 0: a.remove(x)\n', +- 'formatstrings': '\nFormat String Syntax\n********************\n\nThe ``str.format()`` method and the ``Formatter`` class share the same\nsyntax for format strings (although in the case of ``Formatter``,\nsubclasses can define their own format string syntax).\n\nFormat strings contain "replacement fields" surrounded by curly braces\n``{}``. Anything that is not contained in braces is considered literal\ntext, which is copied unchanged to the output. If you need to include\na brace character in the literal text, it can be escaped by doubling:\n``{{`` and ``}}``.\n\nThe grammar for a replacement field is as follows:\n\n replacement_field ::= "{" [field_name] ["!" conversion] [":" format_spec] "}"\n field_name ::= arg_name ("." attribute_name | "[" element_index "]")*\n arg_name ::= [identifier | integer]\n attribute_name ::= identifier\n element_index ::= integer | index_string\n index_string ::= +\n conversion ::= "r" | "s"\n format_spec ::= \n\nIn less formal terms, the replacement field can start with a\n*field_name* that specifies the object whose value is to be formatted\nand inserted into the output instead of the replacement field. The\n*field_name* is optionally followed by a *conversion* field, which is\npreceded by an exclamation point ``\'!\'``, and a *format_spec*, which\nis preceded by a colon ``\':\'``. These specify a non-default format\nfor the replacement value.\n\nSee also the *Format Specification Mini-Language* section.\n\nThe *field_name* itself begins with an *arg_name* that is either a\nnumber or a keyword. If it\'s a number, it refers to a positional\nargument, and if it\'s a keyword, it refers to a named keyword\nargument. If the numerical arg_names in a format string are 0, 1, 2,\n... in sequence, they can all be omitted (not just some) and the\nnumbers 0, 1, 2, ... will be automatically inserted in that order.\nBecause *arg_name* is not quote-delimited, it is not possible to\nspecify arbitrary dictionary keys (e.g., the strings ``\'10\'`` or\n``\':-]\'``) within a format string. The *arg_name* can be followed by\nany number of index or attribute expressions. An expression of the\nform ``\'.name\'`` selects the named attribute using ``getattr()``,\nwhile an expression of the form ``\'[index]\'`` does an index lookup\nusing ``__getitem__()``.\n\nChanged in version 2.7: The positional argument specifiers can be\nomitted, so ``\'{} {}\'`` is equivalent to ``\'{0} {1}\'``.\n\nSome simple format string examples:\n\n "First, thou shalt count to {0}" # References first positional argument\n "Bring me a {}" # Implicitly references the first positional argument\n "From {} to {}" # Same as "From {0} to {1}"\n "My quest is {name}" # References keyword argument \'name\'\n "Weight in tons {0.weight}" # \'weight\' attribute of first positional arg\n "Units destroyed: {players[0]}" # First element of keyword argument \'players\'.\n\nThe *conversion* field causes a type coercion before formatting.\nNormally, the job of formatting a value is done by the\n``__format__()`` method of the value itself. However, in some cases\nit is desirable to force a type to be formatted as a string,\noverriding its own definition of formatting. By converting the value\nto a string before calling ``__format__()``, the normal formatting\nlogic is bypassed.\n\nTwo conversion flags are currently supported: ``\'!s\'`` which calls\n``str()`` on the value, and ``\'!r\'`` which calls ``repr()``.\n\nSome examples:\n\n "Harold\'s a clever {0!s}" # Calls str() on the argument first\n "Bring out the holy {name!r}" # Calls repr() on the argument first\n\nThe *format_spec* field contains a specification of how the value\nshould be presented, including such details as field width, alignment,\npadding, decimal precision and so on. Each value type can define its\nown "formatting mini-language" or interpretation of the *format_spec*.\n\nMost built-in types support a common formatting mini-language, which\nis described in the next section.\n\nA *format_spec* field can also include nested replacement fields\nwithin it. These nested replacement fields can contain only a field\nname; conversion flags and format specifications are not allowed. The\nreplacement fields within the format_spec are substituted before the\n*format_spec* string is interpreted. This allows the formatting of a\nvalue to be dynamically specified.\n\nSee the *Format examples* section for some examples.\n\n\nFormat Specification Mini-Language\n==================================\n\n"Format specifications" are used within replacement fields contained\nwithin a format string to define how individual values are presented\n(see *Format String Syntax*). They can also be passed directly to the\nbuilt-in ``format()`` function. Each formattable type may define how\nthe format specification is to be interpreted.\n\nMost built-in types implement the following options for format\nspecifications, although some of the formatting options are only\nsupported by the numeric types.\n\nA general convention is that an empty format string (``""``) produces\nthe same result as if you had called ``str()`` on the value. A non-\nempty format string typically modifies the result.\n\nThe general form of a *standard format specifier* is:\n\n format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]\n fill ::= \n align ::= "<" | ">" | "=" | "^"\n sign ::= "+" | "-" | " "\n width ::= integer\n precision ::= integer\n type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n\nThe *fill* character can be any character other than \'{\' or \'}\'. The\npresence of a fill character is signaled by the character following\nit, which must be one of the alignment options. If the second\ncharacter of *format_spec* is not a valid alignment option, then it is\nassumed that both the fill character and the alignment option are\nabsent.\n\nThe meaning of the various alignment options is as follows:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'<\'`` | Forces the field to be left-aligned within the available |\n | | space (this is the default for most objects). |\n +-----------+------------------------------------------------------------+\n | ``\'>\'`` | Forces the field to be right-aligned within the available |\n | | space (this is the default for numbers). |\n +-----------+------------------------------------------------------------+\n | ``\'=\'`` | Forces the padding to be placed after the sign (if any) |\n | | but before the digits. This is used for printing fields |\n | | in the form \'+000000120\'. This alignment option is only |\n | | valid for numeric types. |\n +-----------+------------------------------------------------------------+\n | ``\'^\'`` | Forces the field to be centered within the available |\n | | space. |\n +-----------+------------------------------------------------------------+\n\nNote that unless a minimum field width is defined, the field width\nwill always be the same size as the data to fill it, so that the\nalignment option has no meaning in this case.\n\nThe *sign* option is only valid for number types, and can be one of\nthe following:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'+\'`` | indicates that a sign should be used for both positive as |\n | | well as negative numbers. |\n +-----------+------------------------------------------------------------+\n | ``\'-\'`` | indicates that a sign should be used only for negative |\n | | numbers (this is the default behavior). |\n +-----------+------------------------------------------------------------+\n | space | indicates that a leading space should be used on positive |\n | | numbers, and a minus sign on negative numbers. |\n +-----------+------------------------------------------------------------+\n\nThe ``\'#\'`` option is only valid for integers, and only for binary,\noctal, or hexadecimal output. If present, it specifies that the\noutput will be prefixed by ``\'0b\'``, ``\'0o\'``, or ``\'0x\'``,\nrespectively.\n\nThe ``\',\'`` option signals the use of a comma for a thousands\nseparator. For a locale aware separator, use the ``\'n\'`` integer\npresentation type instead.\n\nChanged in version 2.7: Added the ``\',\'`` option (see also **PEP\n378**).\n\n*width* is a decimal integer defining the minimum field width. If not\nspecified, then the field width will be determined by the content.\n\nPreceding the *width* field by a zero (``\'0\'``) character enables\nsign-aware zero-padding for numeric types. This is equivalent to a\n*fill* character of ``\'0\'`` with an *alignment* type of ``\'=\'``.\n\nThe *precision* is a decimal number indicating how many digits should\nbe displayed after the decimal point for a floating point value\nformatted with ``\'f\'`` and ``\'F\'``, or before and after the decimal\npoint for a floating point value formatted with ``\'g\'`` or ``\'G\'``.\nFor non-number types the field indicates the maximum field size - in\nother words, how many characters will be used from the field content.\nThe *precision* is not allowed for integer values.\n\nFinally, the *type* determines how the data should be presented.\n\nThe available string presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'s\'`` | String format. This is the default type for strings and |\n | | may be omitted. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'s\'``. |\n +-----------+------------------------------------------------------------+\n\nThe available integer presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'b\'`` | Binary format. Outputs the number in base 2. |\n +-----------+------------------------------------------------------------+\n | ``\'c\'`` | Character. Converts the integer to the corresponding |\n | | unicode character before printing. |\n +-----------+------------------------------------------------------------+\n | ``\'d\'`` | Decimal Integer. Outputs the number in base 10. |\n +-----------+------------------------------------------------------------+\n | ``\'o\'`` | Octal format. Outputs the number in base 8. |\n +-----------+------------------------------------------------------------+\n | ``\'x\'`` | Hex format. Outputs the number in base 16, using lower- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'X\'`` | Hex format. Outputs the number in base 16, using upper- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'d\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'d\'``. |\n +-----------+------------------------------------------------------------+\n\nIn addition to the above presentation types, integers can be formatted\nwith the floating point presentation types listed below (except\n``\'n\'`` and None). When doing so, ``float()`` is used to convert the\ninteger to a floating point number before formatting.\n\nThe available presentation types for floating point and decimal values\nare:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'e\'`` | Exponent notation. Prints the number in scientific |\n | | notation using the letter \'e\' to indicate the exponent. |\n | | The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'E\'`` | Exponent notation. Same as ``\'e\'`` except it uses an upper |\n | | case \'E\' as the separator character. |\n +-----------+------------------------------------------------------------+\n | ``\'f\'`` | Fixed point. Displays the number as a fixed-point number. |\n | | The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'F\'`` | Fixed point. Same as ``\'f\'``. |\n +-----------+------------------------------------------------------------+\n | ``\'g\'`` | General format. For a given precision ``p >= 1``, this |\n | | rounds the number to ``p`` significant digits and then |\n | | formats the result in either fixed-point format or in |\n | | scientific notation, depending on its magnitude. The |\n | | precise rules are as follows: suppose that the result |\n | | formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1`` would have exponent ``exp``. Then if ``-4 <= exp |\n | | < p``, the number is formatted with presentation type |\n | | ``\'f\'`` and precision ``p-1-exp``. Otherwise, the number |\n | | is formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1``. In both cases insignificant trailing zeros are |\n | | removed from the significand, and the decimal point is |\n | | also removed if there are no remaining digits following |\n | | it. Positive and negative infinity, positive and negative |\n | | zero, and nans, are formatted as ``inf``, ``-inf``, ``0``, |\n | | ``-0`` and ``nan`` respectively, regardless of the |\n | | precision. A precision of ``0`` is treated as equivalent |\n | | to a precision of ``1``. The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'G\'`` | General format. Same as ``\'g\'`` except switches to ``\'E\'`` |\n | | if the number gets too large. The representations of |\n | | infinity and NaN are uppercased, too. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'g\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | ``\'%\'`` | Percentage. Multiplies the number by 100 and displays in |\n | | fixed (``\'f\'``) format, followed by a percent sign. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'g\'``. |\n +-----------+------------------------------------------------------------+\n\n\nFormat examples\n===============\n\nThis section contains examples of the new format syntax and comparison\nwith the old ``%``-formatting.\n\nIn most of the cases the syntax is similar to the old\n``%``-formatting, with the addition of the ``{}`` and with ``:`` used\ninstead of ``%``. For example, ``\'%03.2f\'`` can be translated to\n``\'{:03.2f}\'``.\n\nThe new format syntax also supports new and different options, shown\nin the follow examples.\n\nAccessing arguments by position:\n\n >>> \'{0}, {1}, {2}\'.format(\'a\', \'b\', \'c\')\n \'a, b, c\'\n >>> \'{}, {}, {}\'.format(\'a\', \'b\', \'c\') # 2.7+ only\n \'a, b, c\'\n >>> \'{2}, {1}, {0}\'.format(\'a\', \'b\', \'c\')\n \'c, b, a\'\n >>> \'{2}, {1}, {0}\'.format(*\'abc\') # unpacking argument sequence\n \'c, b, a\'\n >>> \'{0}{1}{0}\'.format(\'abra\', \'cad\') # arguments\' indices can be repeated\n \'abracadabra\'\n\nAccessing arguments by name:\n\n >>> \'Coordinates: {latitude}, {longitude}\'.format(latitude=\'37.24N\', longitude=\'-115.81W\')\n \'Coordinates: 37.24N, -115.81W\'\n >>> coord = {\'latitude\': \'37.24N\', \'longitude\': \'-115.81W\'}\n >>> \'Coordinates: {latitude}, {longitude}\'.format(**coord)\n \'Coordinates: 37.24N, -115.81W\'\n\nAccessing arguments\' attributes:\n\n >>> c = 3-5j\n >>> (\'The complex number {0} is formed from the real part {0.real} \'\n ... \'and the imaginary part {0.imag}.\').format(c)\n \'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.\'\n >>> class Point(object):\n ... def __init__(self, x, y):\n ... self.x, self.y = x, y\n ... def __str__(self):\n ... return \'Point({self.x}, {self.y})\'.format(self=self)\n ...\n >>> str(Point(4, 2))\n \'Point(4, 2)\'\n\nAccessing arguments\' items:\n\n >>> coord = (3, 5)\n >>> \'X: {0[0]}; Y: {0[1]}\'.format(coord)\n \'X: 3; Y: 5\'\n\nReplacing ``%s`` and ``%r``:\n\n >>> "repr() shows quotes: {!r}; str() doesn\'t: {!s}".format(\'test1\', \'test2\')\n "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n\nAligning the text and specifying a width:\n\n >>> \'{:<30}\'.format(\'left aligned\')\n \'left aligned \'\n >>> \'{:>30}\'.format(\'right aligned\')\n \' right aligned\'\n >>> \'{:^30}\'.format(\'centered\')\n \' centered \'\n >>> \'{:*^30}\'.format(\'centered\') # use \'*\' as a fill char\n \'***********centered***********\'\n\nReplacing ``%+f``, ``%-f``, and ``% f`` and specifying a sign:\n\n >>> \'{:+f}; {:+f}\'.format(3.14, -3.14) # show it always\n \'+3.140000; -3.140000\'\n >>> \'{: f}; {: f}\'.format(3.14, -3.14) # show a space for positive numbers\n \' 3.140000; -3.140000\'\n >>> \'{:-f}; {:-f}\'.format(3.14, -3.14) # show only the minus -- same as \'{:f}; {:f}\'\n \'3.140000; -3.140000\'\n\nReplacing ``%x`` and ``%o`` and converting the value to different\nbases:\n\n >>> # format also supports binary numbers\n >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(42)\n \'int: 42; hex: 2a; oct: 52; bin: 101010\'\n >>> # with 0x, 0o, or 0b as prefix:\n >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(42)\n \'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010\'\n\nUsing the comma as a thousands separator:\n\n >>> \'{:,}\'.format(1234567890)\n \'1,234,567,890\'\n\nExpressing a percentage:\n\n >>> points = 19.5\n >>> total = 22\n >>> \'Correct answers: {:.2%}\'.format(points/total)\n \'Correct answers: 88.64%\'\n\nUsing type-specific formatting:\n\n >>> import datetime\n >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n >>> \'{:%Y-%m-%d %H:%M:%S}\'.format(d)\n \'2010-07-04 12:15:58\'\n\nNesting arguments and more complex examples:\n\n >>> for align, text in zip(\'<^>\', [\'left\', \'center\', \'right\']):\n ... \'{0:{fill}{align}16}\'.format(text, fill=align, align=align)\n ...\n \'left<<<<<<<<<<<<\'\n \'^^^^^center^^^^^\'\n \'>>>>>>>>>>>right\'\n >>>\n >>> octets = [192, 168, 0, 1]\n >>> \'{:02X}{:02X}{:02X}{:02X}\'.format(*octets)\n \'C0A80001\'\n >>> int(_, 16)\n 3232235521\n >>>\n >>> width = 5\n >>> for num in range(5,12):\n ... for base in \'dXob\':\n ... print \'{0:{width}{base}}\'.format(num, base=base, width=width),\n ... print\n ...\n 5 5 5 101\n 6 6 6 110\n 7 7 7 111\n 8 8 10 1000\n 9 9 11 1001\n 10 A 12 1010\n 11 B 13 1011\n', ++ 'formatstrings': '\nFormat String Syntax\n********************\n\nThe ``str.format()`` method and the ``Formatter`` class share the same\nsyntax for format strings (although in the case of ``Formatter``,\nsubclasses can define their own format string syntax).\n\nFormat strings contain "replacement fields" surrounded by curly braces\n``{}``. Anything that is not contained in braces is considered literal\ntext, which is copied unchanged to the output. If you need to include\na brace character in the literal text, it can be escaped by doubling:\n``{{`` and ``}}``.\n\nThe grammar for a replacement field is as follows:\n\n replacement_field ::= "{" [field_name] ["!" conversion] [":" format_spec] "}"\n field_name ::= arg_name ("." attribute_name | "[" element_index "]")*\n arg_name ::= [identifier | integer]\n attribute_name ::= identifier\n element_index ::= integer | index_string\n index_string ::= +\n conversion ::= "r" | "s"\n format_spec ::= \n\nIn less formal terms, the replacement field can start with a\n*field_name* that specifies the object whose value is to be formatted\nand inserted into the output instead of the replacement field. The\n*field_name* is optionally followed by a *conversion* field, which is\npreceded by an exclamation point ``\'!\'``, and a *format_spec*, which\nis preceded by a colon ``\':\'``. These specify a non-default format\nfor the replacement value.\n\nSee also the *Format Specification Mini-Language* section.\n\nThe *field_name* itself begins with an *arg_name* that is either a\nnumber or a keyword. If it\'s a number, it refers to a positional\nargument, and if it\'s a keyword, it refers to a named keyword\nargument. If the numerical arg_names in a format string are 0, 1, 2,\n... in sequence, they can all be omitted (not just some) and the\nnumbers 0, 1, 2, ... will be automatically inserted in that order.\nBecause *arg_name* is not quote-delimited, it is not possible to\nspecify arbitrary dictionary keys (e.g., the strings ``\'10\'`` or\n``\':-]\'``) within a format string. The *arg_name* can be followed by\nany number of index or attribute expressions. An expression of the\nform ``\'.name\'`` selects the named attribute using ``getattr()``,\nwhile an expression of the form ``\'[index]\'`` does an index lookup\nusing ``__getitem__()``.\n\nChanged in version 2.7: The positional argument specifiers can be\nomitted, so ``\'{} {}\'`` is equivalent to ``\'{0} {1}\'``.\n\nSome simple format string examples:\n\n "First, thou shalt count to {0}" # References first positional argument\n "Bring me a {}" # Implicitly references the first positional argument\n "From {} to {}" # Same as "From {0} to {1}"\n "My quest is {name}" # References keyword argument \'name\'\n "Weight in tons {0.weight}" # \'weight\' attribute of first positional arg\n "Units destroyed: {players[0]}" # First element of keyword argument \'players\'.\n\nThe *conversion* field causes a type coercion before formatting.\nNormally, the job of formatting a value is done by the\n``__format__()`` method of the value itself. However, in some cases\nit is desirable to force a type to be formatted as a string,\noverriding its own definition of formatting. By converting the value\nto a string before calling ``__format__()``, the normal formatting\nlogic is bypassed.\n\nTwo conversion flags are currently supported: ``\'!s\'`` which calls\n``str()`` on the value, and ``\'!r\'`` which calls ``repr()``.\n\nSome examples:\n\n "Harold\'s a clever {0!s}" # Calls str() on the argument first\n "Bring out the holy {name!r}" # Calls repr() on the argument first\n\nThe *format_spec* field contains a specification of how the value\nshould be presented, including such details as field width, alignment,\npadding, decimal precision and so on. Each value type can define its\nown "formatting mini-language" or interpretation of the *format_spec*.\n\nMost built-in types support a common formatting mini-language, which\nis described in the next section.\n\nA *format_spec* field can also include nested replacement fields\nwithin it. These nested replacement fields can contain only a field\nname; conversion flags and format specifications are not allowed. The\nreplacement fields within the format_spec are substituted before the\n*format_spec* string is interpreted. This allows the formatting of a\nvalue to be dynamically specified.\n\nSee the *Format examples* section for some examples.\n\n\nFormat Specification Mini-Language\n==================================\n\n"Format specifications" are used within replacement fields contained\nwithin a format string to define how individual values are presented\n(see *Format String Syntax*). They can also be passed directly to the\nbuilt-in ``format()`` function. Each formattable type may define how\nthe format specification is to be interpreted.\n\nMost built-in types implement the following options for format\nspecifications, although some of the formatting options are only\nsupported by the numeric types.\n\nA general convention is that an empty format string (``""``) produces\nthe same result as if you had called ``str()`` on the value. A non-\nempty format string typically modifies the result.\n\nThe general form of a *standard format specifier* is:\n\n format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]\n fill ::= \n align ::= "<" | ">" | "=" | "^"\n sign ::= "+" | "-" | " "\n width ::= integer\n precision ::= integer\n type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n\nIf a valid *align* value is specified, it can be preceded by a *fill*\ncharacter that can be any character and defaults to a space if\nomitted. Note that it is not possible to use ``{`` and ``}`` as *fill*\nchar while using the ``str.format()`` method; this limitation however\ndoesn\'t affect the ``format()`` function.\n\nThe meaning of the various alignment options is as follows:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'<\'`` | Forces the field to be left-aligned within the available |\n | | space (this is the default for most objects). |\n +-----------+------------------------------------------------------------+\n | ``\'>\'`` | Forces the field to be right-aligned within the available |\n | | space (this is the default for numbers). |\n +-----------+------------------------------------------------------------+\n | ``\'=\'`` | Forces the padding to be placed after the sign (if any) |\n | | but before the digits. This is used for printing fields |\n | | in the form \'+000000120\'. This alignment option is only |\n | | valid for numeric types. |\n +-----------+------------------------------------------------------------+\n | ``\'^\'`` | Forces the field to be centered within the available |\n | | space. |\n +-----------+------------------------------------------------------------+\n\nNote that unless a minimum field width is defined, the field width\nwill always be the same size as the data to fill it, so that the\nalignment option has no meaning in this case.\n\nThe *sign* option is only valid for number types, and can be one of\nthe following:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'+\'`` | indicates that a sign should be used for both positive as |\n | | well as negative numbers. |\n +-----------+------------------------------------------------------------+\n | ``\'-\'`` | indicates that a sign should be used only for negative |\n | | numbers (this is the default behavior). |\n +-----------+------------------------------------------------------------+\n | space | indicates that a leading space should be used on positive |\n | | numbers, and a minus sign on negative numbers. |\n +-----------+------------------------------------------------------------+\n\nThe ``\'#\'`` option is only valid for integers, and only for binary,\noctal, or hexadecimal output. If present, it specifies that the\noutput will be prefixed by ``\'0b\'``, ``\'0o\'``, or ``\'0x\'``,\nrespectively.\n\nThe ``\',\'`` option signals the use of a comma for a thousands\nseparator. For a locale aware separator, use the ``\'n\'`` integer\npresentation type instead.\n\nChanged in version 2.7: Added the ``\',\'`` option (see also **PEP\n378**).\n\n*width* is a decimal integer defining the minimum field width. If not\nspecified, then the field width will be determined by the content.\n\nPreceding the *width* field by a zero (``\'0\'``) character enables\nsign-aware zero-padding for numeric types. This is equivalent to a\n*fill* character of ``\'0\'`` with an *alignment* type of ``\'=\'``.\n\nThe *precision* is a decimal number indicating how many digits should\nbe displayed after the decimal point for a floating point value\nformatted with ``\'f\'`` and ``\'F\'``, or before and after the decimal\npoint for a floating point value formatted with ``\'g\'`` or ``\'G\'``.\nFor non-number types the field indicates the maximum field size - in\nother words, how many characters will be used from the field content.\nThe *precision* is not allowed for integer values.\n\nFinally, the *type* determines how the data should be presented.\n\nThe available string presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'s\'`` | String format. This is the default type for strings and |\n | | may be omitted. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'s\'``. |\n +-----------+------------------------------------------------------------+\n\nThe available integer presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'b\'`` | Binary format. Outputs the number in base 2. |\n +-----------+------------------------------------------------------------+\n | ``\'c\'`` | Character. Converts the integer to the corresponding |\n | | unicode character before printing. |\n +-----------+------------------------------------------------------------+\n | ``\'d\'`` | Decimal Integer. Outputs the number in base 10. |\n +-----------+------------------------------------------------------------+\n | ``\'o\'`` | Octal format. Outputs the number in base 8. |\n +-----------+------------------------------------------------------------+\n | ``\'x\'`` | Hex format. Outputs the number in base 16, using lower- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'X\'`` | Hex format. Outputs the number in base 16, using upper- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'d\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'d\'``. |\n +-----------+------------------------------------------------------------+\n\nIn addition to the above presentation types, integers can be formatted\nwith the floating point presentation types listed below (except\n``\'n\'`` and None). When doing so, ``float()`` is used to convert the\ninteger to a floating point number before formatting.\n\nThe available presentation types for floating point and decimal values\nare:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'e\'`` | Exponent notation. Prints the number in scientific |\n | | notation using the letter \'e\' to indicate the exponent. |\n | | The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'E\'`` | Exponent notation. Same as ``\'e\'`` except it uses an upper |\n | | case \'E\' as the separator character. |\n +-----------+------------------------------------------------------------+\n | ``\'f\'`` | Fixed point. Displays the number as a fixed-point number. |\n | | The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'F\'`` | Fixed point. Same as ``\'f\'``. |\n +-----------+------------------------------------------------------------+\n | ``\'g\'`` | General format. For a given precision ``p >= 1``, this |\n | | rounds the number to ``p`` significant digits and then |\n | | formats the result in either fixed-point format or in |\n | | scientific notation, depending on its magnitude. The |\n | | precise rules are as follows: suppose that the result |\n | | formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1`` would have exponent ``exp``. Then if ``-4 <= exp |\n | | < p``, the number is formatted with presentation type |\n | | ``\'f\'`` and precision ``p-1-exp``. Otherwise, the number |\n | | is formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1``. In both cases insignificant trailing zeros are |\n | | removed from the significand, and the decimal point is |\n | | also removed if there are no remaining digits following |\n | | it. Positive and negative infinity, positive and negative |\n | | zero, and nans, are formatted as ``inf``, ``-inf``, ``0``, |\n | | ``-0`` and ``nan`` respectively, regardless of the |\n | | precision. A precision of ``0`` is treated as equivalent |\n | | to a precision of ``1``. The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'G\'`` | General format. Same as ``\'g\'`` except switches to ``\'E\'`` |\n | | if the number gets too large. The representations of |\n | | infinity and NaN are uppercased, too. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'g\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | ``\'%\'`` | Percentage. Multiplies the number by 100 and displays in |\n | | fixed (``\'f\'``) format, followed by a percent sign. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'g\'``. |\n +-----------+------------------------------------------------------------+\n\n\nFormat examples\n===============\n\nThis section contains examples of the new format syntax and comparison\nwith the old ``%``-formatting.\n\nIn most of the cases the syntax is similar to the old\n``%``-formatting, with the addition of the ``{}`` and with ``:`` used\ninstead of ``%``. For example, ``\'%03.2f\'`` can be translated to\n``\'{:03.2f}\'``.\n\nThe new format syntax also supports new and different options, shown\nin the follow examples.\n\nAccessing arguments by position:\n\n >>> \'{0}, {1}, {2}\'.format(\'a\', \'b\', \'c\')\n \'a, b, c\'\n >>> \'{}, {}, {}\'.format(\'a\', \'b\', \'c\') # 2.7+ only\n \'a, b, c\'\n >>> \'{2}, {1}, {0}\'.format(\'a\', \'b\', \'c\')\n \'c, b, a\'\n >>> \'{2}, {1}, {0}\'.format(*\'abc\') # unpacking argument sequence\n \'c, b, a\'\n >>> \'{0}{1}{0}\'.format(\'abra\', \'cad\') # arguments\' indices can be repeated\n \'abracadabra\'\n\nAccessing arguments by name:\n\n >>> \'Coordinates: {latitude}, {longitude}\'.format(latitude=\'37.24N\', longitude=\'-115.81W\')\n \'Coordinates: 37.24N, -115.81W\'\n >>> coord = {\'latitude\': \'37.24N\', \'longitude\': \'-115.81W\'}\n >>> \'Coordinates: {latitude}, {longitude}\'.format(**coord)\n \'Coordinates: 37.24N, -115.81W\'\n\nAccessing arguments\' attributes:\n\n >>> c = 3-5j\n >>> (\'The complex number {0} is formed from the real part {0.real} \'\n ... \'and the imaginary part {0.imag}.\').format(c)\n \'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.\'\n >>> class Point(object):\n ... def __init__(self, x, y):\n ... self.x, self.y = x, y\n ... def __str__(self):\n ... return \'Point({self.x}, {self.y})\'.format(self=self)\n ...\n >>> str(Point(4, 2))\n \'Point(4, 2)\'\n\nAccessing arguments\' items:\n\n >>> coord = (3, 5)\n >>> \'X: {0[0]}; Y: {0[1]}\'.format(coord)\n \'X: 3; Y: 5\'\n\nReplacing ``%s`` and ``%r``:\n\n >>> "repr() shows quotes: {!r}; str() doesn\'t: {!s}".format(\'test1\', \'test2\')\n "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n\nAligning the text and specifying a width:\n\n >>> \'{:<30}\'.format(\'left aligned\')\n \'left aligned \'\n >>> \'{:>30}\'.format(\'right aligned\')\n \' right aligned\'\n >>> \'{:^30}\'.format(\'centered\')\n \' centered \'\n >>> \'{:*^30}\'.format(\'centered\') # use \'*\' as a fill char\n \'***********centered***********\'\n\nReplacing ``%+f``, ``%-f``, and ``% f`` and specifying a sign:\n\n >>> \'{:+f}; {:+f}\'.format(3.14, -3.14) # show it always\n \'+3.140000; -3.140000\'\n >>> \'{: f}; {: f}\'.format(3.14, -3.14) # show a space for positive numbers\n \' 3.140000; -3.140000\'\n >>> \'{:-f}; {:-f}\'.format(3.14, -3.14) # show only the minus -- same as \'{:f}; {:f}\'\n \'3.140000; -3.140000\'\n\nReplacing ``%x`` and ``%o`` and converting the value to different\nbases:\n\n >>> # format also supports binary numbers\n >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(42)\n \'int: 42; hex: 2a; oct: 52; bin: 101010\'\n >>> # with 0x, 0o, or 0b as prefix:\n >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(42)\n \'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010\'\n\nUsing the comma as a thousands separator:\n\n >>> \'{:,}\'.format(1234567890)\n \'1,234,567,890\'\n\nExpressing a percentage:\n\n >>> points = 19.5\n >>> total = 22\n >>> \'Correct answers: {:.2%}\'.format(points/total)\n \'Correct answers: 88.64%\'\n\nUsing type-specific formatting:\n\n >>> import datetime\n >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n >>> \'{:%Y-%m-%d %H:%M:%S}\'.format(d)\n \'2010-07-04 12:15:58\'\n\nNesting arguments and more complex examples:\n\n >>> for align, text in zip(\'<^>\', [\'left\', \'center\', \'right\']):\n ... \'{0:{fill}{align}16}\'.format(text, fill=align, align=align)\n ...\n \'left<<<<<<<<<<<<\'\n \'^^^^^center^^^^^\'\n \'>>>>>>>>>>>right\'\n >>>\n >>> octets = [192, 168, 0, 1]\n >>> \'{:02X}{:02X}{:02X}{:02X}\'.format(*octets)\n \'C0A80001\'\n >>> int(_, 16)\n 3232235521\n >>>\n >>> width = 5\n >>> for num in range(5,12):\n ... for base in \'dXob\':\n ... print \'{0:{width}{base}}\'.format(num, base=base, width=width),\n ... print\n ...\n 5 5 5 101\n 6 6 6 110\n 7 7 7 111\n 8 8 10 1000\n 9 9 11 1001\n 10 A 12 1010\n 11 B 13 1011\n', + 'function': '\nFunction definitions\n********************\n\nA function definition defines a user-defined function object (see\nsection *The standard type hierarchy*):\n\n decorated ::= decorators (classdef | funcdef)\n decorators ::= decorator+\n decorator ::= "@" dotted_name ["(" [argument_list [","]] ")"] NEWLINE\n funcdef ::= "def" funcname "(" [parameter_list] ")" ":" suite\n dotted_name ::= identifier ("." identifier)*\n parameter_list ::= (defparameter ",")*\n ( "*" identifier ["," "**" identifier]\n | "**" identifier\n | defparameter [","] )\n defparameter ::= parameter ["=" expression]\n sublist ::= parameter ("," parameter)* [","]\n parameter ::= identifier | "(" sublist ")"\n funcname ::= identifier\n\nA function definition is an executable statement. Its execution binds\nthe function name in the current local namespace to a function object\n(a wrapper around the executable code for the function). This\nfunction object contains a reference to the current global namespace\nas the global namespace to be used when the function is called.\n\nThe function definition does not execute the function body; this gets\nexecuted only when the function is called. [3]\n\nA function definition may be wrapped by one or more *decorator*\nexpressions. Decorator expressions are evaluated when the function is\ndefined, in the scope that contains the function definition. The\nresult must be a callable, which is invoked with the function object\nas the only argument. The returned value is bound to the function name\ninstead of the function object. Multiple decorators are applied in\nnested fashion. For example, the following code:\n\n @f1(arg)\n @f2\n def func(): pass\n\nis equivalent to:\n\n def func(): pass\n func = f1(arg)(f2(func))\n\nWhen one or more top-level *parameters* have the form *parameter*\n``=`` *expression*, the function is said to have "default parameter\nvalues." For a parameter with a default value, the corresponding\n*argument* may be omitted from a call, in which case the parameter\'s\ndefault value is substituted. If a parameter has a default value, all\nfollowing parameters must also have a default value --- this is a\nsyntactic restriction that is not expressed by the grammar.\n\n**Default parameter values are evaluated when the function definition\nis executed.** This means that the expression is evaluated once, when\nthe function is defined, and that the same "pre-computed" value is\nused for each call. This is especially important to understand when a\ndefault parameter is a mutable object, such as a list or a dictionary:\nif the function modifies the object (e.g. by appending an item to a\nlist), the default value is in effect modified. This is generally not\nwhat was intended. A way around this is to use ``None`` as the\ndefault, and explicitly test for it in the body of the function, e.g.:\n\n def whats_on_the_telly(penguin=None):\n if penguin is None:\n penguin = []\n penguin.append("property of the zoo")\n return penguin\n\nFunction call semantics are described in more detail in section\n*Calls*. A function call always assigns values to all parameters\nmentioned in the parameter list, either from position arguments, from\nkeyword arguments, or from default values. If the form\n"``*identifier``" is present, it is initialized to a tuple receiving\nany excess positional parameters, defaulting to the empty tuple. If\nthe form "``**identifier``" is present, it is initialized to a new\ndictionary receiving any excess keyword arguments, defaulting to a new\nempty dictionary.\n\nIt is also possible to create anonymous functions (functions not bound\nto a name), for immediate use in expressions. This uses lambda forms,\ndescribed in section *Lambdas*. Note that the lambda form is merely a\nshorthand for a simplified function definition; a function defined in\na "``def``" statement can be passed around or assigned to another name\njust like a function defined by a lambda form. The "``def``" form is\nactually more powerful since it allows the execution of multiple\nstatements.\n\n**Programmer\'s note:** Functions are first-class objects. A "``def``"\nform executed inside a function definition defines a local function\nthat can be returned or passed around. Free variables used in the\nnested function can access the local variables of the function\ncontaining the def. See section *Naming and binding* for details.\n', + 'global': '\nThe ``global`` statement\n************************\n\n global_stmt ::= "global" identifier ("," identifier)*\n\nThe ``global`` statement is a declaration which holds for the entire\ncurrent code block. It means that the listed identifiers are to be\ninterpreted as globals. It would be impossible to assign to a global\nvariable without ``global``, although free variables may refer to\nglobals without being declared global.\n\nNames listed in a ``global`` statement must not be used in the same\ncode block textually preceding that ``global`` statement.\n\nNames listed in a ``global`` statement must not be defined as formal\nparameters or in a ``for`` loop control target, ``class`` definition,\nfunction definition, or ``import`` statement.\n\n**CPython implementation detail:** The current implementation does not\nenforce the latter two restrictions, but programs should not abuse\nthis freedom, as future implementations may enforce them or silently\nchange the meaning of the program.\n\n**Programmer\'s note:** the ``global`` is a directive to the parser.\nIt applies only to code parsed at the same time as the ``global``\nstatement. In particular, a ``global`` statement contained in an\n``exec`` statement does not affect the code block *containing* the\n``exec`` statement, and code contained in an ``exec`` statement is\nunaffected by ``global`` statements in the code containing the\n``exec`` statement. The same applies to the ``eval()``,\n``execfile()`` and ``compile()`` functions.\n', + 'id-classes': '\nReserved classes of identifiers\n*******************************\n\nCertain classes of identifiers (besides keywords) have special\nmeanings. These classes are identified by the patterns of leading and\ntrailing underscore characters:\n\n``_*``\n Not imported by ``from module import *``. The special identifier\n ``_`` is used in the interactive interpreter to store the result of\n the last evaluation; it is stored in the ``__builtin__`` module.\n When not in interactive mode, ``_`` has no special meaning and is\n not defined. See section *The import statement*.\n\n Note: The name ``_`` is often used in conjunction with\n internationalization; refer to the documentation for the\n ``gettext`` module for more information on this convention.\n\n``__*__``\n System-defined names. These names are defined by the interpreter\n and its implementation (including the standard library). Current\n system names are discussed in the *Special method names* section\n and elsewhere. More will likely be defined in future versions of\n Python. *Any* use of ``__*__`` names, in any context, that does\n not follow explicitly documented use, is subject to breakage\n without warning.\n\n``__*``\n Class-private names. Names in this category, when used within the\n context of a class definition, are re-written to use a mangled form\n to help avoid name clashes between "private" attributes of base and\n derived classes. See section *Identifiers (Names)*.\n', +@@ -52,6 +53,7 @@ topics = {'assert': '\nThe ``assert`` st + 'operator-summary': '\nOperator precedence\n*******************\n\nThe following table summarizes the operator precedences in Python,\nfrom lowest precedence (least binding) to highest precedence (most\nbinding). Operators in the same box have the same precedence. Unless\nthe syntax is explicitly given, operators are binary. Operators in\nthe same box group left to right (except for comparisons, including\ntests, which all have the same precedence and chain from left to right\n--- see section *Comparisons* --- and exponentiation, which groups\nfrom right to left).\n\n+-------------------------------------------------+---------------------------------------+\n| Operator | Description |\n+=================================================+=======================================+\n| ``lambda`` | Lambda expression |\n+-------------------------------------------------+---------------------------------------+\n| ``if`` -- ``else`` | Conditional expression |\n+-------------------------------------------------+---------------------------------------+\n| ``or`` | Boolean OR |\n+-------------------------------------------------+---------------------------------------+\n| ``and`` | Boolean AND |\n+-------------------------------------------------+---------------------------------------+\n| ``not`` ``x`` | Boolean NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``in``, ``not in``, ``is``, ``is not``, ``<``, | Comparisons, including membership |\n| ``<=``, ``>``, ``>=``, ``<>``, ``!=``, ``==`` | tests and identity tests |\n+-------------------------------------------------+---------------------------------------+\n| ``|`` | Bitwise OR |\n+-------------------------------------------------+---------------------------------------+\n| ``^`` | Bitwise XOR |\n+-------------------------------------------------+---------------------------------------+\n| ``&`` | Bitwise AND |\n+-------------------------------------------------+---------------------------------------+\n| ``<<``, ``>>`` | Shifts |\n+-------------------------------------------------+---------------------------------------+\n| ``+``, ``-`` | Addition and subtraction |\n+-------------------------------------------------+---------------------------------------+\n| ``*``, ``/``, ``//``, ``%`` | Multiplication, division, remainder |\n| | [8] |\n+-------------------------------------------------+---------------------------------------+\n| ``+x``, ``-x``, ``~x`` | Positive, negative, bitwise NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``**`` | Exponentiation [9] |\n+-------------------------------------------------+---------------------------------------+\n| ``x[index]``, ``x[index:index]``, | Subscription, slicing, call, |\n| ``x(arguments...)``, ``x.attribute`` | attribute reference |\n+-------------------------------------------------+---------------------------------------+\n| ``(expressions...)``, ``[expressions...]``, | Binding or tuple display, list |\n| ``{key: value...}``, ```expressions...``` | display, dictionary display, string |\n| | conversion |\n+-------------------------------------------------+---------------------------------------+\n\n-[ Footnotes ]-\n\n[1] In Python 2.3 and later releases, a list comprehension "leaks" the\n control variables of each ``for`` it contains into the containing\n scope. However, this behavior is deprecated, and relying on it\n will not work in Python 3.\n\n[2] While ``abs(x%y) < abs(y)`` is true mathematically, for floats it\n may not be true numerically due to roundoff. For example, and\n assuming a platform on which a Python float is an IEEE 754 double-\n precision number, in order that ``-1e-100 % 1e100`` have the same\n sign as ``1e100``, the computed result is ``-1e-100 + 1e100``,\n which is numerically exactly equal to ``1e100``. The function\n ``math.fmod()`` returns a result whose sign matches the sign of\n the first argument instead, and so returns ``-1e-100`` in this\n case. Which approach is more appropriate depends on the\n application.\n\n[3] If x is very close to an exact integer multiple of y, it\'s\n possible for ``floor(x/y)`` to be one larger than ``(x-x%y)/y``\n due to rounding. In such cases, Python returns the latter result,\n in order to preserve that ``divmod(x,y)[0] * y + x % y`` be very\n close to ``x``.\n\n[4] While comparisons between unicode strings make sense at the byte\n level, they may be counter-intuitive to users. For example, the\n strings ``u"\\u00C7"`` and ``u"\\u0043\\u0327"`` compare differently,\n even though they both represent the same unicode character (LATIN\n CAPITAL LETTER C WITH CEDILLA). To compare strings in a human\n recognizable way, compare using ``unicodedata.normalize()``.\n\n[5] The implementation computes this efficiently, without constructing\n lists or sorting.\n\n[6] Earlier versions of Python used lexicographic comparison of the\n sorted (key, value) lists, but this was very expensive for the\n common case of comparing for equality. An even earlier version of\n Python compared dictionaries by identity only, but this caused\n surprises because people expected to be able to test a dictionary\n for emptiness by comparing it to ``{}``.\n\n[7] Due to automatic garbage-collection, free lists, and the dynamic\n nature of descriptors, you may notice seemingly unusual behaviour\n in certain uses of the ``is`` operator, like those involving\n comparisons between instance methods, or constants. Check their\n documentation for more info.\n\n[8] The ``%`` operator is also used for string formatting; the same\n precedence applies.\n\n[9] The power operator ``**`` binds less tightly than an arithmetic or\n bitwise unary operator on its right, that is, ``2**-1`` is\n ``0.5``.\n', + 'pass': '\nThe ``pass`` statement\n**********************\n\n pass_stmt ::= "pass"\n\n``pass`` is a null operation --- when it is executed, nothing happens.\nIt is useful as a placeholder when a statement is required\nsyntactically, but no code needs to be executed, for example:\n\n def f(arg): pass # a function that does nothing (yet)\n\n class C: pass # a class with no methods (yet)\n', + 'power': '\nThe power operator\n******************\n\nThe power operator binds more tightly than unary operators on its\nleft; it binds less tightly than unary operators on its right. The\nsyntax is:\n\n power ::= primary ["**" u_expr]\n\nThus, in an unparenthesized sequence of power and unary operators, the\noperators are evaluated from right to left (this does not constrain\nthe evaluation order for the operands): ``-1**2`` results in ``-1``.\n\nThe power operator has the same semantics as the built-in ``pow()``\nfunction, when called with two arguments: it yields its left argument\nraised to the power of its right argument. The numeric arguments are\nfirst converted to a common type. The result type is that of the\narguments after coercion.\n\nWith mixed operand types, the coercion rules for binary arithmetic\noperators apply. For int and long int operands, the result has the\nsame type as the operands (after coercion) unless the second argument\nis negative; in that case, all arguments are converted to float and a\nfloat result is delivered. For example, ``10**2`` returns ``100``, but\n``10**-2`` returns ``0.01``. (This last feature was added in Python\n2.2. In Python 2.1 and before, if both arguments were of integer types\nand the second argument was negative, an exception was raised).\n\nRaising ``0.0`` to a negative power results in a\n``ZeroDivisionError``. Raising a negative number to a fractional power\nresults in a ``ValueError``.\n', ++ 'print': '\nThe ``print`` statement\n***********************\n\n print_stmt ::= "print" ([expression ("," expression)* [","]]\n | ">>" expression [("," expression)+ [","]])\n\n``print`` evaluates each expression in turn and writes the resulting\nobject to standard output (see below). If an object is not a string,\nit is first converted to a string using the rules for string\nconversions. The (resulting or original) string is then written. A\nspace is written before each object is (converted and) written, unless\nthe output system believes it is positioned at the beginning of a\nline. This is the case (1) when no characters have yet been written\nto standard output, (2) when the last character written to standard\noutput is a whitespace character except ``\' \'``, or (3) when the last\nwrite operation on standard output was not a ``print`` statement. (In\nsome cases it may be functional to write an empty string to standard\noutput for this reason.)\n\nNote: Objects which act like file objects but which are not the built-in\n file objects often do not properly emulate this aspect of the file\n object\'s behavior, so it is best not to rely on this.\n\nA ``\'\\n\'`` character is written at the end, unless the ``print``\nstatement ends with a comma. This is the only action if the statement\ncontains just the keyword ``print``.\n\nStandard output is defined as the file object named ``stdout`` in the\nbuilt-in module ``sys``. If no such object exists, or if it does not\nhave a ``write()`` method, a ``RuntimeError`` exception is raised.\n\n``print`` also has an extended form, defined by the second portion of\nthe syntax described above. This form is sometimes referred to as\n"``print`` chevron." In this form, the first expression after the\n``>>`` must evaluate to a "file-like" object, specifically an object\nthat has a ``write()`` method as described above. With this extended\nform, the subsequent expressions are printed to this file object. If\nthe first expression evaluates to ``None``, then ``sys.stdout`` is\nused as the file for output.\n', + 'raise': '\nThe ``raise`` statement\n***********************\n\n raise_stmt ::= "raise" [expression ["," expression ["," expression]]]\n\nIf no expressions are present, ``raise`` re-raises the last exception\nthat was active in the current scope. If no exception is active in\nthe current scope, a ``TypeError`` exception is raised indicating that\nthis is an error (if running under IDLE, a ``Queue.Empty`` exception\nis raised instead).\n\nOtherwise, ``raise`` evaluates the expressions to get three objects,\nusing ``None`` as the value of omitted expressions. The first two\nobjects are used to determine the *type* and *value* of the exception.\n\nIf the first object is an instance, the type of the exception is the\nclass of the instance, the instance itself is the value, and the\nsecond object must be ``None``.\n\nIf the first object is a class, it becomes the type of the exception.\nThe second object is used to determine the exception value: If it is\nan instance of the class, the instance becomes the exception value. If\nthe second object is a tuple, it is used as the argument list for the\nclass constructor; if it is ``None``, an empty argument list is used,\nand any other object is treated as a single argument to the\nconstructor. The instance so created by calling the constructor is\nused as the exception value.\n\nIf a third object is present and not ``None``, it must be a traceback\nobject (see section *The standard type hierarchy*), and it is\nsubstituted instead of the current location as the place where the\nexception occurred. If the third object is present and not a\ntraceback object or ``None``, a ``TypeError`` exception is raised.\nThe three-expression form of ``raise`` is useful to re-raise an\nexception transparently in an except clause, but ``raise`` with no\nexpressions should be preferred if the exception to be re-raised was\nthe most recently active exception in the current scope.\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information about handling exceptions is in section\n*The try statement*.\n', + 'return': '\nThe ``return`` statement\n************************\n\n return_stmt ::= "return" [expression_list]\n\n``return`` may only occur syntactically nested in a function\ndefinition, not within a nested class definition.\n\nIf an expression list is present, it is evaluated, else ``None`` is\nsubstituted.\n\n``return`` leaves the current function call with the expression list\n(or ``None``) as return value.\n\nWhen ``return`` passes control out of a ``try`` statement with a\n``finally`` clause, that ``finally`` clause is executed before really\nleaving the function.\n\nIn a generator function, the ``return`` statement is not allowed to\ninclude an ``expression_list``. In that context, a bare ``return``\nindicates that the generator is done and will cause ``StopIteration``\nto be raised.\n', + 'sequence-types': "\nEmulating container types\n*************************\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which ``0 <= k < N``\nwhere *N* is the length of the sequence, or slice objects, which\ndefine a range of items. (For backwards compatibility, the method\n``__getslice__()`` (see below) can also be defined to handle simple,\nbut not extended slices.) It is also recommended that mappings provide\nthe methods ``keys()``, ``values()``, ``items()``, ``has_key()``,\n``get()``, ``clear()``, ``setdefault()``, ``iterkeys()``,\n``itervalues()``, ``iteritems()``, ``pop()``, ``popitem()``,\n``copy()``, and ``update()`` behaving similar to those for Python's\nstandard dictionary objects. The ``UserDict`` module provides a\n``DictMixin`` class to help create those methods from a base set of\n``__getitem__()``, ``__setitem__()``, ``__delitem__()``, and\n``keys()``. Mutable sequences should provide methods ``append()``,\n``count()``, ``index()``, ``extend()``, ``insert()``, ``pop()``,\n``remove()``, ``reverse()`` and ``sort()``, like Python standard list\nobjects. Finally, sequence types should implement addition (meaning\nconcatenation) and multiplication (meaning repetition) by defining the\nmethods ``__add__()``, ``__radd__()``, ``__iadd__()``, ``__mul__()``,\n``__rmul__()`` and ``__imul__()`` described below; they should not\ndefine ``__coerce__()`` or other numerical operators. It is\nrecommended that both mappings and sequences implement the\n``__contains__()`` method to allow efficient use of the ``in``\noperator; for mappings, ``in`` should be equivalent of ``has_key()``;\nfor sequences, it should search through the values. It is further\nrecommended that both mappings and sequences implement the\n``__iter__()`` method to allow efficient iteration through the\ncontainer; for mappings, ``__iter__()`` should be the same as\n``iterkeys()``; for sequences, it should iterate through the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function ``len()``. Should return\n the length of the object, an integer ``>=`` 0. Also, an object\n that doesn't define a ``__nonzero__()`` method and whose\n ``__len__()`` method returns zero is considered to be false in a\n Boolean context.\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of ``self[key]``. For sequence\n types, the accepted keys should be integers and slice objects.\n Note that the special interpretation of negative indexes (if the\n class wishes to emulate a sequence type) is up to the\n ``__getitem__()`` method. If *key* is of an inappropriate type,\n ``TypeError`` may be raised; if of a value outside the set of\n indexes for the sequence (after any special interpretation of\n negative values), ``IndexError`` should be raised. For mapping\n types, if *key* is missing (not in the container), ``KeyError``\n should be raised.\n\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the ``__getitem__()`` method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the ``__getitem__()``\n method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method ``iterkeys()``.\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the ``reversed()`` built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the ``__reversed__()`` method is not provided, the\n ``reversed()`` built-in will fall back to using the sequence\n protocol (``__len__()`` and ``__getitem__()``). Objects that\n support the sequence protocol should only provide\n ``__reversed__()`` if they can provide an implementation that is\n more efficient than the one provided by ``reversed()``.\n\n New in version 2.6.\n\nThe membership test operators (``in`` and ``not in``) are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don't define ``__contains__()``, the membership\n test first tries iteration via ``__iter__()``, then the old\n sequence iteration protocol via ``__getitem__()``, see *this\n section in the language reference*.\n", diff --git a/SOURCES/00193-buffer-overflow.patch b/SOURCES/00193-buffer-overflow.patch new file mode 100644 index 0000000..164b462 --- /dev/null +++ b/SOURCES/00193-buffer-overflow.patch @@ -0,0 +1,43 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1389671978 18000 +# Node ID 87673659d8f7ba1623cd4914f09ad3d2ade034e9 +# Parent 2631d33ee7fbd5f0288931ef37872218d511d2e8 +complain when nbytes > buflen to fix possible buffer overflow (closes #20246) + +diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py +--- a/Lib/test/test_socket.py ++++ b/Lib/test/test_socket.py +@@ -1620,6 +1620,16 @@ class BufferIOTest(SocketConnectedTest): + + _testRecvFromIntoMemoryview = _testRecvFromIntoArray + ++ def testRecvFromIntoSmallBuffer(self): ++ # See issue #20246. ++ buf = bytearray(8) ++ self.assertRaises(ValueError, self.cli_conn.recvfrom_into, buf, 1024) ++ ++ def _testRecvFromIntoSmallBuffer(self): ++ with test_support.check_py3k_warnings(): ++ buf = buffer(MSG*2048) ++ self.serv_conn.send(buf) ++ + + TIPC_STYPE = 2000 + TIPC_LOWER = 200 +diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c +--- a/Modules/socketmodule.c ++++ b/Modules/socketmodule.c +@@ -2742,6 +2742,10 @@ sock_recvfrom_into(PySocketSockObject *s + if (recvlen == 0) { + /* If nbytes was not specified, use the buffer's length */ + recvlen = buflen; ++ } else if (recvlen > buflen) { ++ PyErr_SetString(PyExc_ValueError, ++ "nbytes is greater than the length of the buffer"); ++ goto error; + } + + readlen = sock_recvfrom_guts(s, buf.buf, recvlen, flags, &addr); + diff --git a/SOURCES/00194-gdb-dont-fail-on-frame-with-address.patch b/SOURCES/00194-gdb-dont-fail-on-frame-with-address.patch new file mode 100644 index 0000000..501b5db --- /dev/null +++ b/SOURCES/00194-gdb-dont-fail-on-frame-with-address.patch @@ -0,0 +1,19 @@ +--- Lib/test/test_gdb.py.orig 2014-08-01 14:30:43.397473152 +0200 ++++ Lib/test/test_gdb.py 2014-08-01 14:34:50.907325691 +0200 +@@ -135,6 +135,16 @@ + # Disable this: + 'set print entry-values no', + ++ # The tests assume that the first frame of printed ++ # backtrace will not contain program counter, ++ # that is however not guaranteed by gdb (rhbz#1125657) ++ # therefore we need to use 'set print address off' to ++ # make sure the counter is not there. For example: ++ # #0 in PyObject_Print ... ++ # is assumed, but sometimes this can be e.g. ++ # #0 0x00003fffb7dd1798 in PyObject_Print ... ++ 'set print address off', ++ + 'run'] + + if HAS_AUTOLOAD_SAFEPATH: diff --git a/SOURCES/00195-make-multiproc-ignore-EINTR.patch b/SOURCES/00195-make-multiproc-ignore-EINTR.patch new file mode 100644 index 0000000..050320a --- /dev/null +++ b/SOURCES/00195-make-multiproc-ignore-EINTR.patch @@ -0,0 +1,216 @@ + +# HG changeset patch +# User Richard Oudkerk +# Date 1372700728 -3600 +# Node ID bc34fe4a0d58a047509798acb0b4b2a21ce1e375 +# Parent 26ef5d5d5c3ea76ab411f2984d507aadce0ce8d7 +Issue #17097: Make multiprocessing ignore EINTR. + +diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py +--- a/Lib/multiprocessing/connection.py ++++ b/Lib/multiprocessing/connection.py +@@ -270,7 +270,14 @@ class SocketListener(object): + self._unlink = None + + def accept(self): +- s, self._last_accepted = self._socket.accept() ++ while True: ++ try: ++ s, self._last_accepted = self._socket.accept() ++ except socket.error as e: ++ if e.args[0] != errno.EINTR: ++ raise ++ else: ++ break + s.setblocking(True) + fd = duplicate(s.fileno()) + conn = _multiprocessing.Connection(fd) +diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py +--- a/Lib/test/test_multiprocessing.py ++++ b/Lib/test/test_multiprocessing.py +@@ -2461,12 +2461,80 @@ class TestForkAwareThreadLock(unittest.T + self.assertLessEqual(new_size, old_size) + + # ++# Issue #17097: EINTR should be ignored by recv(), send(), accept() etc ++# ++ ++class TestIgnoreEINTR(unittest.TestCase): ++ ++ @classmethod ++ def _test_ignore(cls, conn): ++ def handler(signum, frame): ++ pass ++ signal.signal(signal.SIGUSR1, handler) ++ conn.send('ready') ++ x = conn.recv() ++ conn.send(x) ++ conn.send_bytes(b'x'*(1024*1024)) # sending 1 MB should block ++ ++ @unittest.skipUnless(hasattr(signal, 'SIGUSR1'), 'requires SIGUSR1') ++ def test_ignore(self): ++ conn, child_conn = multiprocessing.Pipe() ++ try: ++ p = multiprocessing.Process(target=self._test_ignore, ++ args=(child_conn,)) ++ p.daemon = True ++ p.start() ++ child_conn.close() ++ self.assertEqual(conn.recv(), 'ready') ++ time.sleep(0.1) ++ os.kill(p.pid, signal.SIGUSR1) ++ time.sleep(0.1) ++ conn.send(1234) ++ self.assertEqual(conn.recv(), 1234) ++ time.sleep(0.1) ++ os.kill(p.pid, signal.SIGUSR1) ++ self.assertEqual(conn.recv_bytes(), b'x'*(1024*1024)) ++ time.sleep(0.1) ++ p.join() ++ finally: ++ conn.close() ++ ++ @classmethod ++ def _test_ignore_listener(cls, conn): ++ def handler(signum, frame): ++ pass ++ signal.signal(signal.SIGUSR1, handler) ++ l = multiprocessing.connection.Listener() ++ conn.send(l.address) ++ a = l.accept() ++ a.send('welcome') ++ ++ @unittest.skipUnless(hasattr(signal, 'SIGUSR1'), 'requires SIGUSR1') ++ def test_ignore_listener(self): ++ conn, child_conn = multiprocessing.Pipe() ++ try: ++ p = multiprocessing.Process(target=self._test_ignore_listener, ++ args=(child_conn,)) ++ p.daemon = True ++ p.start() ++ child_conn.close() ++ address = conn.recv() ++ time.sleep(0.1) ++ os.kill(p.pid, signal.SIGUSR1) ++ time.sleep(0.1) ++ client = multiprocessing.connection.Client(address) ++ self.assertEqual(client.recv(), 'welcome') ++ p.join() ++ finally: ++ conn.close() ++ ++# + # + # + + testcases_other = [OtherTest, TestInvalidHandle, TestInitializers, + TestStdinBadfiledescriptor, TestTimeouts, TestNoForkBomb, +- TestFlags, TestForkAwareThreadLock] ++ TestFlags, TestForkAwareThreadLock, TestIgnoreEINTR] + + # + # +diff --git a/Modules/_multiprocessing/socket_connection.c b/Modules/_multiprocessing/socket_connection.c +--- a/Modules/_multiprocessing/socket_connection.c ++++ b/Modules/_multiprocessing/socket_connection.c +@@ -23,6 +23,21 @@ + #endif + + /* ++ * Wrapper for PyErr_CheckSignals() which can be called without the GIL ++ */ ++ ++static int ++check_signals(void) ++{ ++ PyGILState_STATE state; ++ int res; ++ state = PyGILState_Ensure(); ++ res = PyErr_CheckSignals(); ++ PyGILState_Release(state); ++ return res; ++} ++ ++/* + * Send string to file descriptor + */ + +@@ -34,8 +49,14 @@ static Py_ssize_t + + while (length > 0) { + res = WRITE(h, p, length); +- if (res < 0) ++ if (res < 0) { ++ if (errno == EINTR) { ++ if (check_signals() < 0) ++ return MP_EXCEPTION_HAS_BEEN_SET; ++ continue; ++ } + return MP_SOCKET_ERROR; ++ } + length -= res; + p += res; + } +@@ -56,12 +77,16 @@ static Py_ssize_t + + while (remaining > 0) { + temp = READ(h, p, remaining); +- if (temp <= 0) { +- if (temp == 0) +- return remaining == length ? +- MP_END_OF_FILE : MP_EARLY_END_OF_FILE; +- else +- return temp; ++ if (temp < 0) { ++ if (errno == EINTR) { ++ if (check_signals() < 0) ++ return MP_EXCEPTION_HAS_BEEN_SET; ++ continue; ++ } ++ return temp; ++ } ++ else if (temp == 0) { ++ return remaining == length ? MP_END_OF_FILE : MP_EARLY_END_OF_FILE; + } + remaining -= temp; + p += temp; +@@ -171,9 +196,16 @@ conn_poll(ConnectionObject *conn, double + p.revents = 0; + + if (timeout < 0) { +- res = poll(&p, 1, -1); ++ do { ++ res = poll(&p, 1, -1); ++ } while (res < 0 && errno == EINTR); + } else { + res = poll(&p, 1, (int)(timeout * 1000 + 0.5)); ++ if (res < 0 && errno == EINTR) { ++ /* We were interrupted by a signal. Just indicate a ++ timeout even though we are early. */ ++ return FALSE; ++ } + } + + if (res < 0) { +@@ -209,12 +241,19 @@ conn_poll(ConnectionObject *conn, double + FD_SET((SOCKET)conn->handle, &rfds); + + if (timeout < 0.0) { +- res = select((int)conn->handle+1, &rfds, NULL, NULL, NULL); ++ do { ++ res = select((int)conn->handle+1, &rfds, NULL, NULL, NULL); ++ } while (res < 0 && errno == EINTR); + } else { + struct timeval tv; + tv.tv_sec = (long)timeout; + tv.tv_usec = (long)((timeout - tv.tv_sec) * 1e6 + 0.5); + res = select((int)conn->handle+1, &rfds, NULL, NULL, &tv); ++ if (res < 0 && errno == EINTR) { ++ /* We were interrupted by a signal. Just indicate a ++ timeout even though we are early. */ ++ return FALSE; ++ } + } + + if (res < 0) { + diff --git a/SOURCES/00196-avoid-double-close-of-pipes.patch b/SOURCES/00196-avoid-double-close-of-pipes.patch new file mode 100644 index 0000000..bd53bbc --- /dev/null +++ b/SOURCES/00196-avoid-double-close-of-pipes.patch @@ -0,0 +1,288 @@ + +# HG changeset patch +# User Antoine Pitrou +# Date 1377898693 -7200 +# Node ID 43749cb6bdbd0fdab70f76cd171c3c02a3f600dd +# Parent ba54011aa295004ad87438211fe3bb1568dd69ab +Issue #18851: Avoid a double close of subprocess pipes when the child process fails starting. + +diff --git a/Lib/subprocess.py b/Lib/subprocess.py +--- a/Lib/subprocess.py ++++ b/Lib/subprocess.py +@@ -698,12 +698,12 @@ class Popen(object): + + (p2cread, p2cwrite, + c2pread, c2pwrite, +- errread, errwrite) = self._get_handles(stdin, stdout, stderr) ++ errread, errwrite), to_close = self._get_handles(stdin, stdout, stderr) + + try: + self._execute_child(args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, +- startupinfo, creationflags, shell, ++ startupinfo, creationflags, shell, to_close, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) +@@ -711,18 +711,12 @@ class Popen(object): + # Preserve original exception in case os.close raises. + exc_type, exc_value, exc_trace = sys.exc_info() + +- to_close = [] +- # Only close the pipes we created. +- if stdin == PIPE: +- to_close.extend((p2cread, p2cwrite)) +- if stdout == PIPE: +- to_close.extend((c2pread, c2pwrite)) +- if stderr == PIPE: +- to_close.extend((errread, errwrite)) +- + for fd in to_close: + try: +- os.close(fd) ++ if mswindows: ++ fd.Close() ++ else: ++ os.close(fd) + except EnvironmentError: + pass + +@@ -816,8 +810,9 @@ class Popen(object): + """Construct and return tuple with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ ++ to_close = set() + if stdin is None and stdout is None and stderr is None: +- return (None, None, None, None, None, None) ++ return (None, None, None, None, None, None), to_close + + p2cread, p2cwrite = None, None + c2pread, c2pwrite = None, None +@@ -835,6 +830,10 @@ class Popen(object): + # Assuming file-like object + p2cread = msvcrt.get_osfhandle(stdin.fileno()) + p2cread = self._make_inheritable(p2cread) ++ # We just duplicated the handle, it has to be closed at the end ++ to_close.add(p2cread) ++ if stdin == PIPE: ++ to_close.add(p2cwrite) + + if stdout is None: + c2pwrite = _subprocess.GetStdHandle(_subprocess.STD_OUTPUT_HANDLE) +@@ -848,6 +847,10 @@ class Popen(object): + # Assuming file-like object + c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) + c2pwrite = self._make_inheritable(c2pwrite) ++ # We just duplicated the handle, it has to be closed at the end ++ to_close.add(c2pwrite) ++ if stdout == PIPE: ++ to_close.add(c2pread) + + if stderr is None: + errwrite = _subprocess.GetStdHandle(_subprocess.STD_ERROR_HANDLE) +@@ -863,10 +866,14 @@ class Popen(object): + # Assuming file-like object + errwrite = msvcrt.get_osfhandle(stderr.fileno()) + errwrite = self._make_inheritable(errwrite) ++ # We just duplicated the handle, it has to be closed at the end ++ to_close.add(errwrite) ++ if stderr == PIPE: ++ to_close.add(errread) + + return (p2cread, p2cwrite, + c2pread, c2pwrite, +- errread, errwrite) ++ errread, errwrite), to_close + + + def _make_inheritable(self, handle): +@@ -895,7 +902,7 @@ class Popen(object): + + def _execute_child(self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, +- startupinfo, creationflags, shell, ++ startupinfo, creationflags, shell, to_close, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite): +@@ -934,6 +941,10 @@ class Popen(object): + # kill children. + creationflags |= _subprocess.CREATE_NEW_CONSOLE + ++ def _close_in_parent(fd): ++ fd.Close() ++ to_close.remove(fd) ++ + # Start the process + try: + hp, ht, pid, tid = _subprocess.CreateProcess(executable, args, +@@ -958,11 +969,11 @@ class Popen(object): + # pipe will not close when the child process exits and the + # ReadFile will hang. + if p2cread is not None: +- p2cread.Close() ++ _close_in_parent(p2cread) + if c2pwrite is not None: +- c2pwrite.Close() ++ _close_in_parent(c2pwrite) + if errwrite is not None: +- errwrite.Close() ++ _close_in_parent(errwrite) + + # Retain the process handle, but close the thread handle + self._child_created = True +@@ -1088,6 +1099,7 @@ class Popen(object): + """Construct and return tuple with IO objects: + p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite + """ ++ to_close = set() + p2cread, p2cwrite = None, None + c2pread, c2pwrite = None, None + errread, errwrite = None, None +@@ -1096,6 +1108,7 @@ class Popen(object): + pass + elif stdin == PIPE: + p2cread, p2cwrite = self.pipe_cloexec() ++ to_close.update((p2cread, p2cwrite)) + elif isinstance(stdin, int): + p2cread = stdin + else: +@@ -1106,6 +1119,7 @@ class Popen(object): + pass + elif stdout == PIPE: + c2pread, c2pwrite = self.pipe_cloexec() ++ to_close.update((c2pread, c2pwrite)) + elif isinstance(stdout, int): + c2pwrite = stdout + else: +@@ -1116,6 +1130,7 @@ class Popen(object): + pass + elif stderr == PIPE: + errread, errwrite = self.pipe_cloexec() ++ to_close.update((errread, errwrite)) + elif stderr == STDOUT: + errwrite = c2pwrite + elif isinstance(stderr, int): +@@ -1126,7 +1141,7 @@ class Popen(object): + + return (p2cread, p2cwrite, + c2pread, c2pwrite, +- errread, errwrite) ++ errread, errwrite), to_close + + + def _set_cloexec_flag(self, fd, cloexec=True): +@@ -1170,7 +1185,7 @@ class Popen(object): + + def _execute_child(self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, +- startupinfo, creationflags, shell, ++ startupinfo, creationflags, shell, to_close, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite): +@@ -1189,6 +1204,10 @@ class Popen(object): + if executable is None: + executable = args[0] + ++ def _close_in_parent(fd): ++ os.close(fd) ++ to_close.remove(fd) ++ + # For transferring possible exec failure from child to parent + # The first char specifies the exception type: 0 means + # OSError, 1 means some other error. +@@ -1283,17 +1302,17 @@ class Popen(object): + # be sure the FD is closed no matter what + os.close(errpipe_write) + +- if p2cread is not None and p2cwrite is not None: +- os.close(p2cread) +- if c2pwrite is not None and c2pread is not None: +- os.close(c2pwrite) +- if errwrite is not None and errread is not None: +- os.close(errwrite) +- + # Wait for exec to fail or succeed; possibly raising exception + # Exception limited to 1M + data = _eintr_retry_call(os.read, errpipe_read, 1048576) + finally: ++ if p2cread is not None and p2cwrite is not None: ++ _close_in_parent(p2cread) ++ if c2pwrite is not None and c2pread is not None: ++ _close_in_parent(c2pwrite) ++ if errwrite is not None and errread is not None: ++ _close_in_parent(errwrite) ++ + # be sure the FD is closed no matter what + os.close(errpipe_read) + +diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py +--- a/Lib/test/test_subprocess.py ++++ b/Lib/test/test_subprocess.py +@@ -14,6 +14,10 @@ try: + import resource + except ImportError: + resource = None ++try: ++ import threading ++except ImportError: ++ threading = None + + mswindows = (sys.platform == "win32") + +@@ -629,6 +633,36 @@ class ProcessTestCase(BaseTestCase): + if c.exception.errno not in (errno.ENOENT, errno.EACCES): + raise c.exception + ++ @unittest.skipIf(threading is None, "threading required") ++ def test_double_close_on_error(self): ++ # Issue #18851 ++ fds = [] ++ def open_fds(): ++ for i in range(20): ++ fds.extend(os.pipe()) ++ time.sleep(0.001) ++ t = threading.Thread(target=open_fds) ++ t.start() ++ try: ++ with self.assertRaises(EnvironmentError): ++ subprocess.Popen(['nonexisting_i_hope'], ++ stdin=subprocess.PIPE, ++ stdout=subprocess.PIPE, ++ stderr=subprocess.PIPE) ++ finally: ++ t.join() ++ exc = None ++ for fd in fds: ++ # If a double close occurred, some of those fds will ++ # already have been closed by mistake, and os.close() ++ # here will raise. ++ try: ++ os.close(fd) ++ except OSError as e: ++ exc = e ++ if exc is not None: ++ raise exc ++ + def test_handles_closed_on_exception(self): + # If CreateProcess exits with an error, ensure the + # duplicate output handles are released +@@ -783,7 +817,7 @@ class POSIXProcessTestCase(BaseTestCase) + + def _execute_child( + self, args, executable, preexec_fn, close_fds, cwd, env, +- universal_newlines, startupinfo, creationflags, shell, ++ universal_newlines, startupinfo, creationflags, shell, to_close, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite): +@@ -791,7 +825,7 @@ class POSIXProcessTestCase(BaseTestCase) + subprocess.Popen._execute_child( + self, args, executable, preexec_fn, close_fds, + cwd, env, universal_newlines, +- startupinfo, creationflags, shell, ++ startupinfo, creationflags, shell, to_close, + p2cread, p2cwrite, + c2pread, c2pwrite, + errread, errwrite) diff --git a/SOURCES/00197-add-missing-import-in-bdist_rpm.patch b/SOURCES/00197-add-missing-import-in-bdist_rpm.patch new file mode 100644 index 0000000..79ad5f4 --- /dev/null +++ b/SOURCES/00197-add-missing-import-in-bdist_rpm.patch @@ -0,0 +1,20 @@ + +# HG changeset patch +# User Éric Araujo +# Date 1394614885 14400 +# Node ID 677327810121891704491bafa6209af5b60ebc91 +# Parent 0f1237b61f58a77a159ab6e452782a8924ff2966 +Fix missing import in bdist_rpm (#18045) + +diff --git a/Lib/distutils/command/bdist_rpm.py b/Lib/distutils/command/bdist_rpm.py +--- a/Lib/distutils/command/bdist_rpm.py ++++ b/Lib/distutils/command/bdist_rpm.py +@@ -12,6 +12,7 @@ import string + from distutils.core import Command + from distutils.debug import DEBUG + from distutils.file_util import write_file ++from distutils.sysconfig import get_python_version + from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, + DistutilsFileError, DistutilsExecError) + from distutils import log + diff --git a/SOURCES/00198-fix-readline-erroneous-output.patch b/SOURCES/00198-fix-readline-erroneous-output.patch new file mode 100644 index 0000000..6322446 --- /dev/null +++ b/SOURCES/00198-fix-readline-erroneous-output.patch @@ -0,0 +1,166 @@ + +# HG changeset patch +# User Victor Stinner +# Date 1406197344 -7200 +# Node ID 0177d8a4e82a613de0c64e747656c1d0b63e49b3 +# Parent e70ab72286b470b7209b91d3aa8a21953aafb78f +Issue #19884: readline: Disable the meta modifier key if stdout is not a +terminal to not write the ANSI sequence "\033[1034h" into stdout. This sequence +is used on some terminal (ex: TERM=xterm-256color") to enable support of 8 bit +characters. + +diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py +--- a/Lib/test/test_readline.py ++++ b/Lib/test/test_readline.py +@@ -1,17 +1,19 @@ + """ + Very minimal unittests for parts of the readline module. +- +-These tests were added to check that the libedit emulation on OSX and +-the "real" readline have the same interface for history manipulation. That's +-why the tests cover only a small subset of the interface. + """ ++import os + import unittest + from test.test_support import run_unittest, import_module ++from test.script_helper import assert_python_ok + + # Skip tests if there is no readline module + readline = import_module('readline') + + class TestHistoryManipulation (unittest.TestCase): ++ """These tests were added to check that the libedit emulation on OSX and ++ the "real" readline have the same interface for history manipulation. ++ That's why the tests cover only a small subset of the interface. ++ """ + + @unittest.skipIf(not hasattr(readline, 'clear_history'), + "The history update test cannot be run because the " +@@ -40,8 +42,18 @@ class TestHistoryManipulation (unittest. + self.assertEqual(readline.get_current_history_length(), 1) + + ++class TestReadline(unittest.TestCase): ++ def test_init(self): ++ # Issue #19884: Ensure that the ANSI sequence "\033[1034h" is not ++ # written into stdout when the readline module is imported and stdout ++ # is redirected to a pipe. ++ rc, stdout, stderr = assert_python_ok('-c', 'import readline', ++ TERM='xterm-256color') ++ self.assertEqual(stdout, b'') ++ ++ + def test_main(): +- run_unittest(TestHistoryManipulation) ++ run_unittest(TestHistoryManipulation, TestReadline) + + if __name__ == "__main__": + test_main() +diff --git a/Modules/readline.c b/Modules/readline.c +--- a/Modules/readline.c ++++ b/Modules/readline.c +@@ -887,7 +887,7 @@ setup_readline(void) + #endif + + #ifdef __APPLE__ +- /* the libedit readline emulation resets key bindings etc ++ /* the libedit readline emulation resets key bindings etc + * when calling rl_initialize. So call it upfront + */ + if (using_libedit_emulation) +@@ -932,6 +932,17 @@ setup_readline(void) + + begidx = PyInt_FromLong(0L); + endidx = PyInt_FromLong(0L); ++ ++ if (!isatty(STDOUT_FILENO)) { ++ /* Issue #19884: stdout is no a terminal. Disable meta modifier ++ keys to not write the ANSI sequence "\033[1034h" into stdout. On ++ terminals supporting 8 bit characters like TERM=xterm-256color ++ (which is now the default Fedora since Fedora 18), the meta key is ++ used to enable support of 8 bit characters (ANSI sequence ++ "\033[1034h"). */ ++ rl_variable_bind ("enable-meta-key", "off"); ++ } ++ + /* Initialize (allows .inputrc to override) + * + * XXX: A bug in the readline-2.2 library causes a memory leak +@@ -943,7 +954,7 @@ setup_readline(void) + else + #endif /* __APPLE__ */ + rl_initialize(); +- ++ + RESTORE_LOCALE(saved_locale) + } + + + +# HG changeset patch +# User Victor Stinner +# Date 1406232681 -7200 +# Node ID f0ab6f9f06036dfacff09f22f86464840b50eb0a +# Parent d422062d7d366386acdb81851b0f2ec3a6f6750c +Issue #19884, readline: calling rl_variable_bind ("enable-meta-key", "off") +does crash on Mac OS X which uses libedit instead of readline. + +diff --git a/Modules/readline.c b/Modules/readline.c +--- a/Modules/readline.c ++++ b/Modules/readline.c +@@ -933,15 +933,19 @@ setup_readline(void) + begidx = PyInt_FromLong(0L); + endidx = PyInt_FromLong(0L); + ++#ifndef __APPLE__ + if (!isatty(STDOUT_FILENO)) { + /* Issue #19884: stdout is no a terminal. Disable meta modifier + keys to not write the ANSI sequence "\033[1034h" into stdout. On + terminals supporting 8 bit characters like TERM=xterm-256color + (which is now the default Fedora since Fedora 18), the meta key is + used to enable support of 8 bit characters (ANSI sequence +- "\033[1034h"). */ ++ "\033[1034h"). ++ ++ With libedit, this call makes readline() crash. */ + rl_variable_bind ("enable-meta-key", "off"); + } ++#endif + + /* Initialize (allows .inputrc to override) + * + + +# HG changeset patch +# User Antoine Pitrou +# Date 1415109130 -3600 +# Node ID eba6e68e818c694e499dfc4b22dde095d2557ab1 +# Parent e54d0b197c8245bd29ea09f421e2f1da47370f41 +Issue #22773: fix failing test with old readline versions due to issue #19884. + +diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py +--- a/Lib/test/test_readline.py ++++ b/Lib/test/test_readline.py +@@ -43,6 +43,10 @@ class TestHistoryManipulation (unittest. + + + class TestReadline(unittest.TestCase): ++ ++ @unittest.skipIf(readline._READLINE_VERSION < 0x0600 ++ and "libedit" not in readline.__doc__, ++ "not supported in this library version") + def test_init(self): + # Issue #19884: Ensure that the ANSI sequence "\033[1034h" is not + # written into stdout when the readline module is imported and stdout +diff --git a/Modules/readline.c b/Modules/readline.c +--- a/Modules/readline.c ++++ b/Modules/readline.c +@@ -1184,4 +1184,7 @@ initreadline(void) + + PyOS_ReadlineFunctionPointer = call_readline; + setup_readline(); ++ ++ PyModule_AddIntConstant(m, "_READLINE_VERSION", RL_READLINE_VERSION); ++ PyModule_AddIntConstant(m, "_READLINE_RUNTIME_VERSION", rl_readline_version); + } + diff --git a/SOURCES/00199-CVE-2013-1753.patch b/SOURCES/00199-CVE-2013-1753.patch new file mode 100644 index 0000000..a838c1f --- /dev/null +++ b/SOURCES/00199-CVE-2013-1753.patch @@ -0,0 +1,88 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1417828515 18000 +# Node ID d50096708b2d701937e78f525446d729fc28db88 +# Parent 923aac88a3cc76a95d5a04d9d3ece245147a8064 +add a default limit for the amount of data xmlrpclib.gzip_decode will return (closes #16043) + +diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py +--- a/Lib/test/test_xmlrpc.py ++++ b/Lib/test/test_xmlrpc.py +@@ -737,7 +737,7 @@ class GzipServerTestCase(BaseServerTestC + with cm: + p.pow(6, 8) + +- def test_gsip_response(self): ++ def test_gzip_response(self): + t = self.Transport() + p = xmlrpclib.ServerProxy(URL, transport=t) + old = self.requestHandler.encode_threshold +@@ -750,6 +750,23 @@ class GzipServerTestCase(BaseServerTestC + self.requestHandler.encode_threshold = old + self.assertTrue(a>b) + ++ def test_gzip_decode_limit(self): ++ max_gzip_decode = 20 * 1024 * 1024 ++ data = '\0' * max_gzip_decode ++ encoded = xmlrpclib.gzip_encode(data) ++ decoded = xmlrpclib.gzip_decode(encoded) ++ self.assertEqual(len(decoded), max_gzip_decode) ++ ++ data = '\0' * (max_gzip_decode + 1) ++ encoded = xmlrpclib.gzip_encode(data) ++ ++ with self.assertRaisesRegexp(ValueError, ++ "max gzipped payload length exceeded"): ++ xmlrpclib.gzip_decode(encoded) ++ ++ xmlrpclib.gzip_decode(encoded, max_decode=-1) ++ ++ + #Test special attributes of the ServerProxy object + class ServerProxyTestCase(unittest.TestCase): + def setUp(self): +diff --git a/Lib/xmlrpclib.py b/Lib/xmlrpclib.py +--- a/Lib/xmlrpclib.py ++++ b/Lib/xmlrpclib.py +@@ -49,6 +49,7 @@ + # 2003-07-12 gp Correct marshalling of Faults + # 2003-10-31 mvl Add multicall support + # 2004-08-20 mvl Bump minimum supported Python version to 2.1 ++# 2014-12-02 ch/doko Add workaround for gzip bomb vulnerability + # + # Copyright (c) 1999-2002 by Secret Labs AB. + # Copyright (c) 1999-2002 by Fredrik Lundh. +@@ -1165,10 +1166,13 @@ def gzip_encode(data): + # in the HTTP header, as described in RFC 1952 + # + # @param data The encoded data ++# @keyparam max_decode Maximum bytes to decode (20MB default), use negative ++# values for unlimited decoding + # @return the unencoded data + # @raises ValueError if data is not correctly coded. ++# @raises ValueError if max gzipped payload length exceeded + +-def gzip_decode(data): ++def gzip_decode(data, max_decode=20971520): + """gzip encoded data -> unencoded data + + Decode data using the gzip content encoding as described in RFC 1952 +@@ -1178,11 +1182,16 @@ def gzip_decode(data): + f = StringIO.StringIO(data) + gzf = gzip.GzipFile(mode="rb", fileobj=f) + try: +- decoded = gzf.read() ++ if max_decode < 0: # no limit ++ decoded = gzf.read() ++ else: ++ decoded = gzf.read(max_decode + 1) + except IOError: + raise ValueError("invalid data") + f.close() + gzf.close() ++ if max_decode >= 0 and len(decoded) > max_decode: ++ raise ValueError("max gzipped payload length exceeded") + return decoded + + ## diff --git a/SOURCES/00200-CVE-2014-4616.patch b/SOURCES/00200-CVE-2014-4616.patch new file mode 100644 index 0000000..c60831e --- /dev/null +++ b/SOURCES/00200-CVE-2014-4616.patch @@ -0,0 +1,52 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1397441438 14400 +# Node ID 50c07ed1743da9cd4540d83de0c30bd17aeb41b0 +# Parent 218e28a935ab4494d05215c243e2129625a71893 +in scan_once, prevent the reading of arbitrary memory when passed a negative index + +Bug reported by Guido Vranken. + +diff --git a/Lib/json/tests/test_decode.py b/Lib/json/tests/test_decode.py +--- a/Lib/json/tests/test_decode.py ++++ b/Lib/json/tests/test_decode.py +@@ -60,5 +60,10 @@ class TestDecode(object): + msg = 'escape' + self.assertRaisesRegexp(ValueError, msg, self.loads, s) + ++ def test_negative_index(self): ++ d = self.json.JSONDecoder() ++ self.assertRaises(ValueError, d.raw_decode, 'a'*42, -50000) ++ self.assertRaises(ValueError, d.raw_decode, u'a'*42, -50000) ++ + class TestPyDecode(TestDecode, PyTest): pass + class TestCDecode(TestDecode, CTest): pass +diff --git a/Modules/_json.c b/Modules/_json.c +--- a/Modules/_json.c ++++ b/Modules/_json.c +@@ -1468,7 +1468,10 @@ scan_once_str(PyScannerObject *s, PyObje + PyObject *res; + char *str = PyString_AS_STRING(pystr); + Py_ssize_t length = PyString_GET_SIZE(pystr); +- if (idx >= length) { ++ if (idx < 0) ++ /* Compatibility with the Python version. */ ++ idx += length; ++ if (idx < 0 || idx >= length) { + PyErr_SetNone(PyExc_StopIteration); + return NULL; + } +@@ -1555,7 +1558,10 @@ scan_once_unicode(PyScannerObject *s, Py + PyObject *res; + Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); + Py_ssize_t length = PyUnicode_GET_SIZE(pystr); +- if (idx >= length) { ++ if (idx < 0) ++ /* Compatibility with Python version. */ ++ idx += length; ++ if (idx < 0 || idx >= length) { + PyErr_SetNone(PyExc_StopIteration); + return NULL; + } + diff --git a/SOURCES/00201-CVE-2014-4650.patch b/SOURCES/00201-CVE-2014-4650.patch new file mode 100644 index 0000000..031c859 --- /dev/null +++ b/SOURCES/00201-CVE-2014-4650.patch @@ -0,0 +1,35 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1402796189 25200 +# Node ID b4bab078876811c7d95231d08aa6fa7142fdda66 +# Parent bb8b0c7fefd0c5ed99b3f336178a4f9554a1d0ef +url unquote the path before checking if it refers to a CGI script (closes #21766) + +diff --git a/Lib/CGIHTTPServer.py b/Lib/CGIHTTPServer.py +--- a/Lib/CGIHTTPServer.py ++++ b/Lib/CGIHTTPServer.py +@@ -84,7 +84,7 @@ class CGIHTTPRequestHandler(SimpleHTTPSe + path begins with one of the strings in self.cgi_directories + (and the next character is a '/' or the end of the string). + """ +- collapsed_path = _url_collapse_path(self.path) ++ collapsed_path = _url_collapse_path(urllib.unquote(self.path)) + dir_sep = collapsed_path.find('/', 1) + head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] + if head in self.cgi_directories: +diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py +--- a/Lib/test/test_httpservers.py ++++ b/Lib/test/test_httpservers.py +@@ -510,6 +510,11 @@ class CGIHTTPServerTestCase(BaseTestCase + (res.read(), res.getheader('Content-type'), res.status)) + self.assertEqual(os.environ['SERVER_SOFTWARE'], signature) + ++ def test_urlquote_decoding_in_cgi_check(self): ++ res = self.request('/cgi-bin%2ffile1.py') ++ self.assertEqual((b'Hello World\n', 'text/html', 200), ++ (res.read(), res.getheader('Content-type'), res.status)) ++ + + class SimpleHTTPRequestHandlerTestCase(unittest.TestCase): + """ Test url parsing """ diff --git a/SOURCES/00202-CVE-2014-7185.patch b/SOURCES/00202-CVE-2014-7185.patch new file mode 100644 index 0000000..8ddb798 --- /dev/null +++ b/SOURCES/00202-CVE-2014-7185.patch @@ -0,0 +1,51 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1403579547 25200 +# Node ID 8d963c7db507be561e26bbbb852e3a2be3327c3f +# Parent 8e0b7393e921fb5e05c40265f9272dec90512ef6 +avoid overflow with large buffer sizes and/or offsets (closes #21831) + +diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py +--- a/Lib/test/test_buffer.py ++++ b/Lib/test/test_buffer.py +@@ -4,6 +4,7 @@ For now, tests just new or changed funct + + """ + ++import sys + import unittest + from test import test_support + +@@ -29,6 +30,11 @@ class BufferTests(unittest.TestCase): + m = memoryview(b) # Should not raise an exception + self.assertEqual(m.tobytes(), s) + ++ def test_large_buffer_size_and_offset(self): ++ data = bytearray('hola mundo') ++ buf = buffer(data, sys.maxsize, sys.maxsize) ++ self.assertEqual(buf[:4096], "") ++ + + def test_main(): + with test_support.check_py3k_warnings(("buffer.. not supported", +diff --git a/Objects/bufferobject.c b/Objects/bufferobject.c +--- a/Objects/bufferobject.c ++++ b/Objects/bufferobject.c +@@ -88,7 +88,7 @@ get_buf(PyBufferObject *self, void **ptr + *size = count; + else + *size = self->b_size; +- if (offset + *size > count) ++ if (*size > count - offset) + *size = count - offset; + } + return 1; +@@ -875,4 +875,4 @@ PyTypeObject PyBuffer_Type = { + 0, /* tp_init */ + 0, /* tp_alloc */ + buffer_new, /* tp_new */ +-}; +\ No newline at end of file ++}; + diff --git a/SOURCES/00203-CVE-2013-1752-nntplib.patch b/SOURCES/00203-CVE-2013-1752-nntplib.patch new file mode 100644 index 0000000..37c6f12 --- /dev/null +++ b/SOURCES/00203-CVE-2013-1752-nntplib.patch @@ -0,0 +1,108 @@ + +# HG changeset patch +# User Barry Warsaw +# Date 1380582569 14400 +# Node ID 36680a7c0e22686df9c338a9ca3cdb2c60e05b27 +# Parent 0f5611bca5a284c0b5f978e83a05818f0907bda8# Parent 731abf7834c43efb321231e65e7dd76ad9e8e661 +- Issue #16040: CVE-2013-1752: nntplib: Limit maximum line lengths to 2048 to + prevent readline() calls from consuming too much memory. Patch by Jyrki + Pulliainen. + +diff --git a/Lib/nntplib.py b/Lib/nntplib.py +--- a/Lib/nntplib.py ++++ b/Lib/nntplib.py +@@ -37,6 +37,13 @@ import socket + "error_reply","error_temp","error_perm","error_proto", + "error_data",] + ++# maximal line length when calling readline(). This is to prevent ++# reading arbitrary lenght lines. RFC 3977 limits NNTP line length to ++# 512 characters, including CRLF. We have selected 2048 just to be on ++# the safe side. ++_MAXLINE = 2048 ++ ++ + # Exceptions raised when an error or invalid response is received + class NNTPError(Exception): + """Base class for all nntplib exceptions""" +@@ -200,7 +207,9 @@ class NNTP: + def getline(self): + """Internal: return one line from the server, stripping CRLF. + Raise EOFError if the connection is closed.""" +- line = self.file.readline() ++ line = self.file.readline(_MAXLINE + 1) ++ if len(line) > _MAXLINE: ++ raise NNTPDataError('line too long') + if self.debugging > 1: + print '*get*', repr(line) + if not line: raise EOFError +diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py +new file mode 100644 +--- /dev/null ++++ b/Lib/test/test_nntplib.py +@@ -0,0 +1,65 @@ ++import socket ++import threading ++import nntplib ++import time ++ ++from unittest import TestCase ++from test import test_support ++ ++HOST = test_support.HOST ++ ++ ++def server(evt, serv, evil=False): ++ serv.listen(5) ++ try: ++ conn, addr = serv.accept() ++ except socket.timeout: ++ pass ++ else: ++ if evil: ++ conn.send("1 I'm too long response" * 3000 + "\n") ++ else: ++ conn.send("1 I'm OK response\n") ++ conn.close() ++ finally: ++ serv.close() ++ evt.set() ++ ++ ++class BaseServerTest(TestCase): ++ def setUp(self): ++ self.evt = threading.Event() ++ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) ++ self.sock.settimeout(3) ++ self.port = test_support.bind_port(self.sock) ++ threading.Thread( ++ target=server, ++ args=(self.evt, self.sock, self.evil)).start() ++ time.sleep(.1) ++ ++ def tearDown(self): ++ self.evt.wait() ++ ++ ++class ServerTests(BaseServerTest): ++ evil = False ++ ++ def test_basic_connect(self): ++ nntp = nntplib.NNTP('localhost', self.port) ++ nntp.sock.close() ++ ++ ++class EvilServerTests(BaseServerTest): ++ evil = True ++ ++ def test_too_long_line(self): ++ self.assertRaises(nntplib.NNTPDataError, ++ nntplib.NNTP, 'localhost', self.port) ++ ++ ++def test_main(verbose=None): ++ test_support.run_unittest(EvilServerTests) ++ test_support.run_unittest(ServerTests) ++ ++if __name__ == '__main__': ++ test_main() diff --git a/SOURCES/00204-CVE-2013-1752-ftplib.patch b/SOURCES/00204-CVE-2013-1752-ftplib.patch new file mode 100644 index 0000000..97c890e --- /dev/null +++ b/SOURCES/00204-CVE-2013-1752-ftplib.patch @@ -0,0 +1,149 @@ + +# HG changeset patch +# User Serhiy Storchaka +# Date 1382277427 -10800 +# Node ID 44ac81e6d584758ee56a865a7c18d82505be0643 +# Parent 625ece68d79a27d376889579c414ed4b2d8a2649 +Issue #16038: CVE-2013-1752: ftplib: Limit amount of data read by +limiting the call to readline(). Original patch by Michał +Jastrzębski and Giampaolo Rodola. + +diff --git a/Lib/ftplib.py b/Lib/ftplib.py +--- a/Lib/ftplib.py ++++ b/Lib/ftplib.py +@@ -55,6 +55,8 @@ MSG_OOB = 0x1 + + # The standard FTP server control port + FTP_PORT = 21 ++# The sizehint parameter passed to readline() calls ++MAXLINE = 8192 + + + # Exception raised when an error or invalid response is received +@@ -101,6 +103,7 @@ class FTP: + debugging = 0 + host = '' + port = FTP_PORT ++ maxline = MAXLINE + sock = None + file = None + welcome = None +@@ -180,7 +183,9 @@ class FTP: + # Internal: return one line from the server, stripping CRLF. + # Raise EOFError if the connection is closed + def getline(self): +- line = self.file.readline() ++ line = self.file.readline(self.maxline + 1) ++ if len(line) > self.maxline: ++ raise Error("got more than %d bytes" % self.maxline) + if self.debugging > 1: + print '*get*', self.sanitize(line) + if not line: raise EOFError +@@ -432,7 +437,9 @@ class FTP: + conn = self.transfercmd(cmd) + fp = conn.makefile('rb') + while 1: +- line = fp.readline() ++ line = fp.readline(self.maxline + 1) ++ if len(line) > self.maxline: ++ raise Error("got more than %d bytes" % self.maxline) + if self.debugging > 2: print '*retr*', repr(line) + if not line: + break +@@ -485,7 +492,9 @@ class FTP: + self.voidcmd('TYPE A') + conn = self.transfercmd(cmd) + while 1: +- buf = fp.readline() ++ buf = fp.readline(self.maxline + 1) ++ if len(buf) > self.maxline: ++ raise Error("got more than %d bytes" % self.maxline) + if not buf: break + if buf[-2:] != CRLF: + if buf[-1] in CRLF: buf = buf[:-1] +@@ -710,7 +719,9 @@ else: + fp = conn.makefile('rb') + try: + while 1: +- line = fp.readline() ++ line = fp.readline(self.maxline + 1) ++ if len(line) > self.maxline: ++ raise Error("got more than %d bytes" % self.maxline) + if self.debugging > 2: print '*retr*', repr(line) + if not line: + break +@@ -748,7 +759,9 @@ else: + conn = self.transfercmd(cmd) + try: + while 1: +- buf = fp.readline() ++ buf = fp.readline(self.maxline + 1) ++ if len(buf) > self.maxline: ++ raise Error("got more than %d bytes" % self.maxline) + if not buf: break + if buf[-2:] != CRLF: + if buf[-1] in CRLF: buf = buf[:-1] +@@ -905,7 +918,9 @@ class Netrc: + fp = open(filename, "r") + in_macro = 0 + while 1: +- line = fp.readline() ++ line = fp.readline(self.maxline + 1) ++ if len(line) > self.maxline: ++ raise Error("got more than %d bytes" % self.maxline) + if not line: break + if in_macro and line.strip(): + macro_lines.append(line) +diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py +--- a/Lib/test/test_ftplib.py ++++ b/Lib/test/test_ftplib.py +@@ -65,6 +65,7 @@ class DummyFTPHandler(asynchat.async_cha + self.last_received_data = '' + self.next_response = '' + self.rest = None ++ self.next_retr_data = RETR_DATA + self.push('220 welcome') + + def collect_incoming_data(self, data): +@@ -189,7 +190,7 @@ class DummyFTPHandler(asynchat.async_cha + offset = int(self.rest) + else: + offset = 0 +- self.dtp.push(RETR_DATA[offset:]) ++ self.dtp.push(self.next_retr_data[offset:]) + self.dtp.close_when_done() + self.rest = None + +@@ -203,6 +204,11 @@ class DummyFTPHandler(asynchat.async_cha + self.dtp.push(NLST_DATA) + self.dtp.close_when_done() + ++ def cmd_setlongretr(self, arg): ++ # For testing. Next RETR will return long line. ++ self.next_retr_data = 'x' * int(arg) ++ self.push('125 setlongretr ok') ++ + + class DummyFTPServer(asyncore.dispatcher, threading.Thread): + +@@ -558,6 +564,20 @@ class TestFTPClass(TestCase): + # IPv4 is in use, just make sure send_epsv has not been used + self.assertEqual(self.server.handler.last_received_cmd, 'pasv') + ++ def test_line_too_long(self): ++ self.assertRaises(ftplib.Error, self.client.sendcmd, ++ 'x' * self.client.maxline * 2) ++ ++ def test_retrlines_too_long(self): ++ self.client.sendcmd('SETLONGRETR %d' % (self.client.maxline * 2)) ++ received = [] ++ self.assertRaises(ftplib.Error, ++ self.client.retrlines, 'retr', received.append) ++ ++ def test_storlines_too_long(self): ++ f = StringIO.StringIO('x' * self.client.maxline * 2) ++ self.assertRaises(ftplib.Error, self.client.storlines, 'stor', f) ++ + + class TestIPv6Environment(TestCase): + diff --git a/SOURCES/00205-CVE-2013-1752-httplib-headers.patch b/SOURCES/00205-CVE-2013-1752-httplib-headers.patch new file mode 100644 index 0000000..b6e0147 --- /dev/null +++ b/SOURCES/00205-CVE-2013-1752-httplib-headers.patch @@ -0,0 +1,51 @@ + +# HG changeset patch +# User Berker Peksag +# Date 1407212157 -10800 +# Node ID 5e310c6a8520603bca8bc4b40eaf4f074db47c0d +# Parent 46c7a724b487295257423a69478392cb01ce74e6 +Issue #16037: HTTPMessage.readheaders() raises an HTTPException when more +than 100 headers are read. + +Patch by Jyrki Pulliainen and Daniel Eriksson. + +diff --git a/Lib/httplib.py b/Lib/httplib.py +--- a/Lib/httplib.py ++++ b/Lib/httplib.py +@@ -215,6 +215,10 @@ MAXAMOUNT = 1048576 + # maximal line length when calling readline(). + _MAXLINE = 65536 + ++# maximum amount of headers accepted ++_MAXHEADERS = 100 ++ ++ + class HTTPMessage(mimetools.Message): + + def addheader(self, key, value): +@@ -271,6 +275,8 @@ class HTTPMessage(mimetools.Message): + elif self.seekable: + tell = self.fp.tell + while True: ++ if len(hlist) > _MAXHEADERS: ++ raise HTTPException("got more than %d headers" % _MAXHEADERS) + if tell: + try: + startofline = tell() +diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py +--- a/Lib/test/test_httplib.py ++++ b/Lib/test/test_httplib.py +@@ -262,6 +262,13 @@ class BasicTest(TestCase): + if resp.read() != "": + self.fail("Did not expect response from HEAD request") + ++ def test_too_many_headers(self): ++ headers = '\r\n'.join('Header%d: foo' % i for i in xrange(200)) + '\r\n' ++ text = ('HTTP/1.1 200 OK\r\n' + headers) ++ s = FakeSocket(text) ++ r = httplib.HTTPResponse(s) ++ self.assertRaises(httplib.HTTPException, r.begin) ++ + def test_send_file(self): + expected = 'GET /foo HTTP/1.1\r\nHost: example.com\r\n' \ + 'Accept-Encoding: identity\r\nContent-Length:' diff --git a/SOURCES/00206-CVE-2013-1752-poplib.patch b/SOURCES/00206-CVE-2013-1752-poplib.patch new file mode 100644 index 0000000..f20c111 --- /dev/null +++ b/SOURCES/00206-CVE-2013-1752-poplib.patch @@ -0,0 +1,60 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1417827758 18000 +# Node ID 339f877cca115c1901f5dd93d7bc066031d2a669 +# Parent 54af094087953f4997a4ead63e949d845c4b4412 +in poplib, limit maximum line length that we read from the network (closes #16041) + +Patch from Berker Peksag. + +diff --git a/Lib/poplib.py b/Lib/poplib.py +--- a/Lib/poplib.py ++++ b/Lib/poplib.py +@@ -32,6 +32,12 @@ CR = '\r' + LF = '\n' + CRLF = CR+LF + ++# maximal line length when calling readline(). This is to prevent ++# reading arbitrary length lines. RFC 1939 limits POP3 line length to ++# 512 characters, including CRLF. We have selected 2048 just to be on ++# the safe side. ++_MAXLINE = 2048 ++ + + class POP3: + +@@ -103,7 +109,9 @@ class POP3: + # Raise error_proto('-ERR EOF') if the connection is closed. + + def _getline(self): +- line = self.file.readline() ++ line = self.file.readline(_MAXLINE + 1) ++ if len(line) > _MAXLINE: ++ raise error_proto('line too long') + if self._debugging > 1: print '*get*', repr(line) + if not line: raise error_proto('-ERR EOF') + octets = len(line) +@@ -365,6 +373,8 @@ else: + match = renewline.match(self.buffer) + while not match: + self._fillBuffer() ++ if len(self.buffer) > _MAXLINE: ++ raise error_proto('line too long') + match = renewline.match(self.buffer) + line = match.group(0) + self.buffer = renewline.sub('' ,self.buffer, 1) +diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py +--- a/Lib/test/test_poplib.py ++++ b/Lib/test/test_poplib.py +@@ -198,6 +198,10 @@ class TestPOP3Class(TestCase): + 113) + self.assertEqual(self.client.retr('foo'), expected) + ++ def test_too_long_lines(self): ++ self.assertRaises(poplib.error_proto, self.client._shortcmd, ++ 'echo +%s' % ((poplib._MAXLINE + 10) * 'a')) ++ + def test_dele(self): + self.assertOK(self.client.dele('foo')) + diff --git a/SOURCES/00207-CVE-2013-1752-smtplib.patch b/SOURCES/00207-CVE-2013-1752-smtplib.patch new file mode 100644 index 0000000..11b0819 --- /dev/null +++ b/SOURCES/00207-CVE-2013-1752-smtplib.patch @@ -0,0 +1,100 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1417827918 18000 +# Node ID 923aac88a3cc76a95d5a04d9d3ece245147a8064 +# Parent 339f877cca115c1901f5dd93d7bc066031d2a669 +smtplib: limit amount read from the network (closes #16042) + +diff --git a/Lib/smtplib.py b/Lib/smtplib.py +--- a/Lib/smtplib.py ++++ b/Lib/smtplib.py +@@ -57,6 +57,7 @@ from sys import stderr + SMTP_PORT = 25 + SMTP_SSL_PORT = 465 + CRLF = "\r\n" ++_MAXLINE = 8192 # more than 8 times larger than RFC 821, 4.5.3 + + OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I) + +@@ -179,10 +180,14 @@ else: + def __init__(self, sslobj): + self.sslobj = sslobj + +- def readline(self): ++ def readline(self, size=-1): ++ if size < 0: ++ size = None + str = "" + chr = None + while chr != "\n": ++ if size is not None and len(str) >= size: ++ break + chr = self.sslobj.read(1) + if not chr: + break +@@ -353,7 +358,7 @@ class SMTP: + self.file = self.sock.makefile('rb') + while 1: + try: +- line = self.file.readline() ++ line = self.file.readline(_MAXLINE + 1) + except socket.error as e: + self.close() + raise SMTPServerDisconnected("Connection unexpectedly closed: " +@@ -363,6 +368,8 @@ class SMTP: + raise SMTPServerDisconnected("Connection unexpectedly closed") + if self.debuglevel > 0: + print>>stderr, 'reply:', repr(line) ++ if len(line) > _MAXLINE: ++ raise SMTPResponseException(500, "Line too long.") + resp.append(line[4:].strip()) + code = line[:3] + # Check that the error code is syntactically correct. +diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py +--- a/Lib/test/test_smtplib.py ++++ b/Lib/test/test_smtplib.py +@@ -292,6 +292,33 @@ class BadHELOServerTests(unittest.TestCa + HOST, self.port, 'localhost', 3) + + ++@unittest.skipUnless(threading, 'Threading required for this test.') ++class TooLongLineTests(unittest.TestCase): ++ respdata = '250 OK' + ('.' * smtplib._MAXLINE * 2) + '\n' ++ ++ def setUp(self): ++ self.old_stdout = sys.stdout ++ self.output = StringIO.StringIO() ++ sys.stdout = self.output ++ ++ self.evt = threading.Event() ++ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) ++ self.sock.settimeout(15) ++ self.port = test_support.bind_port(self.sock) ++ servargs = (self.evt, self.respdata, self.sock) ++ threading.Thread(target=server, args=servargs).start() ++ self.evt.wait() ++ self.evt.clear() ++ ++ def tearDown(self): ++ self.evt.wait() ++ sys.stdout = self.old_stdout ++ ++ def testLineTooLong(self): ++ self.assertRaises(smtplib.SMTPResponseException, smtplib.SMTP, ++ HOST, self.port, 'localhost', 3) ++ ++ + sim_users = {'Mr.A@somewhere.com':'John A', + 'Ms.B@somewhere.com':'Sally B', + 'Mrs.C@somewhereesle.com':'Ruth C', +@@ -526,7 +553,8 @@ class SMTPSimTests(unittest.TestCase): + def test_main(verbose=None): + test_support.run_unittest(GeneralTests, DebuggingServerTests, + NonConnectingTests, +- BadHELOServerTests, SMTPSimTests) ++ BadHELOServerTests, SMTPSimTests, ++ TooLongLineTests) + + if __name__ == '__main__': + test_main() diff --git a/SOURCES/00208-CVE-2013-1752-imaplib.patch b/SOURCES/00208-CVE-2013-1752-imaplib.patch new file mode 100644 index 0000000..91395d6 --- /dev/null +++ b/SOURCES/00208-CVE-2013-1752-imaplib.patch @@ -0,0 +1,59 @@ + +# HG changeset patch +# User R David Murray +# Date 1388775562 18000 +# Node ID dd906f4ab9237020a7a275c2d361fa288e553481 +# Parent 69b5f692455306c98aa27ecea17e6290787ebd3f +closes 16039: CVE-2013-1752: limit line length in imaplib readline calls. + +diff --git a/Lib/imaplib.py b/Lib/imaplib.py +--- a/Lib/imaplib.py ++++ b/Lib/imaplib.py +@@ -35,6 +35,15 @@ IMAP4_PORT = 143 + IMAP4_SSL_PORT = 993 + AllowedVersions = ('IMAP4REV1', 'IMAP4') # Most recent first + ++# Maximal line length when calling readline(). This is to prevent ++# reading arbitrary length lines. RFC 3501 and 2060 (IMAP 4rev1) ++# don't specify a line length. RFC 2683 however suggests limiting client ++# command lines to 1000 octets and server command lines to 8000 octets. ++# We have selected 10000 for some extra margin and since that is supposedly ++# also what UW and Panda IMAP does. ++_MAXLINE = 10000 ++ ++ + # Commands + + Commands = { +@@ -237,7 +246,10 @@ class IMAP4: + + def readline(self): + """Read line from remote.""" +- return self.file.readline() ++ line = self.file.readline(_MAXLINE + 1) ++ if len(line) > _MAXLINE: ++ raise self.error("got more than %d bytes" % _MAXLINE) ++ return line + + + def send(self, data): +diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py +--- a/Lib/test/test_imaplib.py ++++ b/Lib/test/test_imaplib.py +@@ -165,6 +165,16 @@ class BaseThreadedNetworkedTests(unittes + self.imap_class, *server.server_address) + + ++ def test_linetoolong(self): ++ class TooLongHandler(SimpleIMAPHandler): ++ def handle(self): ++ # Send a very long response line ++ self.wfile.write('* OK ' + imaplib._MAXLINE*'x' + '\r\n') ++ ++ with self.reaped_server(TooLongHandler) as server: ++ self.assertRaises(imaplib.IMAP4.error, ++ self.imap_class, *server.server_address) ++ + class ThreadedNetworkedTests(BaseThreadedNetworkedTests): + + server_class = SocketServer.TCPServer diff --git a/SOURCES/00209-pep466-backport-hmac.compare_digest.patch b/SOURCES/00209-pep466-backport-hmac.compare_digest.patch new file mode 100644 index 0000000..cca8afe --- /dev/null +++ b/SOURCES/00209-pep466-backport-hmac.compare_digest.patch @@ -0,0 +1,353 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1399849904 25200 +# Node ID b40f1a00b13460cc089450028280c4e52dd24a64 +# Parent 951775c68b1b7782750c213b0fce1f61d46b2f51 +backport hmac.compare_digest to partially implement PEP 466 (closes #21306) + +Backport from Alex Gaynor. + +diff --git a/Doc/library/hmac.rst b/Doc/library/hmac.rst +--- a/Doc/library/hmac.rst ++++ b/Doc/library/hmac.rst +@@ -38,6 +38,13 @@ An HMAC object has the following methods + This string will be the same length as the *digest_size* of the digest given to + the constructor. It may contain non-ASCII characters, including NUL bytes. + ++ .. warning:: ++ ++ When comparing the output of :meth:`digest` to an externally-supplied ++ digest during a verification routine, it is recommended to use the ++ :func:`compare_digest` function instead of the ``==`` operator ++ to reduce the vulnerability to timing attacks. ++ + + .. method:: HMAC.hexdigest() + +@@ -45,6 +52,13 @@ An HMAC object has the following methods + containing only hexadecimal digits. This may be used to exchange the value + safely in email or other non-binary environments. + ++ .. warning:: ++ ++ When comparing the output of :meth:`hexdigest` to an externally-supplied ++ digest during a verification routine, it is recommended to use the ++ :func:`compare_digest` function instead of the ``==`` operator ++ to reduce the vulnerability to timing attacks. ++ + + .. method:: HMAC.copy() + +@@ -52,6 +66,25 @@ An HMAC object has the following methods + compute the digests of strings that share a common initial substring. + + ++This module also provides the following helper function: ++ ++.. function:: compare_digest(a, b) ++ ++ Return ``a == b``. This function uses an approach designed to prevent ++ timing analysis by avoiding content-based short circuiting behaviour, ++ making it appropriate for cryptography. *a* and *b* must both be of the ++ same type: either :class:`unicode` or a :term:`bytes-like object`. ++ ++ .. note:: ++ ++ If *a* and *b* are of different lengths, or if an error occurs, ++ a timing attack could theoretically reveal information about the ++ types and lengths of *a* and *b*--but not their values. ++ ++ ++ .. versionadded:: 2.7.7 ++ ++ + .. seealso:: + + Module :mod:`hashlib` +diff --git a/Lib/hmac.py b/Lib/hmac.py +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -5,6 +5,9 @@ Implements the HMAC algorithm as describ + + import warnings as _warnings + ++from operator import _compare_digest as compare_digest ++ ++ + trans_5C = "".join ([chr (x ^ 0x5C) for x in xrange(256)]) + trans_36 = "".join ([chr (x ^ 0x36) for x in xrange(256)]) + +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -302,12 +302,122 @@ class CopyTestCase(unittest.TestCase): + self.assertTrue(h1.hexdigest() == h2.hexdigest(), + "Hexdigest of copy doesn't match original hexdigest.") + ++ ++class CompareDigestTestCase(unittest.TestCase): ++ ++ def test_compare_digest(self): ++ # Testing input type exception handling ++ a, b = 100, 200 ++ self.assertRaises(TypeError, hmac.compare_digest, a, b) ++ a, b = 100, b"foobar" ++ self.assertRaises(TypeError, hmac.compare_digest, a, b) ++ a, b = b"foobar", 200 ++ self.assertRaises(TypeError, hmac.compare_digest, a, b) ++ a, b = u"foobar", b"foobar" ++ self.assertRaises(TypeError, hmac.compare_digest, a, b) ++ a, b = b"foobar", u"foobar" ++ self.assertRaises(TypeError, hmac.compare_digest, a, b) ++ ++ # Testing bytes of different lengths ++ a, b = b"foobar", b"foo" ++ self.assertFalse(hmac.compare_digest(a, b)) ++ a, b = b"\xde\xad\xbe\xef", b"\xde\xad" ++ self.assertFalse(hmac.compare_digest(a, b)) ++ ++ # Testing bytes of same lengths, different values ++ a, b = b"foobar", b"foobaz" ++ self.assertFalse(hmac.compare_digest(a, b)) ++ a, b = b"\xde\xad\xbe\xef", b"\xab\xad\x1d\xea" ++ self.assertFalse(hmac.compare_digest(a, b)) ++ ++ # Testing bytes of same lengths, same values ++ a, b = b"foobar", b"foobar" ++ self.assertTrue(hmac.compare_digest(a, b)) ++ a, b = b"\xde\xad\xbe\xef", b"\xde\xad\xbe\xef" ++ self.assertTrue(hmac.compare_digest(a, b)) ++ ++ # Testing bytearrays of same lengths, same values ++ a, b = bytearray(b"foobar"), bytearray(b"foobar") ++ self.assertTrue(hmac.compare_digest(a, b)) ++ ++ # Testing bytearrays of diffeent lengths ++ a, b = bytearray(b"foobar"), bytearray(b"foo") ++ self.assertFalse(hmac.compare_digest(a, b)) ++ ++ # Testing bytearrays of same lengths, different values ++ a, b = bytearray(b"foobar"), bytearray(b"foobaz") ++ self.assertFalse(hmac.compare_digest(a, b)) ++ ++ # Testing byte and bytearray of same lengths, same values ++ a, b = bytearray(b"foobar"), b"foobar" ++ self.assertTrue(hmac.compare_digest(a, b)) ++ self.assertTrue(hmac.compare_digest(b, a)) ++ ++ # Testing byte bytearray of diffeent lengths ++ a, b = bytearray(b"foobar"), b"foo" ++ self.assertFalse(hmac.compare_digest(a, b)) ++ self.assertFalse(hmac.compare_digest(b, a)) ++ ++ # Testing byte and bytearray of same lengths, different values ++ a, b = bytearray(b"foobar"), b"foobaz" ++ self.assertFalse(hmac.compare_digest(a, b)) ++ self.assertFalse(hmac.compare_digest(b, a)) ++ ++ # Testing str of same lengths ++ a, b = "foobar", "foobar" ++ self.assertTrue(hmac.compare_digest(a, b)) ++ ++ # Testing str of diffeent lengths ++ a, b = "foo", "foobar" ++ self.assertFalse(hmac.compare_digest(a, b)) ++ ++ # Testing bytes of same lengths, different values ++ a, b = "foobar", "foobaz" ++ self.assertFalse(hmac.compare_digest(a, b)) ++ ++ # Testing error cases ++ a, b = u"foobar", b"foobar" ++ self.assertRaises(TypeError, hmac.compare_digest, a, b) ++ a, b = b"foobar", u"foobar" ++ self.assertRaises(TypeError, hmac.compare_digest, a, b) ++ a, b = b"foobar", 1 ++ self.assertRaises(TypeError, hmac.compare_digest, a, b) ++ a, b = 100, 200 ++ self.assertRaises(TypeError, hmac.compare_digest, a, b) ++ a, b = "fooä", "fooä" ++ self.assertTrue(hmac.compare_digest(a, b)) ++ ++ # subclasses are supported by ignore __eq__ ++ class mystr(str): ++ def __eq__(self, other): ++ return False ++ ++ a, b = mystr("foobar"), mystr("foobar") ++ self.assertTrue(hmac.compare_digest(a, b)) ++ a, b = mystr("foobar"), "foobar" ++ self.assertTrue(hmac.compare_digest(a, b)) ++ a, b = mystr("foobar"), mystr("foobaz") ++ self.assertFalse(hmac.compare_digest(a, b)) ++ ++ class mybytes(bytes): ++ def __eq__(self, other): ++ return False ++ ++ a, b = mybytes(b"foobar"), mybytes(b"foobar") ++ self.assertTrue(hmac.compare_digest(a, b)) ++ a, b = mybytes(b"foobar"), b"foobar" ++ self.assertTrue(hmac.compare_digest(a, b)) ++ a, b = mybytes(b"foobar"), mybytes(b"foobaz") ++ self.assertFalse(hmac.compare_digest(a, b)) ++ ++ + def test_main(): + test_support.run_unittest( + TestVectorsTestCase, + ConstructorTestCase, + SanityTestCase, +- CopyTestCase ++ CopyTestCase, ++ CompareDigestTestCase, + ) + + if __name__ == "__main__": +diff --git a/Modules/operator.c b/Modules/operator.c +--- a/Modules/operator.c ++++ b/Modules/operator.c +@@ -235,6 +235,132 @@ op_delslice(PyObject *s, PyObject *a) + #define spam2o(OP,ALTOP,DOC) {#OP, op_##OP, METH_O, PyDoc_STR(DOC)}, \ + {#ALTOP, op_##OP, METH_O, PyDoc_STR(DOC)}, + ++ ++ ++/* compare_digest **********************************************************/ ++ ++/* ++ * timing safe compare ++ * ++ * Returns 1 of the strings are equal. ++ * In case of len(a) != len(b) the function tries to keep the timing ++ * dependent on the length of b. CPU cache locally may still alter timing ++ * a bit. ++ */ ++static int ++_tscmp(const unsigned char *a, const unsigned char *b, ++ Py_ssize_t len_a, Py_ssize_t len_b) ++{ ++ /* The volatile type declarations make sure that the compiler has no ++ * chance to optimize and fold the code in any way that may change ++ * the timing. ++ */ ++ volatile Py_ssize_t length; ++ volatile const unsigned char *left; ++ volatile const unsigned char *right; ++ Py_ssize_t i; ++ unsigned char result; ++ ++ /* loop count depends on length of b */ ++ length = len_b; ++ left = NULL; ++ right = b; ++ ++ /* don't use else here to keep the amount of CPU instructions constant, ++ * volatile forces re-evaluation ++ * */ ++ if (len_a == length) { ++ left = *((volatile const unsigned char**)&a); ++ result = 0; ++ } ++ if (len_a != length) { ++ left = b; ++ result = 1; ++ } ++ ++ for (i=0; i < length; i++) { ++ result |= *left++ ^ *right++; ++ } ++ ++ return (result == 0); ++} ++ ++PyDoc_STRVAR(compare_digest__doc__, ++"compare_digest(a, b) -> bool\n" ++"\n" ++"Return 'a == b'. This function uses an approach designed to prevent\n" ++"timing analysis, making it appropriate for cryptography.\n" ++"a and b must both be of the same type: either str (ASCII only),\n" ++"or any type that supports the buffer protocol (e.g. bytes).\n" ++"\n" ++"Note: If a and b are of different lengths, or if an error occurs,\n" ++"a timing attack could theoretically reveal information about the\n" ++"types and lengths of a and b--but not their values.\n"); ++ ++static PyObject* ++compare_digest(PyObject *self, PyObject *args) ++{ ++ PyObject *a, *b; ++ int rc; ++ ++ if (!PyArg_ParseTuple(args, "OO:compare_digest", &a, &b)) { ++ return NULL; ++ } ++ ++ /* Unicode string */ ++ if (PyUnicode_Check(a) && PyUnicode_Check(b)) { ++ rc = _tscmp(PyUnicode_AS_DATA(a), ++ PyUnicode_AS_DATA(b), ++ PyUnicode_GET_DATA_SIZE(a), ++ PyUnicode_GET_DATA_SIZE(b)); ++ } ++ /* fallback to buffer interface for bytes, bytesarray and other */ ++ else { ++ Py_buffer view_a; ++ Py_buffer view_b; ++ ++ if ((PyObject_CheckBuffer(a) == 0) & (PyObject_CheckBuffer(b) == 0)) { ++ PyErr_Format(PyExc_TypeError, ++ "unsupported operand types(s) or combination of types: " ++ "'%.100s' and '%.100s'", ++ Py_TYPE(a)->tp_name, Py_TYPE(b)->tp_name); ++ return NULL; ++ } ++ ++ if (PyObject_GetBuffer(a, &view_a, PyBUF_SIMPLE) == -1) { ++ return NULL; ++ } ++ if (view_a.ndim > 1) { ++ PyErr_SetString(PyExc_BufferError, ++ "Buffer must be single dimension"); ++ PyBuffer_Release(&view_a); ++ return NULL; ++ } ++ ++ if (PyObject_GetBuffer(b, &view_b, PyBUF_SIMPLE) == -1) { ++ PyBuffer_Release(&view_a); ++ return NULL; ++ } ++ if (view_b.ndim > 1) { ++ PyErr_SetString(PyExc_BufferError, ++ "Buffer must be single dimension"); ++ PyBuffer_Release(&view_a); ++ PyBuffer_Release(&view_b); ++ return NULL; ++ } ++ ++ rc = _tscmp((const unsigned char*)view_a.buf, ++ (const unsigned char*)view_b.buf, ++ view_a.len, ++ view_b.len); ++ ++ PyBuffer_Release(&view_a); ++ PyBuffer_Release(&view_b); ++ } ++ ++ return PyBool_FromLong(rc); ++} ++ + static struct PyMethodDef operator_methods[] = { + + spam1o(isCallable, +@@ -318,6 +444,8 @@ spam2(ne,__ne__, "ne(a, b) -- Same as a! + spam2(gt,__gt__, "gt(a, b) -- Same as a>b.") + spam2(ge,__ge__, "ge(a, b) -- Same as a>=b.") + ++ {"_compare_digest", (PyCFunction)compare_digest, METH_VARARGS, ++ compare_digest__doc__}, + {NULL, NULL} /* sentinel */ + + }; + diff --git a/SOURCES/00210-pep466-backport-hashlib.pbkdf2_hmac.patch b/SOURCES/00210-pep466-backport-hashlib.pbkdf2_hmac.patch new file mode 100644 index 0000000..1ef8934 --- /dev/null +++ b/SOURCES/00210-pep466-backport-hashlib.pbkdf2_hmac.patch @@ -0,0 +1,489 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1401567982 25200 +# Node ID e4da3ba9dcac4374ca0ccc46a48c32be6f951038 +# Parent 8fa8c290c165dccd613632b69a816623b51e801e +backport hashlib.pbkdf2_hmac per PEP 466 (closes #21304) + +Backport by Alex Gaynor. + +diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst +--- a/Doc/library/hashlib.rst ++++ b/Doc/library/hashlib.rst +@@ -135,6 +135,46 @@ A hash object has the following methods: + compute the digests of strings that share a common initial substring. + + ++Key Derivation Function ++----------------------- ++ ++Key derivation and key stretching algorithms are designed for secure password ++hashing. Naive algorithms such as ``sha1(password)`` are not resistant against ++brute-force attacks. A good password hashing function must be tunable, slow, and ++include a `salt `_. ++ ++ ++.. function:: pbkdf2_hmac(name, password, salt, rounds, dklen=None) ++ ++ The function provides PKCS#5 password-based key derivation function 2. It ++ uses HMAC as pseudorandom function. ++ ++ The string *name* is the desired name of the hash digest algorithm for ++ HMAC, e.g. 'sha1' or 'sha256'. *password* and *salt* are interpreted as ++ buffers of bytes. Applications and libraries should limit *password* to ++ a sensible value (e.g. 1024). *salt* should be about 16 or more bytes from ++ a proper source, e.g. :func:`os.urandom`. ++ ++ The number of *rounds* should be chosen based on the hash algorithm and ++ computing power. As of 2013, at least 100,000 rounds of SHA-256 is suggested. ++ ++ *dklen* is the length of the derived key. If *dklen* is ``None`` then the ++ digest size of the hash algorithm *name* is used, e.g. 64 for SHA-512. ++ ++ >>> import hashlib, binascii ++ >>> dk = hashlib.pbkdf2_hmac('sha256', b'password', b'salt', 100000) ++ >>> binascii.hexlify(dk) ++ b'0394a2ede332c9a13eb82e9b24631604c31df978b4e2f0fbd2c549944f9d79a5' ++ ++ .. versionadded:: 2.7.8 ++ ++ .. note:: ++ ++ A fast implementation of *pbkdf2_hmac* is available with OpenSSL. The ++ Python implementation uses an inline version of :mod:`hmac`. It is about ++ three times slower and doesn't release the GIL. ++ ++ + .. seealso:: + + Module :mod:`hmac` +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -77,7 +77,7 @@ __always_supported = ('md5', 'sha1', 'sh + + algorithms = __always_supported + +-__all__ = __always_supported + ('new', 'algorithms') ++__all__ = __always_supported + ('new', 'algorithms', 'pbkdf2_hmac') + + + def __get_openssl_constructor(name): +@@ -123,6 +123,72 @@ for __func_name in __always_supported: + import logging + logging.exception('code for hash %s was not found.', __func_name) + ++try: ++ # OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA ++ from _hashlib import pbkdf2_hmac ++except ImportError: ++ import binascii ++ import struct ++ ++ _trans_5C = b"".join(chr(x ^ 0x5C) for x in range(256)) ++ _trans_36 = b"".join(chr(x ^ 0x36) for x in range(256)) ++ ++ def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None): ++ """Password based key derivation function 2 (PKCS #5 v2.0) ++ ++ This Python implementations based on the hmac module about as fast ++ as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster ++ for long passwords. ++ """ ++ if not isinstance(hash_name, str): ++ raise TypeError(hash_name) ++ ++ if not isinstance(password, (bytes, bytearray)): ++ password = bytes(buffer(password)) ++ if not isinstance(salt, (bytes, bytearray)): ++ salt = bytes(buffer(salt)) ++ ++ # Fast inline HMAC implementation ++ inner = new(hash_name) ++ outer = new(hash_name) ++ blocksize = getattr(inner, 'block_size', 64) ++ if len(password) > blocksize: ++ password = new(hash_name, password).digest() ++ password = password + b'\x00' * (blocksize - len(password)) ++ inner.update(password.translate(_trans_36)) ++ outer.update(password.translate(_trans_5C)) ++ ++ def prf(msg, inner=inner, outer=outer): ++ # PBKDF2_HMAC uses the password as key. We can re-use the same ++ # digest objects and and just update copies to skip initialization. ++ icpy = inner.copy() ++ ocpy = outer.copy() ++ icpy.update(msg) ++ ocpy.update(icpy.digest()) ++ return ocpy.digest() ++ ++ if iterations < 1: ++ raise ValueError(iterations) ++ if dklen is None: ++ dklen = outer.digest_size ++ if dklen < 1: ++ raise ValueError(dklen) ++ ++ hex_format_string = "%%0%ix" % (new(hash_name).digest_size * 2) ++ ++ dkey = b'' ++ loop = 1 ++ while len(dkey) < dklen: ++ prev = prf(salt + struct.pack(b'>I', loop)) ++ rkey = int(binascii.hexlify(prev), 16) ++ for i in xrange(iterations - 1): ++ prev = prf(prev) ++ rkey ^= int(binascii.hexlify(prev), 16) ++ loop += 1 ++ dkey += binascii.unhexlify(hex_format_string % rkey) ++ ++ return dkey[:dklen] ++ + # Cleanup locals() + del __always_supported, __func_name, __get_hash + del __hash_new, __get_openssl_constructor +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -16,6 +16,8 @@ except ImportError: + threading = None + import unittest + import warnings ++from binascii import unhexlify ++ + from test import test_support + from test.test_support import _4G, precisionbigmemtest + +@@ -436,8 +438,72 @@ class HashLibTestCase(unittest.TestCase) + + + ++class KDFTests(unittest.TestCase): ++ pbkdf2_test_vectors = [ ++ (b'password', b'salt', 1, None), ++ (b'password', b'salt', 2, None), ++ (b'password', b'salt', 4096, None), ++ # too slow, it takes over a minute on a fast CPU. ++ #(b'password', b'salt', 16777216, None), ++ (b'passwordPASSWORDpassword', b'saltSALTsaltSALTsaltSALTsaltSALTsalt', ++ 4096, -1), ++ (b'pass\0word', b'sa\0lt', 4096, 16), ++ ] ++ ++ pbkdf2_results = { ++ "sha1": [ ++ # offical test vectors from RFC 6070 ++ (unhexlify('0c60c80f961f0e71f3a9b524af6012062fe037a6'), None), ++ (unhexlify('ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957'), None), ++ (unhexlify('4b007901b765489abead49d926f721d065a429c1'), None), ++ #(unhexlify('eefe3d61cd4da4e4e9945b3d6ba2158c2634e984'), None), ++ (unhexlify('3d2eec4fe41c849b80c8d83662c0e44a8b291a964c' ++ 'f2f07038'), 25), ++ (unhexlify('56fa6aa75548099dcc37d7f03425e0c3'), None),], ++ "sha256": [ ++ (unhexlify('120fb6cffcf8b32c43e7225256c4f837' ++ 'a86548c92ccc35480805987cb70be17b'), None), ++ (unhexlify('ae4d0c95af6b46d32d0adff928f06dd0' ++ '2a303f8ef3c251dfd6e2d85a95474c43'), None), ++ (unhexlify('c5e478d59288c841aa530db6845c4c8d' ++ '962893a001ce4e11a4963873aa98134a'), None), ++ #(unhexlify('cf81c66fe8cfc04d1f31ecb65dab4089' ++ # 'f7f179e89b3b0bcb17ad10e3ac6eba46'), None), ++ (unhexlify('348c89dbcbd32b2f32d814b8116e84cf2b17' ++ '347ebc1800181c4e2a1fb8dd53e1c635518c7dac47e9'), 40), ++ (unhexlify('89b69d0516f829893c696226650a8687'), None),], ++ "sha512": [ ++ (unhexlify('867f70cf1ade02cff3752599a3a53dc4af34c7a669815ae5' ++ 'd513554e1c8cf252c02d470a285a0501bad999bfe943c08f' ++ '050235d7d68b1da55e63f73b60a57fce'), None), ++ (unhexlify('e1d9c16aa681708a45f5c7c4e215ceb66e011a2e9f004071' ++ '3f18aefdb866d53cf76cab2868a39b9f7840edce4fef5a82' ++ 'be67335c77a6068e04112754f27ccf4e'), None), ++ (unhexlify('d197b1b33db0143e018b12f3d1d1479e6cdebdcc97c5c0f8' ++ '7f6902e072f457b5143f30602641b3d55cd335988cb36b84' ++ '376060ecd532e039b742a239434af2d5'), None), ++ (unhexlify('8c0511f4c6e597c6ac6315d8f0362e225f3c501495ba23b8' ++ '68c005174dc4ee71115b59f9e60cd9532fa33e0f75aefe30' ++ '225c583a186cd82bd4daea9724a3d3b8'), 64), ++ (unhexlify('9d9e9c4cd21fe4be24d5b8244c759665'), None),], ++ } ++ ++ def test_pbkdf2_hmac(self): ++ for digest_name, results in self.pbkdf2_results.items(): ++ for i, vector in enumerate(self.pbkdf2_test_vectors): ++ password, salt, rounds, dklen = vector ++ expected, overwrite_dklen = results[i] ++ if overwrite_dklen: ++ dklen = overwrite_dklen ++ out = hashlib.pbkdf2_hmac( ++ digest_name, password, salt, rounds, dklen) ++ self.assertEqual(out, expected, ++ (digest_name, password, salt, rounds, dklen)) ++ ++ ++ + def test_main(): +- test_support.run_unittest(HashLibTestCase) ++ test_support.run_unittest(HashLibTestCase, KDFTests) + + if __name__ == "__main__": + test_main() +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -39,6 +39,7 @@ + #include + #include + #include ++#include + + #define MUNCH_SIZE INT_MAX + +@@ -563,6 +564,226 @@ EVP_new(PyObject *self, PyObject *args, + return ret_obj; + } + ++ ++ ++#if (OPENSSL_VERSION_NUMBER >= 0x10000000 && !defined(OPENSSL_NO_HMAC) \ ++ && !defined(OPENSSL_NO_SHA)) ++ ++#define PY_PBKDF2_HMAC 1 ++ ++/* Improved implementation of PKCS5_PBKDF2_HMAC() ++ * ++ * PKCS5_PBKDF2_HMAC_fast() hashes the password exactly one time instead of ++ * `iter` times. Today (2013) the iteration count is typically 100,000 or ++ * more. The improved algorithm is not subject to a Denial-of-Service ++ * vulnerability with overly large passwords. ++ * ++ * Also OpenSSL < 1.0 don't provide PKCS5_PBKDF2_HMAC(), only ++ * PKCS5_PBKDF2_SHA1. ++ */ ++static int ++PKCS5_PBKDF2_HMAC_fast(const char *pass, int passlen, ++ const unsigned char *salt, int saltlen, ++ int iter, const EVP_MD *digest, ++ int keylen, unsigned char *out) ++{ ++ unsigned char digtmp[EVP_MAX_MD_SIZE], *p, itmp[4]; ++ int cplen, j, k, tkeylen, mdlen; ++ unsigned long i = 1; ++ HMAC_CTX hctx_tpl, hctx; ++ ++ mdlen = EVP_MD_size(digest); ++ if (mdlen < 0) ++ return 0; ++ ++ HMAC_CTX_init(&hctx_tpl); ++ HMAC_CTX_init(&hctx); ++ p = out; ++ tkeylen = keylen; ++ if (!HMAC_Init_ex(&hctx_tpl, pass, passlen, digest, NULL)) { ++ HMAC_CTX_cleanup(&hctx_tpl); ++ return 0; ++ } ++ while(tkeylen) { ++ if(tkeylen > mdlen) ++ cplen = mdlen; ++ else ++ cplen = tkeylen; ++ /* We are unlikely to ever use more than 256 blocks (5120 bits!) ++ * but just in case... ++ */ ++ itmp[0] = (unsigned char)((i >> 24) & 0xff); ++ itmp[1] = (unsigned char)((i >> 16) & 0xff); ++ itmp[2] = (unsigned char)((i >> 8) & 0xff); ++ itmp[3] = (unsigned char)(i & 0xff); ++ if (!HMAC_CTX_copy(&hctx, &hctx_tpl)) { ++ HMAC_CTX_cleanup(&hctx_tpl); ++ return 0; ++ } ++ if (!HMAC_Update(&hctx, salt, saltlen) ++ || !HMAC_Update(&hctx, itmp, 4) ++ || !HMAC_Final(&hctx, digtmp, NULL)) { ++ HMAC_CTX_cleanup(&hctx_tpl); ++ HMAC_CTX_cleanup(&hctx); ++ return 0; ++ } ++ HMAC_CTX_cleanup(&hctx); ++ memcpy(p, digtmp, cplen); ++ for (j = 1; j < iter; j++) { ++ if (!HMAC_CTX_copy(&hctx, &hctx_tpl)) { ++ HMAC_CTX_cleanup(&hctx_tpl); ++ return 0; ++ } ++ if (!HMAC_Update(&hctx, digtmp, mdlen) ++ || !HMAC_Final(&hctx, digtmp, NULL)) { ++ HMAC_CTX_cleanup(&hctx_tpl); ++ HMAC_CTX_cleanup(&hctx); ++ return 0; ++ } ++ HMAC_CTX_cleanup(&hctx); ++ for (k = 0; k < cplen; k++) { ++ p[k] ^= digtmp[k]; ++ } ++ } ++ tkeylen-= cplen; ++ i++; ++ p+= cplen; ++ } ++ HMAC_CTX_cleanup(&hctx_tpl); ++ return 1; ++} ++ ++/* LCOV_EXCL_START */ ++static PyObject * ++_setException(PyObject *exc) ++{ ++ unsigned long errcode; ++ const char *lib, *func, *reason; ++ ++ errcode = ERR_peek_last_error(); ++ if (!errcode) { ++ PyErr_SetString(exc, "unknown reasons"); ++ return NULL; ++ } ++ ERR_clear_error(); ++ ++ lib = ERR_lib_error_string(errcode); ++ func = ERR_func_error_string(errcode); ++ reason = ERR_reason_error_string(errcode); ++ ++ if (lib && func) { ++ PyErr_Format(exc, "[%s: %s] %s", lib, func, reason); ++ } ++ else if (lib) { ++ PyErr_Format(exc, "[%s] %s", lib, reason); ++ } ++ else { ++ PyErr_SetString(exc, reason); ++ } ++ return NULL; ++} ++/* LCOV_EXCL_STOP */ ++ ++PyDoc_STRVAR(pbkdf2_hmac__doc__, ++"pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None) -> key\n\ ++\n\ ++Password based key derivation function 2 (PKCS #5 v2.0) with HMAC as\n\ ++pseudorandom function."); ++ ++static PyObject * ++pbkdf2_hmac(PyObject *self, PyObject *args, PyObject *kwdict) ++{ ++ static char *kwlist[] = {"hash_name", "password", "salt", "iterations", ++ "dklen", NULL}; ++ PyObject *key_obj = NULL, *dklen_obj = Py_None; ++ char *name, *key; ++ Py_buffer password, salt; ++ long iterations, dklen; ++ int retval; ++ const EVP_MD *digest; ++ ++ if (!PyArg_ParseTupleAndKeywords(args, kwdict, "ss*s*l|O:pbkdf2_hmac", ++ kwlist, &name, &password, &salt, ++ &iterations, &dklen_obj)) { ++ return NULL; ++ } ++ ++ digest = EVP_get_digestbyname(name); ++ if (digest == NULL) { ++ PyErr_SetString(PyExc_ValueError, "unsupported hash type"); ++ goto end; ++ } ++ ++ if (password.len > INT_MAX) { ++ PyErr_SetString(PyExc_OverflowError, ++ "password is too long."); ++ goto end; ++ } ++ ++ if (salt.len > INT_MAX) { ++ PyErr_SetString(PyExc_OverflowError, ++ "salt is too long."); ++ goto end; ++ } ++ ++ if (iterations < 1) { ++ PyErr_SetString(PyExc_ValueError, ++ "iteration value must be greater than 0."); ++ goto end; ++ } ++ if (iterations > INT_MAX) { ++ PyErr_SetString(PyExc_OverflowError, ++ "iteration value is too great."); ++ goto end; ++ } ++ ++ if (dklen_obj == Py_None) { ++ dklen = EVP_MD_size(digest); ++ } else { ++ dklen = PyLong_AsLong(dklen_obj); ++ if ((dklen == -1) && PyErr_Occurred()) { ++ goto end; ++ } ++ } ++ if (dklen < 1) { ++ PyErr_SetString(PyExc_ValueError, ++ "key length must be greater than 0."); ++ goto end; ++ } ++ if (dklen > INT_MAX) { ++ /* INT_MAX is always smaller than dkLen max (2^32 - 1) * hLen */ ++ PyErr_SetString(PyExc_OverflowError, ++ "key length is too great."); ++ goto end; ++ } ++ ++ key_obj = PyBytes_FromStringAndSize(NULL, dklen); ++ if (key_obj == NULL) { ++ goto end; ++ } ++ key = PyBytes_AS_STRING(key_obj); ++ ++ Py_BEGIN_ALLOW_THREADS ++ retval = PKCS5_PBKDF2_HMAC_fast((char*)password.buf, (int)password.len, ++ (unsigned char *)salt.buf, (int)salt.len, ++ iterations, digest, dklen, ++ (unsigned char *)key); ++ Py_END_ALLOW_THREADS ++ ++ if (!retval) { ++ Py_CLEAR(key_obj); ++ _setException(PyExc_ValueError); ++ goto end; ++ } ++ ++ end: ++ PyBuffer_Release(&password); ++ PyBuffer_Release(&salt); ++ return key_obj; ++} ++ ++#endif ++ + /* + * This macro and function generates a family of constructor function + * definitions for specific hash algorithms. These constructors are much +@@ -690,6 +911,10 @@ static struct PyMethodDef EVP_functions[ + CONSTRUCTOR_METH_DEF(sha384), + CONSTRUCTOR_METH_DEF(sha512), + #endif ++#ifdef PY_PBKDF2_HMAC ++ {"pbkdf2_hmac", (PyCFunction)pbkdf2_hmac, METH_VARARGS|METH_KEYWORDS, ++ pbkdf2_hmac__doc__}, ++#endif + {NULL, NULL} /* Sentinel */ + }; + +diff -up Python-2.7.5/Lib/test/test_hmac.py.cod Python-2.7.5/Lib/test/test_hmac.py +--- Python-2.7.5/Lib/test/test_hmac.py.cod 2015-02-23 10:37:13.448594606 +0100 ++++ Python-2.7.5/Lib/test/test_hmac.py 2015-02-23 10:37:27.581717509 +0100 +@@ -1,3 +1,5 @@ ++# coding: utf-8 ++ + import hmac + import hashlib + import unittest diff --git a/SOURCES/00211-pep466-UTF-7-decoder-fix-illegal-unicode.patch b/SOURCES/00211-pep466-UTF-7-decoder-fix-illegal-unicode.patch new file mode 100644 index 0000000..c6ea78e --- /dev/null +++ b/SOURCES/00211-pep466-UTF-7-decoder-fix-illegal-unicode.patch @@ -0,0 +1,67 @@ + +# HG changeset patch +# User Serhiy Storchaka +# Date 1382204269 -10800 +# Node ID 214c0aac7540947d88a38ff0061734547ef86710 +# Parent c207ac413457a1b834e4b7dcf1a6836cd6e036e3 +Issue #19279: UTF-7 decoder no more produces illegal unicode strings. + +diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py +--- a/Lib/test/test_codecs.py ++++ b/Lib/test/test_codecs.py +@@ -611,6 +611,35 @@ class UTF7Test(ReadTest): + ] + ) + ++ def test_errors(self): ++ tests = [ ++ ('a\xffb', u'a\ufffdb'), ++ ('a+IK', u'a\ufffd'), ++ ('a+IK-b', u'a\ufffdb'), ++ ('a+IK,b', u'a\ufffdb'), ++ ('a+IKx', u'a\u20ac\ufffd'), ++ ('a+IKx-b', u'a\u20ac\ufffdb'), ++ ('a+IKwgr', u'a\u20ac\ufffd'), ++ ('a+IKwgr-b', u'a\u20ac\ufffdb'), ++ ('a+IKwgr,', u'a\u20ac\ufffd'), ++ ('a+IKwgr,-b', u'a\u20ac\ufffd-b'), ++ ('a+IKwgrB', u'a\u20ac\u20ac\ufffd'), ++ ('a+IKwgrB-b', u'a\u20ac\u20ac\ufffdb'), ++ ('a+/,+IKw-b', u'a\ufffd\u20acb'), ++ ('a+//,+IKw-b', u'a\ufffd\u20acb'), ++ ('a+///,+IKw-b', u'a\uffff\ufffd\u20acb'), ++ ('a+////,+IKw-b', u'a\uffff\ufffd\u20acb'), ++ ] ++ for raw, expected in tests: ++ self.assertRaises(UnicodeDecodeError, codecs.utf_7_decode, ++ raw, 'strict', True) ++ self.assertEqual(raw.decode('utf-7', 'replace'), expected) ++ ++ def test_nonbmp(self): ++ self.assertEqual(u'\U000104A0'.encode(self.encoding), '+2AHcoA-') ++ self.assertEqual(u'\ud801\udca0'.encode(self.encoding), '+2AHcoA-') ++ self.assertEqual('+2AHcoA-'.decode(self.encoding), u'\U000104A0') ++ + class UTF16ExTest(unittest.TestCase): + + def test_errors(self): +diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c +--- a/Objects/unicodeobject.c ++++ b/Objects/unicodeobject.c +@@ -1671,6 +1671,7 @@ PyObject *PyUnicode_DecodeUTF7Stateful(c + (base64buffer >> (base64bits-16)); + base64bits -= 16; + base64buffer &= (1 << base64bits) - 1; /* clear high bits */ ++ assert(outCh <= 0xffff); + if (surrogate) { + /* expecting a second surrogate */ + if (outCh >= 0xDC00 && outCh <= 0xDFFF) { +@@ -1737,6 +1738,7 @@ PyObject *PyUnicode_DecodeUTF7Stateful(c + inShift = 1; + shiftOutStart = p; + base64bits = 0; ++ base64buffer = 0; + } + } + else if (DECODE_DIRECT(ch)) { /* character decodes as itself */ + diff --git a/SOURCES/00212-pep466-pyunicode_fromformat-raise-overflow.patch b/SOURCES/00212-pep466-pyunicode_fromformat-raise-overflow.patch new file mode 100644 index 0000000..a05dd00 --- /dev/null +++ b/SOURCES/00212-pep466-pyunicode_fromformat-raise-overflow.patch @@ -0,0 +1,40 @@ + +# HG changeset patch +# User Serhiy Storchaka +# Date 1372008129 -10800 +# Node ID 2f1e8b7fa534b147280fdc9b92e44a7c7305338a +# Parent 8f0adcb66633ee97e4f7bdeee2104268113b86c3 +Issue #18184: PyUnicode_FromFormat() and PyUnicode_FromFormatV() now raise +OverflowError when an argument of %c format is out of range. + +diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c +--- a/Objects/unicodeobject.c ++++ b/Objects/unicodeobject.c +@@ -740,8 +740,25 @@ PyUnicode_FromFormatV(const char *format + + switch (*f) { + case 'c': +- (void)va_arg(count, int); ++ { ++ int ordinal = va_arg(count, int); ++#ifdef Py_UNICODE_WIDE ++ if (ordinal < 0 || ordinal > 0x10ffff) { ++ PyErr_SetString(PyExc_OverflowError, ++ "%c arg not in range(0x110000) " ++ "(wide Python build)"); ++ goto fail; ++ } ++#else ++ if (ordinal < 0 || ordinal > 0xffff) { ++ PyErr_SetString(PyExc_OverflowError, ++ "%c arg not in range(0x10000) " ++ "(narrow Python build)"); ++ goto fail; ++ } ++#endif + /* fall through... */ ++ } + case '%': + n++; + break; + diff --git a/SOURCES/00213-pep466-pyunicode_fromformat-fix-formats.patch b/SOURCES/00213-pep466-pyunicode_fromformat-fix-formats.patch new file mode 100644 index 0000000..b326260 --- /dev/null +++ b/SOURCES/00213-pep466-pyunicode_fromformat-fix-formats.patch @@ -0,0 +1,176 @@ + +# HG changeset patch +# User Victor Stinner +# Date 1406673545 -7200 +# Node ID 263701e0b77e3160bc6a835087f838bd6b24092a +# Parent 6c47c6d2033e20e9b35f1d22e0e797961d6e680f +Issue #22023: Fix %S, %R and %V formats of PyUnicode_FromFormat(). + +diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c +--- a/Objects/unicodeobject.c ++++ b/Objects/unicodeobject.c +@@ -690,7 +690,12 @@ makefmt(char *fmt, int longflag, int siz + *fmt = '\0'; + } + +-#define appendstring(string) {for (copy = string;*copy;) *s++ = *copy++;} ++#define appendstring(string) \ ++ do { \ ++ for (copy = string;*copy; copy++) { \ ++ *s++ = (unsigned char)*copy; \ ++ } \ ++ } while (0) + + PyObject * + PyUnicode_FromFormatV(const char *format, va_list vargs) +@@ -845,7 +850,7 @@ PyUnicode_FromFormatV(const char *format + str = PyObject_Str(obj); + if (!str) + goto fail; +- n += PyUnicode_GET_SIZE(str); ++ n += PyString_GET_SIZE(str); + /* Remember the str and switch to the next slot */ + *callresult++ = str; + break; +@@ -1006,15 +1011,10 @@ PyUnicode_FromFormatV(const char *format + case 'S': + case 'R': + { +- Py_UNICODE *ucopy; +- Py_ssize_t usize; +- Py_ssize_t upos; ++ const char *str = PyString_AS_STRING(*callresult); + /* unused, since we already have the result */ + (void) va_arg(vargs, PyObject *); +- ucopy = PyUnicode_AS_UNICODE(*callresult); +- usize = PyUnicode_GET_SIZE(*callresult); +- for (upos = 0; upos forget it */ + Py_DECREF(*callresult); + /* switch to next unicode()/repr() result */ + +diff -up Python-2.7.5/Lib/test/test_unicode.py.uni Python-2.7.5/Lib/test/test_unicode.py +--- Python-2.7.5/Lib/test/test_unicode.py.uni 2015-02-24 13:37:01.704739438 +0100 ++++ Python-2.7.5/Lib/test/test_unicode.py 2015-02-24 13:38:38.439482167 +0100 +@@ -1633,6 +1633,119 @@ class UnicodeTest( + self.assertEqual("%s" % u, u'__unicode__ overridden') + self.assertEqual("{}".format(u), '__unicode__ overridden') + ++ # Test PyUnicode_FromFormat() ++ def test_from_format(self): ++ test_support.import_module('ctypes') ++ from ctypes import ( ++ pythonapi, py_object, sizeof, ++ c_int, c_long, c_longlong, c_ssize_t, ++ c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p) ++ if sys.maxunicode == 0xffff: ++ name = "PyUnicodeUCS2_FromFormat" ++ else: ++ name = "PyUnicodeUCS4_FromFormat" ++ _PyUnicode_FromFormat = getattr(pythonapi, name) ++ _PyUnicode_FromFormat.restype = py_object ++ ++ def PyUnicode_FromFormat(format, *args): ++ cargs = tuple( ++ py_object(arg) if isinstance(arg, unicode) else arg ++ for arg in args) ++ return _PyUnicode_FromFormat(format, *cargs) ++ ++ def check_format(expected, format, *args): ++ text = PyUnicode_FromFormat(format, *args) ++ self.assertEqual(expected, text) ++ ++ # ascii format, non-ascii argument ++ check_format(u'ascii\x7f=unicode\xe9', ++ b'ascii\x7f=%U', u'unicode\xe9') ++ ++ # non-ascii format, ascii argument: ensure that PyUnicode_FromFormatV() ++ # raises an error ++ #self.assertRaisesRegex(ValueError, ++ # '^PyUnicode_FromFormatV\(\) expects an ASCII-encoded format ' ++ # 'string, got a non-ASCII byte: 0xe9$', ++ # PyUnicode_FromFormat, b'unicode\xe9=%s', u'ascii') ++ ++ # test "%c" ++ check_format(u'\uabcd', ++ b'%c', c_int(0xabcd)) ++ if sys.maxunicode > 0xffff: ++ check_format(u'\U0010ffff', ++ b'%c', c_int(0x10ffff)) ++ with self.assertRaises(OverflowError): ++ PyUnicode_FromFormat(b'%c', c_int(0x110000)) ++ # Issue #18183 ++ if sys.maxunicode > 0xffff: ++ check_format(u'\U00010000\U00100000', ++ b'%c%c', c_int(0x10000), c_int(0x100000)) ++ ++ # test "%" ++ check_format(u'%', ++ b'%') ++ check_format(u'%', ++ b'%%') ++ check_format(u'%s', ++ b'%%s') ++ check_format(u'[%]', ++ b'[%%]') ++ check_format(u'%abc', ++ b'%%%s', b'abc') ++ ++ # test %S ++ check_format(u"repr=abc", ++ b'repr=%S', u'abc') ++ ++ # test %R ++ check_format(u"repr=u'abc'", ++ b'repr=%R', u'abc') ++ ++ # test integer formats (%i, %d, %u) ++ check_format(u'010', ++ b'%03i', c_int(10)) ++ check_format(u'0010', ++ b'%0.4i', c_int(10)) ++ check_format(u'-123', ++ b'%i', c_int(-123)) ++ ++ check_format(u'-123', ++ b'%d', c_int(-123)) ++ check_format(u'-123', ++ b'%ld', c_long(-123)) ++ check_format(u'-123', ++ b'%zd', c_ssize_t(-123)) ++ ++ check_format(u'123', ++ b'%u', c_uint(123)) ++ check_format(u'123', ++ b'%lu', c_ulong(123)) ++ check_format(u'123', ++ b'%zu', c_size_t(123)) ++ ++ # test long output ++ PyUnicode_FromFormat(b'%p', c_void_p(-1)) ++ ++ # test %V ++ check_format(u'repr=abc', ++ b'repr=%V', u'abc', b'xyz') ++ check_format(u'repr=\xe4\xba\xba\xe6\xb0\x91', ++ b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91') ++ check_format(u'repr=abc\xff', ++ b'repr=%V', None, b'abc\xff') ++ ++ # not supported: copy the raw format string. these tests are just here ++ # to check for crashs and should not be considered as specifications ++ check_format(u'%s', ++ b'%1%s', b'abc') ++ check_format(u'%1abc', ++ b'%1abc') ++ check_format(u'%+i', ++ b'%+i', c_int(10)) ++ check_format(u'%s', ++ b'%.%s', b'abc') ++ ++ + def test_encode_decimal(self): + from _testcapi import unicode_encodedecimal + self.assertEqual(unicode_encodedecimal(u'123'), diff --git a/SOURCES/00214-pep466-backport-py3-ssl-changes.patch b/SOURCES/00214-pep466-backport-py3-ssl-changes.patch new file mode 100644 index 0000000..960eb0d --- /dev/null +++ b/SOURCES/00214-pep466-backport-py3-ssl-changes.patch @@ -0,0 +1,13082 @@ +# HG changeset patch +# User Benjamin Peterson +# Date 1408562090 18000 +# Node ID 221a1f9155e2a8b4d12015261b83a2ce3a0c62a2 +# Parent c1edc4e43eb103254c5d96f1708542623fe08f17 +backport many ssl features from Python 3 (closes #21308) + +A contribution of Alex Gaynor and David Reid with the generous support of +Rackspace. May God have mercy on their souls. + +diff -up Python-2.7.5/Lib/ssl.py.rev Python-2.7.5/Lib/ssl.py +--- Python-2.7.5/Lib/ssl.py.rev 2015-03-03 11:11:56.743921122 +0100 ++++ Python-2.7.5/Lib/ssl.py 2015-03-03 11:18:11.829704572 +0100 +@@ -89,6 +89,7 @@ else: + + from socket import socket, _fileobject, _delegate_methods, error as socket_error + from socket import getnameinfo as _getnameinfo ++from socket import SOL_SOCKET, SO_TYPE, SOCK_STREAM + import base64 # for DER-to-PEM translation + import errno + +@@ -108,6 +109,10 @@ class SSLSocket(socket): + ssl_version=PROTOCOL_SSLv23, ca_certs=None, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, ciphers=None): ++ # Can't use sock.type as other flags (such as SOCK_NONBLOCK) get ++ # mixed in. ++ if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM: ++ raise NotImplementedError("only stream sockets are supported") + socket.__init__(self, _sock=sock._sock) + # The initializer for socket overrides the methods send(), recv(), etc. + # in the instancce, which we don't need -- but we want to provide the +diff -up Python-2.7.5/Lib/test/test_ssl.py.rev Python-2.7.5/Lib/test/test_ssl.py +diff -up Python-2.7.5/Modules/_ssl.c.rev Python-2.7.5/Modules/_ssl.c +--- Python-2.7.5/Modules/_ssl.c.rev 2015-03-03 10:37:10.331849242 +0100 ++++ Python-2.7.5/Modules/_ssl.c 2015-03-03 11:11:09.324442807 +0100 +@@ -281,6 +281,7 @@ newPySSLObject(PySocketSockObject *Sock, + self->ssl = NULL; + self->ctx = NULL; + self->Socket = NULL; ++ self->shutdown_seen_zero = 0; + + /* Make sure the SSL error state is initialized */ + (void) ERR_get_state(); +@@ -686,7 +687,7 @@ _get_peer_alt_names (X509 *certificate) + + int i, j; + PyObject *peer_alt_names = Py_None; +- PyObject *v, *t; ++ PyObject *v = NULL, *t; + X509_EXTENSION *ext = NULL; + GENERAL_NAMES *names = NULL; + GENERAL_NAME *name; +@@ -745,7 +746,7 @@ _get_peer_alt_names (X509 *certificate) + ASN1_STRING *as = NULL; + + name = sk_GENERAL_NAME_value(names, j); +- gntype = name-> type; ++ gntype = name->type; + switch (gntype) { + case GEN_DIRNAME: + +@@ -781,15 +782,15 @@ _get_peer_alt_names (X509 *certificate) + goto fail; + switch (gntype) { + case GEN_EMAIL: +- v = PyUnicode_FromString("email"); ++ v = PyString_FromString("email"); + as = name->d.rfc822Name; + break; + case GEN_DNS: +- v = PyUnicode_FromString("DNS"); ++ v = PyString_FromString("DNS"); + as = name->d.dNSName; + break; + case GEN_URI: +- v = PyUnicode_FromString("URI"); ++ v = PyString_FromString("URI"); + as = name->d.uniformResourceIdentifier; + break; + } +@@ -819,7 +820,7 @@ _get_peer_alt_names (X509 *certificate) + break; + default: + if (PyErr_Warn(PyExc_RuntimeWarning, +- "Unknown general name type") == -1) { ++ "Unknown general name type") == -1) { + goto fail; + } + break; +@@ -849,7 +850,7 @@ _get_peer_alt_names (X509 *certificate) + goto fail; + } + PyTuple_SET_ITEM(t, 1, v); +- break; ++ break; + } + + /* and add that rendering to the list */ +@@ -1248,6 +1249,12 @@ static PyObject *PySSL_SSLwrite(PySSLObj + if (!PyArg_ParseTuple(args, "s*:write", &buf)) + return NULL; + ++ if (buf.len > INT_MAX) { ++ PyErr_Format(PyExc_OverflowError, ++ "string longer than %d bytes", INT_MAX); ++ goto error; ++ } ++ + /* just in case the blocking state of the socket has been changed */ + nonblocking = (self->Socket->sock_timeout >= 0.0); + BIO_set_nbio(SSL_get_rbio(self->ssl), nonblocking); +@@ -1269,7 +1276,7 @@ static PyObject *PySSL_SSLwrite(PySSLObj + } + do { + PySSL_BEGIN_ALLOW_THREADS +- len = SSL_write(self->ssl, buf.buf, buf.len); ++ len = SSL_write(self->ssl, buf.buf, (int)buf.len); + err = SSL_get_error(self->ssl, len); + PySSL_END_ALLOW_THREADS + if (PyErr_CheckSignals()) { +@@ -1451,7 +1458,7 @@ static PyObject *PySSL_SSLshutdown(PySSL + * Otherwise OpenSSL might read in too much data, + * eating clear text data that happens to be + * transmitted after the SSL shutdown. +- * Should be safe to call repeatedly everytime this ++ * Should be safe to call repeatedly every time this + * function is used and the shutdown_seen_zero != 0 + * condition is met. + */ +@@ -1615,9 +1622,9 @@ PyDoc_STRVAR(PySSL_RAND_egd_doc, + \n\ + Queries the entropy gather daemon (EGD) on the socket named by 'path'.\n\ + Returns number of bytes read. Raises SSLError if connection to EGD\n\ +-fails or if it does provide enough data to seed PRNG."); ++fails or if it does not provide enough data to seed PRNG."); + +-#endif ++#endif /* HAVE_OPENSSL_RAND */ + + /* List of functions exported by this module. */ + +diff -up Python-2.7.5/Modules/_ssl.c.ssl2 Python-2.7.5/Modules/_ssl.c +--- Python-2.7.5/Modules/_ssl.c.ssl2 2015-03-03 15:48:56.112889298 +0100 ++++ Python-2.7.5/Modules/_ssl.c 2015-03-03 15:47:21.598870012 +0100 +@@ -14,20 +14,26 @@ + http://bugs.python.org/issue8108#msg102867 ? + */ + ++#define PY_SSIZE_T_CLEAN + #include "Python.h" + + #ifdef WITH_THREAD + #include "pythread.h" ++#define PySSL_BEGIN_ALLOW_THREADS_S(save) \ ++ do { if (_ssl_locks_count>0) { (save) = PyEval_SaveThread(); } } while (0) ++#define PySSL_END_ALLOW_THREADS_S(save) \ ++ do { if (_ssl_locks_count>0) { PyEval_RestoreThread(save); } } while (0) + #define PySSL_BEGIN_ALLOW_THREADS { \ + PyThreadState *_save = NULL; \ +- if (_ssl_locks_count>0) {_save = PyEval_SaveThread();} +-#define PySSL_BLOCK_THREADS if (_ssl_locks_count>0){PyEval_RestoreThread(_save)}; +-#define PySSL_UNBLOCK_THREADS if (_ssl_locks_count>0){_save = PyEval_SaveThread()}; +-#define PySSL_END_ALLOW_THREADS if (_ssl_locks_count>0){PyEval_RestoreThread(_save);} \ +- } ++ PySSL_BEGIN_ALLOW_THREADS_S(_save); ++#define PySSL_BLOCK_THREADS PySSL_END_ALLOW_THREADS_S(_save); ++#define PySSL_UNBLOCK_THREADS PySSL_BEGIN_ALLOW_THREADS_S(_save); ++#define PySSL_END_ALLOW_THREADS PySSL_END_ALLOW_THREADS_S(_save); } + + #else /* no WITH_THREAD */ + ++#define PySSL_BEGIN_ALLOW_THREADS_S(save) ++#define PySSL_END_ALLOW_THREADS_S(save) + #define PySSL_BEGIN_ALLOW_THREADS + #define PySSL_BLOCK_THREADS + #define PySSL_UNBLOCK_THREADS +@@ -35,6 +41,68 @@ + + #endif + ++/* Include symbols from _socket module */ ++#include "socketmodule.h" ++ ++#if defined(HAVE_POLL_H) ++#include ++#elif defined(HAVE_SYS_POLL_H) ++#include ++#endif ++ ++/* Include OpenSSL header files */ ++#include "openssl/rsa.h" ++#include "openssl/crypto.h" ++#include "openssl/x509.h" ++#include "openssl/x509v3.h" ++#include "openssl/pem.h" ++#include "openssl/ssl.h" ++#include "openssl/err.h" ++#include "openssl/rand.h" ++ ++/* SSL error object */ ++static PyObject *PySSLErrorObject; ++static PyObject *PySSLZeroReturnErrorObject; ++static PyObject *PySSLWantReadErrorObject; ++static PyObject *PySSLWantWriteErrorObject; ++static PyObject *PySSLSyscallErrorObject; ++static PyObject *PySSLEOFErrorObject; ++ ++/* Error mappings */ ++static PyObject *err_codes_to_names; ++static PyObject *err_names_to_codes; ++static PyObject *lib_codes_to_names; ++ ++struct py_ssl_error_code { ++ const char *mnemonic; ++ int library, reason; ++}; ++struct py_ssl_library_code { ++ const char *library; ++ int code; ++}; ++ ++/* Include generated data (error codes) */ ++#include "_ssl_data.h" ++ ++/* Openssl comes with TLSv1.1 and TLSv1.2 between 1.0.0h and 1.0.1 ++ http://www.openssl.org/news/changelog.html ++ */ ++#if OPENSSL_VERSION_NUMBER >= 0x10001000L ++# define HAVE_TLSv1_2 1 ++#else ++# define HAVE_TLSv1_2 0 ++#endif ++ ++/* SNI support (client- and server-side) appeared in OpenSSL 1.0.0 and 0.9.8f ++ * This includes the SSL_set_SSL_CTX() function. ++ */ ++#ifdef SSL_CTRL_SET_TLSEXT_HOSTNAME ++# define HAVE_SNI 1 ++#else ++# define HAVE_SNI 0 ++#endif ++ + enum py_ssl_error { + /* these mirror ssl.h */ + PY_SSL_ERROR_NONE, +@@ -47,6 +115,7 @@ enum py_ssl_error { + PY_SSL_ERROR_WANT_CONNECT, + /* start of non ssl.h errorcodes */ + PY_SSL_ERROR_EOF, /* special case of SSL_ERROR_SYSCALL */ ++ PY_SSL_ERROR_NO_SOCKET, /* socket has been GC'd */ + PY_SSL_ERROR_INVALID_ERROR_CODE + }; + +@@ -62,35 +131,17 @@ enum py_ssl_cert_requirements { + }; + + enum py_ssl_version { +-#ifndef OPENSSL_NO_SSL2 + PY_SSL_VERSION_SSL2, +-#endif + PY_SSL_VERSION_SSL3=1, + PY_SSL_VERSION_SSL23, ++#if HAVE_TLSv1_2 ++ PY_SSL_VERSION_TLS1, ++ PY_SSL_VERSION_TLS1_1, ++ PY_SSL_VERSION_TLS1_2 ++#else + PY_SSL_VERSION_TLS1 +-}; +- +-/* Include symbols from _socket module */ +-#include "socketmodule.h" +- +-#if defined(HAVE_POLL_H) +-#include +-#elif defined(HAVE_SYS_POLL_H) +-#include + #endif +- +-/* Include OpenSSL header files */ +-#include "openssl/rsa.h" +-#include "openssl/crypto.h" +-#include "openssl/x509.h" +-#include "openssl/x509v3.h" +-#include "openssl/pem.h" +-#include "openssl/ssl.h" +-#include "openssl/err.h" +-#include "openssl/rand.h" +- +-/* SSL error object */ +-static PyObject *PySSLErrorObject; ++}; + + #ifdef WITH_THREAD + +@@ -112,27 +163,79 @@ static unsigned int _ssl_locks_count = 0 + # undef HAVE_OPENSSL_RAND + #endif + ++/* SSL_CTX_clear_options() and SSL_clear_options() were first added in ++ * OpenSSL 0.9.8m but do not appear in some 0.9.9-dev versions such the ++ * 0.9.9 from "May 2008" that NetBSD 5.0 uses. */ ++#if OPENSSL_VERSION_NUMBER >= 0x009080dfL && OPENSSL_VERSION_NUMBER != 0x00909000L ++# define HAVE_SSL_CTX_CLEAR_OPTIONS ++#else ++# undef HAVE_SSL_CTX_CLEAR_OPTIONS ++#endif ++ ++/* In case of 'tls-unique' it will be 12 bytes for TLS, 36 bytes for ++ * older SSL, but let's be safe */ ++#define PySSL_CB_MAXLEN 128 ++ ++/* SSL_get_finished got added to OpenSSL in 0.9.5 */ ++#if OPENSSL_VERSION_NUMBER >= 0x0090500fL ++# define HAVE_OPENSSL_FINISHED 1 ++#else ++# define HAVE_OPENSSL_FINISHED 0 ++#endif ++ ++/* ECDH support got added to OpenSSL in 0.9.8 */ ++#if OPENSSL_VERSION_NUMBER < 0x0090800fL && !defined(OPENSSL_NO_ECDH) ++# define OPENSSL_NO_ECDH ++#endif ++ ++/* compression support got added to OpenSSL in 0.9.8 */ ++#if OPENSSL_VERSION_NUMBER < 0x0090800fL && !defined(OPENSSL_NO_COMP) ++# define OPENSSL_NO_COMP ++#endif ++ ++/* X509_VERIFY_PARAM got added to OpenSSL in 0.9.8 */ ++#if OPENSSL_VERSION_NUMBER >= 0x0090800fL ++# define HAVE_OPENSSL_VERIFY_PARAM ++#endif ++ ++ ++typedef struct { ++ PyObject_HEAD ++ SSL_CTX *ctx; ++#ifdef OPENSSL_NPN_NEGOTIATED ++ char *npn_protocols; ++ int npn_protocols_len; ++#endif ++#ifndef OPENSSL_NO_TLSEXT ++ PyObject *set_hostname; ++#endif ++ int check_hostname; ++} PySSLContext; ++ + typedef struct { + PyObject_HEAD +- PySocketSockObject *Socket; /* Socket on which we're layered */ +- SSL_CTX* ctx; +- SSL* ssl; +- X509* peer_cert; +- char server[X509_NAME_MAXLEN]; +- char issuer[X509_NAME_MAXLEN]; +- int shutdown_seen_zero; +- +-} PySSLObject; +- +-static PyTypeObject PySSL_Type; +-static PyObject *PySSL_SSLwrite(PySSLObject *self, PyObject *args); +-static PyObject *PySSL_SSLread(PySSLObject *self, PyObject *args); ++ PySocketSockObject *Socket; ++ PyObject *ssl_sock; ++ SSL *ssl; ++ PySSLContext *ctx; /* weakref to SSL context */ ++ X509 *peer_cert; ++ char shutdown_seen_zero; ++ char handshake_done; ++ enum py_ssl_server_or_client socket_type; ++} PySSLSocket; ++ ++static PyTypeObject PySSLContext_Type; ++static PyTypeObject PySSLSocket_Type; ++ ++static PyObject *PySSL_SSLwrite(PySSLSocket *self, PyObject *args); ++static PyObject *PySSL_SSLread(PySSLSocket *self, PyObject *args); + static int check_socket_and_wait_for_timeout(PySocketSockObject *s, + int writing); +-static PyObject *PySSL_peercert(PySSLObject *self, PyObject *args); +-static PyObject *PySSL_cipher(PySSLObject *self); ++static PyObject *PySSL_peercert(PySSLSocket *self, PyObject *args); ++static PyObject *PySSL_cipher(PySSLSocket *self); + +-#define PySSLObject_Check(v) (Py_TYPE(v) == &PySSL_Type) ++#define PySSLContext_Check(v) (Py_TYPE(v) == &PySSLContext_Type) ++#define PySSLSocket_Check(v) (Py_TYPE(v) == &PySSLSocket_Type) + + typedef enum { + SOCKET_IS_NONBLOCKING, +@@ -149,36 +252,140 @@ typedef enum { + #define ERRSTR1(x,y,z) (x ":" y ": " z) + #define ERRSTR(x) ERRSTR1("_ssl.c", STRINGIFY2(__LINE__), x) + +-/* XXX It might be helpful to augment the error message generated +- below with the name of the SSL function that generated the error. +- I expect it's obvious most of the time. +-*/ ++ ++/* ++ * SSL errors. ++ */ ++ ++PyDoc_STRVAR(SSLError_doc, ++"An error occurred in the SSL implementation."); ++ ++PyDoc_STRVAR(SSLZeroReturnError_doc, ++"SSL/TLS session closed cleanly."); ++ ++PyDoc_STRVAR(SSLWantReadError_doc, ++"Non-blocking SSL socket needs to read more data\n" ++"before the requested operation can be completed."); ++ ++PyDoc_STRVAR(SSLWantWriteError_doc, ++"Non-blocking SSL socket needs to write more data\n" ++"before the requested operation can be completed."); ++ ++PyDoc_STRVAR(SSLSyscallError_doc, ++"System error when attempting SSL operation."); ++ ++PyDoc_STRVAR(SSLEOFError_doc, ++"SSL/TLS connection terminated abruptly."); ++ + + static PyObject * +-PySSL_SetError(PySSLObject *obj, int ret, char *filename, int lineno) ++SSLError_str(PyEnvironmentErrorObject *self) + { +- PyObject *v; +- char buf[2048]; +- char *errstr; ++ if (self->strerror != NULL) { ++ Py_INCREF(self->strerror); ++ return self->strerror; ++ } ++ else ++ return PyObject_Str(self->args); ++} ++ ++static void ++fill_and_set_sslerror(PyObject *type, int ssl_errno, const char *errstr, ++ int lineno, unsigned long errcode) ++{ ++ PyObject *err_value = NULL, *reason_obj = NULL, *lib_obj = NULL; ++ PyObject *init_value, *msg, *key; ++ ++ if (errcode != 0) { ++ int lib, reason; ++ ++ lib = ERR_GET_LIB(errcode); ++ reason = ERR_GET_REASON(errcode); ++ key = Py_BuildValue("ii", lib, reason); ++ if (key == NULL) ++ goto fail; ++ reason_obj = PyDict_GetItem(err_codes_to_names, key); ++ Py_DECREF(key); ++ if (reason_obj == NULL) { ++ /* XXX if reason < 100, it might reflect a library number (!!) */ ++ PyErr_Clear(); ++ } ++ key = PyLong_FromLong(lib); ++ if (key == NULL) ++ goto fail; ++ lib_obj = PyDict_GetItem(lib_codes_to_names, key); ++ Py_DECREF(key); ++ if (lib_obj == NULL) { ++ PyErr_Clear(); ++ } ++ if (errstr == NULL) ++ errstr = ERR_reason_error_string(errcode); ++ } ++ if (errstr == NULL) ++ errstr = "unknown error"; ++ ++ if (reason_obj && lib_obj) ++ msg = PyUnicode_FromFormat("[%S: %S] %s (_ssl.c:%d)", ++ lib_obj, reason_obj, errstr, lineno); ++ else if (lib_obj) ++ msg = PyUnicode_FromFormat("[%S] %s (_ssl.c:%d)", ++ lib_obj, errstr, lineno); ++ else ++ msg = PyUnicode_FromFormat("%s (_ssl.c:%d)", errstr, lineno); ++ if (msg == NULL) ++ goto fail; ++ ++ init_value = Py_BuildValue("iN", ssl_errno, msg); ++ if (init_value == NULL) ++ goto fail; ++ ++ err_value = PyObject_CallObject(type, init_value); ++ Py_DECREF(init_value); ++ if (err_value == NULL) ++ goto fail; ++ ++ if (reason_obj == NULL) ++ reason_obj = Py_None; ++ if (PyObject_SetAttrString(err_value, "reason", reason_obj)) ++ goto fail; ++ if (lib_obj == NULL) ++ lib_obj = Py_None; ++ if (PyObject_SetAttrString(err_value, "library", lib_obj)) ++ goto fail; ++ PyErr_SetObject(type, err_value); ++fail: ++ Py_XDECREF(err_value); ++} ++ ++static PyObject * ++PySSL_SetError(PySSLSocket *obj, int ret, char *filename, int lineno) ++{ ++ PyObject *type = PySSLErrorObject; ++ char *errstr = NULL; + int err; + enum py_ssl_error p = PY_SSL_ERROR_NONE; ++ unsigned long e = 0; + + assert(ret <= 0); ++ e = ERR_peek_last_error(); + + if (obj->ssl != NULL) { + err = SSL_get_error(obj->ssl, ret); + + switch (err) { + case SSL_ERROR_ZERO_RETURN: +- errstr = "TLS/SSL connection has been closed"; ++ errstr = "TLS/SSL connection has been closed (EOF)"; ++ type = PySSLZeroReturnErrorObject; + p = PY_SSL_ERROR_ZERO_RETURN; + break; + case SSL_ERROR_WANT_READ: + errstr = "The operation did not complete (read)"; ++ type = PySSLWantReadErrorObject; + p = PY_SSL_ERROR_WANT_READ; + break; + case SSL_ERROR_WANT_WRITE: + p = PY_SSL_ERROR_WANT_WRITE; ++ type = PySSLWantWriteErrorObject; + errstr = "The operation did not complete (write)"; + break; + case SSL_ERROR_WANT_X509_LOOKUP: +@@ -191,210 +398,109 @@ PySSL_SetError(PySSLObject *obj, int ret + break; + case SSL_ERROR_SYSCALL: + { +- unsigned long e = ERR_get_error(); + if (e == 0) { +- if (ret == 0 || !obj->Socket) { ++ PySocketSockObject *s = obj->Socket; ++ if (ret == 0) { + p = PY_SSL_ERROR_EOF; ++ type = PySSLEOFErrorObject; + errstr = "EOF occurred in violation of protocol"; + } else if (ret == -1) { + /* underlying BIO reported an I/O error */ ++ Py_INCREF(s); + ERR_clear_error(); +- return obj->Socket->errorhandler(); ++ s->errorhandler(); ++ Py_DECREF(s); ++ return NULL; + } else { /* possible? */ + p = PY_SSL_ERROR_SYSCALL; ++ type = PySSLSyscallErrorObject; + errstr = "Some I/O error occurred"; + } + } else { + p = PY_SSL_ERROR_SYSCALL; +- /* XXX Protected by global interpreter lock */ +- errstr = ERR_error_string(e, NULL); + } + break; + } + case SSL_ERROR_SSL: + { +- unsigned long e = ERR_get_error(); + p = PY_SSL_ERROR_SSL; +- if (e != 0) +- /* XXX Protected by global interpreter lock */ +- errstr = ERR_error_string(e, NULL); +- else { /* possible? */ ++ if (e == 0) ++ /* possible? */ + errstr = "A failure in the SSL library occurred"; +- } + break; + } + default: + p = PY_SSL_ERROR_INVALID_ERROR_CODE; + errstr = "Invalid error code"; + } +- } else { +- errstr = ERR_error_string(ERR_peek_last_error(), NULL); + } +- PyOS_snprintf(buf, sizeof(buf), "_ssl.c:%d: %s", lineno, errstr); ++ fill_and_set_sslerror(type, p, errstr, lineno, e); + ERR_clear_error(); +- v = Py_BuildValue("(is)", p, buf); +- if (v != NULL) { +- PyErr_SetObject(PySSLErrorObject, v); +- Py_DECREF(v); +- } + return NULL; + } + + static PyObject * + _setSSLError (char *errstr, int errcode, char *filename, int lineno) { + +- char buf[2048]; +- PyObject *v; +- +- if (errstr == NULL) { ++ if (errstr == NULL) + errcode = ERR_peek_last_error(); +- errstr = ERR_error_string(errcode, NULL); +- } +- PyOS_snprintf(buf, sizeof(buf), "_ssl.c:%d: %s", lineno, errstr); ++ else ++ errcode = 0; ++ fill_and_set_sslerror(PySSLErrorObject, errcode, errstr, lineno, errcode); + ERR_clear_error(); +- v = Py_BuildValue("(is)", errcode, buf); +- if (v != NULL) { +- PyErr_SetObject(PySSLErrorObject, v); +- Py_DECREF(v); +- } + return NULL; + } + +-static PySSLObject * +-newPySSLObject(PySocketSockObject *Sock, char *key_file, char *cert_file, ++/* ++ * SSL objects ++ */ ++ ++static PySSLSocket * ++newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, + enum py_ssl_server_or_client socket_type, +- enum py_ssl_cert_requirements certreq, +- enum py_ssl_version proto_version, +- char *cacerts_file, char *ciphers) ++ char *server_hostname, PyObject *ssl_sock) + { +- PySSLObject *self; +- char *errstr = NULL; +- int ret; +- int verification_mode; ++ PySSLSocket *self; ++ SSL_CTX *ctx = sslctx->ctx; ++ long mode; + +- self = PyObject_New(PySSLObject, &PySSL_Type); /* Create new object */ ++ self = PyObject_New(PySSLSocket, &PySSLSocket_Type); + if (self == NULL) + return NULL; +- memset(self->server, '\0', sizeof(char) * X509_NAME_MAXLEN); +- memset(self->issuer, '\0', sizeof(char) * X509_NAME_MAXLEN); ++ + self->peer_cert = NULL; + self->ssl = NULL; +- self->ctx = NULL; + self->Socket = NULL; ++ self->ssl_sock = NULL; ++ self->ctx = sslctx; + self->shutdown_seen_zero = 0; ++ self->handshake_done = 0; ++ Py_INCREF(sslctx); + + /* Make sure the SSL error state is initialized */ + (void) ERR_get_state(); + ERR_clear_error(); + +- if ((key_file && !cert_file) || (!key_file && cert_file)) { +- errstr = ERRSTR("Both the key & certificate files " +- "must be specified"); +- goto fail; +- } +- +- if ((socket_type == PY_SSL_SERVER) && +- ((key_file == NULL) || (cert_file == NULL))) { +- errstr = ERRSTR("Both the key & certificate files " +- "must be specified for server-side operation"); +- goto fail; +- } +- +- PySSL_BEGIN_ALLOW_THREADS +- if (proto_version == PY_SSL_VERSION_TLS1) +- self->ctx = SSL_CTX_new(TLSv1_method()); /* Set up context */ +- else if (proto_version == PY_SSL_VERSION_SSL3) +- self->ctx = SSL_CTX_new(SSLv3_method()); /* Set up context */ +-#ifndef OPENSSL_NO_SSL2 +- else if (proto_version == PY_SSL_VERSION_SSL2) +- self->ctx = SSL_CTX_new(SSLv2_method()); /* Set up context */ +-#endif +- else if (proto_version == PY_SSL_VERSION_SSL23) +- self->ctx = SSL_CTX_new(SSLv23_method()); /* Set up context */ +- PySSL_END_ALLOW_THREADS +- +- if (self->ctx == NULL) { +- errstr = ERRSTR("Invalid SSL protocol variant specified."); +- goto fail; +- } +- +- if (ciphers != NULL) { +- ret = SSL_CTX_set_cipher_list(self->ctx, ciphers); +- if (ret == 0) { +- errstr = ERRSTR("No cipher can be selected."); +- goto fail; +- } +- } +- +- if (certreq != PY_SSL_CERT_NONE) { +- if (cacerts_file == NULL) { +- errstr = ERRSTR("No root certificates specified for " +- "verification of other-side certificates."); +- goto fail; +- } else { +- PySSL_BEGIN_ALLOW_THREADS +- ret = SSL_CTX_load_verify_locations(self->ctx, +- cacerts_file, +- NULL); +- PySSL_END_ALLOW_THREADS +- if (ret != 1) { +- _setSSLError(NULL, 0, __FILE__, __LINE__); +- goto fail; +- } +- } +- } +- if (key_file) { +- PySSL_BEGIN_ALLOW_THREADS +- ret = SSL_CTX_use_PrivateKey_file(self->ctx, key_file, +- SSL_FILETYPE_PEM); +- PySSL_END_ALLOW_THREADS +- if (ret != 1) { +- _setSSLError(NULL, ret, __FILE__, __LINE__); +- goto fail; +- } +- +- PySSL_BEGIN_ALLOW_THREADS +- ret = SSL_CTX_use_certificate_chain_file(self->ctx, +- cert_file); +- PySSL_END_ALLOW_THREADS +- if (ret != 1) { +- /* +- fprintf(stderr, "ret is %d, errcode is %lu, %lu, with file \"%s\"\n", +- ret, ERR_peek_error(), ERR_peek_last_error(), cert_file); +- */ +- if (ERR_peek_last_error() != 0) { +- _setSSLError(NULL, ret, __FILE__, __LINE__); +- goto fail; +- } +- } +- } +- +- /* ssl compatibility */ +- SSL_CTX_set_options(self->ctx, +- SSL_OP_ALL & ~SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS); +- +- verification_mode = SSL_VERIFY_NONE; +- if (certreq == PY_SSL_CERT_OPTIONAL) +- verification_mode = SSL_VERIFY_PEER; +- else if (certreq == PY_SSL_CERT_REQUIRED) +- verification_mode = (SSL_VERIFY_PEER | +- SSL_VERIFY_FAIL_IF_NO_PEER_CERT); +- SSL_CTX_set_verify(self->ctx, verification_mode, +- NULL); /* set verify lvl */ +- + PySSL_BEGIN_ALLOW_THREADS +- self->ssl = SSL_new(self->ctx); /* New ssl struct */ ++ self->ssl = SSL_new(ctx); + PySSL_END_ALLOW_THREADS +- SSL_set_fd(self->ssl, Sock->sock_fd); /* Set the socket for SSL */ ++ SSL_set_app_data(self->ssl,self); ++ SSL_set_fd(self->ssl, Py_SAFE_DOWNCAST(sock->sock_fd, SOCKET_T, int)); ++ mode = SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER; + #ifdef SSL_MODE_AUTO_RETRY +- SSL_set_mode(self->ssl, SSL_MODE_AUTO_RETRY); ++ mode |= SSL_MODE_AUTO_RETRY; ++#endif ++ SSL_set_mode(self->ssl, mode); ++ ++#if HAVE_SNI ++ if (server_hostname != NULL) ++ SSL_set_tlsext_host_name(self->ssl, server_hostname); + #endif + + /* If the socket is in non-blocking mode or timeout mode, set the BIO + * to non-blocking mode (blocking is the default) + */ +- if (Sock->sock_timeout >= 0.0) { +- /* Set both the read and write BIO's to non-blocking mode */ ++ if (sock->sock_timeout >= 0.0) { + BIO_set_nbio(SSL_get_rbio(self->ssl), 1); + BIO_set_nbio(SSL_get_wbio(self->ssl), 1); + } +@@ -406,65 +512,31 @@ newPySSLObject(PySocketSockObject *Sock, + SSL_set_accept_state(self->ssl); + PySSL_END_ALLOW_THREADS + +- self->Socket = Sock; ++ self->socket_type = socket_type; ++ self->Socket = sock; + Py_INCREF(self->Socket); ++ self->ssl_sock = PyWeakref_NewRef(ssl_sock, NULL); ++ if (self->ssl_sock == NULL) { ++ Py_DECREF(self); ++ return NULL; ++ } + return self; +- fail: +- if (errstr) +- PyErr_SetString(PySSLErrorObject, errstr); +- Py_DECREF(self); +- return NULL; + } + +-static PyObject * +-PySSL_sslwrap(PyObject *self, PyObject *args) +-{ +- PySocketSockObject *Sock; +- int server_side = 0; +- int verification_mode = PY_SSL_CERT_NONE; +- int protocol = PY_SSL_VERSION_SSL23; +- char *key_file = NULL; +- char *cert_file = NULL; +- char *cacerts_file = NULL; +- char *ciphers = NULL; +- +- if (!PyArg_ParseTuple(args, "O!i|zziizz:sslwrap", +- PySocketModule.Sock_Type, +- &Sock, +- &server_side, +- &key_file, &cert_file, +- &verification_mode, &protocol, +- &cacerts_file, &ciphers)) +- return NULL; +- +- /* +- fprintf(stderr, +- "server_side is %d, keyfile %p, certfile %p, verify_mode %d, " +- "protocol %d, certs %p\n", +- server_side, key_file, cert_file, verification_mode, +- protocol, cacerts_file); +- */ +- +- return (PyObject *) newPySSLObject(Sock, key_file, cert_file, +- server_side, verification_mode, +- protocol, cacerts_file, +- ciphers); +-} +- +-PyDoc_STRVAR(ssl_doc, +-"sslwrap(socket, server_side, [keyfile, certfile, certs_mode, protocol,\n" +-" cacertsfile, ciphers]) -> sslobject"); + + /* SSL object methods */ + +-static PyObject *PySSL_SSLdo_handshake(PySSLObject *self) ++static PyObject *PySSL_SSLdo_handshake(PySSLSocket *self) + { + int ret; + int err; + int sockstate, nonblocking; ++ PySocketSockObject *sock = self->Socket; ++ ++ Py_INCREF(sock); + + /* just in case the blocking state of the socket has been changed */ +- nonblocking = (self->Socket->sock_timeout >= 0.0); ++ nonblocking = (sock->sock_timeout >= 0.0); + BIO_set_nbio(SSL_get_rbio(self->ssl), nonblocking); + BIO_set_nbio(SSL_get_wbio(self->ssl), nonblocking); + +@@ -475,60 +547,48 @@ static PyObject *PySSL_SSLdo_handshake(P + ret = SSL_do_handshake(self->ssl); + err = SSL_get_error(self->ssl, ret); + PySSL_END_ALLOW_THREADS +- if(PyErr_CheckSignals()) { +- return NULL; +- } ++ if (PyErr_CheckSignals()) ++ goto error; + if (err == SSL_ERROR_WANT_READ) { +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 0); ++ sockstate = check_socket_and_wait_for_timeout(sock, 0); + } else if (err == SSL_ERROR_WANT_WRITE) { +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 1); ++ sockstate = check_socket_and_wait_for_timeout(sock, 1); + } else { + sockstate = SOCKET_OPERATION_OK; + } + if (sockstate == SOCKET_HAS_TIMED_OUT) { + PyErr_SetString(PySSLErrorObject, + ERRSTR("The handshake operation timed out")); +- return NULL; ++ goto error; + } else if (sockstate == SOCKET_HAS_BEEN_CLOSED) { + PyErr_SetString(PySSLErrorObject, + ERRSTR("Underlying socket has been closed.")); +- return NULL; ++ goto error; + } else if (sockstate == SOCKET_TOO_LARGE_FOR_SELECT) { + PyErr_SetString(PySSLErrorObject, + ERRSTR("Underlying socket too large for select().")); +- return NULL; ++ goto error; + } else if (sockstate == SOCKET_IS_NONBLOCKING) { + break; + } + } while (err == SSL_ERROR_WANT_READ || err == SSL_ERROR_WANT_WRITE); ++ Py_DECREF(sock); + if (ret < 1) + return PySSL_SetError(self, ret, __FILE__, __LINE__); + + if (self->peer_cert) + X509_free (self->peer_cert); + PySSL_BEGIN_ALLOW_THREADS +- if ((self->peer_cert = SSL_get_peer_certificate(self->ssl))) { +- X509_NAME_oneline(X509_get_subject_name(self->peer_cert), +- self->server, X509_NAME_MAXLEN); +- X509_NAME_oneline(X509_get_issuer_name(self->peer_cert), +- self->issuer, X509_NAME_MAXLEN); +- } ++ self->peer_cert = SSL_get_peer_certificate(self->ssl); + PySSL_END_ALLOW_THREADS ++ self->handshake_done = 1; + + Py_INCREF(Py_None); + return Py_None; +-} +- +-static PyObject * +-PySSL_server(PySSLObject *self) +-{ +- return PyString_FromString(self->server); +-} + +-static PyObject * +-PySSL_issuer(PySSLObject *self) +-{ +- return PyString_FromString(self->issuer); ++error: ++ Py_DECREF(sock); ++ return NULL; + } + + static PyObject * +@@ -634,8 +694,8 @@ _create_tuple_for_X509_NAME (X509_NAME * + /* + fprintf(stderr, "RDN level %d, attribute %s: %s\n", + entry->set, +- PyString_AS_STRING(PyTuple_GET_ITEM(attr, 0)), +- PyString_AS_STRING(PyTuple_GET_ITEM(attr, 1))); ++ PyBytes_AS_STRING(PyTuple_GET_ITEM(attr, 0)), ++ PyBytes_AS_STRING(PyTuple_GET_ITEM(attr, 1))); + */ + if (attr == NULL) + goto fail1; +@@ -722,21 +782,24 @@ _get_peer_alt_names (X509 *certificate) + /* now decode the altName */ + ext = X509_get_ext(certificate, i); + if(!(method = X509V3_EXT_get(ext))) { +- PyErr_SetString(PySSLErrorObject, +- ERRSTR("No method for internalizing subjectAltName!")); ++ PyErr_SetString ++ (PySSLErrorObject, ++ ERRSTR("No method for internalizing subjectAltName!")); + goto fail; + } + + p = ext->value->data; + if (method->it) +- names = (GENERAL_NAMES*) (ASN1_item_d2i(NULL, +- &p, +- ext->value->length, +- ASN1_ITEM_ptr(method->it))); ++ names = (GENERAL_NAMES*) ++ (ASN1_item_d2i(NULL, ++ &p, ++ ext->value->length, ++ ASN1_ITEM_ptr(method->it))); + else +- names = (GENERAL_NAMES*) (method->d2i(NULL, +- &p, +- ext->value->length)); ++ names = (GENERAL_NAMES*) ++ (method->d2i(NULL, ++ &p, ++ ext->value->length)); + + for(j = 0; j < sk_GENERAL_NAME_num(names); j++) { + +@@ -885,7 +948,127 @@ _get_peer_alt_names (X509 *certificate) + } + + static PyObject * +-_decode_certificate (X509 *certificate, int verbose) { ++_get_aia_uri(X509 *certificate, int nid) { ++ PyObject *lst = NULL, *ostr = NULL; ++ int i, result; ++ AUTHORITY_INFO_ACCESS *info; ++ ++ info = X509_get_ext_d2i(certificate, NID_info_access, NULL, NULL); ++ if ((info == NULL) || (sk_ACCESS_DESCRIPTION_num(info) == 0)) { ++ return Py_None; ++ } ++ ++ if ((lst = PyList_New(0)) == NULL) { ++ goto fail; ++ } ++ ++ for (i = 0; i < sk_ACCESS_DESCRIPTION_num(info); i++) { ++ ACCESS_DESCRIPTION *ad = sk_ACCESS_DESCRIPTION_value(info, i); ++ ASN1_IA5STRING *uri; ++ ++ if ((OBJ_obj2nid(ad->method) != nid) || ++ (ad->location->type != GEN_URI)) { ++ continue; ++ } ++ uri = ad->location->d.uniformResourceIdentifier; ++ ostr = PyUnicode_FromStringAndSize((char *)uri->data, ++ uri->length); ++ if (ostr == NULL) { ++ goto fail; ++ } ++ result = PyList_Append(lst, ostr); ++ Py_DECREF(ostr); ++ if (result < 0) { ++ goto fail; ++ } ++ } ++ AUTHORITY_INFO_ACCESS_free(info); ++ ++ /* convert to tuple or None */ ++ if (PyList_Size(lst) == 0) { ++ Py_DECREF(lst); ++ return Py_None; ++ } else { ++ PyObject *tup; ++ tup = PyList_AsTuple(lst); ++ Py_DECREF(lst); ++ return tup; ++ } ++ ++ fail: ++ AUTHORITY_INFO_ACCESS_free(info); ++ Py_XDECREF(lst); ++ return NULL; ++} ++ ++static PyObject * ++_get_crl_dp(X509 *certificate) { ++ STACK_OF(DIST_POINT) *dps; ++ int i, j, result; ++ PyObject *lst; ++ ++#if OPENSSL_VERSION_NUMBER < 0x10001000L ++ dps = X509_get_ext_d2i(certificate, NID_crl_distribution_points, ++ NULL, NULL); ++#else ++ /* Calls x509v3_cache_extensions and sets up crldp */ ++ X509_check_ca(certificate); ++ dps = certificate->crldp; ++#endif ++ ++ if (dps == NULL) { ++ return Py_None; ++ } ++ ++ if ((lst = PyList_New(0)) == NULL) { ++ return NULL; ++ } ++ ++ for (i=0; i < sk_DIST_POINT_num(dps); i++) { ++ DIST_POINT *dp; ++ STACK_OF(GENERAL_NAME) *gns; ++ ++ dp = sk_DIST_POINT_value(dps, i); ++ gns = dp->distpoint->name.fullname; ++ ++ for (j=0; j < sk_GENERAL_NAME_num(gns); j++) { ++ GENERAL_NAME *gn; ++ ASN1_IA5STRING *uri; ++ PyObject *ouri; ++ ++ gn = sk_GENERAL_NAME_value(gns, j); ++ if (gn->type != GEN_URI) { ++ continue; ++ } ++ uri = gn->d.uniformResourceIdentifier; ++ ouri = PyUnicode_FromStringAndSize((char *)uri->data, ++ uri->length); ++ if (ouri == NULL) { ++ Py_DECREF(lst); ++ return NULL; ++ } ++ result = PyList_Append(lst, ouri); ++ Py_DECREF(ouri); ++ if (result < 0) { ++ Py_DECREF(lst); ++ return NULL; ++ } ++ } ++ } ++ /* convert to tuple or None */ ++ if (PyList_Size(lst) == 0) { ++ Py_DECREF(lst); ++ return Py_None; ++ } else { ++ PyObject *tup; ++ tup = PyList_AsTuple(lst); ++ Py_DECREF(lst); ++ return tup; ++ } ++} ++ ++static PyObject * ++_decode_certificate(X509 *certificate) { + + PyObject *retval = NULL; + BIO *biobuf = NULL; +@@ -894,9 +1077,10 @@ _decode_certificate (X509 *certificate, + PyObject *issuer; + PyObject *version; + PyObject *sn_obj; ++ PyObject *obj; + ASN1_INTEGER *serialNumber; + char buf[2048]; +- int len; ++ int len, result; + ASN1_TIME *notBefore, *notAfter; + PyObject *pnotBefore, *pnotAfter; + +@@ -914,65 +1098,62 @@ _decode_certificate (X509 *certificate, + } + Py_DECREF(peer); + +- if (verbose) { +- issuer = _create_tuple_for_X509_NAME( +- X509_get_issuer_name(certificate)); +- if (issuer == NULL) +- goto fail0; +- if (PyDict_SetItemString(retval, (const char *)"issuer", issuer) < 0) { +- Py_DECREF(issuer); +- goto fail0; +- } ++ issuer = _create_tuple_for_X509_NAME( ++ X509_get_issuer_name(certificate)); ++ if (issuer == NULL) ++ goto fail0; ++ if (PyDict_SetItemString(retval, (const char *)"issuer", issuer) < 0) { + Py_DECREF(issuer); ++ goto fail0; ++ } ++ Py_DECREF(issuer); + +- version = PyInt_FromLong(X509_get_version(certificate) + 1); +- if (PyDict_SetItemString(retval, "version", version) < 0) { +- Py_DECREF(version); +- goto fail0; +- } ++ version = PyLong_FromLong(X509_get_version(certificate) + 1); ++ if (version == NULL) ++ goto fail0; ++ if (PyDict_SetItemString(retval, "version", version) < 0) { + Py_DECREF(version); ++ goto fail0; + } ++ Py_DECREF(version); + + /* get a memory buffer */ + biobuf = BIO_new(BIO_s_mem()); + +- if (verbose) { +- +- (void) BIO_reset(biobuf); +- serialNumber = X509_get_serialNumber(certificate); +- /* should not exceed 20 octets, 160 bits, so buf is big enough */ +- i2a_ASN1_INTEGER(biobuf, serialNumber); +- len = BIO_gets(biobuf, buf, sizeof(buf)-1); +- if (len < 0) { +- _setSSLError(NULL, 0, __FILE__, __LINE__); +- goto fail1; +- } +- sn_obj = PyString_FromStringAndSize(buf, len); +- if (sn_obj == NULL) +- goto fail1; +- if (PyDict_SetItemString(retval, "serialNumber", sn_obj) < 0) { +- Py_DECREF(sn_obj); +- goto fail1; +- } ++ (void) BIO_reset(biobuf); ++ serialNumber = X509_get_serialNumber(certificate); ++ /* should not exceed 20 octets, 160 bits, so buf is big enough */ ++ i2a_ASN1_INTEGER(biobuf, serialNumber); ++ len = BIO_gets(biobuf, buf, sizeof(buf)-1); ++ if (len < 0) { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ goto fail1; ++ } ++ sn_obj = PyUnicode_FromStringAndSize(buf, len); ++ if (sn_obj == NULL) ++ goto fail1; ++ if (PyDict_SetItemString(retval, "serialNumber", sn_obj) < 0) { + Py_DECREF(sn_obj); ++ goto fail1; ++ } ++ Py_DECREF(sn_obj); + +- (void) BIO_reset(biobuf); +- notBefore = X509_get_notBefore(certificate); +- ASN1_TIME_print(biobuf, notBefore); +- len = BIO_gets(biobuf, buf, sizeof(buf)-1); +- if (len < 0) { +- _setSSLError(NULL, 0, __FILE__, __LINE__); +- goto fail1; +- } +- pnotBefore = PyString_FromStringAndSize(buf, len); +- if (pnotBefore == NULL) +- goto fail1; +- if (PyDict_SetItemString(retval, "notBefore", pnotBefore) < 0) { +- Py_DECREF(pnotBefore); +- goto fail1; +- } ++ (void) BIO_reset(biobuf); ++ notBefore = X509_get_notBefore(certificate); ++ ASN1_TIME_print(biobuf, notBefore); ++ len = BIO_gets(biobuf, buf, sizeof(buf)-1); ++ if (len < 0) { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ goto fail1; ++ } ++ pnotBefore = PyUnicode_FromStringAndSize(buf, len); ++ if (pnotBefore == NULL) ++ goto fail1; ++ if (PyDict_SetItemString(retval, "notBefore", pnotBefore) < 0) { + Py_DECREF(pnotBefore); ++ goto fail1; + } ++ Py_DECREF(pnotBefore); + + (void) BIO_reset(biobuf); + notAfter = X509_get_notAfter(certificate); +@@ -1005,6 +1186,41 @@ _decode_certificate (X509 *certificate, + Py_DECREF(peer_alt_names); + } + ++ /* Authority Information Access: OCSP URIs */ ++ obj = _get_aia_uri(certificate, NID_ad_OCSP); ++ if (obj == NULL) { ++ goto fail1; ++ } else if (obj != Py_None) { ++ result = PyDict_SetItemString(retval, "OCSP", obj); ++ Py_DECREF(obj); ++ if (result < 0) { ++ goto fail1; ++ } ++ } ++ ++ obj = _get_aia_uri(certificate, NID_ad_ca_issuers); ++ if (obj == NULL) { ++ goto fail1; ++ } else if (obj != Py_None) { ++ result = PyDict_SetItemString(retval, "caIssuers", obj); ++ Py_DECREF(obj); ++ if (result < 0) { ++ goto fail1; ++ } ++ } ++ ++ /* CDP (CRL distribution points) */ ++ obj = _get_crl_dp(certificate); ++ if (obj == NULL) { ++ goto fail1; ++ } else if (obj != Py_None) { ++ result = PyDict_SetItemString(retval, "crlDistributionPoints", obj); ++ Py_DECREF(obj); ++ if (result < 0) { ++ goto fail1; ++ } ++ } ++ + BIO_free(biobuf); + return retval; + +@@ -1016,6 +1232,24 @@ _decode_certificate (X509 *certificate, + return NULL; + } + ++static PyObject * ++_certificate_to_der(X509 *certificate) ++{ ++ unsigned char *bytes_buf = NULL; ++ int len; ++ PyObject *retval; ++ ++ bytes_buf = NULL; ++ len = i2d_X509(certificate, &bytes_buf); ++ if (len < 0) { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ return NULL; ++ } ++ /* this is actually an immutable bytes sequence */ ++ retval = PyBytes_FromStringAndSize((const char *) bytes_buf, len); ++ OPENSSL_free(bytes_buf); ++ return retval; ++} + + static PyObject * + PySSL_test_decode_certificate (PyObject *mod, PyObject *args) { +@@ -1024,28 +1258,30 @@ PySSL_test_decode_certificate (PyObject + char *filename = NULL; + X509 *x=NULL; + BIO *cert; +- int verbose = 1; + +- if (!PyArg_ParseTuple(args, "s|i:test_decode_certificate", &filename, &verbose)) ++ if (!PyArg_ParseTuple(args, "s:test_decode_certificate", &filename)) + return NULL; + + if ((cert=BIO_new(BIO_s_file())) == NULL) { +- PyErr_SetString(PySSLErrorObject, "Can't malloc memory to read file"); ++ PyErr_SetString(PySSLErrorObject, ++ "Can't malloc memory to read file"); + goto fail0; + } + + if (BIO_read_filename(cert,filename) <= 0) { +- PyErr_SetString(PySSLErrorObject, "Can't open file"); ++ PyErr_SetString(PySSLErrorObject, ++ "Can't open file"); + goto fail0; + } + + x = PEM_read_bio_X509_AUX(cert,NULL, NULL, NULL); + if (x == NULL) { +- PyErr_SetString(PySSLErrorObject, "Error decoding PEM-encoded file"); ++ PyErr_SetString(PySSLErrorObject, ++ "Error decoding PEM-encoded file"); + goto fail0; + } + +- retval = _decode_certificate(x, verbose); ++ retval = _decode_certificate(x); + X509_free(x); + + fail0: +@@ -1056,10 +1292,8 @@ PySSL_test_decode_certificate (PyObject + + + static PyObject * +-PySSL_peercert(PySSLObject *self, PyObject *args) ++PySSL_peercert(PySSLSocket *self, PyObject *args) + { +- PyObject *retval = NULL; +- int len; + int verification; + PyObject *binary_mode = Py_None; + int b; +@@ -1067,6 +1301,11 @@ PySSL_peercert(PySSLObject *self, PyObje + if (!PyArg_ParseTuple(args, "|O:peer_certificate", &binary_mode)) + return NULL; + ++ if (!self->handshake_done) { ++ PyErr_SetString(PyExc_ValueError, ++ "handshake not done yet"); ++ return NULL; ++ } + if (!self->peer_cert) + Py_RETURN_NONE; + +@@ -1075,26 +1314,13 @@ PySSL_peercert(PySSLObject *self, PyObje + return NULL; + if (b) { + /* return cert in DER-encoded format */ +- +- unsigned char *bytes_buf = NULL; +- +- bytes_buf = NULL; +- len = i2d_X509(self->peer_cert, &bytes_buf); +- if (len < 0) { +- PySSL_SetError(self, len, __FILE__, __LINE__); +- return NULL; +- } +- retval = PyString_FromStringAndSize((const char *) bytes_buf, len); +- OPENSSL_free(bytes_buf); +- return retval; +- ++ return _certificate_to_der(self->peer_cert); + } else { +- +- verification = SSL_CTX_get_verify_mode(self->ctx); ++ verification = SSL_CTX_get_verify_mode(SSL_get_SSL_CTX(self->ssl)); + if ((verification & SSL_VERIFY_PEER) == 0) + return PyDict_New(); + else +- return _decode_certificate (self->peer_cert, 0); ++ return _decode_certificate(self->peer_cert); + } + } + +@@ -1110,7 +1336,7 @@ If the optional argument is True, return + peer certificate, or None if no certificate was provided. This will\n\ + return the certificate even if it wasn't validated."); + +-static PyObject *PySSL_cipher (PySSLObject *self) { ++static PyObject *PySSL_cipher (PySSLSocket *self) { + + PyObject *retval, *v; + const SSL_CIPHER *current; +@@ -1137,7 +1363,7 @@ static PyObject *PySSL_cipher (PySSLObje + goto fail0; + PyTuple_SET_ITEM(retval, 0, v); + } +- cipher_protocol = SSL_CIPHER_get_version(current); ++ cipher_protocol = (char *) SSL_CIPHER_get_version(current); + if (cipher_protocol == NULL) { + Py_INCREF(Py_None); + PyTuple_SET_ITEM(retval, 1, Py_None); +@@ -1158,15 +1384,85 @@ static PyObject *PySSL_cipher (PySSLObje + return NULL; + } + +-static void PySSL_dealloc(PySSLObject *self) ++#ifdef OPENSSL_NPN_NEGOTIATED ++static PyObject *PySSL_selected_npn_protocol(PySSLSocket *self) { ++ const unsigned char *out; ++ unsigned int outlen; ++ ++ SSL_get0_next_proto_negotiated(self->ssl, ++ &out, &outlen); ++ ++ if (out == NULL) ++ Py_RETURN_NONE; ++ return PyUnicode_FromStringAndSize((char *) out, outlen); ++} ++#endif ++ ++static PyObject *PySSL_compression(PySSLSocket *self) { ++#ifdef OPENSSL_NO_COMP ++ Py_RETURN_NONE; ++#else ++ const COMP_METHOD *comp_method; ++ const char *short_name; ++ ++ if (self->ssl == NULL) ++ Py_RETURN_NONE; ++ comp_method = SSL_get_current_compression(self->ssl); ++ if (comp_method == NULL || comp_method->type == NID_undef) ++ Py_RETURN_NONE; ++ short_name = OBJ_nid2sn(comp_method->type); ++ if (short_name == NULL) ++ Py_RETURN_NONE; ++ return PyBytes_FromString(short_name); ++#endif ++} ++ ++static PySSLContext *PySSL_get_context(PySSLSocket *self, void *closure) { ++ Py_INCREF(self->ctx); ++ return self->ctx; ++} ++ ++static int PySSL_set_context(PySSLSocket *self, PyObject *value, ++ void *closure) { ++ ++ if (PyObject_TypeCheck(value, &PySSLContext_Type)) { ++#if !HAVE_SNI ++ PyErr_SetString(PyExc_NotImplementedError, "setting a socket's " ++ "context is not supported by your OpenSSL library"); ++ return -1; ++#else ++ Py_INCREF(value); ++ Py_DECREF(self->ctx); ++ self->ctx = (PySSLContext *) value; ++ SSL_set_SSL_CTX(self->ssl, self->ctx->ctx); ++#endif ++ } else { ++ PyErr_SetString(PyExc_TypeError, "The value must be a SSLContext"); ++ return -1; ++ } ++ ++ return 0; ++} ++ ++PyDoc_STRVAR(PySSL_set_context_doc, ++"_setter_context(ctx)\n\ ++\ ++This changes the context associated with the SSLSocket. This is typically\n\ ++used from within a callback function set by the set_servername_callback\n\ ++on the SSLContext to change the certificate information associated with the\n\ ++SSLSocket before the cryptographic exchange handshake messages\n"); ++ ++ ++ ++static void PySSL_dealloc(PySSLSocket *self) + { + if (self->peer_cert) /* Possible not to have one? */ + X509_free (self->peer_cert); + if (self->ssl) + SSL_free(self->ssl); +- if (self->ctx) +- SSL_CTX_free(self->ctx); + Py_XDECREF(self->Socket); ++ Py_XDECREF(self->ssl_sock); ++ Py_XDECREF(self->ctx); + PyObject_Del(self); + } + +@@ -1238,16 +1534,21 @@ normal_return: + return rc == 0 ? SOCKET_HAS_TIMED_OUT : SOCKET_OPERATION_OK; + } + +-static PyObject *PySSL_SSLwrite(PySSLObject *self, PyObject *args) ++static PyObject *PySSL_SSLwrite(PySSLSocket *self, PyObject *args) + { + Py_buffer buf; + int len; + int sockstate; + int err; + int nonblocking; ++ PySocketSockObject *sock = self->Socket; ++ ++ Py_INCREF(sock); + +- if (!PyArg_ParseTuple(args, "s*:write", &buf)) ++ if (!PyArg_ParseTuple(args, "s*:write", &buf)) { ++ Py_DECREF(sock); + return NULL; ++ } + + if (buf.len > INT_MAX) { + PyErr_Format(PyExc_OverflowError, +@@ -1256,11 +1557,11 @@ static PyObject *PySSL_SSLwrite(PySSLObj + } + + /* just in case the blocking state of the socket has been changed */ +- nonblocking = (self->Socket->sock_timeout >= 0.0); ++ nonblocking = (sock->sock_timeout >= 0.0); + BIO_set_nbio(SSL_get_rbio(self->ssl), nonblocking); + BIO_set_nbio(SSL_get_wbio(self->ssl), nonblocking); + +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 1); ++ sockstate = check_socket_and_wait_for_timeout(sock, 1); + if (sockstate == SOCKET_HAS_TIMED_OUT) { + PyErr_SetString(PySSLErrorObject, + "The write operation timed out"); +@@ -1283,9 +1584,9 @@ static PyObject *PySSL_SSLwrite(PySSLObj + goto error; + } + if (err == SSL_ERROR_WANT_READ) { +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 0); ++ sockstate = check_socket_and_wait_for_timeout(sock, 0); + } else if (err == SSL_ERROR_WANT_WRITE) { +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 1); ++ sockstate = check_socket_and_wait_for_timeout(sock, 1); + } else { + sockstate = SOCKET_OPERATION_OK; + } +@@ -1302,6 +1603,7 @@ static PyObject *PySSL_SSLwrite(PySSLObj + } + } while (err == SSL_ERROR_WANT_READ || err == SSL_ERROR_WANT_WRITE); + ++ Py_DECREF(sock); + PyBuffer_Release(&buf); + if (len > 0) + return PyInt_FromLong(len); +@@ -1309,6 +1611,7 @@ static PyObject *PySSL_SSLwrite(PySSLObj + return PySSL_SetError(self, len, __FILE__, __LINE__); + + error: ++ Py_DECREF(sock); + PyBuffer_Release(&buf); + return NULL; + } +@@ -1319,7 +1622,7 @@ PyDoc_STRVAR(PySSL_SSLwrite_doc, + Writes the string s into the SSL object. Returns the number\n\ + of bytes written."); + +-static PyObject *PySSL_SSLpending(PySSLObject *self) ++static PyObject *PySSL_SSLpending(PySSLSocket *self) + { + int count = 0; + +@@ -1338,23 +1641,46 @@ PyDoc_STRVAR(PySSL_SSLpending_doc, + Returns the number of already decrypted bytes available for read,\n\ + pending on the connection.\n"); + +-static PyObject *PySSL_SSLread(PySSLObject *self, PyObject *args) ++static PyObject *PySSL_SSLread(PySSLSocket *self, PyObject *args) + { +- PyObject *buf; +- int count = 0; +- int len = 1024; ++ PyObject *dest = NULL; ++ Py_buffer buf; ++ char *mem; ++ int len, count; ++ int buf_passed = 0; + int sockstate; + int err; + int nonblocking; ++ PySocketSockObject *sock = self->Socket; + +- if (!PyArg_ParseTuple(args, "|i:read", &len)) +- return NULL; ++ Py_INCREF(sock); + +- if (!(buf = PyString_FromStringAndSize((char *) 0, len))) +- return NULL; ++ buf.obj = NULL; ++ buf.buf = NULL; ++ if (!PyArg_ParseTuple(args, "i|w*:read", &len, &buf)) ++ goto error; ++ ++ if ((buf.buf == NULL) && (buf.obj == NULL)) { ++ dest = PyBytes_FromStringAndSize(NULL, len); ++ if (dest == NULL) ++ goto error; ++ mem = PyBytes_AS_STRING(dest); ++ } ++ else { ++ buf_passed = 1; ++ mem = buf.buf; ++ if (len <= 0 || len > buf.len) { ++ len = (int) buf.len; ++ if (buf.len != len) { ++ PyErr_SetString(PyExc_OverflowError, ++ "maximum length can't fit in a C 'int'"); ++ goto error; ++ } ++ } ++ } + + /* just in case the blocking state of the socket has been changed */ +- nonblocking = (self->Socket->sock_timeout >= 0.0); ++ nonblocking = (sock->sock_timeout >= 0.0); + BIO_set_nbio(SSL_get_rbio(self->ssl), nonblocking); + BIO_set_nbio(SSL_get_wbio(self->ssl), nonblocking); + +@@ -1364,70 +1690,71 @@ static PyObject *PySSL_SSLread(PySSLObje + PySSL_END_ALLOW_THREADS + + if (!count) { +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 0); ++ sockstate = check_socket_and_wait_for_timeout(sock, 0); + if (sockstate == SOCKET_HAS_TIMED_OUT) { + PyErr_SetString(PySSLErrorObject, + "The read operation timed out"); +- Py_DECREF(buf); +- return NULL; ++ goto error; + } else if (sockstate == SOCKET_TOO_LARGE_FOR_SELECT) { + PyErr_SetString(PySSLErrorObject, + "Underlying socket too large for select()."); +- Py_DECREF(buf); +- return NULL; ++ goto error; + } else if (sockstate == SOCKET_HAS_BEEN_CLOSED) { +- if (SSL_get_shutdown(self->ssl) != +- SSL_RECEIVED_SHUTDOWN) +- { +- Py_DECREF(buf); +- PyErr_SetString(PySSLErrorObject, +- "Socket closed without SSL shutdown handshake"); +- return NULL; +- } else { +- /* should contain a zero-length string */ +- _PyString_Resize(&buf, 0); +- return buf; +- } ++ count = 0; ++ goto done; + } + } + do { + PySSL_BEGIN_ALLOW_THREADS +- count = SSL_read(self->ssl, PyString_AsString(buf), len); ++ count = SSL_read(self->ssl, mem, len); + err = SSL_get_error(self->ssl, count); + PySSL_END_ALLOW_THREADS +- if(PyErr_CheckSignals()) { +- Py_DECREF(buf); +- return NULL; +- } ++ if (PyErr_CheckSignals()) ++ goto error; + if (err == SSL_ERROR_WANT_READ) { +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 0); ++ sockstate = check_socket_and_wait_for_timeout(sock, 0); + } else if (err == SSL_ERROR_WANT_WRITE) { +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 1); ++ sockstate = check_socket_and_wait_for_timeout(sock, 1); + } else if ((err == SSL_ERROR_ZERO_RETURN) && + (SSL_get_shutdown(self->ssl) == + SSL_RECEIVED_SHUTDOWN)) + { +- _PyString_Resize(&buf, 0); +- return buf; ++ count = 0; ++ goto done; + } else { + sockstate = SOCKET_OPERATION_OK; + } + if (sockstate == SOCKET_HAS_TIMED_OUT) { + PyErr_SetString(PySSLErrorObject, + "The read operation timed out"); +- Py_DECREF(buf); +- return NULL; ++ goto error; + } else if (sockstate == SOCKET_IS_NONBLOCKING) { + break; + } + } while (err == SSL_ERROR_WANT_READ || err == SSL_ERROR_WANT_WRITE); + if (count <= 0) { +- Py_DECREF(buf); +- return PySSL_SetError(self, count, __FILE__, __LINE__); ++ PySSL_SetError(self, count, __FILE__, __LINE__); ++ goto error; ++ } ++ ++done: ++ Py_DECREF(sock); ++ if (!buf_passed) { ++ _PyBytes_Resize(&dest, count); ++ return dest; + } +- if (count != len) +- _PyString_Resize(&buf, count); +- return buf; ++ else { ++ PyBuffer_Release(&buf); ++ return PyLong_FromLong(count); ++ } ++ ++error: ++ Py_DECREF(sock); ++ if (!buf_passed) ++ Py_XDECREF(dest); ++ else ++ PyBuffer_Release(&buf); ++ return NULL; + } + + PyDoc_STRVAR(PySSL_SSLread_doc, +@@ -1435,20 +1762,22 @@ PyDoc_STRVAR(PySSL_SSLread_doc, + \n\ + Read up to len bytes from the SSL socket."); + +-static PyObject *PySSL_SSLshutdown(PySSLObject *self) ++static PyObject *PySSL_SSLshutdown(PySSLSocket *self) + { + int err, ssl_err, sockstate, nonblocking; + int zeros = 0; ++ PySocketSockObject *sock = self->Socket; + + /* Guard against closed socket */ +- if (self->Socket->sock_fd < 0) { +- PyErr_SetString(PySSLErrorObject, +- "Underlying socket has been closed."); ++ if (sock->sock_fd < 0) { ++ _setSSLError("Underlying socket connection gone", ++ PY_SSL_ERROR_NO_SOCKET, __FILE__, __LINE__); + return NULL; + } ++ Py_INCREF(sock); + + /* Just in case the blocking state of the socket has been changed */ +- nonblocking = (self->Socket->sock_timeout >= 0.0); ++ nonblocking = (sock->sock_timeout >= 0.0); + BIO_set_nbio(SSL_get_rbio(self->ssl), nonblocking); + BIO_set_nbio(SSL_get_wbio(self->ssl), nonblocking); + +@@ -1483,9 +1812,9 @@ static PyObject *PySSL_SSLshutdown(PySSL + /* Possibly retry shutdown until timeout or failure */ + ssl_err = SSL_get_error(self->ssl, err); + if (ssl_err == SSL_ERROR_WANT_READ) +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 0); ++ sockstate = check_socket_and_wait_for_timeout(sock, 0); + else if (ssl_err == SSL_ERROR_WANT_WRITE) +- sockstate = check_socket_and_wait_for_timeout(self->Socket, 1); ++ sockstate = check_socket_and_wait_for_timeout(sock, 1); + else + break; + if (sockstate == SOCKET_HAS_TIMED_OUT) { +@@ -1495,24 +1824,29 @@ static PyObject *PySSL_SSLshutdown(PySSL + else + PyErr_SetString(PySSLErrorObject, + "The write operation timed out"); +- return NULL; ++ goto error; + } + else if (sockstate == SOCKET_TOO_LARGE_FOR_SELECT) { + PyErr_SetString(PySSLErrorObject, + "Underlying socket too large for select()."); +- return NULL; ++ goto error; + } + else if (sockstate != SOCKET_OPERATION_OK) + /* Retain the SSL error code */ + break; + } + +- if (err < 0) ++ if (err < 0) { ++ Py_DECREF(sock); + return PySSL_SetError(self, err, __FILE__, __LINE__); +- else { +- Py_INCREF(self->Socket); +- return (PyObject *) (self->Socket); + } ++ else ++ /* It's already INCREF'ed */ ++ return (PyObject *) sock; ++ ++error: ++ Py_DECREF(sock); ++ return NULL; + } + + PyDoc_STRVAR(PySSL_SSLshutdown_doc, +@@ -1521,6 +1855,47 @@ PyDoc_STRVAR(PySSL_SSLshutdown_doc, + Does the SSL shutdown handshake with the remote end, and returns\n\ + the underlying socket object."); + ++#if HAVE_OPENSSL_FINISHED ++static PyObject * ++PySSL_tls_unique_cb(PySSLSocket *self) ++{ ++ PyObject *retval = NULL; ++ char buf[PySSL_CB_MAXLEN]; ++ size_t len; ++ ++ if (SSL_session_reused(self->ssl) ^ !self->socket_type) { ++ /* if session is resumed XOR we are the client */ ++ len = SSL_get_finished(self->ssl, buf, PySSL_CB_MAXLEN); ++ } ++ else { ++ /* if a new session XOR we are the server */ ++ len = SSL_get_peer_finished(self->ssl, buf, PySSL_CB_MAXLEN); ++ } ++ ++ /* It cannot be negative in current OpenSSL version as of July 2011 */ ++ if (len == 0) ++ Py_RETURN_NONE; ++ ++ retval = PyBytes_FromStringAndSize(buf, len); ++ ++ return retval; ++} ++ ++PyDoc_STRVAR(PySSL_tls_unique_cb_doc, ++"tls_unique_cb() -> bytes\n\ ++\n\ ++Returns the 'tls-unique' channel binding data, as defined by RFC 5929.\n\ ++\n\ ++If the TLS handshake is not yet complete, None is returned"); ++ ++#endif /* HAVE_OPENSSL_FINISHED */ ++ ++static PyGetSetDef ssl_getsetlist[] = { ++ {"context", (getter) PySSL_get_context, ++ (setter) PySSL_set_context, PySSL_set_context_doc}, ++ {NULL}, /* sentinel */ ++}; ++ + static PyMethodDef PySSLMethods[] = { + {"do_handshake", (PyCFunction)PySSL_SSLdo_handshake, METH_NOARGS}, + {"write", (PyCFunction)PySSL_SSLwrite, METH_VARARGS, +@@ -1529,118 +1904,1786 @@ static PyMethodDef PySSLMethods[] = { + PySSL_SSLread_doc}, + {"pending", (PyCFunction)PySSL_SSLpending, METH_NOARGS, + PySSL_SSLpending_doc}, +- {"server", (PyCFunction)PySSL_server, METH_NOARGS}, +- {"issuer", (PyCFunction)PySSL_issuer, METH_NOARGS}, + {"peer_certificate", (PyCFunction)PySSL_peercert, METH_VARARGS, + PySSL_peercert_doc}, + {"cipher", (PyCFunction)PySSL_cipher, METH_NOARGS}, ++#ifdef OPENSSL_NPN_NEGOTIATED ++ {"selected_npn_protocol", (PyCFunction)PySSL_selected_npn_protocol, METH_NOARGS}, ++#endif ++ {"compression", (PyCFunction)PySSL_compression, METH_NOARGS}, + {"shutdown", (PyCFunction)PySSL_SSLshutdown, METH_NOARGS, + PySSL_SSLshutdown_doc}, ++#if HAVE_OPENSSL_FINISHED ++ {"tls_unique_cb", (PyCFunction)PySSL_tls_unique_cb, METH_NOARGS, ++ PySSL_tls_unique_cb_doc}, ++#endif + {NULL, NULL} + }; + +-static PyObject *PySSL_getattr(PySSLObject *self, char *name) +-{ +- return Py_FindMethod(PySSLMethods, (PyObject *)self, name); +-} +- +-static PyTypeObject PySSL_Type = { ++static PyTypeObject PySSLSocket_Type = { + PyVarObject_HEAD_INIT(NULL, 0) +- "ssl.SSLContext", /*tp_name*/ +- sizeof(PySSLObject), /*tp_basicsize*/ ++ "_ssl._SSLSocket", /*tp_name*/ ++ sizeof(PySSLSocket), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + /* methods */ + (destructor)PySSL_dealloc, /*tp_dealloc*/ + 0, /*tp_print*/ +- (getattrfunc)PySSL_getattr, /*tp_getattr*/ ++ 0, /*tp_getattr*/ + 0, /*tp_setattr*/ +- 0, /*tp_compare*/ ++ 0, /*tp_reserved*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ ++ 0, /*tp_call*/ ++ 0, /*tp_str*/ ++ 0, /*tp_getattro*/ ++ 0, /*tp_setattro*/ ++ 0, /*tp_as_buffer*/ ++ Py_TPFLAGS_DEFAULT, /*tp_flags*/ ++ 0, /*tp_doc*/ ++ 0, /*tp_traverse*/ ++ 0, /*tp_clear*/ ++ 0, /*tp_richcompare*/ ++ 0, /*tp_weaklistoffset*/ ++ 0, /*tp_iter*/ ++ 0, /*tp_iternext*/ ++ PySSLMethods, /*tp_methods*/ ++ 0, /*tp_members*/ ++ ssl_getsetlist, /*tp_getset*/ + }; + +-#ifdef HAVE_OPENSSL_RAND + +-/* helper routines for seeding the SSL PRNG */ ++/* ++ * _SSLContext objects ++ */ ++ + static PyObject * +-PySSL_RAND_add(PyObject *self, PyObject *args) ++context_new(PyTypeObject *type, PyObject *args, PyObject *kwds) + { +- char *buf; +- int len; +- double entropy; ++ char *kwlist[] = {"protocol", NULL}; ++ PySSLContext *self; ++ int proto_version = PY_SSL_VERSION_SSL23; ++ long options; ++ SSL_CTX *ctx = NULL; ++ ++ if (!PyArg_ParseTupleAndKeywords( ++ args, kwds, "i:_SSLContext", kwlist, ++ &proto_version)) ++ return NULL; + +- if (!PyArg_ParseTuple(args, "s#d:RAND_add", &buf, &len, &entropy)) ++ PySSL_BEGIN_ALLOW_THREADS ++ if (proto_version == PY_SSL_VERSION_TLS1) ++ ctx = SSL_CTX_new(TLSv1_method()); ++#if HAVE_TLSv1_2 ++ else if (proto_version == PY_SSL_VERSION_TLS1_1) ++ ctx = SSL_CTX_new(TLSv1_1_method()); ++ else if (proto_version == PY_SSL_VERSION_TLS1_2) ++ ctx = SSL_CTX_new(TLSv1_2_method()); ++#endif ++ else if (proto_version == PY_SSL_VERSION_SSL3) ++ ctx = SSL_CTX_new(SSLv3_method()); ++#ifndef OPENSSL_NO_SSL2 ++ else if (proto_version == PY_SSL_VERSION_SSL2) ++ ctx = SSL_CTX_new(SSLv2_method()); ++#endif ++ else if (proto_version == PY_SSL_VERSION_SSL23) ++ ctx = SSL_CTX_new(SSLv23_method()); ++ else ++ proto_version = -1; ++ PySSL_END_ALLOW_THREADS ++ ++ if (proto_version == -1) { ++ PyErr_SetString(PyExc_ValueError, ++ "invalid protocol version"); + return NULL; +- RAND_add(buf, len, entropy); +- Py_INCREF(Py_None); +- return Py_None; ++ } ++ if (ctx == NULL) { ++ PyErr_SetString(PySSLErrorObject, ++ "failed to allocate SSL context"); ++ return NULL; ++ } ++ ++ assert(type != NULL && type->tp_alloc != NULL); ++ self = (PySSLContext *) type->tp_alloc(type, 0); ++ if (self == NULL) { ++ SSL_CTX_free(ctx); ++ return NULL; ++ } ++ self->ctx = ctx; ++#ifdef OPENSSL_NPN_NEGOTIATED ++ self->npn_protocols = NULL; ++#endif ++#ifndef OPENSSL_NO_TLSEXT ++ self->set_hostname = NULL; ++#endif ++ /* Don't check host name by default */ ++ self->check_hostname = 0; ++ /* Defaults */ ++ SSL_CTX_set_verify(self->ctx, SSL_VERIFY_NONE, NULL); ++ options = SSL_OP_ALL & ~SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS; ++ SSL_CTX_set_options(self->ctx, options); ++ ++#ifndef OPENSSL_NO_ECDH ++ /* Allow automatic ECDH curve selection (on OpenSSL 1.0.2+), or use ++ prime256v1 by default. This is Apache mod_ssl's initialization ++ policy, so we should be safe. */ ++#if defined(SSL_CTX_set_ecdh_auto) ++ SSL_CTX_set_ecdh_auto(self->ctx, 1); ++#else ++ { ++ EC_KEY *key = EC_KEY_new_by_curve_name(NID_X9_62_prime256v1); ++ SSL_CTX_set_tmp_ecdh(self->ctx, key); ++ EC_KEY_free(key); ++ } ++#endif ++#endif ++ ++#define SID_CTX "Python" ++ SSL_CTX_set_session_id_context(self->ctx, (const unsigned char *) SID_CTX, ++ sizeof(SID_CTX)); ++#undef SID_CTX ++ ++ return (PyObject *)self; + } + +-PyDoc_STRVAR(PySSL_RAND_add_doc, +-"RAND_add(string, entropy)\n\ +-\n\ +-Mix string into the OpenSSL PRNG state. entropy (a float) is a lower\n\ +-bound on the entropy contained in string. See RFC 1750."); ++static int ++context_traverse(PySSLContext *self, visitproc visit, void *arg) ++{ ++#ifndef OPENSSL_NO_TLSEXT ++ Py_VISIT(self->set_hostname); ++#endif ++ return 0; ++} + +-static PyObject * +-PySSL_RAND_status(PyObject *self) ++static int ++context_clear(PySSLContext *self) + { +- return PyInt_FromLong(RAND_status()); ++#ifndef OPENSSL_NO_TLSEXT ++ Py_CLEAR(self->set_hostname); ++#endif ++ return 0; + } + +-PyDoc_STRVAR(PySSL_RAND_status_doc, +-"RAND_status() -> 0 or 1\n\ +-\n\ +-Returns 1 if the OpenSSL PRNG has been seeded with enough data and 0 if not.\n\ +-It is necessary to seed the PRNG with RAND_add() on some platforms before\n\ +-using the ssl() function."); ++static void ++context_dealloc(PySSLContext *self) ++{ ++ context_clear(self); ++ SSL_CTX_free(self->ctx); ++#ifdef OPENSSL_NPN_NEGOTIATED ++ PyMem_Free(self->npn_protocols); ++#endif ++ Py_TYPE(self)->tp_free(self); ++} + + static PyObject * +-PySSL_RAND_egd(PyObject *self, PyObject *arg) ++set_ciphers(PySSLContext *self, PyObject *args) + { +- int bytes; ++ int ret; ++ const char *cipherlist; + +- if (!PyString_Check(arg)) +- return PyErr_Format(PyExc_TypeError, +- "RAND_egd() expected string, found %s", +- Py_TYPE(arg)->tp_name); +- bytes = RAND_egd(PyString_AS_STRING(arg)); +- if (bytes == -1) { ++ if (!PyArg_ParseTuple(args, "s:set_ciphers", &cipherlist)) ++ return NULL; ++ ret = SSL_CTX_set_cipher_list(self->ctx, cipherlist); ++ if (ret == 0) { ++ /* Clearing the error queue is necessary on some OpenSSL versions, ++ otherwise the error will be reported again when another SSL call ++ is done. */ ++ ERR_clear_error(); + PyErr_SetString(PySSLErrorObject, +- "EGD connection failed or EGD did not return " +- "enough data to seed the PRNG"); ++ "No cipher can be selected."); + return NULL; + } +- return PyInt_FromLong(bytes); ++ Py_RETURN_NONE; + } + +-PyDoc_STRVAR(PySSL_RAND_egd_doc, +-"RAND_egd(path) -> bytes\n\ +-\n\ +-Queries the entropy gather daemon (EGD) on the socket named by 'path'.\n\ +-Returns number of bytes read. Raises SSLError if connection to EGD\n\ +-fails or if it does not provide enough data to seed PRNG."); ++#ifdef OPENSSL_NPN_NEGOTIATED ++/* this callback gets passed to SSL_CTX_set_next_protos_advertise_cb */ ++static int ++_advertiseNPN_cb(SSL *s, ++ const unsigned char **data, unsigned int *len, ++ void *args) ++{ ++ PySSLContext *ssl_ctx = (PySSLContext *) args; + +-#endif /* HAVE_OPENSSL_RAND */ ++ if (ssl_ctx->npn_protocols == NULL) { ++ *data = (unsigned char *) ""; ++ *len = 0; ++ } else { ++ *data = (unsigned char *) ssl_ctx->npn_protocols; ++ *len = ssl_ctx->npn_protocols_len; ++ } + +-/* List of functions exported by this module. */ ++ return SSL_TLSEXT_ERR_OK; ++} ++/* this callback gets passed to SSL_CTX_set_next_proto_select_cb */ ++static int ++_selectNPN_cb(SSL *s, ++ unsigned char **out, unsigned char *outlen, ++ const unsigned char *server, unsigned int server_len, ++ void *args) ++{ ++ PySSLContext *ssl_ctx = (PySSLContext *) args; ++ ++ unsigned char *client = (unsigned char *) ssl_ctx->npn_protocols; ++ int client_len; ++ ++ if (client == NULL) { ++ client = (unsigned char *) ""; ++ client_len = 0; ++ } else { ++ client_len = ssl_ctx->npn_protocols_len; ++ } ++ ++ SSL_select_next_proto(out, outlen, ++ server, server_len, ++ client, client_len); ++ ++ return SSL_TLSEXT_ERR_OK; ++} ++#endif ++ ++static PyObject * ++_set_npn_protocols(PySSLContext *self, PyObject *args) ++{ ++#ifdef OPENSSL_NPN_NEGOTIATED ++ Py_buffer protos; ++ ++ if (!PyArg_ParseTuple(args, "s*:set_npn_protocols", &protos)) ++ return NULL; ++ ++ if (self->npn_protocols != NULL) { ++ PyMem_Free(self->npn_protocols); ++ } ++ ++ self->npn_protocols = PyMem_Malloc(protos.len); ++ if (self->npn_protocols == NULL) { ++ PyBuffer_Release(&protos); ++ return PyErr_NoMemory(); ++ } ++ memcpy(self->npn_protocols, protos.buf, protos.len); ++ self->npn_protocols_len = (int) protos.len; ++ ++ /* set both server and client callbacks, because the context can ++ * be used to create both types of sockets */ ++ SSL_CTX_set_next_protos_advertised_cb(self->ctx, ++ _advertiseNPN_cb, ++ self); ++ SSL_CTX_set_next_proto_select_cb(self->ctx, ++ _selectNPN_cb, ++ self); ++ ++ PyBuffer_Release(&protos); ++ Py_RETURN_NONE; ++#else ++ PyErr_SetString(PyExc_NotImplementedError, ++ "The NPN extension requires OpenSSL 1.0.1 or later."); ++ return NULL; ++#endif ++} ++ ++static PyObject * ++get_verify_mode(PySSLContext *self, void *c) ++{ ++ switch (SSL_CTX_get_verify_mode(self->ctx)) { ++ case SSL_VERIFY_NONE: ++ return PyLong_FromLong(PY_SSL_CERT_NONE); ++ case SSL_VERIFY_PEER: ++ return PyLong_FromLong(PY_SSL_CERT_OPTIONAL); ++ case SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT: ++ return PyLong_FromLong(PY_SSL_CERT_REQUIRED); ++ } ++ PyErr_SetString(PySSLErrorObject, ++ "invalid return value from SSL_CTX_get_verify_mode"); ++ return NULL; ++} ++ ++static int ++set_verify_mode(PySSLContext *self, PyObject *arg, void *c) ++{ ++ int n, mode; ++ if (!PyArg_Parse(arg, "i", &n)) ++ return -1; ++ if (n == PY_SSL_CERT_NONE) ++ mode = SSL_VERIFY_NONE; ++ else if (n == PY_SSL_CERT_OPTIONAL) ++ mode = SSL_VERIFY_PEER; ++ else if (n == PY_SSL_CERT_REQUIRED) ++ mode = SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT; ++ else { ++ PyErr_SetString(PyExc_ValueError, ++ "invalid value for verify_mode"); ++ return -1; ++ } ++ if (mode == SSL_VERIFY_NONE && self->check_hostname) { ++ PyErr_SetString(PyExc_ValueError, ++ "Cannot set verify_mode to CERT_NONE when " ++ "check_hostname is enabled."); ++ return -1; ++ } ++ SSL_CTX_set_verify(self->ctx, mode, NULL); ++ return 0; ++} ++ ++#ifdef HAVE_OPENSSL_VERIFY_PARAM ++static PyObject * ++get_verify_flags(PySSLContext *self, void *c) ++{ ++ X509_STORE *store; ++ unsigned long flags; ++ ++ store = SSL_CTX_get_cert_store(self->ctx); ++ flags = X509_VERIFY_PARAM_get_flags(store->param); ++ return PyLong_FromUnsignedLong(flags); ++} ++ ++static int ++set_verify_flags(PySSLContext *self, PyObject *arg, void *c) ++{ ++ X509_STORE *store; ++ unsigned long new_flags, flags, set, clear; ++ ++ if (!PyArg_Parse(arg, "k", &new_flags)) ++ return -1; ++ store = SSL_CTX_get_cert_store(self->ctx); ++ flags = X509_VERIFY_PARAM_get_flags(store->param); ++ clear = flags & ~new_flags; ++ set = ~flags & new_flags; ++ if (clear) { ++ if (!X509_VERIFY_PARAM_clear_flags(store->param, clear)) { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ return -1; ++ } ++ } ++ if (set) { ++ if (!X509_VERIFY_PARAM_set_flags(store->param, set)) { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ return -1; ++ } ++ } ++ return 0; ++} ++#endif ++ ++static PyObject * ++get_options(PySSLContext *self, void *c) ++{ ++ return PyLong_FromLong(SSL_CTX_get_options(self->ctx)); ++} ++ ++static int ++set_options(PySSLContext *self, PyObject *arg, void *c) ++{ ++ long new_opts, opts, set, clear; ++ if (!PyArg_Parse(arg, "l", &new_opts)) ++ return -1; ++ opts = SSL_CTX_get_options(self->ctx); ++ clear = opts & ~new_opts; ++ set = ~opts & new_opts; ++ if (clear) { ++#ifdef HAVE_SSL_CTX_CLEAR_OPTIONS ++ SSL_CTX_clear_options(self->ctx, clear); ++#else ++ PyErr_SetString(PyExc_ValueError, ++ "can't clear options before OpenSSL 0.9.8m"); ++ return -1; ++#endif ++ } ++ if (set) ++ SSL_CTX_set_options(self->ctx, set); ++ return 0; ++} ++ ++static PyObject * ++get_check_hostname(PySSLContext *self, void *c) ++{ ++ return PyBool_FromLong(self->check_hostname); ++} ++ ++static int ++set_check_hostname(PySSLContext *self, PyObject *arg, void *c) ++{ ++ PyObject *py_check_hostname; ++ int check_hostname; ++ if (!PyArg_Parse(arg, "O", &py_check_hostname)) ++ return -1; ++ ++ check_hostname = PyObject_IsTrue(py_check_hostname); ++ if (check_hostname < 0) ++ return -1; ++ if (check_hostname && ++ SSL_CTX_get_verify_mode(self->ctx) == SSL_VERIFY_NONE) { ++ PyErr_SetString(PyExc_ValueError, ++ "check_hostname needs a SSL context with either " ++ "CERT_OPTIONAL or CERT_REQUIRED"); ++ return -1; ++ } ++ self->check_hostname = check_hostname; ++ return 0; ++} ++ ++ ++typedef struct { ++ PyThreadState *thread_state; ++ PyObject *callable; ++ char *password; ++ int size; ++ int error; ++} _PySSLPasswordInfo; ++ ++static int ++_pwinfo_set(_PySSLPasswordInfo *pw_info, PyObject* password, ++ const char *bad_type_error) ++{ ++ /* Set the password and size fields of a _PySSLPasswordInfo struct ++ from a unicode, bytes, or byte array object. ++ The password field will be dynamically allocated and must be freed ++ by the caller */ ++ PyObject *password_bytes = NULL; ++ const char *data = NULL; ++ Py_ssize_t size; ++ ++ if (PyUnicode_Check(password)) { ++ password_bytes = PyUnicode_AsEncodedString(password, NULL, NULL); ++ if (!password_bytes) { ++ goto error; ++ } ++ data = PyBytes_AS_STRING(password_bytes); ++ size = PyBytes_GET_SIZE(password_bytes); ++ } else if (PyBytes_Check(password)) { ++ data = PyBytes_AS_STRING(password); ++ size = PyBytes_GET_SIZE(password); ++ } else if (PyByteArray_Check(password)) { ++ data = PyByteArray_AS_STRING(password); ++ size = PyByteArray_GET_SIZE(password); ++ } else { ++ PyErr_SetString(PyExc_TypeError, bad_type_error); ++ goto error; ++ } ++ ++ if (size > (Py_ssize_t)INT_MAX) { ++ PyErr_Format(PyExc_ValueError, ++ "password cannot be longer than %d bytes", INT_MAX); ++ goto error; ++ } ++ ++ PyMem_Free(pw_info->password); ++ pw_info->password = PyMem_Malloc(size); ++ if (!pw_info->password) { ++ PyErr_SetString(PyExc_MemoryError, ++ "unable to allocate password buffer"); ++ goto error; ++ } ++ memcpy(pw_info->password, data, size); ++ pw_info->size = (int)size; ++ ++ Py_XDECREF(password_bytes); ++ return 1; ++ ++error: ++ Py_XDECREF(password_bytes); ++ return 0; ++} ++ ++static int ++_password_callback(char *buf, int size, int rwflag, void *userdata) ++{ ++ _PySSLPasswordInfo *pw_info = (_PySSLPasswordInfo*) userdata; ++ PyObject *fn_ret = NULL; ++ ++ PySSL_END_ALLOW_THREADS_S(pw_info->thread_state); ++ ++ if (pw_info->callable) { ++ fn_ret = PyObject_CallFunctionObjArgs(pw_info->callable, NULL); ++ if (!fn_ret) { ++ /* TODO: It would be nice to move _ctypes_add_traceback() into the ++ core python API, so we could use it to add a frame here */ ++ goto error; ++ } ++ ++ if (!_pwinfo_set(pw_info, fn_ret, ++ "password callback must return a string")) { ++ goto error; ++ } ++ Py_CLEAR(fn_ret); ++ } ++ ++ if (pw_info->size > size) { ++ PyErr_Format(PyExc_ValueError, ++ "password cannot be longer than %d bytes", size); ++ goto error; ++ } ++ ++ PySSL_BEGIN_ALLOW_THREADS_S(pw_info->thread_state); ++ memcpy(buf, pw_info->password, pw_info->size); ++ return pw_info->size; ++ ++error: ++ Py_XDECREF(fn_ret); ++ PySSL_BEGIN_ALLOW_THREADS_S(pw_info->thread_state); ++ pw_info->error = 1; ++ return -1; ++} ++ ++static PyObject * ++load_cert_chain(PySSLContext *self, PyObject *args, PyObject *kwds) ++{ ++ char *kwlist[] = {"certfile", "keyfile", "password", NULL}; ++ PyObject *password = NULL; ++ char *certfile_bytes = NULL, *keyfile_bytes = NULL; ++ pem_password_cb *orig_passwd_cb = self->ctx->default_passwd_callback; ++ void *orig_passwd_userdata = self->ctx->default_passwd_callback_userdata; ++ _PySSLPasswordInfo pw_info = { NULL, NULL, NULL, 0, 0 }; ++ int r; ++ ++ errno = 0; ++ ERR_clear_error(); ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, ++ "et|etO:load_cert_chain", kwlist, ++ Py_FileSystemDefaultEncoding, &certfile_bytes, ++ Py_FileSystemDefaultEncoding, &keyfile_bytes, ++ &password)) ++ return NULL; ++ if (password && password != Py_None) { ++ if (PyCallable_Check(password)) { ++ pw_info.callable = password; ++ } else if (!_pwinfo_set(&pw_info, password, ++ "password should be a string or callable")) { ++ goto error; ++ } ++ SSL_CTX_set_default_passwd_cb(self->ctx, _password_callback); ++ SSL_CTX_set_default_passwd_cb_userdata(self->ctx, &pw_info); ++ } ++ PySSL_BEGIN_ALLOW_THREADS_S(pw_info.thread_state); ++ r = SSL_CTX_use_certificate_chain_file(self->ctx, certfile_bytes); ++ PySSL_END_ALLOW_THREADS_S(pw_info.thread_state); ++ if (r != 1) { ++ if (pw_info.error) { ++ ERR_clear_error(); ++ /* the password callback has already set the error information */ ++ } ++ else if (errno != 0) { ++ ERR_clear_error(); ++ PyErr_SetFromErrno(PyExc_IOError); ++ } ++ else { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ } ++ goto error; ++ } ++ PySSL_BEGIN_ALLOW_THREADS_S(pw_info.thread_state); ++ r = SSL_CTX_use_PrivateKey_file(self->ctx, ++ keyfile_bytes ? keyfile_bytes : certfile_bytes, ++ SSL_FILETYPE_PEM); ++ PySSL_END_ALLOW_THREADS_S(pw_info.thread_state); ++ if (r != 1) { ++ if (pw_info.error) { ++ ERR_clear_error(); ++ /* the password callback has already set the error information */ ++ } ++ else if (errno != 0) { ++ ERR_clear_error(); ++ PyErr_SetFromErrno(PyExc_IOError); ++ } ++ else { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ } ++ goto error; ++ } ++ PySSL_BEGIN_ALLOW_THREADS_S(pw_info.thread_state); ++ r = SSL_CTX_check_private_key(self->ctx); ++ PySSL_END_ALLOW_THREADS_S(pw_info.thread_state); ++ if (r != 1) { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ goto error; ++ } ++ SSL_CTX_set_default_passwd_cb(self->ctx, orig_passwd_cb); ++ SSL_CTX_set_default_passwd_cb_userdata(self->ctx, orig_passwd_userdata); ++ PyMem_Free(pw_info.password); ++ Py_RETURN_NONE; ++ ++error: ++ SSL_CTX_set_default_passwd_cb(self->ctx, orig_passwd_cb); ++ SSL_CTX_set_default_passwd_cb_userdata(self->ctx, orig_passwd_userdata); ++ PyMem_Free(pw_info.password); ++ PyMem_Free(keyfile_bytes); ++ PyMem_Free(certfile_bytes); ++ return NULL; ++} ++ ++/* internal helper function, returns -1 on error ++ */ ++static int ++_add_ca_certs(PySSLContext *self, void *data, Py_ssize_t len, ++ int filetype) ++{ ++ BIO *biobuf = NULL; ++ X509_STORE *store; ++ int retval = 0, err, loaded = 0; ++ ++ assert(filetype == SSL_FILETYPE_ASN1 || filetype == SSL_FILETYPE_PEM); ++ ++ if (len <= 0) { ++ PyErr_SetString(PyExc_ValueError, ++ "Empty certificate data"); ++ return -1; ++ } else if (len > INT_MAX) { ++ PyErr_SetString(PyExc_OverflowError, ++ "Certificate data is too long."); ++ return -1; ++ } ++ ++ biobuf = BIO_new_mem_buf(data, (int)len); ++ if (biobuf == NULL) { ++ _setSSLError("Can't allocate buffer", 0, __FILE__, __LINE__); ++ return -1; ++ } ++ ++ store = SSL_CTX_get_cert_store(self->ctx); ++ assert(store != NULL); ++ ++ while (1) { ++ X509 *cert = NULL; ++ int r; ++ ++ if (filetype == SSL_FILETYPE_ASN1) { ++ cert = d2i_X509_bio(biobuf, NULL); ++ } else { ++ cert = PEM_read_bio_X509(biobuf, NULL, ++ self->ctx->default_passwd_callback, ++ self->ctx->default_passwd_callback_userdata); ++ } ++ if (cert == NULL) { ++ break; ++ } ++ r = X509_STORE_add_cert(store, cert); ++ X509_free(cert); ++ if (!r) { ++ err = ERR_peek_last_error(); ++ if ((ERR_GET_LIB(err) == ERR_LIB_X509) && ++ (ERR_GET_REASON(err) == X509_R_CERT_ALREADY_IN_HASH_TABLE)) { ++ /* cert already in hash table, not an error */ ++ ERR_clear_error(); ++ } else { ++ break; ++ } ++ } ++ loaded++; ++ } ++ ++ err = ERR_peek_last_error(); ++ if ((filetype == SSL_FILETYPE_ASN1) && ++ (loaded > 0) && ++ (ERR_GET_LIB(err) == ERR_LIB_ASN1) && ++ (ERR_GET_REASON(err) == ASN1_R_HEADER_TOO_LONG)) { ++ /* EOF ASN1 file, not an error */ ++ ERR_clear_error(); ++ retval = 0; ++ } else if ((filetype == SSL_FILETYPE_PEM) && ++ (loaded > 0) && ++ (ERR_GET_LIB(err) == ERR_LIB_PEM) && ++ (ERR_GET_REASON(err) == PEM_R_NO_START_LINE)) { ++ /* EOF PEM file, not an error */ ++ ERR_clear_error(); ++ retval = 0; ++ } else { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ retval = -1; ++ } ++ ++ BIO_free(biobuf); ++ return retval; ++} ++ ++ ++static PyObject * ++load_verify_locations(PySSLContext *self, PyObject *args, PyObject *kwds) ++{ ++ char *kwlist[] = {"cafile", "capath", "cadata", NULL}; ++ PyObject *cadata = NULL, *cafile = NULL, *capath = NULL; ++ PyObject *cafile_bytes = NULL, *capath_bytes = NULL; ++ const char *cafile_buf = NULL, *capath_buf = NULL; ++ int r = 0, ok = 1; ++ ++ errno = 0; ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, ++ "|OOO:load_verify_locations", kwlist, ++ &cafile, &capath, &cadata)) ++ return NULL; ++ ++ if (cafile == Py_None) ++ cafile = NULL; ++ if (capath == Py_None) ++ capath = NULL; ++ if (cadata == Py_None) ++ cadata = NULL; ++ ++ if (cafile == NULL && capath == NULL && cadata == NULL) { ++ PyErr_SetString(PyExc_TypeError, ++ "cafile, capath and cadata cannot be all omitted"); ++ goto error; ++ } ++ ++ if (cafile) { ++ cafile_bytes = PyString_AsEncodedObject( ++ cafile, Py_FileSystemDefaultEncoding, "strict"); ++ if (!cafile_bytes) { ++ goto error; ++ } ++ } ++ if (capath) { ++ capath_bytes = PyString_AsEncodedObject( ++ capath, Py_FileSystemDefaultEncoding, "strict"); ++ if (!capath_bytes) { ++ goto error; ++ } ++ } ++ ++ /* validata cadata type and load cadata */ ++ if (cadata) { ++ Py_buffer buf; ++ PyObject *cadata_ascii = NULL; ++ ++ if (!PyUnicode_Check(cadata) && PyObject_GetBuffer(cadata, &buf, PyBUF_SIMPLE) == 0) { ++ if (!PyBuffer_IsContiguous(&buf, 'C') || buf.ndim > 1) { ++ PyBuffer_Release(&buf); ++ PyErr_SetString(PyExc_TypeError, ++ "cadata should be a contiguous buffer with " ++ "a single dimension"); ++ goto error; ++ } ++ r = _add_ca_certs(self, buf.buf, buf.len, SSL_FILETYPE_ASN1); ++ PyBuffer_Release(&buf); ++ if (r == -1) { ++ goto error; ++ } ++ } else { ++ PyErr_Clear(); ++ cadata_ascii = PyUnicode_AsASCIIString(cadata); ++ if (cadata_ascii == NULL) { ++ PyErr_SetString(PyExc_TypeError, ++ "cadata should be a ASCII string or a " ++ "bytes-like object"); ++ goto error; ++ } ++ r = _add_ca_certs(self, ++ PyBytes_AS_STRING(cadata_ascii), ++ PyBytes_GET_SIZE(cadata_ascii), ++ SSL_FILETYPE_PEM); ++ Py_DECREF(cadata_ascii); ++ if (r == -1) { ++ goto error; ++ } ++ } ++ } ++ ++ /* load cafile or capath */ ++ if (cafile_bytes || capath_bytes) { ++ if (cafile) ++ cafile_buf = PyBytes_AS_STRING(cafile_bytes); ++ if (capath) ++ capath_buf = PyBytes_AS_STRING(capath_bytes); ++ PySSL_BEGIN_ALLOW_THREADS ++ r = SSL_CTX_load_verify_locations( ++ self->ctx, ++ cafile_buf, ++ capath_buf); ++ PySSL_END_ALLOW_THREADS ++ if (r != 1) { ++ ok = 0; ++ if (errno != 0) { ++ ERR_clear_error(); ++ PyErr_SetFromErrno(PyExc_IOError); ++ } ++ else { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ } ++ goto error; ++ } ++ } ++ goto end; ++ ++ error: ++ ok = 0; ++ end: ++ Py_XDECREF(cafile_bytes); ++ Py_XDECREF(capath_bytes); ++ if (ok) { ++ Py_RETURN_NONE; ++ } else { ++ return NULL; ++ } ++} ++ ++static PyObject * ++load_dh_params(PySSLContext *self, PyObject *filepath) ++{ ++ BIO *bio; ++ DH *dh; ++ char *path = PyBytes_AsString(filepath); ++ if (!path) { ++ return NULL; ++ } ++ ++ bio = BIO_new_file(path, "r"); ++ if (bio == NULL) { ++ ERR_clear_error(); ++ PyErr_SetFromErrnoWithFilenameObject(PyExc_IOError, filepath); ++ return NULL; ++ } ++ errno = 0; ++ PySSL_BEGIN_ALLOW_THREADS ++ dh = PEM_read_bio_DHparams(bio, NULL, NULL, NULL); ++ BIO_free(bio); ++ PySSL_END_ALLOW_THREADS ++ if (dh == NULL) { ++ if (errno != 0) { ++ ERR_clear_error(); ++ PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, filepath); ++ } ++ else { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ } ++ return NULL; ++ } ++ if (SSL_CTX_set_tmp_dh(self->ctx, dh) == 0) ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ DH_free(dh); ++ Py_RETURN_NONE; ++} ++ ++static PyObject * ++context_wrap_socket(PySSLContext *self, PyObject *args, PyObject *kwds) ++{ ++ char *kwlist[] = {"sock", "server_side", "server_hostname", "ssl_sock", NULL}; ++ PySocketSockObject *sock; ++ int server_side = 0; ++ char *hostname = NULL; ++ PyObject *hostname_obj, *ssl_sock = Py_None, *res; ++ ++ /* server_hostname is either None (or absent), or to be encoded ++ using the idna encoding. */ ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O!i|O!O:_wrap_socket", kwlist, ++ PySocketModule.Sock_Type, ++ &sock, &server_side, ++ Py_TYPE(Py_None), &hostname_obj, ++ &ssl_sock)) { ++ PyErr_Clear(); ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O!iet|O:_wrap_socket", kwlist, ++ PySocketModule.Sock_Type, ++ &sock, &server_side, ++ "idna", &hostname, &ssl_sock)) ++ return NULL; ++#if !HAVE_SNI ++ PyMem_Free(hostname); ++ PyErr_SetString(PyExc_ValueError, "server_hostname is not supported " ++ "by your OpenSSL library"); ++ return NULL; ++#endif ++ } ++ ++ res = (PyObject *) newPySSLSocket(self, sock, server_side, ++ hostname, ssl_sock); ++ if (hostname != NULL) ++ PyMem_Free(hostname); ++ return res; ++} ++ ++static PyObject * ++session_stats(PySSLContext *self, PyObject *unused) ++{ ++ int r; ++ PyObject *value, *stats = PyDict_New(); ++ if (!stats) ++ return NULL; ++ ++#define ADD_STATS(SSL_NAME, KEY_NAME) \ ++ value = PyLong_FromLong(SSL_CTX_sess_ ## SSL_NAME (self->ctx)); \ ++ if (value == NULL) \ ++ goto error; \ ++ r = PyDict_SetItemString(stats, KEY_NAME, value); \ ++ Py_DECREF(value); \ ++ if (r < 0) \ ++ goto error; ++ ++ ADD_STATS(number, "number"); ++ ADD_STATS(connect, "connect"); ++ ADD_STATS(connect_good, "connect_good"); ++ ADD_STATS(connect_renegotiate, "connect_renegotiate"); ++ ADD_STATS(accept, "accept"); ++ ADD_STATS(accept_good, "accept_good"); ++ ADD_STATS(accept_renegotiate, "accept_renegotiate"); ++ ADD_STATS(accept, "accept"); ++ ADD_STATS(hits, "hits"); ++ ADD_STATS(misses, "misses"); ++ ADD_STATS(timeouts, "timeouts"); ++ ADD_STATS(cache_full, "cache_full"); ++ ++#undef ADD_STATS ++ ++ return stats; ++ ++error: ++ Py_DECREF(stats); ++ return NULL; ++} ++ ++static PyObject * ++set_default_verify_paths(PySSLContext *self, PyObject *unused) ++{ ++ if (!SSL_CTX_set_default_verify_paths(self->ctx)) { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ return NULL; ++ } ++ Py_RETURN_NONE; ++} ++ ++#ifndef OPENSSL_NO_ECDH ++static PyObject * ++set_ecdh_curve(PySSLContext *self, PyObject *name) ++{ ++ char *name_bytes; ++ int nid; ++ EC_KEY *key; ++ ++ name_bytes = PyBytes_AsString(name); ++ if (!name_bytes) { ++ return NULL; ++ } ++ nid = OBJ_sn2nid(name_bytes); ++ if (nid == 0) { ++ PyErr_Format(PyExc_ValueError, ++ "unknown elliptic curve name %R", name); ++ return NULL; ++ } ++ key = EC_KEY_new_by_curve_name(nid); ++ if (key == NULL) { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ return NULL; ++ } ++ SSL_CTX_set_tmp_ecdh(self->ctx, key); ++ EC_KEY_free(key); ++ Py_RETURN_NONE; ++} ++#endif ++ ++#if HAVE_SNI && !defined(OPENSSL_NO_TLSEXT) ++static int ++_servername_callback(SSL *s, int *al, void *args) ++{ ++ int ret; ++ PySSLContext *ssl_ctx = (PySSLContext *) args; ++ PySSLSocket *ssl; ++ PyObject *servername_o; ++ PyObject *servername_idna; ++ PyObject *result; ++ /* The high-level ssl.SSLSocket object */ ++ PyObject *ssl_socket; ++ const char *servername = SSL_get_servername(s, TLSEXT_NAMETYPE_host_name); ++#ifdef WITH_THREAD ++ PyGILState_STATE gstate = PyGILState_Ensure(); ++#endif ++ ++ if (ssl_ctx->set_hostname == NULL) { ++ /* remove race condition in this the call back while if removing the ++ * callback is in progress */ ++#ifdef WITH_THREAD ++ PyGILState_Release(gstate); ++#endif ++ return SSL_TLSEXT_ERR_OK; ++ } ++ ++ ssl = SSL_get_app_data(s); ++ assert(PySSLSocket_Check(ssl)); ++ ssl_socket = PyWeakref_GetObject(ssl->ssl_sock); ++ Py_INCREF(ssl_socket); ++ if (ssl_socket == Py_None) { ++ goto error; ++ } ++ ++ if (servername == NULL) { ++ result = PyObject_CallFunctionObjArgs(ssl_ctx->set_hostname, ssl_socket, ++ Py_None, ssl_ctx, NULL); ++ } ++ else { ++ servername_o = PyBytes_FromString(servername); ++ if (servername_o == NULL) { ++ PyErr_WriteUnraisable((PyObject *) ssl_ctx); ++ goto error; ++ } ++ servername_idna = PyUnicode_FromEncodedObject(servername_o, "idna", NULL); ++ if (servername_idna == NULL) { ++ PyErr_WriteUnraisable(servername_o); ++ Py_DECREF(servername_o); ++ goto error; ++ } ++ Py_DECREF(servername_o); ++ result = PyObject_CallFunctionObjArgs(ssl_ctx->set_hostname, ssl_socket, ++ servername_idna, ssl_ctx, NULL); ++ Py_DECREF(servername_idna); ++ } ++ Py_DECREF(ssl_socket); ++ ++ if (result == NULL) { ++ PyErr_WriteUnraisable(ssl_ctx->set_hostname); ++ *al = SSL_AD_HANDSHAKE_FAILURE; ++ ret = SSL_TLSEXT_ERR_ALERT_FATAL; ++ } ++ else { ++ if (result != Py_None) { ++ *al = (int) PyLong_AsLong(result); ++ if (PyErr_Occurred()) { ++ PyErr_WriteUnraisable(result); ++ *al = SSL_AD_INTERNAL_ERROR; ++ } ++ ret = SSL_TLSEXT_ERR_ALERT_FATAL; ++ } ++ else { ++ ret = SSL_TLSEXT_ERR_OK; ++ } ++ Py_DECREF(result); ++ } ++ ++#ifdef WITH_THREAD ++ PyGILState_Release(gstate); ++#endif ++ return ret; ++ ++error: ++ Py_DECREF(ssl_socket); ++ *al = SSL_AD_INTERNAL_ERROR; ++ ret = SSL_TLSEXT_ERR_ALERT_FATAL; ++#ifdef WITH_THREAD ++ PyGILState_Release(gstate); ++#endif ++ return ret; ++} ++#endif ++ ++PyDoc_STRVAR(PySSL_set_servername_callback_doc, ++"set_servername_callback(method)\n\ ++\n\ ++This sets a callback that will be called when a server name is provided by\n\ ++the SSL/TLS client in the SNI extension.\n\ ++\n\ ++If the argument is None then the callback is disabled. The method is called\n\ ++with the SSLSocket, the server name as a string, and the SSLContext object.\n\ ++See RFC 6066 for details of the SNI extension."); ++ ++static PyObject * ++set_servername_callback(PySSLContext *self, PyObject *args) ++{ ++#if HAVE_SNI && !defined(OPENSSL_NO_TLSEXT) ++ PyObject *cb; ++ ++ if (!PyArg_ParseTuple(args, "O", &cb)) ++ return NULL; ++ ++ Py_CLEAR(self->set_hostname); ++ if (cb == Py_None) { ++ SSL_CTX_set_tlsext_servername_callback(self->ctx, NULL); ++ } ++ else { ++ if (!PyCallable_Check(cb)) { ++ SSL_CTX_set_tlsext_servername_callback(self->ctx, NULL); ++ PyErr_SetString(PyExc_TypeError, ++ "not a callable object"); ++ return NULL; ++ } ++ Py_INCREF(cb); ++ self->set_hostname = cb; ++ SSL_CTX_set_tlsext_servername_callback(self->ctx, _servername_callback); ++ SSL_CTX_set_tlsext_servername_arg(self->ctx, self); ++ } ++ Py_RETURN_NONE; ++#else ++ PyErr_SetString(PyExc_NotImplementedError, ++ "The TLS extension servername callback, " ++ "SSL_CTX_set_tlsext_servername_callback, " ++ "is not in the current OpenSSL library."); ++ return NULL; ++#endif ++} ++ ++PyDoc_STRVAR(PySSL_get_stats_doc, ++"cert_store_stats() -> {'crl': int, 'x509_ca': int, 'x509': int}\n\ ++\n\ ++Returns quantities of loaded X.509 certificates. X.509 certificates with a\n\ ++CA extension and certificate revocation lists inside the context's cert\n\ ++store.\n\ ++NOTE: Certificates in a capath directory aren't loaded unless they have\n\ ++been used at least once."); ++ ++static PyObject * ++cert_store_stats(PySSLContext *self) ++{ ++ X509_STORE *store; ++ X509_OBJECT *obj; ++ int x509 = 0, crl = 0, pkey = 0, ca = 0, i; ++ ++ store = SSL_CTX_get_cert_store(self->ctx); ++ for (i = 0; i < sk_X509_OBJECT_num(store->objs); i++) { ++ obj = sk_X509_OBJECT_value(store->objs, i); ++ switch (obj->type) { ++ case X509_LU_X509: ++ x509++; ++ if (X509_check_ca(obj->data.x509)) { ++ ca++; ++ } ++ break; ++ case X509_LU_CRL: ++ crl++; ++ break; ++ case X509_LU_PKEY: ++ pkey++; ++ break; ++ default: ++ /* Ignore X509_LU_FAIL, X509_LU_RETRY, X509_LU_PKEY. ++ * As far as I can tell they are internal states and never ++ * stored in a cert store */ ++ break; ++ } ++ } ++ return Py_BuildValue("{sisisi}", "x509", x509, "crl", crl, ++ "x509_ca", ca); ++} ++ ++PyDoc_STRVAR(PySSL_get_ca_certs_doc, ++"get_ca_certs(binary_form=False) -> list of loaded certificate\n\ ++\n\ ++Returns a list of dicts with information of loaded CA certs. If the\n\ ++optional argument is True, returns a DER-encoded copy of the CA certificate.\n\ ++NOTE: Certificates in a capath directory aren't loaded unless they have\n\ ++been used at least once."); ++ ++static PyObject * ++get_ca_certs(PySSLContext *self, PyObject *args, PyObject *kwds) ++{ ++ char *kwlist[] = {"binary_form", NULL}; ++ X509_STORE *store; ++ PyObject *ci = NULL, *rlist = NULL, *py_binary_mode = Py_False; ++ int i; ++ int binary_mode = 0; ++ ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O:get_ca_certs", ++ kwlist, &py_binary_mode)) { ++ return NULL; ++ } ++ binary_mode = PyObject_IsTrue(py_binary_mode); ++ if (binary_mode < 0) { ++ return NULL; ++ } ++ ++ if ((rlist = PyList_New(0)) == NULL) { ++ return NULL; ++ } ++ ++ store = SSL_CTX_get_cert_store(self->ctx); ++ for (i = 0; i < sk_X509_OBJECT_num(store->objs); i++) { ++ X509_OBJECT *obj; ++ X509 *cert; ++ ++ obj = sk_X509_OBJECT_value(store->objs, i); ++ if (obj->type != X509_LU_X509) { ++ /* not a x509 cert */ ++ continue; ++ } ++ /* CA for any purpose */ ++ cert = obj->data.x509; ++ if (!X509_check_ca(cert)) { ++ continue; ++ } ++ if (binary_mode) { ++ ci = _certificate_to_der(cert); ++ } else { ++ ci = _decode_certificate(cert); ++ } ++ if (ci == NULL) { ++ goto error; ++ } ++ if (PyList_Append(rlist, ci) == -1) { ++ goto error; ++ } ++ Py_CLEAR(ci); ++ } ++ return rlist; ++ ++ error: ++ Py_XDECREF(ci); ++ Py_XDECREF(rlist); ++ return NULL; ++} ++ ++ ++static PyGetSetDef context_getsetlist[] = { ++ {"check_hostname", (getter) get_check_hostname, ++ (setter) set_check_hostname, NULL}, ++ {"options", (getter) get_options, ++ (setter) set_options, NULL}, ++#ifdef HAVE_OPENSSL_VERIFY_PARAM ++ {"verify_flags", (getter) get_verify_flags, ++ (setter) set_verify_flags, NULL}, ++#endif ++ {"verify_mode", (getter) get_verify_mode, ++ (setter) set_verify_mode, NULL}, ++ {NULL}, /* sentinel */ ++}; ++ ++static struct PyMethodDef context_methods[] = { ++ {"_wrap_socket", (PyCFunction) context_wrap_socket, ++ METH_VARARGS | METH_KEYWORDS, NULL}, ++ {"set_ciphers", (PyCFunction) set_ciphers, ++ METH_VARARGS, NULL}, ++ {"_set_npn_protocols", (PyCFunction) _set_npn_protocols, ++ METH_VARARGS, NULL}, ++ {"load_cert_chain", (PyCFunction) load_cert_chain, ++ METH_VARARGS | METH_KEYWORDS, NULL}, ++ {"load_dh_params", (PyCFunction) load_dh_params, ++ METH_O, NULL}, ++ {"load_verify_locations", (PyCFunction) load_verify_locations, ++ METH_VARARGS | METH_KEYWORDS, NULL}, ++ {"session_stats", (PyCFunction) session_stats, ++ METH_NOARGS, NULL}, ++ {"set_default_verify_paths", (PyCFunction) set_default_verify_paths, ++ METH_NOARGS, NULL}, ++#ifndef OPENSSL_NO_ECDH ++ {"set_ecdh_curve", (PyCFunction) set_ecdh_curve, ++ METH_O, NULL}, ++#endif ++ {"set_servername_callback", (PyCFunction) set_servername_callback, ++ METH_VARARGS, PySSL_set_servername_callback_doc}, ++ {"cert_store_stats", (PyCFunction) cert_store_stats, ++ METH_NOARGS, PySSL_get_stats_doc}, ++ {"get_ca_certs", (PyCFunction) get_ca_certs, ++ METH_VARARGS | METH_KEYWORDS, PySSL_get_ca_certs_doc}, ++ {NULL, NULL} /* sentinel */ ++}; ++ ++static PyTypeObject PySSLContext_Type = { ++ PyVarObject_HEAD_INIT(NULL, 0) ++ "_ssl._SSLContext", /*tp_name*/ ++ sizeof(PySSLContext), /*tp_basicsize*/ ++ 0, /*tp_itemsize*/ ++ (destructor)context_dealloc, /*tp_dealloc*/ ++ 0, /*tp_print*/ ++ 0, /*tp_getattr*/ ++ 0, /*tp_setattr*/ ++ 0, /*tp_reserved*/ ++ 0, /*tp_repr*/ ++ 0, /*tp_as_number*/ ++ 0, /*tp_as_sequence*/ ++ 0, /*tp_as_mapping*/ ++ 0, /*tp_hash*/ ++ 0, /*tp_call*/ ++ 0, /*tp_str*/ ++ 0, /*tp_getattro*/ ++ 0, /*tp_setattro*/ ++ 0, /*tp_as_buffer*/ ++ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /*tp_flags*/ ++ 0, /*tp_doc*/ ++ (traverseproc) context_traverse, /*tp_traverse*/ ++ (inquiry) context_clear, /*tp_clear*/ ++ 0, /*tp_richcompare*/ ++ 0, /*tp_weaklistoffset*/ ++ 0, /*tp_iter*/ ++ 0, /*tp_iternext*/ ++ context_methods, /*tp_methods*/ ++ 0, /*tp_members*/ ++ context_getsetlist, /*tp_getset*/ ++ 0, /*tp_base*/ ++ 0, /*tp_dict*/ ++ 0, /*tp_descr_get*/ ++ 0, /*tp_descr_set*/ ++ 0, /*tp_dictoffset*/ ++ 0, /*tp_init*/ ++ 0, /*tp_alloc*/ ++ context_new, /*tp_new*/ ++}; ++ ++ ++ ++#ifdef HAVE_OPENSSL_RAND ++ ++/* helper routines for seeding the SSL PRNG */ ++static PyObject * ++PySSL_RAND_add(PyObject *self, PyObject *args) ++{ ++ char *buf; ++ Py_ssize_t len, written; ++ double entropy; ++ ++ if (!PyArg_ParseTuple(args, "s#d:RAND_add", &buf, &len, &entropy)) ++ return NULL; ++ do { ++ if (len >= INT_MAX) { ++ written = INT_MAX; ++ } else { ++ written = len; ++ } ++ RAND_add(buf, (int)written, entropy); ++ buf += written; ++ len -= written; ++ } while (len); ++ Py_INCREF(Py_None); ++ return Py_None; ++} ++ ++PyDoc_STRVAR(PySSL_RAND_add_doc, ++"RAND_add(string, entropy)\n\ ++\n\ ++Mix string into the OpenSSL PRNG state. entropy (a float) is a lower\n\ ++bound on the entropy contained in string. See RFC 1750."); ++ ++static PyObject * ++PySSL_RAND_status(PyObject *self) ++{ ++ return PyLong_FromLong(RAND_status()); ++} ++ ++PyDoc_STRVAR(PySSL_RAND_status_doc, ++"RAND_status() -> 0 or 1\n\ ++\n\ ++Returns 1 if the OpenSSL PRNG has been seeded with enough data and 0 if not.\n\ ++It is necessary to seed the PRNG with RAND_add() on some platforms before\n\ ++using the ssl() function."); ++ ++static PyObject * ++PySSL_RAND_egd(PyObject *self, PyObject *arg) ++{ ++ int bytes; ++ ++ if (!PyString_Check(arg)) ++ return PyErr_Format(PyExc_TypeError, ++ "RAND_egd() expected string, found %s", ++ Py_TYPE(arg)->tp_name); ++ bytes = RAND_egd(PyString_AS_STRING(arg)); ++ if (bytes == -1) { ++ PyErr_SetString(PySSLErrorObject, ++ "EGD connection failed or EGD did not return " ++ "enough data to seed the PRNG"); ++ return NULL; ++ } ++ return PyInt_FromLong(bytes); ++} ++ ++PyDoc_STRVAR(PySSL_RAND_egd_doc, ++"RAND_egd(path) -> bytes\n\ ++\n\ ++Queries the entropy gather daemon (EGD) on the socket named by 'path'.\n\ ++Returns number of bytes read. Raises SSLError if connection to EGD\n\ ++fails or if it does not provide enough data to seed PRNG."); ++ ++#endif /* HAVE_OPENSSL_RAND */ ++ ++ ++PyDoc_STRVAR(PySSL_get_default_verify_paths_doc, ++"get_default_verify_paths() -> tuple\n\ ++\n\ ++Return search paths and environment vars that are used by SSLContext's\n\ ++set_default_verify_paths() to load default CAs. The values are\n\ ++'cert_file_env', 'cert_file', 'cert_dir_env', 'cert_dir'."); ++ ++static PyObject * ++PySSL_get_default_verify_paths(PyObject *self) ++{ ++ PyObject *ofile_env = NULL; ++ PyObject *ofile = NULL; ++ PyObject *odir_env = NULL; ++ PyObject *odir = NULL; ++ ++#define convert(info, target) { \ ++ const char *tmp = (info); \ ++ target = NULL; \ ++ if (!tmp) { Py_INCREF(Py_None); target = Py_None; } \ ++ else { target = PyBytes_FromString(tmp); } \ ++ if (!target) goto error; \ ++ } while(0) ++ ++ convert(X509_get_default_cert_file_env(), ofile_env); ++ convert(X509_get_default_cert_file(), ofile); ++ convert(X509_get_default_cert_dir_env(), odir_env); ++ convert(X509_get_default_cert_dir(), odir); ++#undef convert ++ ++ return Py_BuildValue("NNNN", ofile_env, ofile, odir_env, odir); ++ ++ error: ++ Py_XDECREF(ofile_env); ++ Py_XDECREF(ofile); ++ Py_XDECREF(odir_env); ++ Py_XDECREF(odir); ++ return NULL; ++} ++ ++static PyObject* ++asn1obj2py(ASN1_OBJECT *obj) ++{ ++ int nid; ++ const char *ln, *sn; ++ char buf[100]; ++ Py_ssize_t buflen; ++ ++ nid = OBJ_obj2nid(obj); ++ if (nid == NID_undef) { ++ PyErr_Format(PyExc_ValueError, "Unknown object"); ++ return NULL; ++ } ++ sn = OBJ_nid2sn(nid); ++ ln = OBJ_nid2ln(nid); ++ buflen = OBJ_obj2txt(buf, sizeof(buf), obj, 1); ++ if (buflen < 0) { ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ return NULL; ++ } ++ if (buflen) { ++ return Py_BuildValue("isss#", nid, sn, ln, buf, buflen); ++ } else { ++ return Py_BuildValue("issO", nid, sn, ln, Py_None); ++ } ++} ++ ++PyDoc_STRVAR(PySSL_txt2obj_doc, ++"txt2obj(txt, name=False) -> (nid, shortname, longname, oid)\n\ ++\n\ ++Lookup NID, short name, long name and OID of an ASN1_OBJECT. By default\n\ ++objects are looked up by OID. With name=True short and long name are also\n\ ++matched."); ++ ++static PyObject* ++PySSL_txt2obj(PyObject *self, PyObject *args, PyObject *kwds) ++{ ++ char *kwlist[] = {"txt", "name", NULL}; ++ PyObject *result = NULL; ++ char *txt; ++ PyObject *pyname = Py_None; ++ int name = 0; ++ ASN1_OBJECT *obj; ++ ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|O:txt2obj", ++ kwlist, &txt, &pyname)) { ++ return NULL; ++ } ++ name = PyObject_IsTrue(pyname); ++ if (name < 0) ++ return NULL; ++ obj = OBJ_txt2obj(txt, name ? 0 : 1); ++ if (obj == NULL) { ++ PyErr_Format(PyExc_ValueError, "unknown object '%.100s'", txt); ++ return NULL; ++ } ++ result = asn1obj2py(obj); ++ ASN1_OBJECT_free(obj); ++ return result; ++} ++ ++PyDoc_STRVAR(PySSL_nid2obj_doc, ++"nid2obj(nid) -> (nid, shortname, longname, oid)\n\ ++\n\ ++Lookup NID, short name, long name and OID of an ASN1_OBJECT by NID."); ++ ++static PyObject* ++PySSL_nid2obj(PyObject *self, PyObject *args) ++{ ++ PyObject *result = NULL; ++ int nid; ++ ASN1_OBJECT *obj; ++ ++ if (!PyArg_ParseTuple(args, "i:nid2obj", &nid)) { ++ return NULL; ++ } ++ if (nid < NID_undef) { ++ PyErr_SetString(PyExc_ValueError, "NID must be positive."); ++ return NULL; ++ } ++ obj = OBJ_nid2obj(nid); ++ if (obj == NULL) { ++ PyErr_Format(PyExc_ValueError, "unknown NID %i", nid); ++ return NULL; ++ } ++ result = asn1obj2py(obj); ++ ASN1_OBJECT_free(obj); ++ return result; ++} ++ ++#ifdef _MSC_VER ++ ++static PyObject* ++certEncodingType(DWORD encodingType) ++{ ++ static PyObject *x509_asn = NULL; ++ static PyObject *pkcs_7_asn = NULL; ++ ++ if (x509_asn == NULL) { ++ x509_asn = PyString_InternFromString("x509_asn"); ++ if (x509_asn == NULL) ++ return NULL; ++ } ++ if (pkcs_7_asn == NULL) { ++ pkcs_7_asn = PyString_InternFromString("pkcs_7_asn"); ++ if (pkcs_7_asn == NULL) ++ return NULL; ++ } ++ switch(encodingType) { ++ case X509_ASN_ENCODING: ++ Py_INCREF(x509_asn); ++ return x509_asn; ++ case PKCS_7_ASN_ENCODING: ++ Py_INCREF(pkcs_7_asn); ++ return pkcs_7_asn; ++ default: ++ return PyInt_FromLong(encodingType); ++ } ++} ++ ++static PyObject* ++parseKeyUsage(PCCERT_CONTEXT pCertCtx, DWORD flags) ++{ ++ CERT_ENHKEY_USAGE *usage; ++ DWORD size, error, i; ++ PyObject *retval; ++ ++ if (!CertGetEnhancedKeyUsage(pCertCtx, flags, NULL, &size)) { ++ error = GetLastError(); ++ if (error == CRYPT_E_NOT_FOUND) { ++ Py_RETURN_TRUE; ++ } ++ return PyErr_SetFromWindowsErr(error); ++ } ++ ++ usage = (CERT_ENHKEY_USAGE*)PyMem_Malloc(size); ++ if (usage == NULL) { ++ return PyErr_NoMemory(); ++ } ++ ++ /* Now get the actual enhanced usage property */ ++ if (!CertGetEnhancedKeyUsage(pCertCtx, flags, usage, &size)) { ++ PyMem_Free(usage); ++ error = GetLastError(); ++ if (error == CRYPT_E_NOT_FOUND) { ++ Py_RETURN_TRUE; ++ } ++ return PyErr_SetFromWindowsErr(error); ++ } ++ retval = PySet_New(NULL); ++ if (retval == NULL) { ++ goto error; ++ } ++ for (i = 0; i < usage->cUsageIdentifier; ++i) { ++ if (usage->rgpszUsageIdentifier[i]) { ++ PyObject *oid; ++ int err; ++ oid = PyString_FromString(usage->rgpszUsageIdentifier[i]); ++ if (oid == NULL) { ++ Py_CLEAR(retval); ++ goto error; ++ } ++ err = PySet_Add(retval, oid); ++ Py_DECREF(oid); ++ if (err == -1) { ++ Py_CLEAR(retval); ++ goto error; ++ } ++ } ++ } ++ error: ++ PyMem_Free(usage); ++ return retval; ++} ++ ++PyDoc_STRVAR(PySSL_enum_certificates_doc, ++"enum_certificates(store_name) -> []\n\ ++\n\ ++Retrieve certificates from Windows' cert store. store_name may be one of\n\ ++'CA', 'ROOT' or 'MY'. The system may provide more cert storages, too.\n\ ++The function returns a list of (bytes, encoding_type, trust) tuples. The\n\ ++encoding_type flag can be interpreted with X509_ASN_ENCODING or\n\ ++PKCS_7_ASN_ENCODING. The trust setting is either a set of OIDs or the\n\ ++boolean True."); ++ ++static PyObject * ++PySSL_enum_certificates(PyObject *self, PyObject *args, PyObject *kwds) ++{ ++ char *kwlist[] = {"store_name", NULL}; ++ char *store_name; ++ HCERTSTORE hStore = NULL; ++ PCCERT_CONTEXT pCertCtx = NULL; ++ PyObject *keyusage = NULL, *cert = NULL, *enc = NULL, *tup = NULL; ++ PyObject *result = NULL; ++ ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "s:enum_certificates", ++ kwlist, &store_name)) { ++ return NULL; ++ } ++ result = PyList_New(0); ++ if (result == NULL) { ++ return NULL; ++ } ++ hStore = CertOpenSystemStore((HCRYPTPROV)NULL, store_name); ++ if (hStore == NULL) { ++ Py_DECREF(result); ++ return PyErr_SetFromWindowsErr(GetLastError()); ++ } ++ ++ while (pCertCtx = CertEnumCertificatesInStore(hStore, pCertCtx)) { ++ cert = PyBytes_FromStringAndSize((const char*)pCertCtx->pbCertEncoded, ++ pCertCtx->cbCertEncoded); ++ if (!cert) { ++ Py_CLEAR(result); ++ break; ++ } ++ if ((enc = certEncodingType(pCertCtx->dwCertEncodingType)) == NULL) { ++ Py_CLEAR(result); ++ break; ++ } ++ keyusage = parseKeyUsage(pCertCtx, CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG); ++ if (keyusage == Py_True) { ++ Py_DECREF(keyusage); ++ keyusage = parseKeyUsage(pCertCtx, CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG); ++ } ++ if (keyusage == NULL) { ++ Py_CLEAR(result); ++ break; ++ } ++ if ((tup = PyTuple_New(3)) == NULL) { ++ Py_CLEAR(result); ++ break; ++ } ++ PyTuple_SET_ITEM(tup, 0, cert); ++ cert = NULL; ++ PyTuple_SET_ITEM(tup, 1, enc); ++ enc = NULL; ++ PyTuple_SET_ITEM(tup, 2, keyusage); ++ keyusage = NULL; ++ if (PyList_Append(result, tup) < 0) { ++ Py_CLEAR(result); ++ break; ++ } ++ Py_CLEAR(tup); ++ } ++ if (pCertCtx) { ++ /* loop ended with an error, need to clean up context manually */ ++ CertFreeCertificateContext(pCertCtx); ++ } ++ ++ /* In error cases cert, enc and tup may not be NULL */ ++ Py_XDECREF(cert); ++ Py_XDECREF(enc); ++ Py_XDECREF(keyusage); ++ Py_XDECREF(tup); ++ ++ if (!CertCloseStore(hStore, 0)) { ++ /* This error case might shadow another exception.*/ ++ Py_XDECREF(result); ++ return PyErr_SetFromWindowsErr(GetLastError()); ++ } ++ return result; ++} ++ ++PyDoc_STRVAR(PySSL_enum_crls_doc, ++"enum_crls(store_name) -> []\n\ ++\n\ ++Retrieve CRLs from Windows' cert store. store_name may be one of\n\ ++'CA', 'ROOT' or 'MY'. The system may provide more cert storages, too.\n\ ++The function returns a list of (bytes, encoding_type) tuples. The\n\ ++encoding_type flag can be interpreted with X509_ASN_ENCODING or\n\ ++PKCS_7_ASN_ENCODING."); ++ ++static PyObject * ++PySSL_enum_crls(PyObject *self, PyObject *args, PyObject *kwds) ++{ ++ char *kwlist[] = {"store_name", NULL}; ++ char *store_name; ++ HCERTSTORE hStore = NULL; ++ PCCRL_CONTEXT pCrlCtx = NULL; ++ PyObject *crl = NULL, *enc = NULL, *tup = NULL; ++ PyObject *result = NULL; ++ ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "s:enum_crls", ++ kwlist, &store_name)) { ++ return NULL; ++ } ++ result = PyList_New(0); ++ if (result == NULL) { ++ return NULL; ++ } ++ hStore = CertOpenSystemStore((HCRYPTPROV)NULL, store_name); ++ if (hStore == NULL) { ++ Py_DECREF(result); ++ return PyErr_SetFromWindowsErr(GetLastError()); ++ } ++ ++ while (pCrlCtx = CertEnumCRLsInStore(hStore, pCrlCtx)) { ++ crl = PyBytes_FromStringAndSize((const char*)pCrlCtx->pbCrlEncoded, ++ pCrlCtx->cbCrlEncoded); ++ if (!crl) { ++ Py_CLEAR(result); ++ break; ++ } ++ if ((enc = certEncodingType(pCrlCtx->dwCertEncodingType)) == NULL) { ++ Py_CLEAR(result); ++ break; ++ } ++ if ((tup = PyTuple_New(2)) == NULL) { ++ Py_CLEAR(result); ++ break; ++ } ++ PyTuple_SET_ITEM(tup, 0, crl); ++ crl = NULL; ++ PyTuple_SET_ITEM(tup, 1, enc); ++ enc = NULL; ++ ++ if (PyList_Append(result, tup) < 0) { ++ Py_CLEAR(result); ++ break; ++ } ++ Py_CLEAR(tup); ++ } ++ if (pCrlCtx) { ++ /* loop ended with an error, need to clean up context manually */ ++ CertFreeCRLContext(pCrlCtx); ++ } ++ ++ /* In error cases cert, enc and tup may not be NULL */ ++ Py_XDECREF(crl); ++ Py_XDECREF(enc); ++ Py_XDECREF(tup); ++ ++ if (!CertCloseStore(hStore, 0)) { ++ /* This error case might shadow another exception.*/ ++ Py_XDECREF(result); ++ return PyErr_SetFromWindowsErr(GetLastError()); ++ } ++ return result; ++} ++ ++#endif /* _MSC_VER */ ++ ++/* List of functions exported by this module. */ + + static PyMethodDef PySSL_methods[] = { +- {"sslwrap", PySSL_sslwrap, +- METH_VARARGS, ssl_doc}, + {"_test_decode_cert", PySSL_test_decode_certificate, + METH_VARARGS}, + #ifdef HAVE_OPENSSL_RAND + {"RAND_add", PySSL_RAND_add, METH_VARARGS, + PySSL_RAND_add_doc}, +- {"RAND_egd", PySSL_RAND_egd, METH_O, ++ {"RAND_egd", PySSL_RAND_egd, METH_VARARGS, + PySSL_RAND_egd_doc}, + {"RAND_status", (PyCFunction)PySSL_RAND_status, METH_NOARGS, + PySSL_RAND_status_doc}, + #endif ++ {"get_default_verify_paths", (PyCFunction)PySSL_get_default_verify_paths, ++ METH_NOARGS, PySSL_get_default_verify_paths_doc}, ++#ifdef _MSC_VER ++ {"enum_certificates", (PyCFunction)PySSL_enum_certificates, ++ METH_VARARGS | METH_KEYWORDS, PySSL_enum_certificates_doc}, ++ {"enum_crls", (PyCFunction)PySSL_enum_crls, ++ METH_VARARGS | METH_KEYWORDS, PySSL_enum_crls_doc}, ++#endif ++ {"txt2obj", (PyCFunction)PySSL_txt2obj, ++ METH_VARARGS | METH_KEYWORDS, PySSL_txt2obj_doc}, ++ {"nid2obj", (PyCFunction)PySSL_nid2obj, ++ METH_VARARGS, PySSL_nid2obj_doc}, + {NULL, NULL} /* Sentinel */ + }; + +@@ -1656,16 +3701,17 @@ static unsigned long _ssl_thread_id_func + return PyThread_get_thread_ident(); + } + +-static void _ssl_thread_locking_function (int mode, int n, const char *file, int line) { ++static void _ssl_thread_locking_function ++ (int mode, int n, const char *file, int line) { + /* this function is needed to perform locking on shared data + structures. (Note that OpenSSL uses a number of global data +- structures that will be implicitly shared whenever multiple threads +- use OpenSSL.) Multi-threaded applications will crash at random if +- it is not set. +- +- locking_function() must be able to handle up to CRYPTO_num_locks() +- different mutex locks. It sets the n-th lock if mode & CRYPTO_LOCK, and +- releases it otherwise. ++ structures that will be implicitly shared whenever multiple ++ threads use OpenSSL.) Multi-threaded applications will ++ crash at random if it is not set. ++ ++ locking_function() must be able to handle up to ++ CRYPTO_num_locks() different mutex locks. It sets the n-th ++ lock if mode & CRYPTO_LOCK, and releases it otherwise. + + file and line are the file number of the function setting the + lock. They can be useful for debugging. +@@ -1689,10 +3735,11 @@ static int _setup_ssl_threads(void) { + if (_ssl_locks == NULL) { + _ssl_locks_count = CRYPTO_num_locks(); + _ssl_locks = (PyThread_type_lock *) +- malloc(sizeof(PyThread_type_lock) * _ssl_locks_count); ++ PyMem_Malloc(sizeof(PyThread_type_lock) * _ssl_locks_count); + if (_ssl_locks == NULL) + return 0; +- memset(_ssl_locks, 0, sizeof(PyThread_type_lock) * _ssl_locks_count); ++ memset(_ssl_locks, 0, ++ sizeof(PyThread_type_lock) * _ssl_locks_count); + for (i = 0; i < _ssl_locks_count; i++) { + _ssl_locks[i] = PyThread_allocate_lock(); + if (_ssl_locks[i] == NULL) { +@@ -1700,7 +3747,7 @@ static int _setup_ssl_threads(void) { + for (j = 0; j < i; j++) { + PyThread_free_lock(_ssl_locks[j]); + } +- free(_ssl_locks); ++ PyMem_Free(_ssl_locks); + return 0; + } + } +@@ -1716,14 +3763,39 @@ PyDoc_STRVAR(module_doc, + "Implementation module for SSL socket operations. See the socket module\n\ + for documentation."); + ++ ++ ++ ++static void ++parse_openssl_version(unsigned long libver, ++ unsigned int *major, unsigned int *minor, ++ unsigned int *fix, unsigned int *patch, ++ unsigned int *status) ++{ ++ *status = libver & 0xF; ++ libver >>= 4; ++ *patch = libver & 0xFF; ++ libver >>= 8; ++ *fix = libver & 0xFF; ++ libver >>= 8; ++ *minor = libver & 0xFF; ++ libver >>= 8; ++ *major = libver & 0xFF; ++} ++ + PyMODINIT_FUNC + init_ssl(void) + { + PyObject *m, *d, *r; + unsigned long libver; + unsigned int major, minor, fix, patch, status; ++ struct py_ssl_error_code *errcode; ++ struct py_ssl_library_code *libcode; + +- Py_TYPE(&PySSL_Type) = &PyType_Type; ++ if (PyType_Ready(&PySSLContext_Type) < 0) ++ return; ++ if (PyType_Ready(&PySSLSocket_Type) < 0) ++ return; + + m = Py_InitModule3("_ssl", PySSL_methods, module_doc); + if (m == NULL) +@@ -1746,15 +3818,53 @@ init_ssl(void) + OpenSSL_add_all_algorithms(); + + /* Add symbols to module dict */ +- PySSLErrorObject = PyErr_NewException("ssl.SSLError", +- PySocketModule.error, +- NULL); ++ PySSLErrorObject = PyErr_NewExceptionWithDoc( ++ "ssl.SSLError", SSLError_doc, ++ PySocketModule.error, NULL); + if (PySSLErrorObject == NULL) + return; +- if (PyDict_SetItemString(d, "SSLError", PySSLErrorObject) != 0) ++ ((PyTypeObject *)PySSLErrorObject)->tp_str = (reprfunc)SSLError_str; ++ ++ PySSLZeroReturnErrorObject = PyErr_NewExceptionWithDoc( ++ "ssl.SSLZeroReturnError", SSLZeroReturnError_doc, ++ PySSLErrorObject, NULL); ++ PySSLWantReadErrorObject = PyErr_NewExceptionWithDoc( ++ "ssl.SSLWantReadError", SSLWantReadError_doc, ++ PySSLErrorObject, NULL); ++ PySSLWantWriteErrorObject = PyErr_NewExceptionWithDoc( ++ "ssl.SSLWantWriteError", SSLWantWriteError_doc, ++ PySSLErrorObject, NULL); ++ PySSLSyscallErrorObject = PyErr_NewExceptionWithDoc( ++ "ssl.SSLSyscallError", SSLSyscallError_doc, ++ PySSLErrorObject, NULL); ++ PySSLEOFErrorObject = PyErr_NewExceptionWithDoc( ++ "ssl.SSLEOFError", SSLEOFError_doc, ++ PySSLErrorObject, NULL); ++ if (PySSLZeroReturnErrorObject == NULL ++ || PySSLWantReadErrorObject == NULL ++ || PySSLWantWriteErrorObject == NULL ++ || PySSLSyscallErrorObject == NULL ++ || PySSLEOFErrorObject == NULL) ++ return; ++ ++ ((PyTypeObject *)PySSLZeroReturnErrorObject)->tp_str = (reprfunc)SSLError_str; ++ ((PyTypeObject *)PySSLWantReadErrorObject)->tp_str = (reprfunc)SSLError_str; ++ ((PyTypeObject *)PySSLWantWriteErrorObject)->tp_str = (reprfunc)SSLError_str; ++ ((PyTypeObject *)PySSLSyscallErrorObject)->tp_str = (reprfunc)SSLError_str; ++ ((PyTypeObject *)PySSLEOFErrorObject)->tp_str = (reprfunc)SSLError_str; ++ ++ if (PyDict_SetItemString(d, "SSLError", PySSLErrorObject) != 0 ++ || PyDict_SetItemString(d, "SSLZeroReturnError", PySSLZeroReturnErrorObject) != 0 ++ || PyDict_SetItemString(d, "SSLWantReadError", PySSLWantReadErrorObject) != 0 ++ || PyDict_SetItemString(d, "SSLWantWriteError", PySSLWantWriteErrorObject) != 0 ++ || PyDict_SetItemString(d, "SSLSyscallError", PySSLSyscallErrorObject) != 0 ++ || PyDict_SetItemString(d, "SSLEOFError", PySSLEOFErrorObject) != 0) ++ return; ++ if (PyDict_SetItemString(d, "_SSLContext", ++ (PyObject *)&PySSLContext_Type) != 0) + return; +- if (PyDict_SetItemString(d, "SSLType", +- (PyObject *)&PySSL_Type) != 0) ++ if (PyDict_SetItemString(d, "_SSLSocket", ++ (PyObject *)&PySSLSocket_Type) != 0) + return; + PyModule_AddIntConstant(m, "SSL_ERROR_ZERO_RETURN", + PY_SSL_ERROR_ZERO_RETURN); +@@ -1782,6 +3892,66 @@ init_ssl(void) + PY_SSL_CERT_OPTIONAL); + PyModule_AddIntConstant(m, "CERT_REQUIRED", + PY_SSL_CERT_REQUIRED); ++ /* CRL verification for verification_flags */ ++ PyModule_AddIntConstant(m, "VERIFY_DEFAULT", ++ 0); ++ PyModule_AddIntConstant(m, "VERIFY_CRL_CHECK_LEAF", ++ X509_V_FLAG_CRL_CHECK); ++ PyModule_AddIntConstant(m, "VERIFY_CRL_CHECK_CHAIN", ++ X509_V_FLAG_CRL_CHECK|X509_V_FLAG_CRL_CHECK_ALL); ++ PyModule_AddIntConstant(m, "VERIFY_X509_STRICT", ++ X509_V_FLAG_X509_STRICT); ++ ++ /* Alert Descriptions from ssl.h */ ++ /* note RESERVED constants no longer intended for use have been removed */ ++ /* http://www.iana.org/assignments/tls-parameters/tls-parameters.xml#tls-parameters-6 */ ++ ++#define ADD_AD_CONSTANT(s) \ ++ PyModule_AddIntConstant(m, "ALERT_DESCRIPTION_"#s, \ ++ SSL_AD_##s) ++ ++ ADD_AD_CONSTANT(CLOSE_NOTIFY); ++ ADD_AD_CONSTANT(UNEXPECTED_MESSAGE); ++ ADD_AD_CONSTANT(BAD_RECORD_MAC); ++ ADD_AD_CONSTANT(RECORD_OVERFLOW); ++ ADD_AD_CONSTANT(DECOMPRESSION_FAILURE); ++ ADD_AD_CONSTANT(HANDSHAKE_FAILURE); ++ ADD_AD_CONSTANT(BAD_CERTIFICATE); ++ ADD_AD_CONSTANT(UNSUPPORTED_CERTIFICATE); ++ ADD_AD_CONSTANT(CERTIFICATE_REVOKED); ++ ADD_AD_CONSTANT(CERTIFICATE_EXPIRED); ++ ADD_AD_CONSTANT(CERTIFICATE_UNKNOWN); ++ ADD_AD_CONSTANT(ILLEGAL_PARAMETER); ++ ADD_AD_CONSTANT(UNKNOWN_CA); ++ ADD_AD_CONSTANT(ACCESS_DENIED); ++ ADD_AD_CONSTANT(DECODE_ERROR); ++ ADD_AD_CONSTANT(DECRYPT_ERROR); ++ ADD_AD_CONSTANT(PROTOCOL_VERSION); ++ ADD_AD_CONSTANT(INSUFFICIENT_SECURITY); ++ ADD_AD_CONSTANT(INTERNAL_ERROR); ++ ADD_AD_CONSTANT(USER_CANCELLED); ++ ADD_AD_CONSTANT(NO_RENEGOTIATION); ++ /* Not all constants are in old OpenSSL versions */ ++#ifdef SSL_AD_UNSUPPORTED_EXTENSION ++ ADD_AD_CONSTANT(UNSUPPORTED_EXTENSION); ++#endif ++#ifdef SSL_AD_CERTIFICATE_UNOBTAINABLE ++ ADD_AD_CONSTANT(CERTIFICATE_UNOBTAINABLE); ++#endif ++#ifdef SSL_AD_UNRECOGNIZED_NAME ++ ADD_AD_CONSTANT(UNRECOGNIZED_NAME); ++#endif ++#ifdef SSL_AD_BAD_CERTIFICATE_STATUS_RESPONSE ++ ADD_AD_CONSTANT(BAD_CERTIFICATE_STATUS_RESPONSE); ++#endif ++#ifdef SSL_AD_BAD_CERTIFICATE_HASH_VALUE ++ ADD_AD_CONSTANT(BAD_CERTIFICATE_HASH_VALUE); ++#endif ++#ifdef SSL_AD_UNKNOWN_PSK_IDENTITY ++ ADD_AD_CONSTANT(UNKNOWN_PSK_IDENTITY); ++#endif ++ ++#undef ADD_AD_CONSTANT + + /* protocol versions */ + #ifndef OPENSSL_NO_SSL2 +@@ -1794,6 +3964,109 @@ init_ssl(void) + PY_SSL_VERSION_SSL23); + PyModule_AddIntConstant(m, "PROTOCOL_TLSv1", + PY_SSL_VERSION_TLS1); ++#if HAVE_TLSv1_2 ++ PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_1", ++ PY_SSL_VERSION_TLS1_1); ++ PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_2", ++ PY_SSL_VERSION_TLS1_2); ++#endif ++ ++ /* protocol options */ ++ PyModule_AddIntConstant(m, "OP_ALL", ++ SSL_OP_ALL & ~SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS); ++ PyModule_AddIntConstant(m, "OP_NO_SSLv2", SSL_OP_NO_SSLv2); ++ PyModule_AddIntConstant(m, "OP_NO_SSLv3", SSL_OP_NO_SSLv3); ++ PyModule_AddIntConstant(m, "OP_NO_TLSv1", SSL_OP_NO_TLSv1); ++#if HAVE_TLSv1_2 ++ PyModule_AddIntConstant(m, "OP_NO_TLSv1_1", SSL_OP_NO_TLSv1_1); ++ PyModule_AddIntConstant(m, "OP_NO_TLSv1_2", SSL_OP_NO_TLSv1_2); ++#endif ++ PyModule_AddIntConstant(m, "OP_CIPHER_SERVER_PREFERENCE", ++ SSL_OP_CIPHER_SERVER_PREFERENCE); ++ PyModule_AddIntConstant(m, "OP_SINGLE_DH_USE", SSL_OP_SINGLE_DH_USE); ++#ifdef SSL_OP_SINGLE_ECDH_USE ++ PyModule_AddIntConstant(m, "OP_SINGLE_ECDH_USE", SSL_OP_SINGLE_ECDH_USE); ++#endif ++#ifdef SSL_OP_NO_COMPRESSION ++ PyModule_AddIntConstant(m, "OP_NO_COMPRESSION", ++ SSL_OP_NO_COMPRESSION); ++#endif ++ ++#if HAVE_SNI ++ r = Py_True; ++#else ++ r = Py_False; ++#endif ++ Py_INCREF(r); ++ PyModule_AddObject(m, "HAS_SNI", r); ++ ++#if HAVE_OPENSSL_FINISHED ++ r = Py_True; ++#else ++ r = Py_False; ++#endif ++ Py_INCREF(r); ++ PyModule_AddObject(m, "HAS_TLS_UNIQUE", r); ++ ++#ifdef OPENSSL_NO_ECDH ++ r = Py_False; ++#else ++ r = Py_True; ++#endif ++ Py_INCREF(r); ++ PyModule_AddObject(m, "HAS_ECDH", r); ++ ++#ifdef OPENSSL_NPN_NEGOTIATED ++ r = Py_True; ++#else ++ r = Py_False; ++#endif ++ Py_INCREF(r); ++ PyModule_AddObject(m, "HAS_NPN", r); ++ ++ /* Mappings for error codes */ ++ err_codes_to_names = PyDict_New(); ++ err_names_to_codes = PyDict_New(); ++ if (err_codes_to_names == NULL || err_names_to_codes == NULL) ++ return; ++ errcode = error_codes; ++ while (errcode->mnemonic != NULL) { ++ PyObject *mnemo, *key; ++ mnemo = PyUnicode_FromString(errcode->mnemonic); ++ key = Py_BuildValue("ii", errcode->library, errcode->reason); ++ if (mnemo == NULL || key == NULL) ++ return; ++ if (PyDict_SetItem(err_codes_to_names, key, mnemo)) ++ return; ++ if (PyDict_SetItem(err_names_to_codes, mnemo, key)) ++ return; ++ Py_DECREF(key); ++ Py_DECREF(mnemo); ++ errcode++; ++ } ++ if (PyModule_AddObject(m, "err_codes_to_names", err_codes_to_names)) ++ return; ++ if (PyModule_AddObject(m, "err_names_to_codes", err_names_to_codes)) ++ return; ++ ++ lib_codes_to_names = PyDict_New(); ++ if (lib_codes_to_names == NULL) ++ return; ++ libcode = library_codes; ++ while (libcode->library != NULL) { ++ PyObject *mnemo, *key; ++ key = PyLong_FromLong(libcode->code); ++ mnemo = PyUnicode_FromString(libcode->library); ++ if (key == NULL || mnemo == NULL) ++ return; ++ if (PyDict_SetItem(lib_codes_to_names, key, mnemo)) ++ return; ++ Py_DECREF(key); ++ Py_DECREF(mnemo); ++ libcode++; ++ } ++ if (PyModule_AddObject(m, "lib_codes_to_names", lib_codes_to_names)) ++ return; + + /* OpenSSL version */ + /* SSLeay() gives us the version of the library linked against, +@@ -1805,19 +4078,17 @@ init_ssl(void) + return; + if (PyModule_AddObject(m, "OPENSSL_VERSION_NUMBER", r)) + return; +- status = libver & 0xF; +- libver >>= 4; +- patch = libver & 0xFF; +- libver >>= 8; +- fix = libver & 0xFF; +- libver >>= 8; +- minor = libver & 0xFF; +- libver >>= 8; +- major = libver & 0xFF; ++ parse_openssl_version(libver, &major, &minor, &fix, &patch, &status); + r = Py_BuildValue("IIIII", major, minor, fix, patch, status); + if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION_INFO", r)) + return; + r = PyString_FromString(SSLeay_version(SSLEAY_VERSION)); + if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION", r)) + return; ++ ++ libver = OPENSSL_VERSION_NUMBER; ++ parse_openssl_version(libver, &major, &minor, &fix, &patch, &status); ++ r = Py_BuildValue("IIIII", major, minor, fix, patch, status); ++ if (r == NULL || PyModule_AddObject(m, "_OPENSSL_API_VERSION", r)) ++ return; + } +diff --git a/Lib/ssl.py b/Lib/ssl.py +--- a/Lib/ssl.py ++++ b/Lib/ssl.py +@@ -1,8 +1,7 @@ + # Wrapper module for _ssl, providing some additional facilities + # implemented in Python. Written by Bill Janssen. + +-"""\ +-This module provides some more Pythonic support for SSL. ++"""This module provides some more Pythonic support for SSL. + + Object types: + +@@ -53,62 +52,461 @@ PROTOCOL_SSLv2 + PROTOCOL_SSLv3 + PROTOCOL_SSLv23 + PROTOCOL_TLSv1 ++PROTOCOL_TLSv1_1 ++PROTOCOL_TLSv1_2 ++ ++The following constants identify various SSL alert message descriptions as per ++http://www.iana.org/assignments/tls-parameters/tls-parameters.xml#tls-parameters-6 ++ ++ALERT_DESCRIPTION_CLOSE_NOTIFY ++ALERT_DESCRIPTION_UNEXPECTED_MESSAGE ++ALERT_DESCRIPTION_BAD_RECORD_MAC ++ALERT_DESCRIPTION_RECORD_OVERFLOW ++ALERT_DESCRIPTION_DECOMPRESSION_FAILURE ++ALERT_DESCRIPTION_HANDSHAKE_FAILURE ++ALERT_DESCRIPTION_BAD_CERTIFICATE ++ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE ++ALERT_DESCRIPTION_CERTIFICATE_REVOKED ++ALERT_DESCRIPTION_CERTIFICATE_EXPIRED ++ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN ++ALERT_DESCRIPTION_ILLEGAL_PARAMETER ++ALERT_DESCRIPTION_UNKNOWN_CA ++ALERT_DESCRIPTION_ACCESS_DENIED ++ALERT_DESCRIPTION_DECODE_ERROR ++ALERT_DESCRIPTION_DECRYPT_ERROR ++ALERT_DESCRIPTION_PROTOCOL_VERSION ++ALERT_DESCRIPTION_INSUFFICIENT_SECURITY ++ALERT_DESCRIPTION_INTERNAL_ERROR ++ALERT_DESCRIPTION_USER_CANCELLED ++ALERT_DESCRIPTION_NO_RENEGOTIATION ++ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION ++ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE ++ALERT_DESCRIPTION_UNRECOGNIZED_NAME ++ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE ++ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE ++ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY + """ + + import textwrap ++import re ++import sys ++import os ++from collections import namedtuple ++from contextlib import closing + + import _ssl # if we can't import it, let the error propagate + + from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION +-from _ssl import SSLError ++from _ssl import _SSLContext ++from _ssl import ( ++ SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError, ++ SSLSyscallError, SSLEOFError, ++ ) + from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED ++from _ssl import (VERIFY_DEFAULT, VERIFY_CRL_CHECK_LEAF, VERIFY_CRL_CHECK_CHAIN, ++ VERIFY_X509_STRICT) ++from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj + from _ssl import RAND_status, RAND_egd, RAND_add +-from _ssl import \ +- SSL_ERROR_ZERO_RETURN, \ +- SSL_ERROR_WANT_READ, \ +- SSL_ERROR_WANT_WRITE, \ +- SSL_ERROR_WANT_X509_LOOKUP, \ +- SSL_ERROR_SYSCALL, \ +- SSL_ERROR_SSL, \ +- SSL_ERROR_WANT_CONNECT, \ +- SSL_ERROR_EOF, \ +- SSL_ERROR_INVALID_ERROR_CODE +-from _ssl import PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1 +-_PROTOCOL_NAMES = { +- PROTOCOL_TLSv1: "TLSv1", +- PROTOCOL_SSLv23: "SSLv23", +- PROTOCOL_SSLv3: "SSLv3", +-} ++ ++def _import_symbols(prefix): ++ for n in dir(_ssl): ++ if n.startswith(prefix): ++ globals()[n] = getattr(_ssl, n) ++ ++_import_symbols('OP_') ++_import_symbols('ALERT_DESCRIPTION_') ++_import_symbols('SSL_ERROR_') ++_import_symbols('PROTOCOL_') ++ ++from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN ++ ++from _ssl import _OPENSSL_API_VERSION ++ ++_PROTOCOL_NAMES = {value: name for name, value in globals().items() if name.startswith('PROTOCOL_')} ++ + try: +- from _ssl import PROTOCOL_SSLv2 + _SSLv2_IF_EXISTS = PROTOCOL_SSLv2 +-except ImportError: ++except NameError: + _SSLv2_IF_EXISTS = None +-else: +- _PROTOCOL_NAMES[PROTOCOL_SSLv2] = "SSLv2" + + from socket import socket, _fileobject, _delegate_methods, error as socket_error +-from socket import getnameinfo as _getnameinfo +-from socket import SOL_SOCKET, SO_TYPE, SOCK_STREAM ++if sys.platform == "win32": ++ from _ssl import enum_certificates, enum_crls ++ ++from socket import socket, AF_INET, SOCK_STREAM, create_connection ++from socket import SOL_SOCKET, SO_TYPE + import base64 # for DER-to-PEM translation + import errno + ++if _ssl.HAS_TLS_UNIQUE: ++ CHANNEL_BINDING_TYPES = ['tls-unique'] ++else: ++ CHANNEL_BINDING_TYPES = [] ++ + # Disable weak or insecure ciphers by default + # (OpenSSL's default setting is 'DEFAULT:!aNULL:!eNULL') +-_DEFAULT_CIPHERS = 'DEFAULT:!aNULL:!eNULL:!LOW:!EXPORT:!SSLv2' ++# Enable a better set of ciphers by default ++# This list has been explicitly chosen to: ++# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE) ++# * Prefer ECDHE over DHE for better performance ++# * Prefer any AES-GCM over any AES-CBC for better performance and security ++# * Then Use HIGH cipher suites as a fallback ++# * Then Use 3DES as fallback which is secure but slow ++# * Finally use RC4 as a fallback which is problematic but needed for ++# compatibility some times. ++# * Disable NULL authentication, NULL encryption, and MD5 MACs for security ++# reasons ++_DEFAULT_CIPHERS = ( ++ 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' ++ 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:' ++ 'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5' ++) + ++# Restricted and more secure ciphers for the server side ++# This list has been explicitly chosen to: ++# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE) ++# * Prefer ECDHE over DHE for better performance ++# * Prefer any AES-GCM over any AES-CBC for better performance and security ++# * Then Use HIGH cipher suites as a fallback ++# * Then Use 3DES as fallback which is secure but slow ++# * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, and RC4 for ++# security reasons ++_RESTRICTED_SERVER_CIPHERS = ( ++ 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' ++ 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' ++ '!eNULL:!MD5:!DSS:!RC4' ++) ++ ++ ++class CertificateError(ValueError): ++ pass ++ ++ ++def _dnsname_match(dn, hostname, max_wildcards=1): ++ """Matching according to RFC 6125, section 6.4.3 ++ ++ http://tools.ietf.org/html/rfc6125#section-6.4.3 ++ """ ++ pats = [] ++ if not dn: ++ return False ++ ++ pieces = dn.split(r'.') ++ leftmost = pieces[0] ++ remainder = pieces[1:] ++ ++ wildcards = leftmost.count('*') ++ if wildcards > max_wildcards: ++ # Issue #17980: avoid denials of service by refusing more ++ # than one wildcard per fragment. A survery of established ++ # policy among SSL implementations showed it to be a ++ # reasonable choice. ++ raise CertificateError( ++ "too many wildcards in certificate DNS name: " + repr(dn)) ++ ++ # speed up common case w/o wildcards ++ if not wildcards: ++ return dn.lower() == hostname.lower() ++ ++ # RFC 6125, section 6.4.3, subitem 1. ++ # The client SHOULD NOT attempt to match a presented identifier in which ++ # the wildcard character comprises a label other than the left-most label. ++ if leftmost == '*': ++ # When '*' is a fragment by itself, it matches a non-empty dotless ++ # fragment. ++ pats.append('[^.]+') ++ elif leftmost.startswith('xn--') or hostname.startswith('xn--'): ++ # RFC 6125, section 6.4.3, subitem 3. ++ # The client SHOULD NOT attempt to match a presented identifier ++ # where the wildcard character is embedded within an A-label or ++ # U-label of an internationalized domain name. ++ pats.append(re.escape(leftmost)) ++ else: ++ # Otherwise, '*' matches any dotless string, e.g. www* ++ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) ++ ++ # add the remaining fragments, ignore any wildcards ++ for frag in remainder: ++ pats.append(re.escape(frag)) ++ ++ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) ++ return pat.match(hostname) ++ ++ ++def match_hostname(cert, hostname): ++ """Verify that *cert* (in decoded format as returned by ++ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 ++ rules are followed, but IP addresses are not accepted for *hostname*. ++ ++ CertificateError is raised on failure. On success, the function ++ returns nothing. ++ """ ++ if not cert: ++ raise ValueError("empty or no certificate, match_hostname needs a " ++ "SSL socket or SSL context with either " ++ "CERT_OPTIONAL or CERT_REQUIRED") ++ dnsnames = [] ++ san = cert.get('subjectAltName', ()) ++ for key, value in san: ++ if key == 'DNS': ++ if _dnsname_match(value, hostname): ++ return ++ dnsnames.append(value) ++ if not dnsnames: ++ # The subject is only checked when there is no dNSName entry ++ # in subjectAltName ++ for sub in cert.get('subject', ()): ++ for key, value in sub: ++ # XXX according to RFC 2818, the most specific Common Name ++ # must be used. ++ if key == 'commonName': ++ if _dnsname_match(value, hostname): ++ return ++ dnsnames.append(value) ++ if len(dnsnames) > 1: ++ raise CertificateError("hostname %r " ++ "doesn't match either of %s" ++ % (hostname, ', '.join(map(repr, dnsnames)))) ++ elif len(dnsnames) == 1: ++ raise CertificateError("hostname %r " ++ "doesn't match %r" ++ % (hostname, dnsnames[0])) ++ else: ++ raise CertificateError("no appropriate commonName or " ++ "subjectAltName fields were found") ++ ++ ++DefaultVerifyPaths = namedtuple("DefaultVerifyPaths", ++ "cafile capath openssl_cafile_env openssl_cafile openssl_capath_env " ++ "openssl_capath") ++ ++def get_default_verify_paths(): ++ """Return paths to default cafile and capath. ++ """ ++ parts = _ssl.get_default_verify_paths() ++ ++ # environment vars shadow paths ++ cafile = os.environ.get(parts[0], parts[1]) ++ capath = os.environ.get(parts[2], parts[3]) ++ ++ return DefaultVerifyPaths(cafile if os.path.isfile(cafile) else None, ++ capath if os.path.isdir(capath) else None, ++ *parts) ++ ++ ++class _ASN1Object(namedtuple("_ASN1Object", "nid shortname longname oid")): ++ """ASN.1 object identifier lookup ++ """ ++ __slots__ = () ++ ++ def __new__(cls, oid): ++ return super(_ASN1Object, cls).__new__(cls, *_txt2obj(oid, name=False)) ++ ++ @classmethod ++ def fromnid(cls, nid): ++ """Create _ASN1Object from OpenSSL numeric ID ++ """ ++ return super(_ASN1Object, cls).__new__(cls, *_nid2obj(nid)) ++ ++ @classmethod ++ def fromname(cls, name): ++ """Create _ASN1Object from short name, long name or OID ++ """ ++ return super(_ASN1Object, cls).__new__(cls, *_txt2obj(name, name=True)) ++ ++ ++class Purpose(_ASN1Object): ++ """SSLContext purpose flags with X509v3 Extended Key Usage objects ++ """ ++ ++Purpose.SERVER_AUTH = Purpose('1.3.6.1.5.5.7.3.1') ++Purpose.CLIENT_AUTH = Purpose('1.3.6.1.5.5.7.3.2') ++ ++ ++class SSLContext(_SSLContext): ++ """An SSLContext holds various SSL-related configuration options and ++ data, such as certificates and possibly a private key.""" ++ ++ __slots__ = ('protocol', '__weakref__') ++ _windows_cert_stores = ("CA", "ROOT") ++ ++ def __new__(cls, protocol, *args, **kwargs): ++ self = _SSLContext.__new__(cls, protocol) ++ if protocol != _SSLv2_IF_EXISTS: ++ self.set_ciphers(_DEFAULT_CIPHERS) ++ return self ++ ++ def __init__(self, protocol): ++ self.protocol = protocol ++ ++ def wrap_socket(self, sock, server_side=False, ++ do_handshake_on_connect=True, ++ suppress_ragged_eofs=True, ++ server_hostname=None): ++ return SSLSocket(sock=sock, server_side=server_side, ++ do_handshake_on_connect=do_handshake_on_connect, ++ suppress_ragged_eofs=suppress_ragged_eofs, ++ server_hostname=server_hostname, ++ _context=self) ++ ++ def set_npn_protocols(self, npn_protocols): ++ protos = bytearray() ++ for protocol in npn_protocols: ++ b = protocol.encode('ascii') ++ if len(b) == 0 or len(b) > 255: ++ raise SSLError('NPN protocols must be 1 to 255 in length') ++ protos.append(len(b)) ++ protos.extend(b) ++ ++ self._set_npn_protocols(protos) ++ ++ def _load_windows_store_certs(self, storename, purpose): ++ certs = bytearray() ++ for cert, encoding, trust in enum_certificates(storename): ++ # CA certs are never PKCS#7 encoded ++ if encoding == "x509_asn": ++ if trust is True or purpose.oid in trust: ++ certs.extend(cert) ++ self.load_verify_locations(cadata=certs) ++ return certs ++ ++ def load_default_certs(self, purpose=Purpose.SERVER_AUTH): ++ if not isinstance(purpose, _ASN1Object): ++ raise TypeError(purpose) ++ if sys.platform == "win32": ++ for storename in self._windows_cert_stores: ++ self._load_windows_store_certs(storename, purpose) ++ else: ++ self.set_default_verify_paths() ++ ++ ++def create_default_context(purpose=Purpose.SERVER_AUTH, cafile=None, ++ capath=None, cadata=None): ++ """Create a SSLContext object with default settings. ++ ++ NOTE: The protocol and settings may change anytime without prior ++ deprecation. The values represent a fair balance between maximum ++ compatibility and security. ++ """ ++ if not isinstance(purpose, _ASN1Object): ++ raise TypeError(purpose) ++ ++ context = SSLContext(PROTOCOL_SSLv23) ++ ++ # SSLv2 considered harmful. ++ context.options |= OP_NO_SSLv2 ++ ++ # SSLv3 has problematic security and is only required for really old ++ # clients such as IE6 on Windows XP ++ context.options |= OP_NO_SSLv3 ++ ++ # disable compression to prevent CRIME attacks (OpenSSL 1.0+) ++ context.options |= getattr(_ssl, "OP_NO_COMPRESSION", 0) ++ ++ if purpose == Purpose.SERVER_AUTH: ++ # verify certs and host name in client mode ++ context.verify_mode = CERT_REQUIRED ++ context.check_hostname = True ++ elif purpose == Purpose.CLIENT_AUTH: ++ # Prefer the server's ciphers by default so that we get stronger ++ # encryption ++ context.options |= getattr(_ssl, "OP_CIPHER_SERVER_PREFERENCE", 0) ++ ++ # Use single use keys in order to improve forward secrecy ++ context.options |= getattr(_ssl, "OP_SINGLE_DH_USE", 0) ++ context.options |= getattr(_ssl, "OP_SINGLE_ECDH_USE", 0) ++ ++ # disallow ciphers with known vulnerabilities ++ context.set_ciphers(_RESTRICTED_SERVER_CIPHERS) ++ ++ if cafile or capath or cadata: ++ context.load_verify_locations(cafile, capath, cadata) ++ elif context.verify_mode != CERT_NONE: ++ # no explicit cafile, capath or cadata but the verify mode is ++ # CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system ++ # root CA certificates for the given purpose. This may fail silently. ++ context.load_default_certs(purpose) ++ return context ++ ++ ++def _create_stdlib_context(protocol=PROTOCOL_SSLv23, cert_reqs=None, ++ check_hostname=False, purpose=Purpose.SERVER_AUTH, ++ certfile=None, keyfile=None, ++ cafile=None, capath=None, cadata=None): ++ """Create a SSLContext object for Python stdlib modules ++ ++ All Python stdlib modules shall use this function to create SSLContext ++ objects in order to keep common settings in one place. The configuration ++ is less restrict than create_default_context()'s to increase backward ++ compatibility. ++ """ ++ if not isinstance(purpose, _ASN1Object): ++ raise TypeError(purpose) ++ ++ context = SSLContext(protocol) ++ # SSLv2 considered harmful. ++ context.options |= OP_NO_SSLv2 ++ ++ if cert_reqs is not None: ++ context.verify_mode = cert_reqs ++ context.check_hostname = check_hostname ++ ++ if keyfile and not certfile: ++ raise ValueError("certfile must be specified") ++ if certfile or keyfile: ++ context.load_cert_chain(certfile, keyfile) ++ ++ # load CA root certs ++ if cafile or capath or cadata: ++ context.load_verify_locations(cafile, capath, cadata) ++ elif context.verify_mode != CERT_NONE: ++ # no explicit cafile, capath or cadata but the verify mode is ++ # CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system ++ # root CA certificates for the given purpose. This may fail silently. ++ context.load_default_certs(purpose) ++ ++ return context + + class SSLSocket(socket): +- + """This class implements a subtype of socket.socket that wraps + the underlying OS socket in an SSL context when necessary, and + provides read and write methods over that channel.""" + +- def __init__(self, sock, keyfile=None, certfile=None, ++ def __init__(self, sock=None, keyfile=None, certfile=None, + server_side=False, cert_reqs=CERT_NONE, + ssl_version=PROTOCOL_SSLv23, ca_certs=None, + do_handshake_on_connect=True, +- suppress_ragged_eofs=True, ciphers=None): ++ family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None, ++ suppress_ragged_eofs=True, npn_protocols=None, ciphers=None, ++ server_hostname=None, ++ _context=None): ++ ++ if _context: ++ self._context = _context ++ else: ++ if server_side and not certfile: ++ raise ValueError("certfile must be specified for server-side " ++ "operations") ++ if keyfile and not certfile: ++ raise ValueError("certfile must be specified") ++ if certfile and not keyfile: ++ keyfile = certfile ++ self._context = SSLContext(ssl_version) ++ self._context.verify_mode = cert_reqs ++ if ca_certs: ++ self._context.load_verify_locations(ca_certs) ++ if certfile: ++ self._context.load_cert_chain(certfile, keyfile) ++ if npn_protocols: ++ self._context.set_npn_protocols(npn_protocols) ++ if ciphers: ++ self._context.set_ciphers(ciphers) ++ self.keyfile = keyfile ++ self.certfile = certfile ++ self.cert_reqs = cert_reqs ++ self.ssl_version = ssl_version ++ self.ca_certs = ca_certs ++ self.ciphers = ciphers + # Can't use sock.type as other flags (such as SOCK_NONBLOCK) get + # mixed in. + if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM: +@@ -122,98 +520,161 @@ class SSLSocket(socket): + delattr(self, attr) + except AttributeError: + pass ++ if server_side and server_hostname: ++ raise ValueError("server_hostname can only be specified " ++ "in client mode") ++ if self._context.check_hostname and not server_hostname: ++ if HAS_SNI: ++ raise ValueError("check_hostname requires server_hostname") ++ else: ++ raise ValueError("check_hostname requires server_hostname, " ++ "but it's not supported by your OpenSSL " ++ "library") ++ self.server_side = server_side ++ self.server_hostname = server_hostname ++ self.do_handshake_on_connect = do_handshake_on_connect ++ self.suppress_ragged_eofs = suppress_ragged_eofs + +- if ciphers is None and ssl_version != _SSLv2_IF_EXISTS: +- ciphers = _DEFAULT_CIPHERS +- +- if certfile and not keyfile: +- keyfile = certfile +- # see if it's connected ++ # See if we are connected + try: +- socket.getpeername(self) +- except socket_error, e: ++ self.getpeername() ++ except socket_error as e: + if e.errno != errno.ENOTCONN: + raise +- # no, no connection yet +- self._connected = False +- self._sslobj = None ++ connected = False + else: +- # yes, create the SSL object +- self._connected = True +- self._sslobj = _ssl.sslwrap(self._sock, server_side, +- keyfile, certfile, +- cert_reqs, ssl_version, ca_certs, +- ciphers) +- if do_handshake_on_connect: +- self.do_handshake() +- self.keyfile = keyfile +- self.certfile = certfile +- self.cert_reqs = cert_reqs +- self.ssl_version = ssl_version +- self.ca_certs = ca_certs +- self.ciphers = ciphers +- self.do_handshake_on_connect = do_handshake_on_connect +- self.suppress_ragged_eofs = suppress_ragged_eofs ++ connected = True ++ ++ self._closed = False ++ self._sslobj = None ++ self._connected = connected ++ if connected: ++ # create the SSL object ++ try: ++ self._sslobj = self._context._wrap_socket(self._sock, server_side, ++ server_hostname, ssl_sock=self) ++ if do_handshake_on_connect: ++ timeout = self.gettimeout() ++ if timeout == 0.0: ++ # non-blocking ++ raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets") ++ self.do_handshake() ++ ++ except (OSError, ValueError): ++ self.close() ++ raise + self._makefile_refs = 0 + +- def read(self, len=1024): ++ @property ++ def context(self): ++ return self._context + ++ @context.setter ++ def context(self, ctx): ++ self._context = ctx ++ self._sslobj.context = ctx ++ ++ def dup(self): ++ raise NotImplemented("Can't dup() %s instances" % ++ self.__class__.__name__) ++ ++ def _checkClosed(self, msg=None): ++ # raise an exception here if you wish to check for spurious closes ++ pass ++ ++ def _check_connected(self): ++ if not self._connected: ++ # getpeername() will raise ENOTCONN if the socket is really ++ # not connected; note that we can be connected even without ++ # _connected being set, e.g. if connect() first returned ++ # EAGAIN. ++ self.getpeername() ++ ++ def read(self, len=0, buffer=None): + """Read up to LEN bytes and return them. + Return zero-length string on EOF.""" + ++ self._checkClosed() ++ if not self._sslobj: ++ raise ValueError("Read on closed or unwrapped SSL socket.") + try: +- return self._sslobj.read(len) +- except SSLError, x: ++ if buffer is not None: ++ v = self._sslobj.read(len, buffer) ++ else: ++ v = self._sslobj.read(len or 1024) ++ return v ++ except SSLError as x: + if x.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs: +- return '' ++ if buffer is not None: ++ return 0 ++ else: ++ return b'' + else: + raise + + def write(self, data): +- + """Write DATA to the underlying SSL channel. Returns + number of bytes of DATA actually transmitted.""" + ++ self._checkClosed() ++ if not self._sslobj: ++ raise ValueError("Write on closed or unwrapped SSL socket.") + return self._sslobj.write(data) + + def getpeercert(self, binary_form=False): +- + """Returns a formatted version of the data in the + certificate provided by the other end of the SSL channel. + Return None if no certificate was provided, {} if a + certificate was provided, but not validated.""" + ++ self._checkClosed() ++ self._check_connected() + return self._sslobj.peer_certificate(binary_form) + ++ def selected_npn_protocol(self): ++ self._checkClosed() ++ if not self._sslobj or not _ssl.HAS_NPN: ++ return None ++ else: ++ return self._sslobj.selected_npn_protocol() ++ + def cipher(self): +- ++ self._checkClosed() + if not self._sslobj: + return None + else: + return self._sslobj.cipher() + ++ def compression(self): ++ self._checkClosed() ++ if not self._sslobj: ++ return None ++ else: ++ return self._sslobj.compression() ++ + def send(self, data, flags=0): ++ self._checkClosed() + if self._sslobj: + if flags != 0: + raise ValueError( + "non-zero flags not allowed in calls to send() on %s" % + self.__class__) +- while True: +- try: +- v = self._sslobj.write(data) +- except SSLError, x: +- if x.args[0] == SSL_ERROR_WANT_READ: +- return 0 +- elif x.args[0] == SSL_ERROR_WANT_WRITE: +- return 0 +- else: +- raise ++ try: ++ v = self._sslobj.write(data) ++ except SSLError as x: ++ if x.args[0] == SSL_ERROR_WANT_READ: ++ return 0 ++ elif x.args[0] == SSL_ERROR_WANT_WRITE: ++ return 0 + else: +- return v ++ raise ++ else: ++ return v + else: + return self._sock.send(data, flags) + + def sendto(self, data, flags_or_addr, addr=None): ++ self._checkClosed() + if self._sslobj: + raise ValueError("sendto not allowed on instances of %s" % + self.__class__) +@@ -222,7 +683,9 @@ class SSLSocket(socket): + else: + return self._sock.sendto(data, flags_or_addr, addr) + ++ + def sendall(self, data, flags=0): ++ self._checkClosed() + if self._sslobj: + if flags != 0: + raise ValueError( +@@ -238,6 +701,7 @@ class SSLSocket(socket): + return socket.sendall(self, data, flags) + + def recv(self, buflen=1024, flags=0): ++ self._checkClosed() + if self._sslobj: + if flags != 0: + raise ValueError( +@@ -248,6 +712,7 @@ class SSLSocket(socket): + return self._sock.recv(buflen, flags) + + def recv_into(self, buffer, nbytes=None, flags=0): ++ self._checkClosed() + if buffer and (nbytes is None): + nbytes = len(buffer) + elif nbytes is None: +@@ -257,14 +722,12 @@ class SSLSocket(socket): + raise ValueError( + "non-zero flags not allowed in calls to recv_into() on %s" % + self.__class__) +- tmp_buffer = self.read(nbytes) +- v = len(tmp_buffer) +- buffer[:v] = tmp_buffer +- return v ++ return self.read(nbytes, buffer) + else: + return self._sock.recv_into(buffer, nbytes, flags) + + def recvfrom(self, buflen=1024, flags=0): ++ self._checkClosed() + if self._sslobj: + raise ValueError("recvfrom not allowed on instances of %s" % + self.__class__) +@@ -272,27 +735,23 @@ class SSLSocket(socket): + return self._sock.recvfrom(buflen, flags) + + def recvfrom_into(self, buffer, nbytes=None, flags=0): ++ self._checkClosed() + if self._sslobj: + raise ValueError("recvfrom_into not allowed on instances of %s" % + self.__class__) + else: + return self._sock.recvfrom_into(buffer, nbytes, flags) + ++ + def pending(self): ++ self._checkClosed() + if self._sslobj: + return self._sslobj.pending() + else: + return 0 + +- def unwrap(self): +- if self._sslobj: +- s = self._sslobj.shutdown() +- self._sslobj = None +- return s +- else: +- raise ValueError("No SSL wrapper around " + str(self)) +- + def shutdown(self, how): ++ self._checkClosed() + self._sslobj = None + socket.shutdown(self, how) + +@@ -303,32 +762,55 @@ class SSLSocket(socket): + else: + self._makefile_refs -= 1 + +- def do_handshake(self): ++ def unwrap(self): ++ if self._sslobj: ++ s = self._sslobj.shutdown() ++ self._sslobj = None ++ return s ++ else: ++ raise ValueError("No SSL wrapper around " + str(self)) + ++ def _real_close(self): ++ self._sslobj = None ++ socket._real_close(self) ++ ++ def do_handshake(self, block=False): + """Perform a TLS/SSL handshake.""" ++ self._check_connected() ++ timeout = self.gettimeout() ++ try: ++ if timeout == 0.0 and block: ++ self.settimeout(None) ++ self._sslobj.do_handshake() ++ finally: ++ self.settimeout(timeout) + +- self._sslobj.do_handshake() ++ if self.context.check_hostname: ++ if not self.server_hostname: ++ raise ValueError("check_hostname needs server_hostname " ++ "argument") ++ match_hostname(self.getpeercert(), self.server_hostname) + +- def _real_connect(self, addr, return_errno): ++ def _real_connect(self, addr, connect_ex): ++ if self.server_side: ++ raise ValueError("can't connect in server-side mode") + # Here we assume that the socket is client-side, and not + # connected at the time of the call. We connect it, then wrap it. + if self._connected: + raise ValueError("attempt to connect already-connected SSLSocket!") +- self._sslobj = _ssl.sslwrap(self._sock, False, self.keyfile, self.certfile, +- self.cert_reqs, self.ssl_version, +- self.ca_certs, self.ciphers) ++ self._sslobj = self.context._wrap_socket(self._sock, False, self.server_hostname, ssl_sock=self) + try: +- if return_errno: ++ if connect_ex: + rc = socket.connect_ex(self, addr) + else: + rc = None + socket.connect(self, addr) + if not rc: ++ self._connected = True + if self.do_handshake_on_connect: + self.do_handshake() +- self._connected = True + return rc +- except socket_error: ++ except (OSError, ValueError): + self._sslobj = None + raise + +@@ -343,27 +825,16 @@ class SSLSocket(socket): + return self._real_connect(addr, True) + + def accept(self): +- + """Accepts a new connection from a remote client, and returns + a tuple containing that new connection wrapped with a server-side + SSL channel, and the address of the remote client.""" + + newsock, addr = socket.accept(self) +- try: +- return (SSLSocket(newsock, +- keyfile=self.keyfile, +- certfile=self.certfile, +- server_side=True, +- cert_reqs=self.cert_reqs, +- ssl_version=self.ssl_version, +- ca_certs=self.ca_certs, +- ciphers=self.ciphers, +- do_handshake_on_connect=self.do_handshake_on_connect, +- suppress_ragged_eofs=self.suppress_ragged_eofs), +- addr) +- except socket_error as e: +- newsock.close() +- raise e ++ newsock = self.context.wrap_socket(newsock, ++ do_handshake_on_connect=self.do_handshake_on_connect, ++ suppress_ragged_eofs=self.suppress_ragged_eofs, ++ server_side=True) ++ return newsock, addr + + def makefile(self, mode='r', bufsize=-1): + +@@ -376,54 +847,81 @@ class SSLSocket(socket): + # the file-like object. + return _fileobject(self, mode, bufsize, close=True) + ++ def get_channel_binding(self, cb_type="tls-unique"): ++ """Get channel binding data for current connection. Raise ValueError ++ if the requested `cb_type` is not supported. Return bytes of the data ++ or None if the data is not available (e.g. before the handshake). ++ """ ++ if cb_type not in CHANNEL_BINDING_TYPES: ++ raise ValueError("Unsupported channel binding type") ++ if cb_type != "tls-unique": ++ raise NotImplementedError( ++ "{0} channel binding type not implemented" ++ .format(cb_type)) ++ if self._sslobj is None: ++ return None ++ return self._sslobj.tls_unique_cb() + + + def wrap_socket(sock, keyfile=None, certfile=None, + server_side=False, cert_reqs=CERT_NONE, + ssl_version=PROTOCOL_SSLv23, ca_certs=None, + do_handshake_on_connect=True, +- suppress_ragged_eofs=True, ciphers=None): ++ suppress_ragged_eofs=True, ++ ciphers=None): + +- return SSLSocket(sock, keyfile=keyfile, certfile=certfile, ++ return SSLSocket(sock=sock, keyfile=keyfile, certfile=certfile, + server_side=server_side, cert_reqs=cert_reqs, + ssl_version=ssl_version, ca_certs=ca_certs, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, + ciphers=ciphers) + +- + # some utility functions + + def cert_time_to_seconds(cert_time): ++ """Return the time in seconds since the Epoch, given the timestring ++ representing the "notBefore" or "notAfter" date from a certificate ++ in ``"%b %d %H:%M:%S %Y %Z"`` strptime format (C locale). + +- """Takes a date-time string in standard ASN1_print form +- ("MON DAY 24HOUR:MINUTE:SEC YEAR TIMEZONE") and return +- a Python time value in seconds past the epoch.""" ++ "notBefore" or "notAfter" dates must use UTC (RFC 5280). + +- import time +- return time.mktime(time.strptime(cert_time, "%b %d %H:%M:%S %Y GMT")) ++ Month is one of: Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec ++ UTC should be specified as GMT (see ASN1_TIME_print()) ++ """ ++ from time import strptime ++ from calendar import timegm ++ ++ months = ( ++ "Jan","Feb","Mar","Apr","May","Jun", ++ "Jul","Aug","Sep","Oct","Nov","Dec" ++ ) ++ time_format = ' %d %H:%M:%S %Y GMT' # NOTE: no month, fixed GMT ++ try: ++ month_number = months.index(cert_time[:3].title()) + 1 ++ except ValueError: ++ raise ValueError('time data %r does not match ' ++ 'format "%%b%s"' % (cert_time, time_format)) ++ else: ++ # found valid month ++ tt = strptime(cert_time[3:], time_format) ++ # return an integer, the previous mktime()-based implementation ++ # returned a float (fractional seconds are always zero here). ++ return timegm((tt[0], month_number) + tt[2:6]) + + PEM_HEADER = "-----BEGIN CERTIFICATE-----" + PEM_FOOTER = "-----END CERTIFICATE-----" + + def DER_cert_to_PEM_cert(der_cert_bytes): +- + """Takes a certificate in binary DER format and returns the + PEM version of it as a string.""" + +- if hasattr(base64, 'standard_b64encode'): +- # preferred because older API gets line-length wrong +- f = base64.standard_b64encode(der_cert_bytes) +- return (PEM_HEADER + '\n' + +- textwrap.fill(f, 64) + '\n' + +- PEM_FOOTER + '\n') +- else: +- return (PEM_HEADER + '\n' + +- base64.encodestring(der_cert_bytes) + +- PEM_FOOTER + '\n') ++ f = base64.standard_b64encode(der_cert_bytes).decode('ascii') ++ return (PEM_HEADER + '\n' + ++ textwrap.fill(f, 64) + '\n' + ++ PEM_FOOTER + '\n') + + def PEM_cert_to_DER_cert(pem_cert_string): +- + """Takes a certificate in ASCII PEM format and returns the + DER-encoded version of it as a byte sequence""" + +@@ -434,25 +932,25 @@ def PEM_cert_to_DER_cert(pem_cert_string + raise ValueError("Invalid PEM encoding; must end with %s" + % PEM_FOOTER) + d = pem_cert_string.strip()[len(PEM_HEADER):-len(PEM_FOOTER)] +- return base64.decodestring(d) ++ return base64.decodestring(d.encode('ASCII', 'strict')) + +-def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv3, ca_certs=None): +- ++def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv23, ca_certs=None): + """Retrieve the certificate from the server at the specified address, + and return it as a PEM-encoded string. + If 'ca_certs' is specified, validate the server cert against it. + If 'ssl_version' is specified, use it in the connection attempt.""" + + host, port = addr +- if (ca_certs is not None): ++ if ca_certs is not None: + cert_reqs = CERT_REQUIRED + else: + cert_reqs = CERT_NONE +- s = wrap_socket(socket(), ssl_version=ssl_version, +- cert_reqs=cert_reqs, ca_certs=ca_certs) +- s.connect(addr) +- dercert = s.getpeercert(True) +- s.close() ++ context = _create_stdlib_context(ssl_version, ++ cert_reqs=cert_reqs, ++ cafile=ca_certs) ++ with closing(create_connection(addr)) as sock: ++ with closing(context.wrap_socket(sock)) as sslsock: ++ dercert = sslsock.getpeercert(True) + return DER_cert_to_PEM_cert(dercert) + + def get_protocol_name(protocol_code): +diff -up Python-2.7.5/Lib/ssl.py.makefile Python-2.7.5/Lib/ssl.py +--- Python-2.7.5/Lib/ssl.py.makefile 2015-03-04 16:48:10.040698640 +0100 ++++ Python-2.7.5/Lib/ssl.py 2015-03-04 16:48:32.885909530 +0100 +@@ -481,6 +481,7 @@ class SSLSocket(socket): + server_hostname=None, + _context=None): + ++ self._makefile_refs = 0 + if _context: + self._context = _context + else: +@@ -563,7 +564,6 @@ class SSLSocket(socket): + except (OSError, ValueError): + self.close() + raise +- self._makefile_refs = 0 + + @property + def context(self): + +diff --git a/Lib/test/capath/4e1295a3.0 b/Lib/test/capath/4e1295a3.0 +new file mode 100644 +--- /dev/null ++++ b/Lib/test/capath/4e1295a3.0 +@@ -0,0 +1,14 @@ ++-----BEGIN CERTIFICATE----- ++MIICLDCCAdYCAQAwDQYJKoZIhvcNAQEEBQAwgaAxCzAJBgNVBAYTAlBUMRMwEQYD ++VQQIEwpRdWVlbnNsYW5kMQ8wDQYDVQQHEwZMaXNib2ExFzAVBgNVBAoTDk5ldXJv ++bmlvLCBMZGEuMRgwFgYDVQQLEw9EZXNlbnZvbHZpbWVudG8xGzAZBgNVBAMTEmJy ++dXR1cy5uZXVyb25pby5wdDEbMBkGCSqGSIb3DQEJARYMc2FtcG9AaWtpLmZpMB4X ++DTk2MDkwNTAzNDI0M1oXDTk2MTAwNTAzNDI0M1owgaAxCzAJBgNVBAYTAlBUMRMw ++EQYDVQQIEwpRdWVlbnNsYW5kMQ8wDQYDVQQHEwZMaXNib2ExFzAVBgNVBAoTDk5l ++dXJvbmlvLCBMZGEuMRgwFgYDVQQLEw9EZXNlbnZvbHZpbWVudG8xGzAZBgNVBAMT ++EmJydXR1cy5uZXVyb25pby5wdDEbMBkGCSqGSIb3DQEJARYMc2FtcG9AaWtpLmZp ++MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAL7+aty3S1iBA/+yxjxv4q1MUTd1kjNw ++L4lYKbpzzlmC5beaQXeQ2RmGMTXU+mDvuqItjVHOK3DvPK7lTcSGftUCAwEAATAN ++BgkqhkiG9w0BAQQFAANBAFqPEKFjk6T6CKTHvaQeEAsX0/8YHPHqH/9AnhSjrwuX ++9EBc0n6bVGhN7XaXd6sJ7dym9sbsWxb+pJdurnkxjx4= ++-----END CERTIFICATE----- +diff --git a/Lib/test/capath/5ed36f99.0 b/Lib/test/capath/5ed36f99.0 +new file mode 100644 +--- /dev/null ++++ b/Lib/test/capath/5ed36f99.0 +@@ -0,0 +1,41 @@ ++-----BEGIN CERTIFICATE----- ++MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 ++IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB ++IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA ++Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO ++BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi ++MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ ++ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC ++CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ ++8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 ++zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y ++fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 ++w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc ++G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k ++epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q ++laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ ++QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU ++fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 ++YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w ++ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY ++gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe ++MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 ++IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy ++dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw ++czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 ++dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl ++aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC ++AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg ++b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB ++ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc ++nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg ++18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c ++gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl ++Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY ++sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T ++SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF ++CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum ++GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk ++zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW ++omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD ++-----END CERTIFICATE----- +diff --git a/Lib/test/capath/6e88d7b8.0 b/Lib/test/capath/6e88d7b8.0 +new file mode 100644 +--- /dev/null ++++ b/Lib/test/capath/6e88d7b8.0 +@@ -0,0 +1,14 @@ ++-----BEGIN CERTIFICATE----- ++MIICLDCCAdYCAQAwDQYJKoZIhvcNAQEEBQAwgaAxCzAJBgNVBAYTAlBUMRMwEQYD ++VQQIEwpRdWVlbnNsYW5kMQ8wDQYDVQQHEwZMaXNib2ExFzAVBgNVBAoTDk5ldXJv ++bmlvLCBMZGEuMRgwFgYDVQQLEw9EZXNlbnZvbHZpbWVudG8xGzAZBgNVBAMTEmJy ++dXR1cy5uZXVyb25pby5wdDEbMBkGCSqGSIb3DQEJARYMc2FtcG9AaWtpLmZpMB4X ++DTk2MDkwNTAzNDI0M1oXDTk2MTAwNTAzNDI0M1owgaAxCzAJBgNVBAYTAlBUMRMw ++EQYDVQQIEwpRdWVlbnNsYW5kMQ8wDQYDVQQHEwZMaXNib2ExFzAVBgNVBAoTDk5l ++dXJvbmlvLCBMZGEuMRgwFgYDVQQLEw9EZXNlbnZvbHZpbWVudG8xGzAZBgNVBAMT ++EmJydXR1cy5uZXVyb25pby5wdDEbMBkGCSqGSIb3DQEJARYMc2FtcG9AaWtpLmZp ++MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAL7+aty3S1iBA/+yxjxv4q1MUTd1kjNw ++L4lYKbpzzlmC5beaQXeQ2RmGMTXU+mDvuqItjVHOK3DvPK7lTcSGftUCAwEAATAN ++BgkqhkiG9w0BAQQFAANBAFqPEKFjk6T6CKTHvaQeEAsX0/8YHPHqH/9AnhSjrwuX ++9EBc0n6bVGhN7XaXd6sJ7dym9sbsWxb+pJdurnkxjx4= ++-----END CERTIFICATE----- +diff --git a/Lib/test/capath/99d0fa06.0 b/Lib/test/capath/99d0fa06.0 +new file mode 100644 +--- /dev/null ++++ b/Lib/test/capath/99d0fa06.0 +@@ -0,0 +1,41 @@ ++-----BEGIN CERTIFICATE----- ++MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 ++IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB ++IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA ++Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO ++BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi ++MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ ++ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC ++CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ ++8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 ++zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y ++fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 ++w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc ++G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k ++epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q ++laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ ++QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU ++fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 ++YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w ++ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY ++gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe ++MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 ++IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy ++dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw ++czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 ++dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl ++aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC ++AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg ++b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB ++ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc ++nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg ++18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c ++gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl ++Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY ++sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T ++SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF ++CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum ++GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk ++zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW ++omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD ++-----END CERTIFICATE----- +diff --git a/Lib/test/dh512.pem b/Lib/test/dh512.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/dh512.pem +@@ -0,0 +1,9 @@ ++-----BEGIN DH PARAMETERS----- ++MEYCQQD1Kv884bEpQBgRjXyEpwpy1obEAxnIByl6ypUM2Zafq9AKUJsCRtMIPWak ++XUGfnHy9iUsiGSa6q6Jew1XpKgVfAgEC ++-----END DH PARAMETERS----- ++ ++These are the 512 bit DH parameters from "Assigned Number for SKIP Protocols" ++(http://www.skip-vpn.org/spec/numbers.html). ++See there for how they were generated. ++Note that g is not a generator, but this is not a problem since p is a safe prime. +diff --git a/Lib/test/keycert.passwd.pem b/Lib/test/keycert.passwd.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/keycert.passwd.pem +@@ -0,0 +1,33 @@ ++-----BEGIN RSA PRIVATE KEY----- ++Proc-Type: 4,ENCRYPTED ++DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A ++ ++kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c ++u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA ++AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr ++Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ ++YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P ++6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ ++noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 ++94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l ++7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo ++cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO ++zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt ++L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo ++2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== ++-----END RSA PRIVATE KEY----- ++-----BEGIN CERTIFICATE----- ++MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV ++BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u ++IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw ++MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH ++Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k ++YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw ++gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 ++6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt ++pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw ++FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd ++BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G ++lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 ++CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX ++-----END CERTIFICATE----- +diff --git a/Lib/test/keycert3.pem b/Lib/test/keycert3.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/keycert3.pem +@@ -0,0 +1,73 @@ ++-----BEGIN PRIVATE KEY----- ++MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAMLgD0kAKDb5cFyP ++jbwNfR5CtewdXC+kMXAWD8DLxiTTvhMW7qVnlwOm36mZlszHKvsRf05lT4pegiFM ++9z2j1OlaN+ci/X7NU22TNN6crYSiN77FjYJP464j876ndSxyD+rzys386T+1r1aZ ++aggEdkj1TsSsv1zWIYKlPIjlvhuxAgMBAAECgYA0aH+T2Vf3WOPv8KdkcJg6gCRe ++yJKXOWgWRcicx/CUzOEsTxmFIDPLxqAWA3k7v0B+3vjGw5Y9lycV/5XqXNoQI14j ++y09iNsumds13u5AKkGdTJnZhQ7UKdoVHfuP44ZdOv/rJ5/VD6F4zWywpe90pcbK+ ++AWDVtusgGQBSieEl1QJBAOyVrUG5l2yoUBtd2zr/kiGm/DYyXlIthQO/A3/LngDW ++5/ydGxVsT7lAVOgCsoT+0L4efTh90PjzW8LPQrPBWVMCQQDS3h/FtYYd5lfz+FNL ++9CEe1F1w9l8P749uNUD0g317zv1tatIqVCsQWHfVHNdVvfQ+vSFw38OORO00Xqs9 ++1GJrAkBkoXXEkxCZoy4PteheO/8IWWLGGr6L7di6MzFl1lIqwT6D8L9oaV2vynFT ++DnKop0pa09Unhjyw57KMNmSE2SUJAkEArloTEzpgRmCq4IK2/NpCeGdHS5uqRlbh ++1VIa/xGps7EWQl5Mn8swQDel/YP3WGHTjfx7pgSegQfkyaRtGpZ9OQJAa9Vumj8m ++JAAtI0Bnga8hgQx7BhTQY4CadDxyiRGOGYhwUzYVCqkb2sbVRH9HnwUaJT7cWBY3 ++RnJdHOMXWem7/w== ++-----END PRIVATE KEY----- ++Certificate: ++ Data: ++ Version: 1 (0x0) ++ Serial Number: 12723342612721443281 (0xb09264b1f2da21d1) ++ Signature Algorithm: sha1WithRSAEncryption ++ Issuer: C=XY, O=Python Software Foundation CA, CN=our-ca-server ++ Validity ++ Not Before: Jan 4 19:47:07 2013 GMT ++ Not After : Nov 13 19:47:07 2022 GMT ++ Subject: C=XY, L=Castle Anthrax, O=Python Software Foundation, CN=localhost ++ Subject Public Key Info: ++ Public Key Algorithm: rsaEncryption ++ Public-Key: (1024 bit) ++ Modulus: ++ 00:c2:e0:0f:49:00:28:36:f9:70:5c:8f:8d:bc:0d: ++ 7d:1e:42:b5:ec:1d:5c:2f:a4:31:70:16:0f:c0:cb: ++ c6:24:d3:be:13:16:ee:a5:67:97:03:a6:df:a9:99: ++ 96:cc:c7:2a:fb:11:7f:4e:65:4f:8a:5e:82:21:4c: ++ f7:3d:a3:d4:e9:5a:37:e7:22:fd:7e:cd:53:6d:93: ++ 34:de:9c:ad:84:a2:37:be:c5:8d:82:4f:e3:ae:23: ++ f3:be:a7:75:2c:72:0f:ea:f3:ca:cd:fc:e9:3f:b5: ++ af:56:99:6a:08:04:76:48:f5:4e:c4:ac:bf:5c:d6: ++ 21:82:a5:3c:88:e5:be:1b:b1 ++ Exponent: 65537 (0x10001) ++ Signature Algorithm: sha1WithRSAEncryption ++ 2f:42:5f:a3:09:2c:fa:51:88:c7:37:7f:ea:0e:63:f0:a2:9a: ++ e5:5a:e2:c8:20:f0:3f:60:bc:c8:0f:b6:c6:76:ce:db:83:93: ++ f5:a3:33:67:01:8e:04:cd:00:9a:73:fd:f3:35:86:fa:d7:13: ++ e2:46:c6:9d:c0:29:53:d4:a9:90:b8:77:4b:e6:83:76:e4:92: ++ d6:9c:50:cf:43:d0:c6:01:77:61:9a:de:9b:70:f7:72:cd:59: ++ 00:31:69:d9:b4:ca:06:9c:6d:c3:c7:80:8c:68:e6:b5:a2:f8: ++ ef:1d:bb:16:9f:77:77:ef:87:62:22:9b:4d:69:a4:3a:1a:f1: ++ 21:5e:8c:32:ac:92:fd:15:6b:18:c2:7f:15:0d:98:30:ca:75: ++ 8f:1a:71:df:da:1d:b2:ef:9a:e8:2d:2e:02:fd:4a:3c:aa:96: ++ 0b:06:5d:35:b3:3d:24:87:4b:e0:b0:58:60:2f:45:ac:2e:48: ++ 8a:b0:99:10:65:27:ff:cc:b1:d8:fd:bd:26:6b:b9:0c:05:2a: ++ f4:45:63:35:51:07:ed:83:85:fe:6f:69:cb:bb:40:a8:ae:b6: ++ 3b:56:4a:2d:a4:ed:6d:11:2c:4d:ed:17:24:fd:47:bc:d3:41: ++ a2:d3:06:fe:0c:90:d8:d8:94:26:c4:ff:cc:a1:d8:42:77:eb: ++ fc:a9:94:71 ++-----BEGIN CERTIFICATE----- ++MIICpDCCAYwCCQCwkmSx8toh0TANBgkqhkiG9w0BAQUFADBNMQswCQYDVQQGEwJY ++WTEmMCQGA1UECgwdUHl0aG9uIFNvZnR3YXJlIEZvdW5kYXRpb24gQ0ExFjAUBgNV ++BAMMDW91ci1jYS1zZXJ2ZXIwHhcNMTMwMTA0MTk0NzA3WhcNMjIxMTEzMTk0NzA3 ++WjBfMQswCQYDVQQGEwJYWTEXMBUGA1UEBxMOQ2FzdGxlIEFudGhyYXgxIzAhBgNV ++BAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMRIwEAYDVQQDEwlsb2NhbGhv ++c3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMLgD0kAKDb5cFyPjbwNfR5C ++tewdXC+kMXAWD8DLxiTTvhMW7qVnlwOm36mZlszHKvsRf05lT4pegiFM9z2j1Ola ++N+ci/X7NU22TNN6crYSiN77FjYJP464j876ndSxyD+rzys386T+1r1aZaggEdkj1 ++TsSsv1zWIYKlPIjlvhuxAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAC9CX6MJLPpR ++iMc3f+oOY/CimuVa4sgg8D9gvMgPtsZ2ztuDk/WjM2cBjgTNAJpz/fM1hvrXE+JG ++xp3AKVPUqZC4d0vmg3bkktacUM9D0MYBd2Ga3ptw93LNWQAxadm0ygacbcPHgIxo ++5rWi+O8duxafd3fvh2Iim01ppDoa8SFejDKskv0VaxjCfxUNmDDKdY8acd/aHbLv ++mugtLgL9SjyqlgsGXTWzPSSHS+CwWGAvRawuSIqwmRBlJ//Msdj9vSZruQwFKvRF ++YzVRB+2Dhf5vacu7QKiutjtWSi2k7W0RLE3tFyT9R7zTQaLTBv4MkNjYlCbE/8yh ++2EJ36/yplHE= ++-----END CERTIFICATE----- +diff --git a/Lib/test/keycert4.pem b/Lib/test/keycert4.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/keycert4.pem +@@ -0,0 +1,73 @@ ++-----BEGIN PRIVATE KEY----- ++MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAK5UQiMI5VkNs2Qv ++L7gUaiDdFevNUXRjU4DHAe3ZzzYLZNE69h9gO9VCSS16tJ5fT5VEu0EZyGr0e3V2 ++NkX0ZoU0Hc/UaY4qx7LHmn5SYZpIxhJnkf7SyHJK1zUaGlU0/LxYqIuGCtF5dqx1 ++L2OQhEx1GM6RydHdgX69G64LXcY5AgMBAAECgYAhsRMfJkb9ERLMl/oG/5sLQu9L ++pWDKt6+ZwdxzlZbggQ85CMYshjLKIod2DLL/sLf2x1PRXyRG131M1E3k8zkkz6de ++R1uDrIN/x91iuYzfLQZGh8bMY7Yjd2eoroa6R/7DjpElGejLxOAaDWO0ST2IFQy9 ++myTGS2jSM97wcXfsSQJBANP3jelJoS5X6BRjTSneY21wcocxVuQh8pXpErALVNsT ++drrFTeaBuZp7KvbtnIM5g2WRNvaxLZlAY/hXPJvi6ncCQQDSix1cebml6EmPlEZS ++Mm8gwI2F9ufUunwJmBJcz826Do0ZNGByWDAM/JQZH4FX4GfAFNuj8PUb+GQfadkx ++i1DPAkEA0lVsNHojvuDsIo8HGuzarNZQT2beWjJ1jdxh9t7HrTx7LIps6rb/fhOK ++Zs0R6gVAJaEbcWAPZ2tFyECInAdnsQJAUjaeXXjuxFkjOFym5PvqpvhpivEx78Bu ++JPTr3rAKXmfGMxxfuOa0xK1wSyshP6ZR/RBn/+lcXPKubhHQDOegwwJAJF1DBQnN +++/tLmOPULtDwfP4Zixn+/8GmGOahFoRcu6VIGHmRilJTn6MOButw7Glv2YdeC6l/ ++e83Gq6ffLVfKNQ== ++-----END PRIVATE KEY----- ++Certificate: ++ Data: ++ Version: 1 (0x0) ++ Serial Number: 12723342612721443282 (0xb09264b1f2da21d2) ++ Signature Algorithm: sha1WithRSAEncryption ++ Issuer: C=XY, O=Python Software Foundation CA, CN=our-ca-server ++ Validity ++ Not Before: Jan 4 19:47:07 2013 GMT ++ Not After : Nov 13 19:47:07 2022 GMT ++ Subject: C=XY, L=Castle Anthrax, O=Python Software Foundation, CN=fakehostname ++ Subject Public Key Info: ++ Public Key Algorithm: rsaEncryption ++ Public-Key: (1024 bit) ++ Modulus: ++ 00:ae:54:42:23:08:e5:59:0d:b3:64:2f:2f:b8:14: ++ 6a:20:dd:15:eb:cd:51:74:63:53:80:c7:01:ed:d9: ++ cf:36:0b:64:d1:3a:f6:1f:60:3b:d5:42:49:2d:7a: ++ b4:9e:5f:4f:95:44:bb:41:19:c8:6a:f4:7b:75:76: ++ 36:45:f4:66:85:34:1d:cf:d4:69:8e:2a:c7:b2:c7: ++ 9a:7e:52:61:9a:48:c6:12:67:91:fe:d2:c8:72:4a: ++ d7:35:1a:1a:55:34:fc:bc:58:a8:8b:86:0a:d1:79: ++ 76:ac:75:2f:63:90:84:4c:75:18:ce:91:c9:d1:dd: ++ 81:7e:bd:1b:ae:0b:5d:c6:39 ++ Exponent: 65537 (0x10001) ++ Signature Algorithm: sha1WithRSAEncryption ++ ad:45:8a:8e:ef:c6:ef:04:41:5c:2c:4a:84:dc:02:76:0c:d0: ++ 66:0f:f0:16:04:58:4d:fd:68:b7:b8:d3:a8:41:a5:5c:3c:6f: ++ 65:3c:d1:f8:ce:43:35:e7:41:5f:53:3d:c9:2c:c3:7d:fc:56: ++ 4a:fa:47:77:38:9d:bb:97:28:0a:3b:91:19:7f:bc:74:ae:15: ++ 6b:bd:20:36:67:45:a5:1e:79:d7:75:e6:89:5c:6d:54:84:d1: ++ 95:d7:a7:b4:33:3c:af:37:c4:79:8f:5e:75:dc:75:c2:18:fb: ++ 61:6f:2d:dc:38:65:5b:ba:67:28:d0:88:d7:8d:b9:23:5a:8e: ++ e8:c6:bb:db:ce:d5:b8:41:2a:ce:93:08:b6:95:ad:34:20:18: ++ d5:3b:37:52:74:50:0b:07:2c:b0:6d:a4:4c:7b:f4:e0:fd:d1: ++ af:17:aa:20:cd:62:e3:f0:9d:37:69:db:41:bd:d4:1c:fb:53: ++ 20:da:88:9d:76:26:67:ce:01:90:a7:80:1d:a9:5b:39:73:68: ++ 54:0a:d1:2a:03:1b:8f:3c:43:5d:5d:c4:51:f1:a7:e7:11:da: ++ 31:2c:49:06:af:04:f4:b8:3c:99:c4:20:b9:06:36:a2:00:92: ++ 61:1d:0c:6d:24:05:e2:82:e1:47:db:a0:5f:ba:b9:fb:ba:fa: ++ 49:12:1e:ce ++-----BEGIN CERTIFICATE----- ++MIICpzCCAY8CCQCwkmSx8toh0jANBgkqhkiG9w0BAQUFADBNMQswCQYDVQQGEwJY ++WTEmMCQGA1UECgwdUHl0aG9uIFNvZnR3YXJlIEZvdW5kYXRpb24gQ0ExFjAUBgNV ++BAMMDW91ci1jYS1zZXJ2ZXIwHhcNMTMwMTA0MTk0NzA3WhcNMjIxMTEzMTk0NzA3 ++WjBiMQswCQYDVQQGEwJYWTEXMBUGA1UEBxMOQ2FzdGxlIEFudGhyYXgxIzAhBgNV ++BAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMRUwEwYDVQQDEwxmYWtlaG9z ++dG5hbWUwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAK5UQiMI5VkNs2QvL7gU ++aiDdFevNUXRjU4DHAe3ZzzYLZNE69h9gO9VCSS16tJ5fT5VEu0EZyGr0e3V2NkX0 ++ZoU0Hc/UaY4qx7LHmn5SYZpIxhJnkf7SyHJK1zUaGlU0/LxYqIuGCtF5dqx1L2OQ ++hEx1GM6RydHdgX69G64LXcY5AgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAK1Fio7v ++xu8EQVwsSoTcAnYM0GYP8BYEWE39aLe406hBpVw8b2U80fjOQzXnQV9TPcksw338 ++Vkr6R3c4nbuXKAo7kRl/vHSuFWu9IDZnRaUeedd15olcbVSE0ZXXp7QzPK83xHmP ++XnXcdcIY+2FvLdw4ZVu6ZyjQiNeNuSNajujGu9vO1bhBKs6TCLaVrTQgGNU7N1J0 ++UAsHLLBtpEx79OD90a8XqiDNYuPwnTdp20G91Bz7UyDaiJ12JmfOAZCngB2pWzlz ++aFQK0SoDG488Q11dxFHxp+cR2jEsSQavBPS4PJnEILkGNqIAkmEdDG0kBeKC4Ufb ++oF+6ufu6+kkSHs4= ++-----END CERTIFICATE----- +diff --git a/Lib/test/make_ssl_certs.py b/Lib/test/make_ssl_certs.py +new file mode 100644 +--- /dev/null ++++ b/Lib/test/make_ssl_certs.py +@@ -0,0 +1,176 @@ ++"""Make the custom certificate and private key files used by test_ssl ++and friends.""" ++ ++import os ++import shutil ++import sys ++import tempfile ++from subprocess import * ++ ++req_template = """ ++ [req] ++ distinguished_name = req_distinguished_name ++ x509_extensions = req_x509_extensions ++ prompt = no ++ ++ [req_distinguished_name] ++ C = XY ++ L = Castle Anthrax ++ O = Python Software Foundation ++ CN = {hostname} ++ ++ [req_x509_extensions] ++ subjectAltName = DNS:{hostname} ++ ++ [ ca ] ++ default_ca = CA_default ++ ++ [ CA_default ] ++ dir = cadir ++ database = $dir/index.txt ++ crlnumber = $dir/crl.txt ++ default_md = sha1 ++ default_days = 3600 ++ default_crl_days = 3600 ++ certificate = pycacert.pem ++ private_key = pycakey.pem ++ serial = $dir/serial ++ RANDFILE = $dir/.rand ++ ++ policy = policy_match ++ ++ [ policy_match ] ++ countryName = match ++ stateOrProvinceName = optional ++ organizationName = match ++ organizationalUnitName = optional ++ commonName = supplied ++ emailAddress = optional ++ ++ [ policy_anything ] ++ countryName = optional ++ stateOrProvinceName = optional ++ localityName = optional ++ organizationName = optional ++ organizationalUnitName = optional ++ commonName = supplied ++ emailAddress = optional ++ ++ ++ [ v3_ca ] ++ ++ subjectKeyIdentifier=hash ++ authorityKeyIdentifier=keyid:always,issuer ++ basicConstraints = CA:true ++ ++ """ ++ ++here = os.path.abspath(os.path.dirname(__file__)) ++ ++def make_cert_key(hostname, sign=False): ++ print("creating cert for " + hostname) ++ tempnames = [] ++ for i in range(3): ++ with tempfile.NamedTemporaryFile(delete=False) as f: ++ tempnames.append(f.name) ++ req_file, cert_file, key_file = tempnames ++ try: ++ with open(req_file, 'w') as f: ++ f.write(req_template.format(hostname=hostname)) ++ args = ['req', '-new', '-days', '3650', '-nodes', ++ '-newkey', 'rsa:1024', '-keyout', key_file, ++ '-config', req_file] ++ if sign: ++ with tempfile.NamedTemporaryFile(delete=False) as f: ++ tempnames.append(f.name) ++ reqfile = f.name ++ args += ['-out', reqfile ] ++ ++ else: ++ args += ['-x509', '-out', cert_file ] ++ check_call(['openssl'] + args) ++ ++ if sign: ++ args = ['ca', '-config', req_file, '-out', cert_file, '-outdir', 'cadir', ++ '-policy', 'policy_anything', '-batch', '-infiles', reqfile ] ++ check_call(['openssl'] + args) ++ ++ ++ with open(cert_file, 'r') as f: ++ cert = f.read() ++ with open(key_file, 'r') as f: ++ key = f.read() ++ return cert, key ++ finally: ++ for name in tempnames: ++ os.remove(name) ++ ++TMP_CADIR = 'cadir' ++ ++def unmake_ca(): ++ shutil.rmtree(TMP_CADIR) ++ ++def make_ca(): ++ os.mkdir(TMP_CADIR) ++ with open(os.path.join('cadir','index.txt'),'a+') as f: ++ pass # empty file ++ with open(os.path.join('cadir','crl.txt'),'a+') as f: ++ f.write("00") ++ with open(os.path.join('cadir','index.txt.attr'),'w+') as f: ++ f.write('unique_subject = no') ++ ++ with tempfile.NamedTemporaryFile("w") as t: ++ t.write(req_template.format(hostname='our-ca-server')) ++ t.flush() ++ with tempfile.NamedTemporaryFile() as f: ++ args = ['req', '-new', '-days', '3650', '-extensions', 'v3_ca', '-nodes', ++ '-newkey', 'rsa:2048', '-keyout', 'pycakey.pem', ++ '-out', f.name, ++ '-subj', '/C=XY/L=Castle Anthrax/O=Python Software Foundation CA/CN=our-ca-server'] ++ check_call(['openssl'] + args) ++ args = ['ca', '-config', t.name, '-create_serial', ++ '-out', 'pycacert.pem', '-batch', '-outdir', TMP_CADIR, ++ '-keyfile', 'pycakey.pem', '-days', '3650', ++ '-selfsign', '-extensions', 'v3_ca', '-infiles', f.name ] ++ check_call(['openssl'] + args) ++ args = ['ca', '-config', t.name, '-gencrl', '-out', 'revocation.crl'] ++ check_call(['openssl'] + args) ++ ++if __name__ == '__main__': ++ os.chdir(here) ++ cert, key = make_cert_key('localhost') ++ with open('ssl_cert.pem', 'w') as f: ++ f.write(cert) ++ with open('ssl_key.pem', 'w') as f: ++ f.write(key) ++ print("password protecting ssl_key.pem in ssl_key.passwd.pem") ++ check_call(['openssl','rsa','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-des3','-passout','pass:somepass']) ++ check_call(['openssl','rsa','-in','ssl_key.pem','-out','keycert.passwd.pem','-des3','-passout','pass:somepass']) ++ ++ with open('keycert.pem', 'w') as f: ++ f.write(key) ++ f.write(cert) ++ ++ with open('keycert.passwd.pem', 'a+') as f: ++ f.write(cert) ++ ++ # For certificate matching tests ++ make_ca() ++ cert, key = make_cert_key('fakehostname') ++ with open('keycert2.pem', 'w') as f: ++ f.write(key) ++ f.write(cert) ++ ++ cert, key = make_cert_key('localhost', True) ++ with open('keycert3.pem', 'w') as f: ++ f.write(key) ++ f.write(cert) ++ ++ cert, key = make_cert_key('fakehostname', True) ++ with open('keycert4.pem', 'w') as f: ++ f.write(key) ++ f.write(cert) ++ ++ unmake_ca() ++ print("\n\nPlease change the values in test_ssl.py, test_parse_cert function related to notAfter,notBefore and serialNumber") ++ check_call(['openssl','x509','-in','keycert.pem','-dates','-serial','-noout']) +diff --git a/Lib/test/pycacert.pem b/Lib/test/pycacert.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/pycacert.pem +@@ -0,0 +1,78 @@ ++Certificate: ++ Data: ++ Version: 3 (0x2) ++ Serial Number: 12723342612721443280 (0xb09264b1f2da21d0) ++ Signature Algorithm: sha1WithRSAEncryption ++ Issuer: C=XY, O=Python Software Foundation CA, CN=our-ca-server ++ Validity ++ Not Before: Jan 4 19:47:07 2013 GMT ++ Not After : Jan 2 19:47:07 2023 GMT ++ Subject: C=XY, O=Python Software Foundation CA, CN=our-ca-server ++ Subject Public Key Info: ++ Public Key Algorithm: rsaEncryption ++ Public-Key: (2048 bit) ++ Modulus: ++ 00:e7:de:e9:e3:0c:9f:00:b6:a1:fd:2b:5b:96:d2: ++ 6f:cc:e0:be:86:b9:20:5e:ec:03:7a:55:ab:ea:a4: ++ e9:f9:49:85:d2:66:d5:ed:c7:7a:ea:56:8e:2d:8f: ++ e7:42:e2:62:28:a9:9f:d6:1b:8e:eb:b5:b4:9c:9f: ++ 14:ab:df:e6:94:8b:76:1d:3e:6d:24:61:ed:0c:bf: ++ 00:8a:61:0c:df:5c:c8:36:73:16:00:cd:47:ba:6d: ++ a4:a4:74:88:83:23:0a:19:fc:09:a7:3c:4a:4b:d3: ++ e7:1d:2d:e4:ea:4c:54:21:f3:26:db:89:37:18:d4: ++ 02:bb:40:32:5f:a4:ff:2d:1c:f7:d4:bb:ec:8e:cf: ++ 5c:82:ac:e6:7c:08:6c:48:85:61:07:7f:25:e0:5c: ++ e0:bc:34:5f:e0:b9:04:47:75:c8:47:0b:8d:bc:d6: ++ c8:68:5f:33:83:62:d2:20:44:35:b1:ad:81:1a:8a: ++ cd:bc:35:b0:5c:8b:47:d6:18:e9:9c:18:97:cc:01: ++ 3c:29:cc:e8:1e:e4:e4:c1:b8:de:e7:c2:11:18:87: ++ 5a:93:34:d8:a6:25:f7:14:71:eb:e4:21:a2:d2:0f: ++ 2e:2e:d4:62:00:35:d3:d6:ef:5c:60:4b:4c:a9:14: ++ e2:dd:15:58:46:37:33:26:b7:e7:2e:5d:ed:42:e4: ++ c5:4d ++ Exponent: 65537 (0x10001) ++ X509v3 extensions: ++ X509v3 Subject Key Identifier: ++ BC:DD:62:D9:76:DA:1B:D2:54:6B:CF:E0:66:9B:1E:1E:7B:56:0C:0B ++ X509v3 Authority Key Identifier: ++ keyid:BC:DD:62:D9:76:DA:1B:D2:54:6B:CF:E0:66:9B:1E:1E:7B:56:0C:0B ++ ++ X509v3 Basic Constraints: ++ CA:TRUE ++ Signature Algorithm: sha1WithRSAEncryption ++ 7d:0a:f5:cb:8d:d3:5d:bd:99:8e:f8:2b:0f:ba:eb:c2:d9:a6: ++ 27:4f:2e:7b:2f:0e:64:d8:1c:35:50:4e:ee:fc:90:b9:8d:6d: ++ a8:c5:c6:06:b0:af:f3:2d:bf:3b:b8:42:07:dd:18:7d:6d:95: ++ 54:57:85:18:60:47:2f:eb:78:1b:f9:e8:17:fd:5a:0d:87:17: ++ 28:ac:4c:6a:e6:bc:29:f4:f4:55:70:29:42:de:85:ea:ab:6c: ++ 23:06:64:30:75:02:8e:53:bc:5e:01:33:37:cc:1e:cd:b8:a4: ++ fd:ca:e4:5f:65:3b:83:1c:86:f1:55:02:a0:3a:8f:db:91:b7: ++ 40:14:b4:e7:8d:d2:ee:73:ba:e3:e5:34:2d:bc:94:6f:4e:24: ++ 06:f7:5f:8b:0e:a7:8e:6b:de:5e:75:f4:32:9a:50:b1:44:33: ++ 9a:d0:05:e2:78:82:ff:db:da:8a:63:eb:a9:dd:d1:bf:a0:61: ++ ad:e3:9e:8a:24:5d:62:0e:e7:4c:91:7f:ef:df:34:36:3b:2f: ++ 5d:f5:84:b2:2f:c4:6d:93:96:1a:6f:30:28:f1:da:12:9a:64: ++ b4:40:33:1d:bd:de:2b:53:a8:ea:be:d6:bc:4e:96:f5:44:fb: ++ 32:18:ae:d5:1f:f6:69:af:b6:4e:7b:1d:58:ec:3b:a9:53:a3: ++ 5e:58:c8:9e ++-----BEGIN CERTIFICATE----- ++MIIDbTCCAlWgAwIBAgIJALCSZLHy2iHQMA0GCSqGSIb3DQEBBQUAME0xCzAJBgNV ++BAYTAlhZMSYwJAYDVQQKDB1QeXRob24gU29mdHdhcmUgRm91bmRhdGlvbiBDQTEW ++MBQGA1UEAwwNb3VyLWNhLXNlcnZlcjAeFw0xMzAxMDQxOTQ3MDdaFw0yMzAxMDIx ++OTQ3MDdaME0xCzAJBgNVBAYTAlhZMSYwJAYDVQQKDB1QeXRob24gU29mdHdhcmUg ++Rm91bmRhdGlvbiBDQTEWMBQGA1UEAwwNb3VyLWNhLXNlcnZlcjCCASIwDQYJKoZI ++hvcNAQEBBQADggEPADCCAQoCggEBAOfe6eMMnwC2of0rW5bSb8zgvoa5IF7sA3pV ++q+qk6flJhdJm1e3HeupWji2P50LiYiipn9Ybjuu1tJyfFKvf5pSLdh0+bSRh7Qy/ ++AIphDN9cyDZzFgDNR7ptpKR0iIMjChn8Cac8SkvT5x0t5OpMVCHzJtuJNxjUArtA ++Ml+k/y0c99S77I7PXIKs5nwIbEiFYQd/JeBc4Lw0X+C5BEd1yEcLjbzWyGhfM4Ni ++0iBENbGtgRqKzbw1sFyLR9YY6ZwYl8wBPCnM6B7k5MG43ufCERiHWpM02KYl9xRx ++6+QhotIPLi7UYgA109bvXGBLTKkU4t0VWEY3Mya35y5d7ULkxU0CAwEAAaNQME4w ++HQYDVR0OBBYEFLzdYtl22hvSVGvP4GabHh57VgwLMB8GA1UdIwQYMBaAFLzdYtl2 ++2hvSVGvP4GabHh57VgwLMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB ++AH0K9cuN0129mY74Kw+668LZpidPLnsvDmTYHDVQTu78kLmNbajFxgawr/Mtvzu4 ++QgfdGH1tlVRXhRhgRy/reBv56Bf9Wg2HFyisTGrmvCn09FVwKULeheqrbCMGZDB1 ++Ao5TvF4BMzfMHs24pP3K5F9lO4MchvFVAqA6j9uRt0AUtOeN0u5zuuPlNC28lG9O ++JAb3X4sOp45r3l519DKaULFEM5rQBeJ4gv/b2opj66nd0b+gYa3jnookXWIO50yR ++f+/fNDY7L131hLIvxG2TlhpvMCjx2hKaZLRAMx293itTqOq+1rxOlvVE+zIYrtUf ++9mmvtk57HVjsO6lTo15YyJ4= ++-----END CERTIFICATE----- +diff --git a/Lib/test/revocation.crl b/Lib/test/revocation.crl +new file mode 100644 +--- /dev/null ++++ b/Lib/test/revocation.crl +@@ -0,0 +1,11 @@ ++-----BEGIN X509 CRL----- ++MIIBpjCBjwIBATANBgkqhkiG9w0BAQUFADBNMQswCQYDVQQGEwJYWTEmMCQGA1UE ++CgwdUHl0aG9uIFNvZnR3YXJlIEZvdW5kYXRpb24gQ0ExFjAUBgNVBAMMDW91ci1j ++YS1zZXJ2ZXIXDTEzMTEyMTE3MDg0N1oXDTIzMDkzMDE3MDg0N1qgDjAMMAoGA1Ud ++FAQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQCNJXC2mVKauEeN3LlQ3ZtM5gkH3ExH +++i4bmJjtJn497WwvvoIeUdrmVXgJQR93RtV37hZwN0SXMLlNmUZPH4rHhihayw4m ++unCzVj/OhCCY7/TPjKuJ1O/0XhaLBpBVjQN7R/1ujoRKbSia/CD3vcn7Fqxzw7LK ++fSRCKRGTj1CZiuxrphtFchwALXSiFDy9mr2ZKhImcyq1PydfgEzU78APpOkMQsIC ++UNJ/cf3c9emzf+dUtcMEcejQ3mynBo4eIGg1EW42bz4q4hSjzQlKcBV0muw5qXhc ++HOxH2iTFhQ7SrvVuK/dM14rYM4B5mSX3nRC1kNmXpS9j3wJDhuwmjHed ++-----END X509 CRL----- +diff --git a/Lib/test/sha256.pem b/Lib/test/sha256.pem +--- a/Lib/test/sha256.pem ++++ b/Lib/test/sha256.pem +@@ -1,128 +1,128 @@ + # Certificate chain for https://sha256.tbs-internet.com +- 0 s:/C=FR/postalCode=14000/ST=Calvados/L=CAEN/street=22 rue de Bretagne/O=TBS INTERNET/OU=0002 440443810/OU=Certificats TBS X509/CN=ecom.tbs-x509.com +- i:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA business ++ 0 s:/C=FR/postalCode=14000/ST=Calvados/L=CAEN/street=22 rue de Bretagne/O=TBS INTERNET/OU=0002 440443810/OU=sha-256 production/CN=sha256.tbs-internet.com ++ i:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC + -----BEGIN CERTIFICATE----- +-MIIGTjCCBTagAwIBAgIQOh3d9dNDPq1cSdJmEiMpqDANBgkqhkiG9w0BAQUFADCB +-yTELMAkGA1UEBhMCRlIxETAPBgNVBAgTCENhbHZhZG9zMQ0wCwYDVQQHEwRDYWVu +-MRUwEwYDVQQKEwxUQlMgSU5URVJORVQxSDBGBgNVBAsTP1Rlcm1zIGFuZCBDb25k +-aXRpb25zOiBodHRwOi8vd3d3LnRicy1pbnRlcm5ldC5jb20vQ0EvcmVwb3NpdG9y +-eTEYMBYGA1UECxMPVEJTIElOVEVSTkVUIENBMR0wGwYDVQQDExRUQlMgWDUwOSBD +-QSBidXNpbmVzczAeFw0xMTAxMjUwMDAwMDBaFw0xMzAyMDUyMzU5NTlaMIHHMQsw +-CQYDVQQGEwJGUjEOMAwGA1UEERMFMTQwMDAxETAPBgNVBAgTCENhbHZhZG9zMQ0w +-CwYDVQQHEwRDQUVOMRswGQYDVQQJExIyMiBydWUgZGUgQnJldGFnbmUxFTATBgNV +-BAoTDFRCUyBJTlRFUk5FVDEXMBUGA1UECxMOMDAwMiA0NDA0NDM4MTAxHTAbBgNV +-BAsTFENlcnRpZmljYXRzIFRCUyBYNTA5MRowGAYDVQQDExFlY29tLnRicy14NTA5 +-LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKRrlHUnJ++1lpcg +-jtYco7cdmRe+EEfTmwPfCdfV3G1QfsTSvY6FfMpm/83pqHfT+4ANwr18wD9ZrAEN +-G16mf9VdCGK12+TP7DmqeZyGIqlFFoahQnmb8EarvE43/1UeQ2CV9XmzwZvpqeli +-LfXsFonawrY3H6ZnMwS64St61Z+9gdyuZ/RbsoZBbT5KUjDEG844QRU4OT1IGeEI +-eY5NM5RNIh6ZNhVtqeeCxMS7afONkHQrOco73RdSTRck/Hj96Ofl3MHNHryr+AMK +-DGFk1kLCZGpPdXtkxXvaDeQoiYDlil26CWc+YK6xyDPMdsWvoG14ZLyCpzMXA7/7 +-4YAQRH0CAwEAAaOCAjAwggIsMB8GA1UdIwQYMBaAFBoJBMz5CY+7HqDO1KQUf0vV +-I1jNMB0GA1UdDgQWBBQgOU8HsWzbmD4WZP5Wtdw7jca2WDAOBgNVHQ8BAf8EBAMC +-BaAwDAYDVR0TAQH/BAIwADAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIw +-TAYDVR0gBEUwQzBBBgsrBgEEAYDlNwIBATAyMDAGCCsGAQUFBwIBFiRodHRwczov +-L3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL0NQUzEwdwYDVR0fBHAwbjA3oDWgM4Yx +-aHR0cDovL2NybC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQWJ1c2luZXNzLmNy +-bDAzoDGgL4YtaHR0cDovL2NybC50YnMteDUwOS5jb20vVEJTWDUwOUNBYnVzaW5l +-c3MuY3JsMIGwBggrBgEFBQcBAQSBozCBoDA9BggrBgEFBQcwAoYxaHR0cDovL2Ny +-dC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQWJ1c2luZXNzLmNydDA5BggrBgEF +-BQcwAoYtaHR0cDovL2NydC50YnMteDUwOS5jb20vVEJTWDUwOUNBYnVzaW5lc3Mu +-Y3J0MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC50YnMteDUwOS5jb20wMwYDVR0R +-BCwwKoIRZWNvbS50YnMteDUwOS5jb22CFXd3dy5lY29tLnRicy14NTA5LmNvbTAN +-BgkqhkiG9w0BAQUFAAOCAQEArT4NHfbY87bGAw8lPV4DmHlmuDuVp/y7ltO3Ynse +-3Rz8RxW2AzuO0Oy2F0Cu4yWKtMyEyMXyHqWtae7ElRbdTu5w5GwVBLJHClCzC8S9 +-SpgMMQTx3Rgn8vjkHuU9VZQlulZyiPK7yunjc7c310S9FRZ7XxOwf8Nnx4WnB+No +-WrfApzhhQl31w+RyrNxZe58hCfDDHmevRvwLjQ785ZoQXJDj2j3qAD4aI2yB8lB5 +-oaE1jlCJzC7Kmz/Y9jzfmv/zAs1LQTm9ktevv4BTUFaGjv9jxnQ1xnS862ZiouLW +-zZYIlYPf4F6JjXGiIQgQRglILUfq3ftJd9/ok9W9ZF8h8w== ++MIIGXDCCBUSgAwIBAgIRAKpVmHgg9nfCodAVwcP4siwwDQYJKoZIhvcNAQELBQAw ++gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl ++bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u ++ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv ++cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg ++Q0EgU0dDMB4XDTEyMDEwNDAwMDAwMFoXDTE0MDIxNzIzNTk1OVowgcsxCzAJBgNV ++BAYTAkZSMQ4wDAYDVQQREwUxNDAwMDERMA8GA1UECBMIQ2FsdmFkb3MxDTALBgNV ++BAcTBENBRU4xGzAZBgNVBAkTEjIyIHJ1ZSBkZSBCcmV0YWduZTEVMBMGA1UEChMM ++VEJTIElOVEVSTkVUMRcwFQYDVQQLEw4wMDAyIDQ0MDQ0MzgxMDEbMBkGA1UECxMS ++c2hhLTI1NiBwcm9kdWN0aW9uMSAwHgYDVQQDExdzaGEyNTYudGJzLWludGVybmV0 ++LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKQIX/zdJcyxty0m ++PM1XQSoSSifueS3AVcgqMsaIKS/u+rYzsv4hQ/qA6vLn5m5/ewUcZDj7zdi6rBVf ++PaVNXJ6YinLX0tkaW8TEjeVuZG5yksGZlhCt1CJ1Ho9XLiLaP4uJ7MCoNUntpJ+E ++LfrOdgsIj91kPmwjDJeztVcQCvKzhjVJA/KxdInc0JvOATn7rpaSmQI5bvIjufgo ++qVsTPwVFzuUYULXBk7KxRT7MiEqnd5HvviNh0285QC478zl3v0I0Fb5El4yD3p49 ++IthcRnxzMKc0UhU5ogi0SbONyBfm/mzONVfSxpM+MlyvZmJqrbuuLoEDzJD+t8PU ++xSuzgbcCAwEAAaOCAj4wggI6MB8GA1UdIwQYMBaAFAdEdoWTKLx/bXjSCuv6TEvf ++2YIfMB0GA1UdDgQWBBT/qTGYdaj+f61c2IRFL/B1eEsM8DAOBgNVHQ8BAf8EBAMC ++BaAwDAYDVR0TAQH/BAIwADA0BgNVHSUELTArBggrBgEFBQcDAQYIKwYBBQUHAwIG ++CisGAQQBgjcKAwMGCWCGSAGG+EIEATBLBgNVHSAERDBCMEAGCisGAQQB5TcCBAEw ++MjAwBggrBgEFBQcCARYkaHR0cHM6Ly93d3cudGJzLWludGVybmV0LmNvbS9DQS9D ++UFM0MG0GA1UdHwRmMGQwMqAwoC6GLGh0dHA6Ly9jcmwudGJzLWludGVybmV0LmNv ++bS9UQlNYNTA5Q0FTR0MuY3JsMC6gLKAqhihodHRwOi8vY3JsLnRicy14NTA5LmNv ++bS9UQlNYNTA5Q0FTR0MuY3JsMIGmBggrBgEFBQcBAQSBmTCBljA4BggrBgEFBQcw ++AoYsaHR0cDovL2NydC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQVNHQy5jcnQw ++NAYIKwYBBQUHMAKGKGh0dHA6Ly9jcnQudGJzLXg1MDkuY29tL1RCU1g1MDlDQVNH ++Qy5jcnQwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLnRicy14NTA5LmNvbTA/BgNV ++HREEODA2ghdzaGEyNTYudGJzLWludGVybmV0LmNvbYIbd3d3LnNoYTI1Ni50YnMt ++aW50ZXJuZXQuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQA0pOuL8QvAa5yksTbGShzX ++ABApagunUGoEydv4YJT1MXy9tTp7DrWaozZSlsqBxrYAXP1d9r2fuKbEniYHxaQ0 ++UYaf1VSIlDo1yuC8wE7wxbHDIpQ/E5KAyxiaJ8obtDhFstWAPAH+UoGXq0kj2teN ++21sFQ5dXgA95nldvVFsFhrRUNB6xXAcaj0VZFhttI0ZfQZmQwEI/P+N9Jr40OGun ++aa+Dn0TMeUH4U20YntfLbu2nDcJcYfyurm+8/0Tr4HznLnedXu9pCPYj0TaddrgT ++XO0oFiyy7qGaY6+qKh71yD64Y3ycCJ/HR9Wm39mjZYc9ezYwT4noP6r7Lk8YO7/q + -----END CERTIFICATE----- +- 1 s:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA business ++ 1 s:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC + i:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root + -----BEGIN CERTIFICATE----- +-MIIFPzCCBCegAwIBAgIQDlBz/++iRSmLDeVRHT/hADANBgkqhkiG9w0BAQUFADBv ++MIIFVjCCBD6gAwIBAgIQXpDZ0ETJMV02WTx3GTnhhTANBgkqhkiG9w0BAQUFADBv + MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFk + ZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBF +-eHRlcm5hbCBDQSBSb290MB4XDTA1MTIwMTAwMDAwMFoXDTE5MDcwOTE4MTkyMlow +-gckxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl ++eHRlcm5hbCBDQSBSb290MB4XDTA1MTIwMTAwMDAwMFoXDTE5MDYyNDE5MDYzMFow ++gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl + bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u + ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv +-cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEdMBsGA1UEAxMUVEJTIFg1MDkg +-Q0EgYnVzaW5lc3MwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDB1PAU +-qudCcz3tmyGcf+u6EkZqonKKHrV4gZYbvVkIRojmmlhfi/jwvpHvo8bqSt/9Rj5S +-jhCDW0pcbI+IPPtD1Jy+CHNSfnMqVDy6CKQ3p5maTzCMG6ZT+XjnvcND5v+FtaiB +-xk1iCX6uvt0jeUtdZvYbyytsSDE6c3Y5//wRxOF8tM1JxibwO3pyER26jbbN2gQz +-m/EkdGjLdJ4svPk23WDAvQ6G0/z2LcAaJB+XLfqRwfQpHQvfKa1uTi8PivC8qtip +-rmNQMMPMjxSK2azX8cKjjTDJiUKaCb4VHlJDWKEsCFRpgJAoAuX8f7Yfs1M4esGo +-sWb3PGspK3O22uIlAgMBAAGjggF6MIIBdjAdBgNVHQ4EFgQUGgkEzPkJj7seoM7U +-pBR/S9UjWM0wDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQAwGAYD +-VR0gBBEwDzANBgsrBgEEAYDlNwIBATB7BgNVHR8EdDByMDigNqA0hjJodHRwOi8v +-Y3JsLmNvbW9kb2NhLmNvbS9BZGRUcnVzdEV4dGVybmFsQ0FSb290LmNybDA2oDSg +-MoYwaHR0cDovL2NybC5jb21vZG8ubmV0L0FkZFRydXN0RXh0ZXJuYWxDQVJvb3Qu +-Y3JsMIGGBggrBgEFBQcBAQR6MHgwOwYIKwYBBQUHMAKGL2h0dHA6Ly9jcnQuY29t +-b2RvY2EuY29tL0FkZFRydXN0VVROU2VydmVyQ0EuY3J0MDkGCCsGAQUFBzAChi1o +-dHRwOi8vY3J0LmNvbW9kby5uZXQvQWRkVHJ1c3RVVE5TZXJ2ZXJDQS5jcnQwEQYJ +-YIZIAYb4QgEBBAQDAgIEMA0GCSqGSIb3DQEBBQUAA4IBAQA7mqrMgk/MrE6QnbNA +-h4nRCn2ti4bg4w2C3lB6bSvRPnYwuNw9Jb8vuKkNFzRDxNJXqVDZdfFW5CVQJuyd +-nfAx83+wk+spzvFaE1KhFYfN9G9pQfXUfvDRoIcJgPEKUXL1wRiOG+IjU3VVI8pg +-IgqHkr7ylln5i5zCiFAPuIJmYUSFg/gxH5xkCNcjJqqrHrHatJr6Qrrke93joupw +-oU1njfAcZtYp6fbiK6u2b1pJqwkVBE8RsfLnPhRj+SFbpvjv8Od7o/ieJhFIYQNU +-k2jX2u8qZnAiNw93LZW9lpYjtuvMXq8QQppENNja5b53q7UwI+lU7ZGjZ7quuESp +-J6/5 ++cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg ++Q0EgU0dDMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsgOkO3f7wzN6 ++rOjg45tR5vjBfzK7qmV9IBxb/QW9EEXxG+E7FNhZqQLtwGBKoSsHTnQqV75wWMk0 ++9tinWvftBkSpj5sTi/8cbzJfUvTSVYh3Qxv6AVVjMMH/ruLjE6y+4PoaPs8WoYAQ ++ts5R4Z1g8c/WnTepLst2x0/Wv7GmuoQi+gXvHU6YrBiu7XkeYhzc95QdviWSJRDk ++owhb5K43qhcvjRmBfO/paGlCliDGZp8mHwrI21mwobWpVjTxZRwYO3bd4+TGcI4G ++Ie5wmHwE8F7SK1tgSqbBacKjDa93j7txKkfz/Yd2n7TGqOXiHPsJpG655vrKtnXk ++9vs1zoDeJQIDAQABo4IBljCCAZIwHQYDVR0OBBYEFAdEdoWTKLx/bXjSCuv6TEvf ++2YIfMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMCAGA1UdJQQZ ++MBcGCisGAQQBgjcKAwMGCWCGSAGG+EIEATAYBgNVHSAEETAPMA0GCysGAQQBgOU3 ++AgQBMHsGA1UdHwR0MHIwOKA2oDSGMmh0dHA6Ly9jcmwuY29tb2RvY2EuY29tL0Fk ++ZFRydXN0RXh0ZXJuYWxDQVJvb3QuY3JsMDagNKAyhjBodHRwOi8vY3JsLmNvbW9k ++by5uZXQvQWRkVHJ1c3RFeHRlcm5hbENBUm9vdC5jcmwwgYAGCCsGAQUFBwEBBHQw ++cjA4BggrBgEFBQcwAoYsaHR0cDovL2NydC5jb21vZG9jYS5jb20vQWRkVHJ1c3RV ++VE5TR0NDQS5jcnQwNgYIKwYBBQUHMAKGKmh0dHA6Ly9jcnQuY29tb2RvLm5ldC9B ++ZGRUcnVzdFVUTlNHQ0NBLmNydDARBglghkgBhvhCAQEEBAMCAgQwDQYJKoZIhvcN ++AQEFBQADggEBAK2zEzs+jcIrVK9oDkdDZNvhuBYTdCfpxfFs+OAujW0bIfJAy232 ++euVsnJm6u/+OrqKudD2tad2BbejLLXhMZViaCmK7D9nrXHx4te5EP8rL19SUVqLY ++1pTnv5dhNgEgvA7n5lIzDSYs7yRLsr7HJsYPr6SeYSuZizyX1SNz7ooJ32/F3X98 ++RB0Mlc/E0OyOrkQ9/y5IrnpnaSora8CnUrV5XNOg+kyCz9edCyx4D5wXYcwZPVWz ++8aDqquESrezPyjtfi4WRO4s/VD3HLZvOxzMrWAVYCDG9FxaOhF0QGuuG1F7F3GKV ++v6prNyCl016kRl2j1UT+a7gLd8fA25A4C9E= + -----END CERTIFICATE----- + 2 s:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root +- i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware ++ i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC + -----BEGIN CERTIFICATE----- +-MIIETzCCAzegAwIBAgIQHM5EYpUZep1jUvnyI6m2mDANBgkqhkiG9w0BAQUFADCB +-lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug ++MIIEZjCCA06gAwIBAgIQUSYKkxzif5zDpV954HKugjANBgkqhkiG9w0BAQUFADCB ++kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug + Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt +-SGFyZHdhcmUwHhcNMDUwNjA3MDgwOTEwWhcNMTkwNzA5MTgxOTIyWjBvMQswCQYD +-VQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0 +-IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5h +-bCBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt/caM+by +-AAQtOeBOW+0fvGwPzbX6I7bO3psRM5ekKUx9k5+9SryT7QMa44/P5W1QWtaXKZRa +-gLBJetsulf24yr83OC0ePpFBrXBWx/BPP+gynnTKyJBU6cZfD3idmkA8Dqxhql4U +-j56HoWpQ3NeaTq8Fs6ZxlJxxs1BgCscTnTgHhgKo6ahpJhiQq0ywTyOrOk+E2N/O +-n+Fpb7vXQtdrROTHre5tQV9yWnEIN7N5ZaRZoJQ39wAvDcKSctrQOHLbFKhFxF0q +-fbe01sTurM0TRLfJK91DACX6YblpalgjEbenM49WdVn1zSnXRrcKK2W200JvFbK4 +-e/vv6V1T1TRaJwIDAQABo4G9MIG6MB8GA1UdIwQYMBaAFKFyXyYbKJhDlV0HN9WF +-lp1L0sNFMB0GA1UdDgQWBBStvZh6NLQm9/rEJlTvA73gJMtUGjAOBgNVHQ8BAf8E +-BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zARBglghkgBhvhCAQEEBAMCAQIwRAYDVR0f +-BD0wOzA5oDegNYYzaHR0cDovL2NybC51c2VydHJ1c3QuY29tL1VUTi1VU0VSRmly +-c3QtSGFyZHdhcmUuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQByQhANOs4kClrwF8BW +-onvUOGCSjRK52zYZgDXYNjDtmr5rJ6NyPFDNn+JxkLpjYetIFMTbSRe679Bt8m7a +-gIAoQYFQtxMuyLnJegB2aEbQiIxh/tC21UcFF7ktdnDoTlA6w3pLuvunaI84Of3o +-2YBrhzkTbCfaYk5JRlTpudW9DkUkHBsyx3nknPKnplkIGaK0jgn8E0n+SFabYaHk +-I9LroYT/+JtLefh9lgBdAgVv0UPbzoGfuDsrk/Zh+UrgbLFpHoVnElhzbkh64Z0X +-OGaJunQc68cCZu5HTn/aK7fBGMcVflRCXLVEQpU9PIAdGA8Ynvg684t8GMaKsRl1 +-jIGZ ++dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw ++IFNHQzAeFw0wNTA2MDcwODA5MTBaFw0xOTA2MjQxOTA2MzBaMG8xCzAJBgNVBAYT ++AlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0 ++ZXJuYWwgVFRQIE5ldHdvcmsxIjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENB ++IFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC39xoz5vIABC05 ++4E5b7R+8bA/Ntfojts7emxEzl6QpTH2Tn71KvJPtAxrjj8/lbVBa1pcplFqAsEl6 ++2y6V/bjKvzc4LR4+kUGtcFbH8E8/6DKedMrIkFTpxl8PeJ2aQDwOrGGqXhSPnoeh ++alDc15pOrwWzpnGUnHGzUGAKxxOdOAeGAqjpqGkmGJCrTLBPI6s6T4TY386f4Wlv ++u9dC12tE5Met7m1BX3JacQg3s3llpFmglDf3AC8NwpJy2tA4ctsUqEXEXSp9t7TW ++xO6szRNEt8kr3UMAJfphuWlqWCMRt6czj1Z1WfXNKddGtworZbbTQm8Vsrh7++/p ++XVPVNFonAgMBAAGjgdgwgdUwHwYDVR0jBBgwFoAUUzLRs89/+uDxoF2FTpLSnkUd ++tE8wHQYDVR0OBBYEFK29mHo0tCb3+sQmVO8DveAky1QaMA4GA1UdDwEB/wQEAwIB ++BjAPBgNVHRMBAf8EBTADAQH/MBEGCWCGSAGG+EIBAQQEAwIBAjAgBgNVHSUEGTAX ++BgorBgEEAYI3CgMDBglghkgBhvhCBAEwPQYDVR0fBDYwNDAyoDCgLoYsaHR0cDov ++L2NybC51c2VydHJ1c3QuY29tL1VUTi1EQVRBQ29ycFNHQy5jcmwwDQYJKoZIhvcN ++AQEFBQADggEBAMbuUxdoFLJRIh6QWA2U/b3xcOWGLcM2MY9USEbnLQg3vGwKYOEO ++rVE04BKT6b64q7gmtOmWPSiPrmQH/uAB7MXjkesYoPF1ftsK5p+R26+udd8jkWjd ++FwBaS/9kbHDrARrQkNnHptZt9hPk/7XJ0h4qy7ElQyZ42TCbTg0evmnv3+r+LbPM +++bDdtRTKkdSytaX7ARmjR3mfnYyVhzT4HziS2jamEfpr62vp3EV4FTkG101B5CHI ++3C+H0be/SGB1pWLLJN47YaApIKa+xWycxOkKaSLvkTr6Jq/RW0GnOuL4OAdCq8Fb +++M5tug8EPzI0rNwEKNdwMBQmBsTkm5jVz3g= + -----END CERTIFICATE----- +- 3 s:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware +- i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware ++ 3 s:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC ++ i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC + -----BEGIN CERTIFICATE----- +-MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB +-lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug ++MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB ++kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug + Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt +-SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG +-A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe +-MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v +-d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh +-cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn +-0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ +-M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a +-MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd +-oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI +-DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy +-oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD +-VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 +-dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy +-bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF +-BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM +-//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli +-CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE +-CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t +-3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS +-KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== ++dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw ++IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG ++EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD ++VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu ++dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN ++BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 ++E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ ++D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK ++4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq ++lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW ++bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB ++o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT ++MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js ++LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr ++BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB ++AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft ++Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj ++j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH ++KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv ++2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 ++mfnGV/TJVTl4uix5yaaIK/QI + -----END CERTIFICATE----- +diff --git a/Lib/test/ssl_cert.pem b/Lib/test/ssl_cert.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/ssl_cert.pem +@@ -0,0 +1,15 @@ ++-----BEGIN CERTIFICATE----- ++MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV ++BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u ++IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw ++MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH ++Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k ++YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw ++gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 ++6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt ++pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw ++FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd ++BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G ++lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 ++CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX ++-----END CERTIFICATE----- +diff --git a/Lib/test/ssl_key.passwd.pem b/Lib/test/ssl_key.passwd.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/ssl_key.passwd.pem +@@ -0,0 +1,18 @@ ++-----BEGIN RSA PRIVATE KEY----- ++Proc-Type: 4,ENCRYPTED ++DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A ++ ++kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c ++u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA ++AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr ++Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ ++YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P ++6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ ++noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 ++94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l ++7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo ++cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO ++zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt ++L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo ++2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== ++-----END RSA PRIVATE KEY----- +diff --git a/Lib/test/ssl_key.pem b/Lib/test/ssl_key.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/ssl_key.pem +@@ -0,0 +1,16 @@ ++-----BEGIN PRIVATE KEY----- ++MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANtb0+YrKuxevGpm ++LrjaUhZSgz6zFAmuGFmKmUbdjmfv9zSmmdsQIksK++jK0Be9LeZy20j6ahOfuVa0 ++ufEmPoP7Fy4hXegKZR9cCWcIe/A6H2xWF1IIJLRTLaU8ol/I7T+um5HD5AwAwNPP ++USNU0Eegmvp+xxWu3NX2m1Veot85AgMBAAECgYA3ZdZ673X0oexFlq7AAmrutkHt ++CL7LvwrpOiaBjhyTxTeSNWzvtQBkIU8DOI0bIazA4UreAFffwtvEuPmonDb3F+Iq ++SMAu42XcGyVZEl+gHlTPU9XRX7nTOXVt+MlRRRxL6t9GkGfUAXI3XxJDXW3c0vBK ++UL9xqD8cORXOfE06rQJBAP8mEX1ERkR64Ptsoe4281vjTlNfIbs7NMPkUnrn9N/Y ++BLhjNIfQ3HFZG8BTMLfX7kCS9D593DW5tV4Z9BP/c6cCQQDcFzCcVArNh2JSywOQ ++ZfTfRbJg/Z5Lt9Fkngv1meeGNPgIMLN8Sg679pAOOWmzdMO3V706rNPzSVMME7E5 ++oPIfAkEA8pDddarP5tCvTTgUpmTFbakm0KoTZm2+FzHcnA4jRh+XNTjTOv98Y6Ik ++eO5d1ZnKXseWvkZncQgxfdnMqqpj5wJAcNq/RVne1DbYlwWchT2Si65MYmmJ8t+F ++0mcsULqjOnEMwf5e+ptq5LzwbyrHZYq5FNk7ocufPv/ZQrcSSC+cFwJBAKvOJByS ++x56qyGeZLOQlWS2JS3KJo59XuLFGqcbgN9Om9xFa41Yb4N9NvplFivsvZdw3m1Q/ ++SPIXQuT8RMPDVNQ= ++-----END PRIVATE KEY----- +diff --git a/Lib/test/ssl_servers.py b/Lib/test/ssl_servers.py +new file mode 100644 +--- /dev/null ++++ b/Lib/test/ssl_servers.py +@@ -0,0 +1,209 @@ ++import os ++import sys ++import ssl ++import pprint ++import urllib ++import urlparse ++# Rename HTTPServer to _HTTPServer so as to avoid confusion with HTTPSServer. ++from BaseHTTPServer import HTTPServer as _HTTPServer, BaseHTTPRequestHandler ++from SimpleHTTPServer import SimpleHTTPRequestHandler ++ ++from test import test_support as support ++threading = support.import_module("threading") ++ ++here = os.path.dirname(__file__) ++ ++HOST = support.HOST ++CERTFILE = os.path.join(here, 'keycert.pem') ++ ++# This one's based on HTTPServer, which is based on SocketServer ++ ++class HTTPSServer(_HTTPServer): ++ ++ def __init__(self, server_address, handler_class, context): ++ _HTTPServer.__init__(self, server_address, handler_class) ++ self.context = context ++ ++ def __str__(self): ++ return ('<%s %s:%s>' % ++ (self.__class__.__name__, ++ self.server_name, ++ self.server_port)) ++ ++ def get_request(self): ++ # override this to wrap socket with SSL ++ try: ++ sock, addr = self.socket.accept() ++ sslconn = self.context.wrap_socket(sock, server_side=True) ++ except OSError as e: ++ # socket errors are silenced by the caller, print them here ++ if support.verbose: ++ sys.stderr.write("Got an error:\n%s\n" % e) ++ raise ++ return sslconn, addr ++ ++class RootedHTTPRequestHandler(SimpleHTTPRequestHandler): ++ # need to override translate_path to get a known root, ++ # instead of using os.curdir, since the test could be ++ # run from anywhere ++ ++ server_version = "TestHTTPS/1.0" ++ root = here ++ # Avoid hanging when a request gets interrupted by the client ++ timeout = 5 ++ ++ def translate_path(self, path): ++ """Translate a /-separated PATH to the local filename syntax. ++ ++ Components that mean special things to the local file system ++ (e.g. drive or directory names) are ignored. (XXX They should ++ probably be diagnosed.) ++ ++ """ ++ # abandon query parameters ++ path = urlparse.urlparse(path)[2] ++ path = os.path.normpath(urllib.unquote(path)) ++ words = path.split('/') ++ words = filter(None, words) ++ path = self.root ++ for word in words: ++ drive, word = os.path.splitdrive(word) ++ head, word = os.path.split(word) ++ path = os.path.join(path, word) ++ return path ++ ++ def log_message(self, format, *args): ++ # we override this to suppress logging unless "verbose" ++ if support.verbose: ++ sys.stdout.write(" server (%s:%d %s):\n [%s] %s\n" % ++ (self.server.server_address, ++ self.server.server_port, ++ self.request.cipher(), ++ self.log_date_time_string(), ++ format%args)) ++ ++ ++class StatsRequestHandler(BaseHTTPRequestHandler): ++ """Example HTTP request handler which returns SSL statistics on GET ++ requests. ++ """ ++ ++ server_version = "StatsHTTPS/1.0" ++ ++ def do_GET(self, send_body=True): ++ """Serve a GET request.""" ++ sock = self.rfile.raw._sock ++ context = sock.context ++ stats = { ++ 'session_cache': context.session_stats(), ++ 'cipher': sock.cipher(), ++ 'compression': sock.compression(), ++ } ++ body = pprint.pformat(stats) ++ body = body.encode('utf-8') ++ self.send_response(200) ++ self.send_header("Content-type", "text/plain; charset=utf-8") ++ self.send_header("Content-Length", str(len(body))) ++ self.end_headers() ++ if send_body: ++ self.wfile.write(body) ++ ++ def do_HEAD(self): ++ """Serve a HEAD request.""" ++ self.do_GET(send_body=False) ++ ++ def log_request(self, format, *args): ++ if support.verbose: ++ BaseHTTPRequestHandler.log_request(self, format, *args) ++ ++ ++class HTTPSServerThread(threading.Thread): ++ ++ def __init__(self, context, host=HOST, handler_class=None): ++ self.flag = None ++ self.server = HTTPSServer((host, 0), ++ handler_class or RootedHTTPRequestHandler, ++ context) ++ self.port = self.server.server_port ++ threading.Thread.__init__(self) ++ self.daemon = True ++ ++ def __str__(self): ++ return "<%s %s>" % (self.__class__.__name__, self.server) ++ ++ def start(self, flag=None): ++ self.flag = flag ++ threading.Thread.start(self) ++ ++ def run(self): ++ if self.flag: ++ self.flag.set() ++ try: ++ self.server.serve_forever(0.05) ++ finally: ++ self.server.server_close() ++ ++ def stop(self): ++ self.server.shutdown() ++ ++ ++def make_https_server(case, context=None, certfile=CERTFILE, ++ host=HOST, handler_class=None): ++ if context is None: ++ context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ++ # We assume the certfile contains both private key and certificate ++ context.load_cert_chain(certfile) ++ server = HTTPSServerThread(context, host, handler_class) ++ flag = threading.Event() ++ server.start(flag) ++ flag.wait() ++ def cleanup(): ++ if support.verbose: ++ sys.stdout.write('stopping HTTPS server\n') ++ server.stop() ++ if support.verbose: ++ sys.stdout.write('joining HTTPS thread\n') ++ server.join() ++ case.addCleanup(cleanup) ++ return server ++ ++ ++if __name__ == "__main__": ++ import argparse ++ parser = argparse.ArgumentParser( ++ description='Run a test HTTPS server. ' ++ 'By default, the current directory is served.') ++ parser.add_argument('-p', '--port', type=int, default=4433, ++ help='port to listen on (default: %(default)s)') ++ parser.add_argument('-q', '--quiet', dest='verbose', default=True, ++ action='store_false', help='be less verbose') ++ parser.add_argument('-s', '--stats', dest='use_stats_handler', default=False, ++ action='store_true', help='always return stats page') ++ parser.add_argument('--curve-name', dest='curve_name', type=str, ++ action='store', ++ help='curve name for EC-based Diffie-Hellman') ++ parser.add_argument('--ciphers', dest='ciphers', type=str, ++ help='allowed cipher list') ++ parser.add_argument('--dh', dest='dh_file', type=str, action='store', ++ help='PEM file containing DH parameters') ++ args = parser.parse_args() ++ ++ support.verbose = args.verbose ++ if args.use_stats_handler: ++ handler_class = StatsRequestHandler ++ else: ++ handler_class = RootedHTTPRequestHandler ++ handler_class.root = os.getcwd() ++ context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ++ context.load_cert_chain(CERTFILE) ++ if args.curve_name: ++ context.set_ecdh_curve(args.curve_name) ++ if args.dh_file: ++ context.load_dh_params(args.dh_file) ++ if args.ciphers: ++ context.set_ciphers(args.ciphers) ++ ++ server = HTTPSServer(("", args.port), handler_class, context) ++ if args.verbose: ++ print("Listening on https://localhost:{0.port}".format(args)) ++ server.serve_forever(0.1) +diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py +--- a/Lib/test/test_support.py ++++ b/Lib/test/test_support.py +@@ -39,7 +39,7 @@ except ImportError: + "threading_cleanup", "reap_children", "cpython_only", + "check_impl_detail", "get_attribute", "py3k_bytes", + "import_fresh_module", "threading_cleanup", "reap_children", +- "strip_python_stderr"] ++ "strip_python_stderr", "IPV6_ENABLED"] + + class Error(Exception): + """Base class for regression test exceptions.""" +@@ -465,6 +465,23 @@ def bind_port(sock, host=HOST): + port = sock.getsockname()[1] + return port + ++def _is_ipv6_enabled(): ++ """Check whether IPv6 is enabled on this host.""" ++ if socket.has_ipv6: ++ sock = None ++ try: ++ sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) ++ sock.bind((HOSTv6, 0)) ++ return True ++ except OSError: ++ pass ++ finally: ++ if sock: ++ sock.close() ++ return False ++ ++IPV6_ENABLED = _is_ipv6_enabled() ++ + FUZZ = 1e-6 + + def fcmp(x, y): # fuzzy comparison function +diff --git a/Tools/ssl/make_ssl_data.py b/Tools/ssl/make_ssl_data.py +new file mode 100755 +--- /dev/null ++++ b/Tools/ssl/make_ssl_data.py +@@ -0,0 +1,68 @@ ++#! /usr/bin/env python3 ++ ++""" ++This script should be called *manually* when we want to upgrade SSLError ++`library` and `reason` mnemnonics to a more recent OpenSSL version. ++ ++It takes two arguments: ++- the path to the OpenSSL include files' directory ++ (e.g. openssl-1.0.1-beta3/include/openssl/) ++- the path to the C file to be generated ++ (probably Modules/_ssl_data.h) ++""" ++ ++import datetime ++import os ++import re ++import sys ++ ++ ++def parse_error_codes(h_file, prefix): ++ pat = re.compile(r"#define\W+(%s([\w]+))\W+(\d+)\b" % re.escape(prefix)) ++ codes = [] ++ with open(h_file, "r", encoding="latin1") as f: ++ for line in f: ++ match = pat.search(line) ++ if match: ++ code, name, num = match.groups() ++ num = int(num) ++ codes.append((code, name, num)) ++ return codes ++ ++if __name__ == "__main__": ++ openssl_inc = sys.argv[1] ++ outfile = sys.argv[2] ++ use_stdout = outfile == '-' ++ f = sys.stdout if use_stdout else open(outfile, "w") ++ error_libraries = ( ++ # (library code, mnemonic, error prefix, header file) ++ ('ERR_LIB_PEM', 'PEM', 'PEM_R_', 'pem.h'), ++ ('ERR_LIB_SSL', 'SSL', 'SSL_R_', 'ssl.h'), ++ ('ERR_LIB_X509', 'X509', 'X509_R_', 'x509.h'), ++ ) ++ def w(l): ++ f.write(l + "\n") ++ w("/* File generated by Tools/ssl/make_ssl_data.py */") ++ w("/* Generated on %s */" % datetime.datetime.now().isoformat()) ++ w("") ++ ++ w("static struct py_ssl_library_code library_codes[] = {") ++ for libcode, mnemo, _, _ in error_libraries: ++ w(' {"%s", %s},' % (mnemo, libcode)) ++ w(' { NULL }') ++ w('};') ++ w("") ++ ++ w("static struct py_ssl_error_code error_codes[] = {") ++ for libcode, _, prefix, h_file in error_libraries: ++ codes = parse_error_codes(os.path.join(openssl_inc, h_file), prefix) ++ for code, name, num in sorted(codes): ++ w(' #ifdef %s' % (code)) ++ w(' {"%s", %s, %s},' % (name, libcode, code)) ++ w(' #else') ++ w(' {"%s", %s, %d},' % (name, libcode, num)) ++ w(' #endif') ++ w(' { NULL }') ++ w('};') ++ if not use_stdout: ++ f.close() +diff --git a/Tools/ssl/test_multiple_versions.py b/Tools/ssl/test_multiple_versions.py +new file mode 100644 +--- /dev/null ++++ b/Tools/ssl/test_multiple_versions.py +@@ -0,0 +1,241 @@ ++#./python ++"""Run Python tests with multiple installations of OpenSSL ++ ++The script ++ ++ (1) downloads OpenSSL tar bundle ++ (2) extracts it to ../openssl/src/openssl-VERSION/ ++ (3) compiles OpenSSL ++ (4) installs OpenSSL into ../openssl/VERSION/ ++ (5) forces a recompilation of Python modules using the ++ header and library files from ../openssl/VERSION/ ++ (6) runs Python's test suite ++ ++The script must be run with Python's build directory as current working ++directory: ++ ++ ./python Tools/ssl/test_multiple_versions.py ++ ++The script uses LD_RUN_PATH, LD_LIBRARY_PATH, CPPFLAGS and LDFLAGS to bend ++search paths for header files and shared libraries. It's known to work on ++Linux with GCC 4.x. ++ ++(c) 2013 Christian Heimes ++""" ++import logging ++import os ++import tarfile ++import shutil ++import subprocess ++import sys ++from urllib import urlopen ++ ++log = logging.getLogger("multissl") ++ ++OPENSSL_VERSIONS = [ ++ "0.9.7m", "0.9.8i", "0.9.8l", "0.9.8m", "0.9.8y", "1.0.0k", "1.0.1e" ++] ++FULL_TESTS = [ ++ "test_asyncio", "test_ftplib", "test_hashlib", "test_httplib", ++ "test_imaplib", "test_nntplib", "test_poplib", "test_smtplib", ++ "test_smtpnet", "test_urllib2_localnet", "test_venv" ++] ++MINIMAL_TESTS = ["test_ssl", "test_hashlib"] ++CADEFAULT = True ++HERE = os.path.abspath(os.getcwd()) ++DEST_DIR = os.path.abspath(os.path.join(HERE, os.pardir, "openssl")) ++ ++ ++class BuildSSL(object): ++ url_template = "https://www.openssl.org/source/openssl-{}.tar.gz" ++ ++ module_files = ["Modules/_ssl.c", ++ "Modules/socketmodule.c", ++ "Modules/_hashopenssl.c"] ++ ++ def __init__(self, version, openssl_compile_args=(), destdir=DEST_DIR): ++ self._check_python_builddir() ++ self.version = version ++ self.openssl_compile_args = openssl_compile_args ++ # installation directory ++ self.install_dir = os.path.join(destdir, version) ++ # source file ++ self.src_file = os.path.join(destdir, "src", ++ "openssl-{}.tar.gz".format(version)) ++ # build directory (removed after install) ++ self.build_dir = os.path.join(destdir, "src", ++ "openssl-{}".format(version)) ++ ++ @property ++ def openssl_cli(self): ++ """openssl CLI binary""" ++ return os.path.join(self.install_dir, "bin", "openssl") ++ ++ @property ++ def openssl_version(self): ++ """output of 'bin/openssl version'""" ++ env = os.environ.copy() ++ env["LD_LIBRARY_PATH"] = self.lib_dir ++ cmd = [self.openssl_cli, "version"] ++ return self._subprocess_output(cmd, env=env) ++ ++ @property ++ def pyssl_version(self): ++ """Value of ssl.OPENSSL_VERSION""" ++ env = os.environ.copy() ++ env["LD_LIBRARY_PATH"] = self.lib_dir ++ cmd = ["./python", "-c", "import ssl; print(ssl.OPENSSL_VERSION)"] ++ return self._subprocess_output(cmd, env=env) ++ ++ @property ++ def include_dir(self): ++ return os.path.join(self.install_dir, "include") ++ ++ @property ++ def lib_dir(self): ++ return os.path.join(self.install_dir, "lib") ++ ++ @property ++ def has_openssl(self): ++ return os.path.isfile(self.openssl_cli) ++ ++ @property ++ def has_src(self): ++ return os.path.isfile(self.src_file) ++ ++ def _subprocess_call(self, cmd, stdout=subprocess.DEVNULL, env=None, ++ **kwargs): ++ log.debug("Call '{}'".format(" ".join(cmd))) ++ return subprocess.check_call(cmd, stdout=stdout, env=env, **kwargs) ++ ++ def _subprocess_output(self, cmd, env=None, **kwargs): ++ log.debug("Call '{}'".format(" ".join(cmd))) ++ out = subprocess.check_output(cmd, env=env) ++ return out.strip().decode("utf-8") ++ ++ def _check_python_builddir(self): ++ if not os.path.isfile("python") or not os.path.isfile("setup.py"): ++ raise ValueError("Script must be run in Python build directory") ++ ++ def _download_openssl(self): ++ """Download OpenSSL source dist""" ++ src_dir = os.path.dirname(self.src_file) ++ if not os.path.isdir(src_dir): ++ os.makedirs(src_dir) ++ url = self.url_template.format(self.version) ++ log.info("Downloading OpenSSL from {}".format(url)) ++ req = urlopen(url, cadefault=CADEFAULT) ++ # KISS, read all, write all ++ data = req.read() ++ log.info("Storing {}".format(self.src_file)) ++ with open(self.src_file, "wb") as f: ++ f.write(data) ++ ++ def _unpack_openssl(self): ++ """Unpack tar.gz bundle""" ++ # cleanup ++ if os.path.isdir(self.build_dir): ++ shutil.rmtree(self.build_dir) ++ os.makedirs(self.build_dir) ++ ++ tf = tarfile.open(self.src_file) ++ base = "openssl-{}/".format(self.version) ++ # force extraction into build dir ++ members = tf.getmembers() ++ for member in members: ++ if not member.name.startswith(base): ++ raise ValueError(member.name) ++ member.name = member.name[len(base):] ++ log.info("Unpacking files to {}".format(self.build_dir)) ++ tf.extractall(self.build_dir, members) ++ ++ def _build_openssl(self): ++ """Now build openssl""" ++ log.info("Running build in {}".format(self.install_dir)) ++ cwd = self.build_dir ++ cmd = ["./config", "shared", "--prefix={}".format(self.install_dir)] ++ cmd.extend(self.openssl_compile_args) ++ self._subprocess_call(cmd, cwd=cwd) ++ self._subprocess_call(["make"], cwd=cwd) ++ ++ def _install_openssl(self, remove=True): ++ self._subprocess_call(["make", "install"], cwd=self.build_dir) ++ if remove: ++ shutil.rmtree(self.build_dir) ++ ++ def install_openssl(self): ++ if not self.has_openssl: ++ if not self.has_src: ++ self._download_openssl() ++ else: ++ log.debug("Already has src {}".format(self.src_file)) ++ self._unpack_openssl() ++ self._build_openssl() ++ self._install_openssl() ++ else: ++ log.info("Already has installation {}".format(self.install_dir)) ++ # validate installation ++ version = self.openssl_version ++ if self.version not in version: ++ raise ValueError(version) ++ ++ def touch_pymods(self): ++ # force a rebuild of all modules that use OpenSSL APIs ++ for fname in self.module_files: ++ os.utime(fname) ++ ++ def recompile_pymods(self): ++ log.info("Using OpenSSL build from {}".format(self.build_dir)) ++ # overwrite header and library search paths ++ env = os.environ.copy() ++ env["CPPFLAGS"] = "-I{}".format(self.include_dir) ++ env["LDFLAGS"] = "-L{}".format(self.lib_dir) ++ # set rpath ++ env["LD_RUN_PATH"] = self.lib_dir ++ ++ log.info("Rebuilding Python modules") ++ self.touch_pymods() ++ cmd = ["./python", "setup.py", "build"] ++ self._subprocess_call(cmd, env=env) ++ ++ def check_pyssl(self): ++ version = self.pyssl_version ++ if self.version not in version: ++ raise ValueError(version) ++ ++ def run_pytests(self, *args): ++ cmd = ["./python", "-m", "test"] ++ cmd.extend(args) ++ self._subprocess_call(cmd, stdout=None) ++ ++ def run_python_tests(self, *args): ++ self.recompile_pymods() ++ self.check_pyssl() ++ self.run_pytests(*args) ++ ++ ++def main(*args): ++ builders = [] ++ for version in OPENSSL_VERSIONS: ++ if version in ("0.9.8i", "0.9.8l"): ++ openssl_compile_args = ("no-asm",) ++ else: ++ openssl_compile_args = () ++ builder = BuildSSL(version, openssl_compile_args) ++ builder.install_openssl() ++ builders.append(builder) ++ ++ for builder in builders: ++ builder.run_python_tests(*args) ++ # final touch ++ builder.touch_pymods() ++ ++ ++if __name__ == "__main__": ++ logging.basicConfig(level=logging.INFO, ++ format="*** %(levelname)s %(message)s") ++ args = sys.argv[1:] ++ if not args: ++ args = ["-unetwork", "-v"] ++ args.extend(FULL_TESTS) ++ main(*args) + +diff -up Python-2.7.5/Doc/library/ssl.rst.ssl Python-2.7.5/Doc/library/ssl.rst +--- Python-2.7.5/Doc/library/ssl.rst.ssl 2015-02-24 11:37:44.278196135 +0100 ++++ Python-2.7.5/Doc/library/ssl.rst 2015-02-24 09:24:25.135911989 +0100 +@@ -28,7 +28,8 @@ probably additional platforms, as long a + + Some behavior may be platform dependent, since calls are made to the + operating system socket APIs. The installed version of OpenSSL may also +- cause variations in behavior. ++ cause variations in behavior. For example, TLSv1.1 and TLSv1.2 come with ++ openssl version 1.0.1. + + This section documents the objects and functions in the ``ssl`` module; for more + general information about TLS, SSL, and certificates, the reader is referred to +@@ -37,23 +38,101 @@ the documents in the "See Also" section + This module provides a class, :class:`ssl.SSLSocket`, which is derived from the + :class:`socket.socket` type, and provides a socket-like wrapper that also + encrypts and decrypts the data going over the socket with SSL. It supports +-additional :meth:`read` and :meth:`write` methods, along with a method, +-:meth:`getpeercert`, to retrieve the certificate of the other side of the +-connection, and a method, :meth:`cipher`, to retrieve the cipher being used for +-the secure connection. ++additional methods such as :meth:`getpeercert`, which retrieves the ++certificate of the other side of the connection, and :meth:`cipher`,which ++retrieves the cipher being used for the secure connection. ++ ++For more sophisticated applications, the :class:`ssl.SSLContext` class ++helps manage settings and certificates, which can then be inherited ++by SSL sockets created through the :meth:`SSLContext.wrap_socket` method. ++ + + Functions, Constants, and Exceptions + ------------------------------------ + + .. exception:: SSLError + +- Raised to signal an error from the underlying SSL implementation. This +- signifies some problem in the higher-level encryption and authentication +- layer that's superimposed on the underlying network connection. This error +- is a subtype of :exc:`socket.error`, which in turn is a subtype of +- :exc:`IOError`. ++ Raised to signal an error from the underlying SSL implementation (currently ++ provided by the OpenSSL library). This signifies some problem in the ++ higher-level encryption and authentication layer that's superimposed on the ++ underlying network connection. This error is a subtype of ++ :exc:`socket.error`, which in turn is a subtype of :exc:`IOError`. The ++ error code and message of :exc:`SSLError` instances are provided by the ++ OpenSSL library. ++ ++ .. attribute:: library ++ ++ A string mnemonic designating the OpenSSL submodule in which the error ++ occurred, such as ``SSL``, ``PEM`` or ``X509``. The range of possible ++ values depends on the OpenSSL version. ++ ++ .. versionadded:: 2.7.9 ++ ++ .. attribute:: reason ++ ++ A string mnemonic designating the reason this error occurred, for ++ example ``CERTIFICATE_VERIFY_FAILED``. The range of possible ++ values depends on the OpenSSL version. ++ ++ .. versionadded:: 2.7.9 ++ ++.. exception:: SSLZeroReturnError ++ ++ A subclass of :exc:`SSLError` raised when trying to read or write and ++ the SSL connection has been closed cleanly. Note that this doesn't ++ mean that the underlying transport (read TCP) has been closed. ++ ++ .. versionadded:: 2.7.9 ++ ++.. exception:: SSLWantReadError ++ ++ A subclass of :exc:`SSLError` raised by a :ref:`non-blocking SSL socket ++ ` when trying to read or write data, but more data needs ++ to be received on the underlying TCP transport before the request can be ++ fulfilled. ++ ++ .. versionadded:: 2.7.9 ++ ++.. exception:: SSLWantWriteError ++ ++ A subclass of :exc:`SSLError` raised by a :ref:`non-blocking SSL socket ++ ` when trying to read or write data, but more data needs ++ to be sent on the underlying TCP transport before the request can be ++ fulfilled. ++ ++ .. versionadded:: 2.7.9 + +-.. function:: wrap_socket (sock, keyfile=None, certfile=None, server_side=False, cert_reqs=CERT_NONE, ssl_version={see docs}, ca_certs=None, do_handshake_on_connect=True, suppress_ragged_eofs=True, ciphers=None) ++.. exception:: SSLSyscallError ++ ++ A subclass of :exc:`SSLError` raised when a system error was encountered ++ while trying to fulfill an operation on a SSL socket. Unfortunately, ++ there is no easy way to inspect the original errno number. ++ ++ .. versionadded:: 2.7.9 ++ ++.. exception:: SSLEOFError ++ ++ A subclass of :exc:`SSLError` raised when the SSL connection has been ++ terminated abruptly. Generally, you shouldn't try to reuse the underlying ++ transport when this error is encountered. ++ ++ .. versionadded:: 2.7.9 ++ ++.. exception:: CertificateError ++ ++ Raised to signal an error with a certificate (such as mismatching ++ hostname). Certificate errors detected by OpenSSL, though, raise ++ an :exc:`SSLError`. ++ ++ ++Socket creation ++^^^^^^^^^^^^^^^ ++ ++The following function allows for standalone socket creation. Starting from ++Python 2.7.9, it can be more flexible to use :meth:`SSLContext.wrap_socket` ++instead. ++ ++.. function:: wrap_socket(sock, keyfile=None, certfile=None, server_side=False, cert_reqs=CERT_NONE, ssl_version={see docs}, ca_certs=None, do_handshake_on_connect=True, suppress_ragged_eofs=True, ciphers=None) + + Takes an instance ``sock`` of :class:`socket.socket`, and returns an instance + of :class:`ssl.SSLSocket`, a subtype of :class:`socket.socket`, which wraps +@@ -70,19 +149,6 @@ Functions, Constants, and Exceptions + connection. See the discussion of :ref:`ssl-certificates` for more + information on how the certificate is stored in the ``certfile``. + +- Often the private key is stored in the same file as the certificate; in this +- case, only the ``certfile`` parameter need be passed. If the private key is +- stored in a separate file, both parameters must be used. If the private key +- is stored in the ``certfile``, it should come before the first certificate in +- the certificate chain:: +- +- -----BEGIN RSA PRIVATE KEY----- +- ... (private key in base64 encoding) ... +- -----END RSA PRIVATE KEY----- +- -----BEGIN CERTIFICATE----- +- ... (certificate in base64 PEM encoding) ... +- -----END CERTIFICATE----- +- + The parameter ``server_side`` is a boolean which identifies whether + server-side or client-side behavior is desired from this socket. + +@@ -112,14 +178,16 @@ Functions, Constants, and Exceptions + + .. table:: + +- ======================== ========= ========= ========== ========= +- *client* / **server** **SSLv2** **SSLv3** **SSLv23** **TLSv1** +- ------------------------ --------- --------- ---------- --------- +- *SSLv2* yes no yes no +- *SSLv3* no yes yes no +- *SSLv23* yes no yes no +- *TLSv1* no no yes yes +- ======================== ========= ========= ========== ========= ++ ======================== ========= ========= ========== ========= =========== =========== ++ *client* / **server** **SSLv2** **SSLv3** **SSLv23** **TLSv1** **TLSv1.1** **TLSv1.2** ++ ------------------------ --------- --------- ---------- --------- ----------- ----------- ++ *SSLv2* yes no yes no no no ++ *SSLv3* no yes yes no no no ++ *SSLv23* yes no yes no no no ++ *TLSv1* no no yes yes no no ++ *TLSv1.1* no no yes no yes no ++ *TLSv1.2* no no yes no no yes ++ ======================== ========= ========= ========== ========= =========== =========== + + .. note:: + +@@ -146,22 +214,79 @@ Functions, Constants, and Exceptions + The parameter ``suppress_ragged_eofs`` specifies how the + :meth:`SSLSocket.read` method should signal unexpected EOF from the other end + of the connection. If specified as :const:`True` (the default), it returns a +- normal EOF in response to unexpected EOF errors raised from the underlying +- socket; if :const:`False`, it will raise the exceptions back to the caller. ++ normal EOF (an empty bytes object) in response to unexpected EOF errors ++ raised from the underlying socket; if :const:`False`, it will raise the ++ exceptions back to the caller. + + .. versionchanged:: 2.7 + New optional argument *ciphers*. + ++ ++Context creation ++^^^^^^^^^^^^^^^^ ++ ++A convenience function helps create :class:`SSLContext` objects for common ++purposes. ++ ++.. function:: create_default_context(purpose=Purpose.SERVER_AUTH, cafile=None, capath=None, cadata=None) ++ ++ Return a new :class:`SSLContext` object with default settings for ++ the given *purpose*. The settings are chosen by the :mod:`ssl` module, ++ and usually represent a higher security level than when calling the ++ :class:`SSLContext` constructor directly. ++ ++ *cafile*, *capath*, *cadata* represent optional CA certificates to ++ trust for certificate verification, as in ++ :meth:`SSLContext.load_verify_locations`. If all three are ++ :const:`None`, this function can choose to trust the system's default ++ CA certificates instead. ++ ++ The settings in Python 2.7.9 are: :data:`PROTOCOL_SSLv23`, ++ :data:`OP_NO_SSLv2`, and :data:`OP_NO_SSLv3` with high encryption cipher ++ suites without RC4 and without unauthenticated cipher suites. Passing ++ :data:`~Purpose.SERVER_AUTH` as *purpose* sets ++ :data:`~SSLContext.verify_mode` to :data:`CERT_REQUIRED` and either loads CA ++ certificates (when at least one of *cafile*, *capath* or *cadata* is given) ++ or uses :meth:`SSLContext.load_default_certs` to load default CA ++ certificates. ++ ++ .. note:: ++ The protocol, options, cipher and other settings may change to more ++ restrictive values anytime without prior deprecation. The values ++ represent a fair balance between compatibility and security. ++ ++ If your application needs specific settings, you should create a ++ :class:`SSLContext` and apply the settings yourself. ++ ++ .. note:: ++ If you find that when certain older clients or servers attempt to connect ++ with a :class:`SSLContext` created by this function that they get an ++ error stating "Protocol or cipher suite mismatch", it may be that they ++ only support SSL3.0 which this function excludes using the ++ :data:`OP_NO_SSLv3`. SSL3.0 has problematic security due to a number of ++ poor implementations and it's reliance on MD5 within the protocol. If you ++ wish to continue to use this function but still allow SSL 3.0 connections ++ you can re-enable them using:: ++ ++ ctx = ssl.create_default_context(Purpose.CLIENT_AUTH) ++ ctx.options &= ~ssl.OP_NO_SSLv3 ++ ++ .. versionadded:: 2.7.9 ++ ++ ++Random generation ++^^^^^^^^^^^^^^^^^ ++ + .. function:: RAND_status() + + Returns True if the SSL pseudo-random number generator has been seeded with +- 'enough' randomness, and False otherwise. You can use :func:`ssl.RAND_egd` ++ 'enough' randomness, and ``False`` otherwise. You can use :func:`ssl.RAND_egd` + and :func:`ssl.RAND_add` to increase the randomness of the pseudo-random + number generator. + + .. function:: RAND_egd(path) + +- If you are running an entropy-gathering daemon (EGD) somewhere, and ``path`` ++ If you are running an entropy-gathering daemon (EGD) somewhere, and *path* + is the pathname of a socket connection open to it, this will read 256 bytes + of randomness from the socket, and add it to the SSL pseudo-random number + generator to increase the security of generated secret keys. This is +@@ -172,28 +297,66 @@ Functions, Constants, and Exceptions + + .. function:: RAND_add(bytes, entropy) + +- Mixes the given ``bytes`` into the SSL pseudo-random number generator. The +- parameter ``entropy`` (a float) is a lower bound on the entropy contained in ++ Mixes the given *bytes* into the SSL pseudo-random number generator. The ++ parameter *entropy* (a float) is a lower bound on the entropy contained in + string (so you can always use :const:`0.0`). See :rfc:`1750` for more + information on sources of entropy. + +-.. function:: cert_time_to_seconds(timestring) ++Certificate handling ++^^^^^^^^^^^^^^^^^^^^ ++ ++.. function:: match_hostname(cert, hostname) ++ ++ Verify that *cert* (in decoded format as returned by ++ :meth:`SSLSocket.getpeercert`) matches the given *hostname*. The rules ++ applied are those for checking the identity of HTTPS servers as outlined ++ in :rfc:`2818` and :rfc:`6125`, except that IP addresses are not currently ++ supported. In addition to HTTPS, this function should be suitable for ++ checking the identity of servers in various SSL-based protocols such as ++ FTPS, IMAPS, POPS and others. + +- Returns a floating-point value containing a normal seconds-after-the-epoch +- time value, given the time-string representing the "notBefore" or "notAfter" +- date from a certificate. +- +- Here's an example:: +- +- >>> import ssl +- >>> ssl.cert_time_to_seconds("May 9 00:00:00 2007 GMT") +- 1178694000.0 +- >>> import time +- >>> time.ctime(ssl.cert_time_to_seconds("May 9 00:00:00 2007 GMT")) +- 'Wed May 9 00:00:00 2007' +- >>> ++ :exc:`CertificateError` is raised on failure. On success, the function ++ returns nothing:: + +-.. function:: get_server_certificate (addr, ssl_version=PROTOCOL_SSLv3, ca_certs=None) ++ >>> cert = {'subject': ((('commonName', 'example.com'),),)} ++ >>> ssl.match_hostname(cert, "example.com") ++ >>> ssl.match_hostname(cert, "example.org") ++ Traceback (most recent call last): ++ File "", line 1, in ++ File "/home/py3k/Lib/ssl.py", line 130, in match_hostname ++ ssl.CertificateError: hostname 'example.org' doesn't match 'example.com' ++ ++ .. versionadded:: 2.7.9 ++ ++ ++.. function:: cert_time_to_seconds(cert_time) ++ ++ Return the time in seconds since the Epoch, given the ``cert_time`` ++ string representing the "notBefore" or "notAfter" date from a ++ certificate in ``"%b %d %H:%M:%S %Y %Z"`` strptime format (C ++ locale). ++ ++ Here's an example: ++ ++ .. doctest:: newcontext ++ ++ >>> import ssl ++ >>> timestamp = ssl.cert_time_to_seconds("Jan 5 09:34:43 2018 GMT") ++ >>> timestamp ++ 1515144883 ++ >>> from datetime import datetime ++ >>> print(datetime.utcfromtimestamp(timestamp)) ++ 2018-01-05 09:34:43 ++ ++ "notBefore" or "notAfter" dates must use GMT (:rfc:`5280`). ++ ++ .. versionchanged:: 2.7.9 ++ Interpret the input time as a time in UTC as specified by 'GMT' ++ timezone in the input string. Local timezone was used ++ previously. Return an integer (no fractions of a second in the ++ input format) ++ ++.. function:: get_server_certificate(addr, ssl_version=PROTOCOL_SSLv23, ca_certs=None) + + Given the address ``addr`` of an SSL-protected server, as a (*hostname*, + *port-number*) pair, fetches the server's certificate, and returns it as a +@@ -204,36 +367,144 @@ Functions, Constants, and Exceptions + will attempt to validate the server certificate against that set of root + certificates, and will fail if the validation attempt fails. + +-.. function:: DER_cert_to_PEM_cert (DER_cert_bytes) ++ .. versionchanged:: 2.7.9 ++ ++ This function is now IPv6-compatible, and the default *ssl_version* is ++ changed from :data:`PROTOCOL_SSLv3` to :data:`PROTOCOL_SSLv23` for ++ maximum compatibility with modern servers. ++ ++.. function:: DER_cert_to_PEM_cert(DER_cert_bytes) + + Given a certificate as a DER-encoded blob of bytes, returns a PEM-encoded + string version of the same certificate. + +-.. function:: PEM_cert_to_DER_cert (PEM_cert_string) ++.. function:: PEM_cert_to_DER_cert(PEM_cert_string) + + Given a certificate as an ASCII PEM string, returns a DER-encoded sequence of + bytes for that same certificate. + ++.. function:: get_default_verify_paths() ++ ++ Returns a named tuple with paths to OpenSSL's default cafile and capath. ++ The paths are the same as used by ++ :meth:`SSLContext.set_default_verify_paths`. The return value is a ++ :term:`named tuple` ``DefaultVerifyPaths``: ++ ++ * :attr:`cafile` - resolved path to cafile or None if the file doesn't exist, ++ * :attr:`capath` - resolved path to capath or None if the directory doesn't exist, ++ * :attr:`openssl_cafile_env` - OpenSSL's environment key that points to a cafile, ++ * :attr:`openssl_cafile` - hard coded path to a cafile, ++ * :attr:`openssl_capath_env` - OpenSSL's environment key that points to a capath, ++ * :attr:`openssl_capath` - hard coded path to a capath directory ++ ++ .. versionadded:: 2.7.9 ++ ++.. function:: enum_certificates(store_name) ++ ++ Retrieve certificates from Windows' system cert store. *store_name* may be ++ one of ``CA``, ``ROOT`` or ``MY``. Windows may provide additional cert ++ stores, too. ++ ++ The function returns a list of (cert_bytes, encoding_type, trust) tuples. ++ The encoding_type specifies the encoding of cert_bytes. It is either ++ :const:`x509_asn` for X.509 ASN.1 data or :const:`pkcs_7_asn` for ++ PKCS#7 ASN.1 data. Trust specifies the purpose of the certificate as a set ++ of OIDS or exactly ``True`` if the certificate is trustworthy for all ++ purposes. ++ ++ Example:: ++ ++ >>> ssl.enum_certificates("CA") ++ [(b'data...', 'x509_asn', {'1.3.6.1.5.5.7.3.1', '1.3.6.1.5.5.7.3.2'}), ++ (b'data...', 'x509_asn', True)] ++ ++ Availability: Windows. ++ ++ .. versionadded:: 2.7.9 ++ ++.. function:: enum_crls(store_name) ++ ++ Retrieve CRLs from Windows' system cert store. *store_name* may be ++ one of ``CA``, ``ROOT`` or ``MY``. Windows may provide additional cert ++ stores, too. ++ ++ The function returns a list of (cert_bytes, encoding_type, trust) tuples. ++ The encoding_type specifies the encoding of cert_bytes. It is either ++ :const:`x509_asn` for X.509 ASN.1 data or :const:`pkcs_7_asn` for ++ PKCS#7 ASN.1 data. ++ ++ Availability: Windows. ++ ++ .. versionadded:: 2.7.9 ++ ++ ++Constants ++^^^^^^^^^ ++ + .. data:: CERT_NONE + +- Value to pass to the ``cert_reqs`` parameter to :func:`sslobject` when no +- certificates will be required or validated from the other side of the socket +- connection. ++ Possible value for :attr:`SSLContext.verify_mode`, or the ``cert_reqs`` ++ parameter to :func:`wrap_socket`. In this mode (the default), no ++ certificates will be required from the other side of the socket connection. ++ If a certificate is received from the other end, no attempt to validate it ++ is made. ++ ++ See the discussion of :ref:`ssl-security` below. + + .. data:: CERT_OPTIONAL + +- Value to pass to the ``cert_reqs`` parameter to :func:`sslobject` when no +- certificates will be required from the other side of the socket connection, +- but if they are provided, will be validated. Note that use of this setting +- requires a valid certificate validation file also be passed as a value of the +- ``ca_certs`` parameter. ++ Possible value for :attr:`SSLContext.verify_mode`, or the ``cert_reqs`` ++ parameter to :func:`wrap_socket`. In this mode no certificates will be ++ required from the other side of the socket connection; but if they ++ are provided, validation will be attempted and an :class:`SSLError` ++ will be raised on failure. ++ ++ Use of this setting requires a valid set of CA certificates to ++ be passed, either to :meth:`SSLContext.load_verify_locations` or as a ++ value of the ``ca_certs`` parameter to :func:`wrap_socket`. + + .. data:: CERT_REQUIRED + +- Value to pass to the ``cert_reqs`` parameter to :func:`sslobject` when +- certificates will be required from the other side of the socket connection. +- Note that use of this setting requires a valid certificate validation file +- also be passed as a value of the ``ca_certs`` parameter. ++ Possible value for :attr:`SSLContext.verify_mode`, or the ``cert_reqs`` ++ parameter to :func:`wrap_socket`. In this mode, certificates are ++ required from the other side of the socket connection; an :class:`SSLError` ++ will be raised if no certificate is provided, or if its validation fails. ++ ++ Use of this setting requires a valid set of CA certificates to ++ be passed, either to :meth:`SSLContext.load_verify_locations` or as a ++ value of the ``ca_certs`` parameter to :func:`wrap_socket`. ++ ++.. data:: VERIFY_DEFAULT ++ ++ Possible value for :attr:`SSLContext.verify_flags`. In this mode, ++ certificate revocation lists (CRLs) are not checked. By default OpenSSL ++ does neither require nor verify CRLs. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: VERIFY_CRL_CHECK_LEAF ++ ++ Possible value for :attr:`SSLContext.verify_flags`. In this mode, only the ++ peer cert is check but non of the intermediate CA certificates. The mode ++ requires a valid CRL that is signed by the peer cert's issuer (its direct ++ ancestor CA). If no proper has been loaded ++ :attr:`SSLContext.load_verify_locations`, validation will fail. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: VERIFY_CRL_CHECK_CHAIN ++ ++ Possible value for :attr:`SSLContext.verify_flags`. In this mode, CRLs of ++ all certificates in the peer cert chain are checked. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: VERIFY_X509_STRICT ++ ++ Possible value for :attr:`SSLContext.verify_flags` to disable workarounds ++ for broken X.509 certificates. ++ ++ .. versionadded:: 2.7.9 + + .. data:: PROTOCOL_SSLv2 + +@@ -260,9 +531,136 @@ Functions, Constants, and Exceptions + + .. data:: PROTOCOL_TLSv1 + +- Selects TLS version 1 as the channel encryption protocol. This is the most ++ Selects TLS version 1.0 as the channel encryption protocol. ++ ++.. data:: PROTOCOL_TLSv1_1 ++ ++ Selects TLS version 1.1 as the channel encryption protocol. ++ Available only with openssl version 1.0.1+. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: PROTOCOL_TLSv1_2 ++ ++ Selects TLS version 1.2 as the channel encryption protocol. This is the most + modern version, and probably the best choice for maximum protection, if both +- sides can speak it. ++ sides can speak it. Available only with openssl version 1.0.1+. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_ALL ++ ++ Enables workarounds for various bugs present in other SSL implementations. ++ This option is set by default. It does not necessarily set the same ++ flags as OpenSSL's ``SSL_OP_ALL`` constant. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_NO_SSLv2 ++ ++ Prevents an SSLv2 connection. This option is only applicable in ++ conjunction with :const:`PROTOCOL_SSLv23`. It prevents the peers from ++ choosing SSLv2 as the protocol version. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_NO_SSLv3 ++ ++ Prevents an SSLv3 connection. This option is only applicable in ++ conjunction with :const:`PROTOCOL_SSLv23`. It prevents the peers from ++ choosing SSLv3 as the protocol version. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_NO_TLSv1 ++ ++ Prevents a TLSv1 connection. This option is only applicable in ++ conjunction with :const:`PROTOCOL_SSLv23`. It prevents the peers from ++ choosing TLSv1 as the protocol version. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_NO_TLSv1_1 ++ ++ Prevents a TLSv1.1 connection. This option is only applicable in conjunction ++ with :const:`PROTOCOL_SSLv23`. It prevents the peers from choosing TLSv1.1 as ++ the protocol version. Available only with openssl version 1.0.1+. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_NO_TLSv1_2 ++ ++ Prevents a TLSv1.2 connection. This option is only applicable in conjunction ++ with :const:`PROTOCOL_SSLv23`. It prevents the peers from choosing TLSv1.2 as ++ the protocol version. Available only with openssl version 1.0.1+. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_CIPHER_SERVER_PREFERENCE ++ ++ Use the server's cipher ordering preference, rather than the client's. ++ This option has no effect on client sockets and SSLv2 server sockets. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_SINGLE_DH_USE ++ ++ Prevents re-use of the same DH key for distinct SSL sessions. This ++ improves forward secrecy but requires more computational resources. ++ This option only applies to server sockets. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_SINGLE_ECDH_USE ++ ++ Prevents re-use of the same ECDH key for distinct SSL sessions. This ++ improves forward secrecy but requires more computational resources. ++ This option only applies to server sockets. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: OP_NO_COMPRESSION ++ ++ Disable compression on the SSL channel. This is useful if the application ++ protocol supports its own compression scheme. ++ ++ This option is only available with OpenSSL 1.0.0 and later. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: HAS_ECDH ++ ++ Whether the OpenSSL library has built-in support for Elliptic Curve-based ++ Diffie-Hellman key exchange. This should be true unless the feature was ++ explicitly disabled by the distributor. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: HAS_SNI ++ ++ Whether the OpenSSL library has built-in support for the *Server Name ++ Indication* extension to the SSLv3 and TLSv1 protocols (as defined in ++ :rfc:`4366`). When true, you can use the *server_hostname* argument to ++ :meth:`SSLContext.wrap_socket`. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: HAS_NPN ++ ++ Whether the OpenSSL library has built-in support for *Next Protocol ++ Negotiation* as described in the `NPN draft specification ++ `_. When true, ++ you can use the :meth:`SSLContext.set_npn_protocols` method to advertise ++ which protocols you want to support. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: CHANNEL_BINDING_TYPES ++ ++ List of supported TLS channel binding types. Strings in this list ++ can be used as arguments to :meth:`SSLSocket.get_channel_binding`. ++ ++ .. versionadded:: 2.7.9 + + .. data:: OPENSSL_VERSION + +@@ -294,9 +692,40 @@ Functions, Constants, and Exceptions + + .. versionadded:: 2.7 + ++.. data:: ALERT_DESCRIPTION_HANDSHAKE_FAILURE ++ ALERT_DESCRIPTION_INTERNAL_ERROR ++ ALERT_DESCRIPTION_* ++ ++ Alert Descriptions from :rfc:`5246` and others. The `IANA TLS Alert Registry ++ `_ ++ contains this list and references to the RFCs where their meaning is defined. ++ ++ Used as the return value of the callback function in ++ :meth:`SSLContext.set_servername_callback`. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: Purpose.SERVER_AUTH ++ ++ Option for :func:`create_default_context` and ++ :meth:`SSLContext.load_default_certs`. This value indicates that the ++ context may be used to authenticate Web servers (therefore, it will ++ be used to create client-side sockets). ++ ++ .. versionadded:: 2.7.9 + +-SSLSocket Objects +------------------ ++.. data:: Purpose.CLIENT_AUTH ++ ++ Option for :func:`create_default_context` and ++ :meth:`SSLContext.load_default_certs`. This value indicates that the ++ context may be used to authenticate Web clients (therefore, it will ++ be used to create server-side sockets). ++ ++ .. versionadded:: 2.7.9 ++ ++ ++SSL Sockets ++----------- + + SSL sockets provide the following methods of :ref:`socket-objects`: + +@@ -319,37 +748,64 @@ SSL sockets provide the following method + + However, since the SSL (and TLS) protocol has its own framing atop + of TCP, the SSL sockets abstraction can, in certain respects, diverge from +-the specification of normal, OS-level sockets. ++the specification of normal, OS-level sockets. See especially the ++:ref:`notes on non-blocking sockets `. + + SSL sockets also have the following additional methods and attributes: + ++.. method:: SSLSocket.do_handshake() ++ ++ Perform the SSL setup handshake. ++ ++ .. versionchanged:: 2.7.9 ++ ++ The handshake method also performs :func:`match_hostname` when the ++ :attr:`~SSLContext.check_hostname` attribute of the socket's ++ :attr:`~SSLSocket.context` is true. ++ + .. method:: SSLSocket.getpeercert(binary_form=False) + + If there is no certificate for the peer on the other end of the connection, +- returns ``None``. ++ return ``None``. If the SSL handshake hasn't been done yet, raise ++ :exc:`ValueError`. + + If the ``binary_form`` parameter is :const:`False`, and a certificate was + received from the peer, this method returns a :class:`dict` instance. If the + certificate was not validated, the dict is empty. If the certificate was +- validated, it returns a dict with the keys ``subject`` (the principal for +- which the certificate was issued), and ``notAfter`` (the time after which the +- certificate should not be trusted). The certificate was already validated, +- so the ``notBefore`` and ``issuer`` fields are not returned. If a +- certificate contains an instance of the *Subject Alternative Name* extension +- (see :rfc:`3280`), there will also be a ``subjectAltName`` key in the +- dictionary. +- +- The "subject" field is a tuple containing the sequence of relative +- distinguished names (RDNs) given in the certificate's data structure for the +- principal, and each RDN is a sequence of name-value pairs:: +- +- {'notAfter': 'Feb 16 16:54:50 2013 GMT', +- 'subject': ((('countryName', u'US'),), +- (('stateOrProvinceName', u'Delaware'),), +- (('localityName', u'Wilmington'),), +- (('organizationName', u'Python Software Foundation'),), +- (('organizationalUnitName', u'SSL'),), +- (('commonName', u'somemachine.python.org'),))} ++ validated, it returns a dict with several keys, amongst them ``subject`` ++ (the principal for which the certificate was issued) and ``issuer`` ++ (the principal issuing the certificate). If a certificate contains an ++ instance of the *Subject Alternative Name* extension (see :rfc:`3280`), ++ there will also be a ``subjectAltName`` key in the dictionary. ++ ++ The ``subject`` and ``issuer`` fields are tuples containing the sequence ++ of relative distinguished names (RDNs) given in the certificate's data ++ structure for the respective fields, and each RDN is a sequence of ++ name-value pairs. Here is a real-world example:: ++ ++ {'issuer': ((('countryName', 'IL'),), ++ (('organizationName', 'StartCom Ltd.'),), ++ (('organizationalUnitName', ++ 'Secure Digital Certificate Signing'),), ++ (('commonName', ++ 'StartCom Class 2 Primary Intermediate Server CA'),)), ++ 'notAfter': 'Nov 22 08:15:19 2013 GMT', ++ 'notBefore': 'Nov 21 03:09:52 2011 GMT', ++ 'serialNumber': '95F0', ++ 'subject': ((('description', '571208-SLe257oHY9fVQ07Z'),), ++ (('countryName', 'US'),), ++ (('stateOrProvinceName', 'California'),), ++ (('localityName', 'San Francisco'),), ++ (('organizationName', 'Electronic Frontier Foundation, Inc.'),), ++ (('commonName', '*.eff.org'),), ++ (('emailAddress', 'hostmaster@eff.org'),)), ++ 'subjectAltName': (('DNS', '*.eff.org'), ('DNS', 'eff.org')), ++ 'version': 3} ++ ++ .. note:: ++ ++ To validate a certificate for a particular service, you can use the ++ :func:`match_hostname` function. + + If the ``binary_form`` parameter is :const:`True`, and a certificate was + provided, this method returns the DER-encoded form of the entire certificate +@@ -365,40 +821,388 @@ SSL sockets also have the following addi + :const:`None` if you used :const:`CERT_NONE` (rather than + :const:`CERT_OPTIONAL` or :const:`CERT_REQUIRED`). + ++ .. versionchanged:: 2.7.9 ++ The returned dictionary includes additional items such as ``issuer`` and ++ ``notBefore``. Additionall :exc:`ValueError` is raised when the handshake ++ isn't done. The returned dictionary includes additional X509v3 extension ++ items such as ``crlDistributionPoints``, ``caIssuers`` and ``OCSP`` URIs. ++ + .. method:: SSLSocket.cipher() + + Returns a three-value tuple containing the name of the cipher being used, the + version of the SSL protocol that defines its use, and the number of secret + bits being used. If no connection has been established, returns ``None``. + +-.. method:: SSLSocket.do_handshake() ++.. method:: SSLSocket.compression() ++ ++ Return the compression algorithm being used as a string, or ``None`` ++ if the connection isn't compressed. ++ ++ If the higher-level protocol supports its own compression mechanism, ++ you can use :data:`OP_NO_COMPRESSION` to disable SSL-level compression. ++ ++ .. versionadded:: 2.7.9 ++ ++.. method:: SSLSocket.get_channel_binding(cb_type="tls-unique") ++ ++ Get channel binding data for current connection, as a bytes object. Returns ++ ``None`` if not connected or the handshake has not been completed. ++ ++ The *cb_type* parameter allow selection of the desired channel binding ++ type. Valid channel binding types are listed in the ++ :data:`CHANNEL_BINDING_TYPES` list. Currently only the 'tls-unique' channel ++ binding, defined by :rfc:`5929`, is supported. :exc:`ValueError` will be ++ raised if an unsupported channel binding type is requested. ++ ++ .. versionadded:: 2.7.9 ++ ++.. method:: SSLSocket.selected_npn_protocol() ++ ++ Returns the protocol that was selected during the TLS/SSL handshake. If ++ :meth:`SSLContext.set_npn_protocols` was not called, or if the other party ++ does not support NPN, or if the handshake has not yet happened, this will ++ return ``None``. + +- Perform a TLS/SSL handshake. If this is used with a non-blocking socket, it +- may raise :exc:`SSLError` with an ``arg[0]`` of :const:`SSL_ERROR_WANT_READ` +- or :const:`SSL_ERROR_WANT_WRITE`, in which case it must be called again until +- it completes successfully. For example, to simulate the behavior of a +- blocking socket, one might write:: +- +- while True: +- try: +- s.do_handshake() +- break +- except ssl.SSLError as err: +- if err.args[0] == ssl.SSL_ERROR_WANT_READ: +- select.select([s], [], []) +- elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE: +- select.select([], [s], []) +- else: +- raise ++ .. versionadded:: 2.7.9 + + .. method:: SSLSocket.unwrap() + + Performs the SSL shutdown handshake, which removes the TLS layer from the + underlying socket, and returns the underlying socket object. This can be + used to go from encrypted operation over a connection to unencrypted. The +- socket instance returned should always be used for further communication with +- the other side of the connection, rather than the original socket instance +- (which may not function properly after the unwrap). ++ returned socket should always be used for further communication with the ++ other side of the connection, rather than the original socket. ++ ++.. attribute:: SSLSocket.context ++ ++ The :class:`SSLContext` object this SSL socket is tied to. If the SSL ++ socket was created using the top-level :func:`wrap_socket` function ++ (rather than :meth:`SSLContext.wrap_socket`), this is a custom context ++ object created for this SSL socket. ++ ++ .. versionadded:: 2.7.9 ++ ++ ++SSL Contexts ++------------ ++ ++.. versionadded:: 2.7.9 ++ ++An SSL context holds various data longer-lived than single SSL connections, ++such as SSL configuration options, certificate(s) and private key(s). ++It also manages a cache of SSL sessions for server-side sockets, in order ++to speed up repeated connections from the same clients. ++ ++.. class:: SSLContext(protocol) ++ ++ Create a new SSL context. You must pass *protocol* which must be one ++ of the ``PROTOCOL_*`` constants defined in this module. ++ :data:`PROTOCOL_SSLv23` is currently recommended for maximum ++ interoperability. ++ ++ .. seealso:: ++ :func:`create_default_context` lets the :mod:`ssl` module choose ++ security settings for a given purpose. ++ ++ ++:class:`SSLContext` objects have the following methods and attributes: ++ ++.. method:: SSLContext.cert_store_stats() ++ ++ Get statistics about quantities of loaded X.509 certificates, count of ++ X.509 certificates flagged as CA certificates and certificate revocation ++ lists as dictionary. ++ ++ Example for a context with one CA cert and one other cert:: ++ ++ >>> context.cert_store_stats() ++ {'crl': 0, 'x509_ca': 1, 'x509': 2} ++ ++ ++.. method:: SSLContext.load_cert_chain(certfile, keyfile=None, password=None) ++ ++ Load a private key and the corresponding certificate. The *certfile* ++ string must be the path to a single file in PEM format containing the ++ certificate as well as any number of CA certificates needed to establish ++ the certificate's authenticity. The *keyfile* string, if present, must ++ point to a file containing the private key in. Otherwise the private ++ key will be taken from *certfile* as well. See the discussion of ++ :ref:`ssl-certificates` for more information on how the certificate ++ is stored in the *certfile*. ++ ++ The *password* argument may be a function to call to get the password for ++ decrypting the private key. It will only be called if the private key is ++ encrypted and a password is necessary. It will be called with no arguments, ++ and it should return a string, bytes, or bytearray. If the return value is ++ a string it will be encoded as UTF-8 before using it to decrypt the key. ++ Alternatively a string, bytes, or bytearray value may be supplied directly ++ as the *password* argument. It will be ignored if the private key is not ++ encrypted and no password is needed. ++ ++ If the *password* argument is not specified and a password is required, ++ OpenSSL's built-in password prompting mechanism will be used to ++ interactively prompt the user for a password. ++ ++ An :class:`SSLError` is raised if the private key doesn't ++ match with the certificate. ++ ++.. method:: SSLContext.load_default_certs(purpose=Purpose.SERVER_AUTH) ++ ++ Load a set of default "certification authority" (CA) certificates from ++ default locations. On Windows it loads CA certs from the ``CA`` and ++ ``ROOT`` system stores. On other systems it calls ++ :meth:`SSLContext.set_default_verify_paths`. In the future the method may ++ load CA certificates from other locations, too. ++ ++ The *purpose* flag specifies what kind of CA certificates are loaded. The ++ default settings :data:`Purpose.SERVER_AUTH` loads certificates, that are ++ flagged and trusted for TLS web server authentication (client side ++ sockets). :data:`Purpose.CLIENT_AUTH` loads CA certificates for client ++ certificate verification on the server side. ++ ++.. method:: SSLContext.load_verify_locations(cafile=None, capath=None, cadata=None) ++ ++ Load a set of "certification authority" (CA) certificates used to validate ++ other peers' certificates when :data:`verify_mode` is other than ++ :data:`CERT_NONE`. At least one of *cafile* or *capath* must be specified. ++ ++ This method can also load certification revocation lists (CRLs) in PEM or ++ DER format. In order to make use of CRLs, :attr:`SSLContext.verify_flags` ++ must be configured properly. ++ ++ The *cafile* string, if present, is the path to a file of concatenated ++ CA certificates in PEM format. See the discussion of ++ :ref:`ssl-certificates` for more information about how to arrange the ++ certificates in this file. ++ ++ The *capath* string, if present, is ++ the path to a directory containing several CA certificates in PEM format, ++ following an `OpenSSL specific layout ++ `_. ++ ++ The *cadata* object, if present, is either an ASCII string of one or more ++ PEM-encoded certificates or a bytes-like object of DER-encoded ++ certificates. Like with *capath* extra lines around PEM-encoded ++ certificates are ignored but at least one certificate must be present. ++ ++.. method:: SSLContext.get_ca_certs(binary_form=False) ++ ++ Get a list of loaded "certification authority" (CA) certificates. If the ++ ``binary_form`` parameter is :const:`False` each list ++ entry is a dict like the output of :meth:`SSLSocket.getpeercert`. Otherwise ++ the method returns a list of DER-encoded certificates. The returned list ++ does not contain certificates from *capath* unless a certificate was ++ requested and loaded by a SSL connection. ++ ++.. method:: SSLContext.set_default_verify_paths() ++ ++ Load a set of default "certification authority" (CA) certificates from ++ a filesystem path defined when building the OpenSSL library. Unfortunately, ++ there's no easy way to know whether this method succeeds: no error is ++ returned if no certificates are to be found. When the OpenSSL library is ++ provided as part of the operating system, though, it is likely to be ++ configured properly. ++ ++.. method:: SSLContext.set_ciphers(ciphers) ++ ++ Set the available ciphers for sockets created with this context. ++ It should be a string in the `OpenSSL cipher list format ++ `_. ++ If no cipher can be selected (because compile-time options or other ++ configuration forbids use of all the specified ciphers), an ++ :class:`SSLError` will be raised. ++ ++ .. note:: ++ when connected, the :meth:`SSLSocket.cipher` method of SSL sockets will ++ give the currently selected cipher. ++ ++.. method:: SSLContext.set_npn_protocols(protocols) ++ ++ Specify which protocols the socket should advertise during the SSL/TLS ++ handshake. It should be a list of strings, like ``['http/1.1', 'spdy/2']``, ++ ordered by preference. The selection of a protocol will happen during the ++ handshake, and will play out according to the `NPN draft specification ++ `_. After a ++ successful handshake, the :meth:`SSLSocket.selected_npn_protocol` method will ++ return the agreed-upon protocol. ++ ++ This method will raise :exc:`NotImplementedError` if :data:`HAS_NPN` is ++ False. ++ ++.. method:: SSLContext.set_servername_callback(server_name_callback) ++ ++ Register a callback function that will be called after the TLS Client Hello ++ handshake message has been received by the SSL/TLS server when the TLS client ++ specifies a server name indication. The server name indication mechanism ++ is specified in :rfc:`6066` section 3 - Server Name Indication. ++ ++ Only one callback can be set per ``SSLContext``. If *server_name_callback* ++ is ``None`` then the callback is disabled. Calling this function a ++ subsequent time will disable the previously registered callback. ++ ++ The callback function, *server_name_callback*, will be called with three ++ arguments; the first being the :class:`ssl.SSLSocket`, the second is a string ++ that represents the server name that the client is intending to communicate ++ (or :const:`None` if the TLS Client Hello does not contain a server name) ++ and the third argument is the original :class:`SSLContext`. The server name ++ argument is the IDNA decoded server name. ++ ++ A typical use of this callback is to change the :class:`ssl.SSLSocket`'s ++ :attr:`SSLSocket.context` attribute to a new object of type ++ :class:`SSLContext` representing a certificate chain that matches the server ++ name. ++ ++ Due to the early negotiation phase of the TLS connection, only limited ++ methods and attributes are usable like ++ :meth:`SSLSocket.selected_npn_protocol` and :attr:`SSLSocket.context`. ++ :meth:`SSLSocket.getpeercert`, :meth:`SSLSocket.getpeercert`, ++ :meth:`SSLSocket.cipher` and :meth:`SSLSocket.compress` methods require that ++ the TLS connection has progressed beyond the TLS Client Hello and therefore ++ will not contain return meaningful values nor can they be called safely. ++ ++ The *server_name_callback* function must return ``None`` to allow the ++ TLS negotiation to continue. If a TLS failure is required, a constant ++ :const:`ALERT_DESCRIPTION_* ` can be ++ returned. Other return values will result in a TLS fatal error with ++ :const:`ALERT_DESCRIPTION_INTERNAL_ERROR`. ++ ++ If there is an IDNA decoding error on the server name, the TLS connection ++ will terminate with an :const:`ALERT_DESCRIPTION_INTERNAL_ERROR` fatal TLS ++ alert message to the client. ++ ++ If an exception is raised from the *server_name_callback* function the TLS ++ connection will terminate with a fatal TLS alert message ++ :const:`ALERT_DESCRIPTION_HANDSHAKE_FAILURE`. ++ ++ This method will raise :exc:`NotImplementedError` if the OpenSSL library ++ had OPENSSL_NO_TLSEXT defined when it was built. ++ ++.. method:: SSLContext.load_dh_params(dhfile) ++ ++ Load the key generation parameters for Diffie-Helman (DH) key exchange. ++ Using DH key exchange improves forward secrecy at the expense of ++ computational resources (both on the server and on the client). ++ The *dhfile* parameter should be the path to a file containing DH ++ parameters in PEM format. ++ ++ This setting doesn't apply to client sockets. You can also use the ++ :data:`OP_SINGLE_DH_USE` option to further improve security. ++ ++.. method:: SSLContext.set_ecdh_curve(curve_name) ++ ++ Set the curve name for Elliptic Curve-based Diffie-Hellman (ECDH) key ++ exchange. ECDH is significantly faster than regular DH while arguably ++ as secure. The *curve_name* parameter should be a string describing ++ a well-known elliptic curve, for example ``prime256v1`` for a widely ++ supported curve. ++ ++ This setting doesn't apply to client sockets. You can also use the ++ :data:`OP_SINGLE_ECDH_USE` option to further improve security. ++ ++ This method is not available if :data:`HAS_ECDH` is False. ++ ++ .. seealso:: ++ `SSL/TLS & Perfect Forward Secrecy `_ ++ Vincent Bernat. ++ ++.. method:: SSLContext.wrap_socket(sock, server_side=False, \ ++ do_handshake_on_connect=True, suppress_ragged_eofs=True, \ ++ server_hostname=None) ++ ++ Wrap an existing Python socket *sock* and return an :class:`SSLSocket` ++ object. *sock* must be a :data:`~socket.SOCK_STREAM` socket; other socket ++ types are unsupported. ++ ++ The returned SSL socket is tied to the context, its settings and ++ certificates. The parameters *server_side*, *do_handshake_on_connect* ++ and *suppress_ragged_eofs* have the same meaning as in the top-level ++ :func:`wrap_socket` function. ++ ++ On client connections, the optional parameter *server_hostname* specifies ++ the hostname of the service which we are connecting to. This allows a ++ single server to host multiple SSL-based services with distinct certificates, ++ quite similarly to HTTP virtual hosts. Specifying *server_hostname* ++ will raise a :exc:`ValueError` if the OpenSSL library doesn't have support ++ for it (that is, if :data:`HAS_SNI` is :const:`False`). Specifying ++ *server_hostname* will also raise a :exc:`ValueError` if *server_side* ++ is true. ++ ++.. method:: SSLContext.session_stats() ++ ++ Get statistics about the SSL sessions created or managed by this context. ++ A dictionary is returned which maps the names of each `piece of information ++ `_ to their ++ numeric values. For example, here is the total number of hits and misses ++ in the session cache since the context was created:: ++ ++ >>> stats = context.session_stats() ++ >>> stats['hits'], stats['misses'] ++ (0, 0) ++ ++.. method:: SSLContext.get_ca_certs(binary_form=False) ++ ++ Returns a list of dicts with information of loaded CA certs. If the ++ optional argument is true, returns a DER-encoded copy of the CA ++ certificate. ++ ++ .. note:: ++ Certificates in a capath directory aren't loaded unless they have ++ been used at least once. ++ ++.. attribute:: SSLContext.check_hostname ++ ++ Wether to match the peer cert's hostname with :func:`match_hostname` in ++ :meth:`SSLSocket.do_handshake`. The context's ++ :attr:`~SSLContext.verify_mode` must be set to :data:`CERT_OPTIONAL` or ++ :data:`CERT_REQUIRED`, and you must pass *server_hostname* to ++ :meth:`~SSLContext.wrap_socket` in order to match the hostname. ++ ++ Example:: ++ ++ import socket, ssl ++ ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.check_hostname = True ++ context.load_default_certs() ++ ++ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) ++ ssl_sock = context.wrap_socket(s, server_hostname='www.verisign.com') ++ ssl_sock.connect(('www.verisign.com', 443)) ++ ++ .. note:: ++ ++ This features requires OpenSSL 0.9.8f or newer. ++ ++.. attribute:: SSLContext.options ++ ++ An integer representing the set of SSL options enabled on this context. ++ The default value is :data:`OP_ALL`, but you can specify other options ++ such as :data:`OP_NO_SSLv2` by ORing them together. ++ ++ .. note:: ++ With versions of OpenSSL older than 0.9.8m, it is only possible ++ to set options, not to clear them. Attempting to clear an option ++ (by resetting the corresponding bits) will raise a ``ValueError``. ++ ++.. attribute:: SSLContext.protocol ++ ++ The protocol version chosen when constructing the context. This attribute ++ is read-only. ++ ++.. attribute:: SSLContext.verify_flags ++ ++ The flags for certificate verification operations. You can set flags like ++ :data:`VERIFY_CRL_CHECK_LEAF` by ORing them together. By default OpenSSL ++ does neither require nor verify certificate revocation lists (CRLs). ++ Available only with openssl version 0.9.8+. ++ ++.. attribute:: SSLContext.verify_mode ++ ++ Whether to try to verify other peers' certificates and how to behave ++ if verification fails. This attribute must be one of ++ :data:`CERT_NONE`, :data:`CERT_OPTIONAL` or :data:`CERT_REQUIRED`. ++ + + .. index:: single: certificates + +@@ -445,6 +1249,9 @@ and a footer line:: + ... (certificate in base64 PEM encoding) ... + -----END CERTIFICATE----- + ++Certificate chains ++^^^^^^^^^^^^^^^^^^ ++ + The Python files which contain certificates can contain a sequence of + certificates, sometimes called a *certificate chain*. This chain should start + with the specific certificate for the principal who "is" the client or server, +@@ -468,25 +1275,35 @@ certification authority's certificate:: + ... (the root certificate for the CA's issuer)... + -----END CERTIFICATE----- + ++CA certificates ++^^^^^^^^^^^^^^^ ++ + If you are going to require validation of the other side of the connection's + certificate, you need to provide a "CA certs" file, filled with the certificate + chains for each issuer you are willing to trust. Again, this file just contains + these chains concatenated together. For validation, Python will use the first +-chain it finds in the file which matches. ++chain it finds in the file which matches. The platform's certificates file can ++be used by calling :meth:`SSLContext.load_default_certs`, this is done ++automatically with :func:`.create_default_context`. ++ ++Combined key and certificate ++^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ++ ++Often the private key is stored in the same file as the certificate; in this ++case, only the ``certfile`` parameter to :meth:`SSLContext.load_cert_chain` ++and :func:`wrap_socket` needs to be passed. If the private key is stored ++with the certificate, it should come before the first certificate in ++the certificate chain:: ++ ++ -----BEGIN RSA PRIVATE KEY----- ++ ... (private key in base64 encoding) ... ++ -----END RSA PRIVATE KEY----- ++ -----BEGIN CERTIFICATE----- ++ ... (certificate in base64 PEM encoding) ... ++ -----END CERTIFICATE----- + +-Some "standard" root certificates are available from various certification +-authorities: `CACert.org `_, `Thawte +-`_, `Verisign +-`_, `Positive SSL +-`_ +-(used by python.org), `Equifax and GeoTrust +-`_. +- +-In general, if you are using SSL3 or TLS1, you don't need to put the full chain +-in your "CA certs" file; you only need the root certificates, and the remote +-peer is supposed to furnish the other certificates necessary to chain from its +-certificate to a root certificate. See :rfc:`4158` for more discussion of the +-way in which certification chains can be built. ++Self-signed certificates ++^^^^^^^^^^^^^^^^^^^^^^^^ + + If you are going to create a server that provides SSL-encrypted connection + services, you will need to acquire a certificate for that service. There are +@@ -541,87 +1358,156 @@ should use the following idiom:: + Client-side operation + ^^^^^^^^^^^^^^^^^^^^^ + +-This example connects to an SSL server, prints the server's address and +-certificate, sends some bytes, and reads part of the response:: ++This example connects to an SSL server and prints the server's certificate:: + + import socket, ssl, pprint + + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) +- + # require a certificate from the server + ssl_sock = ssl.wrap_socket(s, + ca_certs="/etc/ca_certs_file", + cert_reqs=ssl.CERT_REQUIRED) +- + ssl_sock.connect(('www.verisign.com', 443)) + +- print repr(ssl_sock.getpeername()) +- print ssl_sock.cipher() +- print pprint.pformat(ssl_sock.getpeercert()) +- +- # Set a simple HTTP request -- use httplib in actual code. +- ssl_sock.write("""GET / HTTP/1.0\r +- Host: www.verisign.com\r\n\r\n""") +- +- # Read a chunk of data. Will not necessarily +- # read all the data returned by the server. +- data = ssl_sock.read() +- ++ pprint.pprint(ssl_sock.getpeercert()) + # note that closing the SSLSocket will also close the underlying socket + ssl_sock.close() + +-As of September 6, 2007, the certificate printed by this program looked like ++As of January 6, 2012, the certificate printed by this program looks like + this:: + +- {'notAfter': 'May 8 23:59:59 2009 GMT', +- 'subject': ((('serialNumber', u'2497886'),), +- (('1.3.6.1.4.1.311.60.2.1.3', u'US'),), +- (('1.3.6.1.4.1.311.60.2.1.2', u'Delaware'),), +- (('countryName', u'US'),), +- (('postalCode', u'94043'),), +- (('stateOrProvinceName', u'California'),), +- (('localityName', u'Mountain View'),), +- (('streetAddress', u'487 East Middlefield Road'),), +- (('organizationName', u'VeriSign, Inc.'),), +- (('organizationalUnitName', +- u'Production Security Services'),), +- (('organizationalUnitName', +- u'Terms of use at www.verisign.com/rpa (c)06'),), +- (('commonName', u'www.verisign.com'),))} ++ {'issuer': ((('countryName', 'US'),), ++ (('organizationName', 'VeriSign, Inc.'),), ++ (('organizationalUnitName', 'VeriSign Trust Network'),), ++ (('organizationalUnitName', ++ 'Terms of use at https://www.verisign.com/rpa (c)06'),), ++ (('commonName', ++ 'VeriSign Class 3 Extended Validation SSL SGC CA'),)), ++ 'notAfter': 'May 25 23:59:59 2012 GMT', ++ 'notBefore': 'May 26 00:00:00 2010 GMT', ++ 'serialNumber': '53D2BEF924A7245E83CA01E46CAA2477', ++ 'subject': ((('1.3.6.1.4.1.311.60.2.1.3', 'US'),), ++ (('1.3.6.1.4.1.311.60.2.1.2', 'Delaware'),), ++ (('businessCategory', 'V1.0, Clause 5.(b)'),), ++ (('serialNumber', '2497886'),), ++ (('countryName', 'US'),), ++ (('postalCode', '94043'),), ++ (('stateOrProvinceName', 'California'),), ++ (('localityName', 'Mountain View'),), ++ (('streetAddress', '487 East Middlefield Road'),), ++ (('organizationName', 'VeriSign, Inc.'),), ++ (('organizationalUnitName', ' Production Security Services'),), ++ (('commonName', 'www.verisign.com'),)), ++ 'subjectAltName': (('DNS', 'www.verisign.com'), ++ ('DNS', 'verisign.com'), ++ ('DNS', 'www.verisign.net'), ++ ('DNS', 'verisign.net'), ++ ('DNS', 'www.verisign.mobi'), ++ ('DNS', 'verisign.mobi'), ++ ('DNS', 'www.verisign.eu'), ++ ('DNS', 'verisign.eu')), ++ 'version': 3} ++ ++This other example first creates an SSL context, instructs it to verify ++certificates sent by peers, and feeds it a set of recognized certificate ++authorities (CA):: ++ ++ >>> context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ >>> context.verify_mode = ssl.CERT_REQUIRED ++ >>> context.load_verify_locations("/etc/ssl/certs/ca-bundle.crt") ++ ++(it is assumed your operating system places a bundle of all CA certificates ++in ``/etc/ssl/certs/ca-bundle.crt``; if not, you'll get an error and have ++to adjust the location) ++ ++When you use the context to connect to a server, :const:`CERT_REQUIRED` ++validates the server certificate: it ensures that the server certificate ++was signed with one of the CA certificates, and checks the signature for ++correctness:: ++ ++ >>> conn = context.wrap_socket(socket.socket(socket.AF_INET)) ++ >>> conn.connect(("linuxfr.org", 443)) ++ ++You should then fetch the certificate and check its fields for conformity:: ++ ++ >>> cert = conn.getpeercert() ++ >>> ssl.match_hostname(cert, "linuxfr.org") ++ ++Visual inspection shows that the certificate does identify the desired service ++(that is, the HTTPS host ``linuxfr.org``):: ++ ++ >>> pprint.pprint(cert) ++ {'issuer': ((('organizationName', 'CAcert Inc.'),), ++ (('organizationalUnitName', 'http://www.CAcert.org'),), ++ (('commonName', 'CAcert Class 3 Root'),)), ++ 'notAfter': 'Jun 7 21:02:24 2013 GMT', ++ 'notBefore': 'Jun 8 21:02:24 2011 GMT', ++ 'serialNumber': 'D3E9', ++ 'subject': ((('commonName', 'linuxfr.org'),),), ++ 'subjectAltName': (('DNS', 'linuxfr.org'), ++ ('othername', ''), ++ ('DNS', 'linuxfr.org'), ++ ('othername', ''), ++ ('DNS', 'dev.linuxfr.org'), ++ ('othername', ''), ++ ('DNS', 'prod.linuxfr.org'), ++ ('othername', ''), ++ ('DNS', 'alpha.linuxfr.org'), ++ ('othername', ''), ++ ('DNS', '*.linuxfr.org'), ++ ('othername', '')), ++ 'version': 3} ++ ++Now that you are assured of its authenticity, you can proceed to talk with ++the server:: ++ ++ >>> conn.sendall(b"HEAD / HTTP/1.0\r\nHost: linuxfr.org\r\n\r\n") ++ >>> pprint.pprint(conn.recv(1024).split(b"\r\n")) ++ [b'HTTP/1.1 302 Found', ++ b'Date: Sun, 16 May 2010 13:43:28 GMT', ++ b'Server: Apache/2.2', ++ b'Location: https://linuxfr.org/pub/', ++ b'Vary: Accept-Encoding', ++ b'Connection: close', ++ b'Content-Type: text/html; charset=iso-8859-1', ++ b'', ++ b''] ++ ++See the discussion of :ref:`ssl-security` below. + +-which is a fairly poorly-formed ``subject`` field. + + Server-side operation + ^^^^^^^^^^^^^^^^^^^^^ + +-For server operation, typically you'd need to have a server certificate, and +-private key, each in a file. You'd open a socket, bind it to a port, call +-:meth:`listen` on it, then start waiting for clients to connect:: ++For server operation, typically you'll need to have a server certificate, and ++private key, each in a file. You'll first create a context holding the key ++and the certificate, so that clients can check your authenticity. Then ++you'll open a socket, bind it to a port, call :meth:`listen` on it, and start ++waiting for clients to connect:: + + import socket, ssl + ++ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ context.load_cert_chain(certfile="mycertfile", keyfile="mykeyfile") ++ + bindsocket = socket.socket() + bindsocket.bind(('myaddr.mydomain.com', 10023)) + bindsocket.listen(5) + +-When one did, you'd call :meth:`accept` on the socket to get the new socket from +-the other end, and use :func:`wrap_socket` to create a server-side SSL context +-for it:: ++When a client connects, you'll call :meth:`accept` on the socket to get the ++new socket from the other end, and use the context's :meth:`SSLContext.wrap_socket` ++method to create a server-side SSL socket for the connection:: + + while True: + newsocket, fromaddr = bindsocket.accept() +- connstream = ssl.wrap_socket(newsocket, +- server_side=True, +- certfile="mycertfile", +- keyfile="mykeyfile", +- ssl_version=ssl.PROTOCOL_TLSv1) ++ connstream = context.wrap_socket(newsocket, server_side=True) + try: + deal_with_client(connstream) + finally: + connstream.shutdown(socket.SHUT_RDWR) + connstream.close() + +-Then you'd read data from the ``connstream`` and do something with it till you ++Then you'll read data from the ``connstream`` and do something with it till you + are finished with the client (or the client is finished with you):: + + def deal_with_client(connstream): +@@ -635,7 +1521,138 @@ are finished with the client (or the cli + data = connstream.read() + # finished with client + +-And go back to listening for new client connections. ++And go back to listening for new client connections (of course, a real server ++would probably handle each client connection in a separate thread, or put ++the sockets in non-blocking mode and use an event loop). ++ ++ ++.. _ssl-nonblocking: ++ ++Notes on non-blocking sockets ++----------------------------- ++ ++When working with non-blocking sockets, there are several things you need ++to be aware of: ++ ++- Calling :func:`~select.select` tells you that the OS-level socket can be ++ read from (or written to), but it does not imply that there is sufficient ++ data at the upper SSL layer. For example, only part of an SSL frame might ++ have arrived. Therefore, you must be ready to handle :meth:`SSLSocket.recv` ++ and :meth:`SSLSocket.send` failures, and retry after another call to ++ :func:`~select.select`. ++ ++- Conversely, since the SSL layer has its own framing, a SSL socket may ++ still have data available for reading without :func:`~select.select` ++ being aware of it. Therefore, you should first call ++ :meth:`SSLSocket.recv` to drain any potentially available data, and then ++ only block on a :func:`~select.select` call if still necessary. ++ ++ (of course, similar provisions apply when using other primitives such as ++ :func:`~select.poll`, or those in the :mod:`selectors` module) ++ ++- The SSL handshake itself will be non-blocking: the ++ :meth:`SSLSocket.do_handshake` method has to be retried until it returns ++ successfully. Here is a synopsis using :func:`~select.select` to wait for ++ the socket's readiness:: ++ ++ while True: ++ try: ++ sock.do_handshake() ++ break ++ except ssl.SSLWantReadError: ++ select.select([sock], [], []) ++ except ssl.SSLWantWriteError: ++ select.select([], [sock], []) ++ ++ ++.. _ssl-security: ++ ++Security considerations ++----------------------- ++ ++Best defaults ++^^^^^^^^^^^^^ ++ ++For **client use**, if you don't have any special requirements for your ++security policy, it is highly recommended that you use the ++:func:`create_default_context` function to create your SSL context. ++It will load the system's trusted CA certificates, enable certificate ++validation and hostname checking, and try to choose reasonably secure ++protocol and cipher settings. ++ ++If a client certificate is needed for the connection, it can be added with ++:meth:`SSLContext.load_cert_chain`. ++ ++By contrast, if you create the SSL context by calling the :class:`SSLContext` ++constructor yourself, it will not have certificate validation nor hostname ++checking enabled by default. If you do so, please read the paragraphs below ++to achieve a good security level. ++ ++Manual settings ++^^^^^^^^^^^^^^^ ++ ++Verifying certificates ++'''''''''''''''''''''' ++ ++When calling the :class:`SSLContext` constructor directly, ++:const:`CERT_NONE` is the default. Since it does not authenticate the other ++peer, it can be insecure, especially in client mode where most of time you ++would like to ensure the authenticity of the server you're talking to. ++Therefore, when in client mode, it is highly recommended to use ++:const:`CERT_REQUIRED`. However, it is in itself not sufficient; you also ++have to check that the server certificate, which can be obtained by calling ++:meth:`SSLSocket.getpeercert`, matches the desired service. For many ++protocols and applications, the service can be identified by the hostname; ++in this case, the :func:`match_hostname` function can be used. This common ++check is automatically performed when :attr:`SSLContext.check_hostname` is ++enabled. ++ ++In server mode, if you want to authenticate your clients using the SSL layer ++(rather than using a higher-level authentication mechanism), you'll also have ++to specify :const:`CERT_REQUIRED` and similarly check the client certificate. ++ ++ .. note:: ++ ++ In client mode, :const:`CERT_OPTIONAL` and :const:`CERT_REQUIRED` are ++ equivalent unless anonymous ciphers are enabled (they are disabled ++ by default). ++ ++Protocol versions ++''''''''''''''''' ++ ++SSL version 2 is considered insecure and is therefore dangerous to use. If ++you want maximum compatibility between clients and servers, it is recommended ++to use :const:`PROTOCOL_SSLv23` as the protocol version and then disable ++SSLv2 explicitly using the :data:`SSLContext.options` attribute:: ++ ++ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ context.options |= ssl.OP_NO_SSLv2 ++ ++The SSL context created above will allow SSLv3 and TLSv1 (and later, if ++supported by your system) connections, but not SSLv2. ++ ++Cipher selection ++'''''''''''''''' ++ ++If you have advanced security requirements, fine-tuning of the ciphers ++enabled when negotiating a SSL session is possible through the ++:meth:`SSLContext.set_ciphers` method. Starting from Python 2.7.9, the ++ssl module disables certain weak ciphers by default, but you may want ++to further restrict the cipher choice. Be sure to read OpenSSL's documentation ++about the `cipher list format `_. ++If you want to check which ciphers are enabled by a given cipher list, use the ++``openssl ciphers`` command on your system. ++ ++Multi-processing ++^^^^^^^^^^^^^^^^ ++ ++If using this module as part of a multi-processed application (using, ++for example the :mod:`multiprocessing` or :mod:`concurrent.futures` modules), ++be aware that OpenSSL's internal random number generator does not properly ++handle forked processes. Applications must change the PRNG state of the ++parent process if they use any SSL feature with :func:`os.fork`. Any ++successful call of :func:`~ssl.RAND_add`, :func:`~ssl.RAND_bytes` or ++:func:`~ssl.RAND_pseudo_bytes` is sufficient. + + + .. seealso:: +@@ -654,3 +1671,15 @@ And go back to listening for new client + + `RFC 3280: Internet X.509 Public Key Infrastructure Certificate and CRL Profile `_ + Housley et. al. ++ ++ `RFC 4366: Transport Layer Security (TLS) Extensions `_ ++ Blake-Wilson et. al. ++ ++ `RFC 5246: The Transport Layer Security (TLS) Protocol Version 1.2 `_ ++ T. Dierks et. al. ++ ++ `RFC 6066: Transport Layer Security (TLS) Extensions `_ ++ D. Eastlake ++ ++ `IANA TLS: Transport Layer Security (TLS) Parameters `_ ++ IANA +diff -up Python-2.7.5/Lib/test/test_ssl.py.ssl Python-2.7.5/Lib/test/test_ssl.py +--- Python-2.7.5/Lib/test/test_ssl.py.ssl 2015-02-24 11:37:12.270937480 +0100 ++++ Python-2.7.5/Lib/test/test_ssl.py 2015-02-24 10:17:44.084768602 +0100 +@@ -1,35 +1,78 @@ ++# -*- coding: utf-8 -*- + # Test the support for SSL and sockets + + import sys + import unittest +-from test import test_support ++from test import test_support as support + import asyncore + import socket + import select + import time ++import datetime + import gc + import os + import errno + import pprint +-import urllib, urlparse ++import tempfile ++import urllib + import traceback + import weakref +-import functools + import platform ++import functools ++from contextlib import closing ++ ++ssl = support.import_module("ssl") + +-from BaseHTTPServer import HTTPServer +-from SimpleHTTPServer import SimpleHTTPRequestHandler ++PROTOCOLS = sorted(ssl._PROTOCOL_NAMES) ++HOST = support.HOST + +-ssl = test_support.import_module("ssl") ++def data_file(*name): ++ return os.path.join(os.path.dirname(__file__), *name) ++ ++# The custom key and certificate files used in test_ssl are generated ++# using Lib/test/make_ssl_certs.py. ++# Other certificates are simply fetched from the Internet servers they ++# are meant to authenticate. ++ ++CERTFILE = data_file("keycert.pem") ++BYTES_CERTFILE = CERTFILE.encode(sys.getfilesystemencoding()) ++ONLYCERT = data_file("ssl_cert.pem") ++ONLYKEY = data_file("ssl_key.pem") ++BYTES_ONLYCERT = ONLYCERT.encode(sys.getfilesystemencoding()) ++BYTES_ONLYKEY = ONLYKEY.encode(sys.getfilesystemencoding()) ++CERTFILE_PROTECTED = data_file("keycert.passwd.pem") ++ONLYKEY_PROTECTED = data_file("ssl_key.passwd.pem") ++KEY_PASSWORD = "somepass" ++CAPATH = data_file("capath") ++BYTES_CAPATH = CAPATH.encode(sys.getfilesystemencoding()) ++CAFILE_NEURONIO = data_file("capath", "4e1295a3.0") ++CAFILE_CACERT = data_file("capath", "5ed36f99.0") ++ ++ ++# empty CRL ++CRLFILE = data_file("revocation.crl") ++ ++# Two keys and certs signed by the same CA (for SNI tests) ++SIGNED_CERTFILE = data_file("keycert3.pem") ++SIGNED_CERTFILE2 = data_file("keycert4.pem") ++SIGNING_CA = data_file("pycacert.pem") ++ ++SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem") ++ ++EMPTYCERT = data_file("nullcert.pem") ++BADCERT = data_file("badcert.pem") ++WRONGCERT = data_file("XXXnonexisting.pem") ++BADKEY = data_file("badkey.pem") ++NOKIACERT = data_file("nokia.pem") ++NULLBYTECERT = data_file("nullbytecert.pem") ++ ++DHFILE = data_file("dh512.pem") ++BYTES_DHFILE = DHFILE.encode(sys.getfilesystemencoding()) + +-HOST = test_support.HOST +-CERTFILE = None +-SVN_PYTHON_ORG_ROOT_CERT = None +-NULLBYTECERT = None + + def handle_error(prefix): + exc_format = ' '.join(traceback.format_exception(*sys.exc_info())) +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write(prefix + exc_format) + + +@@ -51,48 +94,76 @@ class BasicTests(unittest.TestCase): + pass + else: + raise ++def can_clear_options(): ++ # 0.9.8m or higher ++ return ssl._OPENSSL_API_VERSION >= (0, 9, 8, 13, 15) ++ ++def no_sslv2_implies_sslv3_hello(): ++ # 0.9.7h or higher ++ return ssl.OPENSSL_VERSION_INFO >= (0, 9, 7, 8, 15) ++ ++def have_verify_flags(): ++ # 0.9.8 or higher ++ return ssl.OPENSSL_VERSION_INFO >= (0, 9, 8, 0, 15) ++ ++def utc_offset(): #NOTE: ignore issues like #1647654 ++ # local time = utc time + utc offset ++ if time.daylight and time.localtime().tm_isdst > 0: ++ return -time.altzone # seconds ++ return -time.timezone ++ ++def asn1time(cert_time): ++ # Some versions of OpenSSL ignore seconds, see #18207 ++ # 0.9.8.i ++ if ssl._OPENSSL_API_VERSION == (0, 9, 8, 9, 15): ++ fmt = "%b %d %H:%M:%S %Y GMT" ++ dt = datetime.datetime.strptime(cert_time, fmt) ++ dt = dt.replace(second=0) ++ cert_time = dt.strftime(fmt) ++ # %d adds leading zero but ASN1_TIME_print() uses leading space ++ if cert_time[4] == "0": ++ cert_time = cert_time[:4] + " " + cert_time[5:] ++ ++ return cert_time + + # Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2 + def skip_if_broken_ubuntu_ssl(func): + if hasattr(ssl, 'PROTOCOL_SSLv2'): +- # We need to access the lower-level wrapper in order to create an +- # implicit SSL context without trying to connect or listen. +- try: +- import _ssl +- except ImportError: +- # The returned function won't get executed, just ignore the error +- pass + @functools.wraps(func) + def f(*args, **kwargs): + try: +- s = socket.socket(socket.AF_INET) +- _ssl.sslwrap(s._sock, 0, None, None, +- ssl.CERT_NONE, ssl.PROTOCOL_SSLv2, None, None) +- except ssl.SSLError as e: ++ ssl.SSLContext(ssl.PROTOCOL_SSLv2) ++ except ssl.SSLError: + if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and +- platform.linux_distribution() == ('debian', 'squeeze/sid', '') +- and 'Invalid SSL protocol variant specified' in str(e)): ++ platform.linux_distribution() == ('debian', 'squeeze/sid', '')): + raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour") + return func(*args, **kwargs) + return f + else: + return func + ++needs_sni = unittest.skipUnless(ssl.HAS_SNI, "SNI support needed for this test") ++ + + class BasicSocketTests(unittest.TestCase): + + def test_constants(self): +- #ssl.PROTOCOL_SSLv2 +- ssl.PROTOCOL_SSLv23 +- ssl.PROTOCOL_SSLv3 +- ssl.PROTOCOL_TLSv1 + ssl.CERT_NONE + ssl.CERT_OPTIONAL + ssl.CERT_REQUIRED ++ ssl.OP_CIPHER_SERVER_PREFERENCE ++ ssl.OP_SINGLE_DH_USE ++ if ssl.HAS_ECDH: ++ ssl.OP_SINGLE_ECDH_USE ++ if ssl.OPENSSL_VERSION_INFO >= (1, 0): ++ ssl.OP_NO_COMPRESSION ++ self.assertIn(ssl.HAS_SNI, {True, False}) ++ self.assertIn(ssl.HAS_ECDH, {True, False}) ++ + + def test_random(self): + v = ssl.RAND_status() +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n RAND_status is %d (%s)\n" + % (v, (v and "sufficient randomness") or + "insufficient randomness")) +@@ -104,9 +175,19 @@ class BasicSocketTests(unittest.TestCase + # note that this uses an 'unofficial' function in _ssl.c, + # provided solely for this test, to exercise the certificate + # parsing code +- p = ssl._ssl._test_decode_cert(CERTFILE, False) +- if test_support.verbose: ++ p = ssl._ssl._test_decode_cert(CERTFILE) ++ if support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") ++ self.assertEqual(p['issuer'], ++ ((('countryName', 'XY'),), ++ (('localityName', 'Castle Anthrax'),), ++ (('organizationName', 'Python Software Foundation'),), ++ (('commonName', 'localhost'),)) ++ ) ++ # Note the next three asserts will fail if the keys are regenerated ++ self.assertEqual(p['notAfter'], asn1time('Oct 5 23:01:56 2020 GMT')) ++ self.assertEqual(p['notBefore'], asn1time('Oct 8 23:01:56 2010 GMT')) ++ self.assertEqual(p['serialNumber'], 'D7C7381919AFC24E') + self.assertEqual(p['subject'], + ((('countryName', 'XY'),), + (('localityName', 'Castle Anthrax'),), +@@ -117,16 +198,23 @@ class BasicSocketTests(unittest.TestCase + # Issue #13034: the subjectAltName in some certificates + # (notably projects.developer.nokia.com:443) wasn't parsed + p = ssl._ssl._test_decode_cert(NOKIACERT) +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") + self.assertEqual(p['subjectAltName'], + (('DNS', 'projects.developer.nokia.com'), + ('DNS', 'projects.forum.nokia.com')) + ) ++ # extra OCSP and AIA fields ++ self.assertEqual(p['OCSP'], ('http://ocsp.verisign.com',)) ++ self.assertEqual(p['caIssuers'], ++ ('http://SVRIntl-G3-aia.verisign.com/SVRIntlG3.cer',)) ++ self.assertEqual(p['crlDistributionPoints'], ++ ('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',)) ++ + + def test_parse_cert_CVE_2013_4073(self): + p = ssl._ssl._test_decode_cert(NULLBYTECERT) +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") + subject = ((('countryName', 'US'),), + (('stateOrProvinceName', 'Oregon'),), +@@ -184,25 +272,7 @@ class BasicSocketTests(unittest.TestCase + self.assertTrue(s.startswith("OpenSSL {:d}.{:d}.{:d}".format(major, minor, fix)), + (s, t)) + +- def test_ciphers(self): +- if not test_support.is_resource_enabled('network'): +- return +- remote = ("svn.python.org", 443) +- with test_support.transient_internet(remote[0]): +- s = ssl.wrap_socket(socket.socket(socket.AF_INET), +- cert_reqs=ssl.CERT_NONE, ciphers="ALL") +- s.connect(remote) +- s = ssl.wrap_socket(socket.socket(socket.AF_INET), +- cert_reqs=ssl.CERT_NONE, ciphers="DEFAULT") +- s.connect(remote) +- # Error checking occurs when connecting, because the SSL context +- # isn't created before. +- s = ssl.wrap_socket(socket.socket(socket.AF_INET), +- cert_reqs=ssl.CERT_NONE, ciphers="^$:,;?*'dorothyx") +- with self.assertRaisesRegexp(ssl.SSLError, "No cipher can be selected"): +- s.connect(remote) +- +- @test_support.cpython_only ++ @support.cpython_only + def test_refcycle(self): + # Issue #7943: an SSL object doesn't create reference cycles with + # itself. +@@ -213,53 +283,917 @@ class BasicSocketTests(unittest.TestCase + self.assertEqual(wr(), None) + + def test_wrapped_unconnected(self): +- # The _delegate_methods in socket.py are correctly delegated to by an +- # unconnected SSLSocket, so they will raise a socket.error rather than +- # something unexpected like TypeError. ++ # Methods on an unconnected SSLSocket propagate the original ++ # socket.error raise by the underlying socket object. + s = socket.socket(socket.AF_INET) +- ss = ssl.wrap_socket(s) +- self.assertRaises(socket.error, ss.recv, 1) +- self.assertRaises(socket.error, ss.recv_into, bytearray(b'x')) +- self.assertRaises(socket.error, ss.recvfrom, 1) +- self.assertRaises(socket.error, ss.recvfrom_into, bytearray(b'x'), 1) +- self.assertRaises(socket.error, ss.send, b'x') +- self.assertRaises(socket.error, ss.sendto, b'x', ('0.0.0.0', 0)) ++ with closing(ssl.wrap_socket(s)) as ss: ++ self.assertRaises(socket.error, ss.recv, 1) ++ self.assertRaises(socket.error, ss.recv_into, bytearray(b'x')) ++ self.assertRaises(socket.error, ss.recvfrom, 1) ++ self.assertRaises(socket.error, ss.recvfrom_into, bytearray(b'x'), 1) ++ self.assertRaises(socket.error, ss.send, b'x') ++ self.assertRaises(socket.error, ss.sendto, b'x', ('0.0.0.0', 0)) ++ ++ def test_timeout(self): ++ # Issue #8524: when creating an SSL socket, the timeout of the ++ # original socket should be retained. ++ for timeout in (None, 0.0, 5.0): ++ s = socket.socket(socket.AF_INET) ++ s.settimeout(timeout) ++ with closing(ssl.wrap_socket(s)) as ss: ++ self.assertEqual(timeout, ss.gettimeout()) ++ ++ def test_errors(self): ++ sock = socket.socket() ++ self.assertRaisesRegexp(ValueError, ++ "certfile must be specified", ++ ssl.wrap_socket, sock, keyfile=CERTFILE) ++ self.assertRaisesRegexp(ValueError, ++ "certfile must be specified for server-side operations", ++ ssl.wrap_socket, sock, server_side=True) ++ self.assertRaisesRegexp(ValueError, ++ "certfile must be specified for server-side operations", ++ ssl.wrap_socket, sock, server_side=True, certfile="") ++ with closing(ssl.wrap_socket(sock, server_side=True, certfile=CERTFILE)) as s: ++ self.assertRaisesRegexp(ValueError, "can't connect in server-side mode", ++ s.connect, (HOST, 8080)) ++ with self.assertRaises(IOError) as cm: ++ with closing(socket.socket()) as sock: ++ ssl.wrap_socket(sock, certfile=WRONGCERT) ++ self.assertEqual(cm.exception.errno, errno.ENOENT) ++ with self.assertRaises(IOError) as cm: ++ with closing(socket.socket()) as sock: ++ ssl.wrap_socket(sock, certfile=CERTFILE, keyfile=WRONGCERT) ++ self.assertEqual(cm.exception.errno, errno.ENOENT) ++ with self.assertRaises(IOError) as cm: ++ with closing(socket.socket()) as sock: ++ ssl.wrap_socket(sock, certfile=WRONGCERT, keyfile=WRONGCERT) ++ self.assertEqual(cm.exception.errno, errno.ENOENT) ++ ++ def test_match_hostname(self): ++ def ok(cert, hostname): ++ ssl.match_hostname(cert, hostname) ++ def fail(cert, hostname): ++ self.assertRaises(ssl.CertificateError, ++ ssl.match_hostname, cert, hostname) ++ ++ cert = {'subject': ((('commonName', 'example.com'),),)} ++ ok(cert, 'example.com') ++ ok(cert, 'ExAmple.cOm') ++ fail(cert, 'www.example.com') ++ fail(cert, '.example.com') ++ fail(cert, 'example.org') ++ fail(cert, 'exampleXcom') ++ ++ cert = {'subject': ((('commonName', '*.a.com'),),)} ++ ok(cert, 'foo.a.com') ++ fail(cert, 'bar.foo.a.com') ++ fail(cert, 'a.com') ++ fail(cert, 'Xa.com') ++ fail(cert, '.a.com') ++ ++ # only match one left-most wildcard ++ cert = {'subject': ((('commonName', 'f*.com'),),)} ++ ok(cert, 'foo.com') ++ ok(cert, 'f.com') ++ fail(cert, 'bar.com') ++ fail(cert, 'foo.a.com') ++ fail(cert, 'bar.foo.com') ++ ++ # NULL bytes are bad, CVE-2013-4073 ++ cert = {'subject': ((('commonName', ++ 'null.python.org\x00example.org'),),)} ++ ok(cert, 'null.python.org\x00example.org') # or raise an error? ++ fail(cert, 'example.org') ++ fail(cert, 'null.python.org') ++ ++ # error cases with wildcards ++ cert = {'subject': ((('commonName', '*.*.a.com'),),)} ++ fail(cert, 'bar.foo.a.com') ++ fail(cert, 'a.com') ++ fail(cert, 'Xa.com') ++ fail(cert, '.a.com') ++ ++ cert = {'subject': ((('commonName', 'a.*.com'),),)} ++ fail(cert, 'a.foo.com') ++ fail(cert, 'a..com') ++ fail(cert, 'a.com') ++ ++ # wildcard doesn't match IDNA prefix 'xn--' ++ idna = u'püthon.python.org'.encode("idna").decode("ascii") ++ cert = {'subject': ((('commonName', idna),),)} ++ ok(cert, idna) ++ cert = {'subject': ((('commonName', 'x*.python.org'),),)} ++ fail(cert, idna) ++ cert = {'subject': ((('commonName', 'xn--p*.python.org'),),)} ++ fail(cert, idna) ++ ++ # wildcard in first fragment and IDNA A-labels in sequent fragments ++ # are supported. ++ idna = u'www*.pythön.org'.encode("idna").decode("ascii") ++ cert = {'subject': ((('commonName', idna),),)} ++ ok(cert, u'www.pythön.org'.encode("idna").decode("ascii")) ++ ok(cert, u'www1.pythön.org'.encode("idna").decode("ascii")) ++ fail(cert, u'ftp.pythön.org'.encode("idna").decode("ascii")) ++ fail(cert, u'pythön.org'.encode("idna").decode("ascii")) ++ ++ # Slightly fake real-world example ++ cert = {'notAfter': 'Jun 26 21:41:46 2011 GMT', ++ 'subject': ((('commonName', 'linuxfrz.org'),),), ++ 'subjectAltName': (('DNS', 'linuxfr.org'), ++ ('DNS', 'linuxfr.com'), ++ ('othername', ''))} ++ ok(cert, 'linuxfr.org') ++ ok(cert, 'linuxfr.com') ++ # Not a "DNS" entry ++ fail(cert, '') ++ # When there is a subjectAltName, commonName isn't used ++ fail(cert, 'linuxfrz.org') ++ ++ # A pristine real-world example ++ cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT', ++ 'subject': ((('countryName', 'US'),), ++ (('stateOrProvinceName', 'California'),), ++ (('localityName', 'Mountain View'),), ++ (('organizationName', 'Google Inc'),), ++ (('commonName', 'mail.google.com'),))} ++ ok(cert, 'mail.google.com') ++ fail(cert, 'gmail.com') ++ # Only commonName is considered ++ fail(cert, 'California') ++ ++ # Neither commonName nor subjectAltName ++ cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT', ++ 'subject': ((('countryName', 'US'),), ++ (('stateOrProvinceName', 'California'),), ++ (('localityName', 'Mountain View'),), ++ (('organizationName', 'Google Inc'),))} ++ fail(cert, 'mail.google.com') ++ ++ # No DNS entry in subjectAltName but a commonName ++ cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT', ++ 'subject': ((('countryName', 'US'),), ++ (('stateOrProvinceName', 'California'),), ++ (('localityName', 'Mountain View'),), ++ (('commonName', 'mail.google.com'),)), ++ 'subjectAltName': (('othername', 'blabla'), )} ++ ok(cert, 'mail.google.com') ++ ++ # No DNS entry subjectAltName and no commonName ++ cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT', ++ 'subject': ((('countryName', 'US'),), ++ (('stateOrProvinceName', 'California'),), ++ (('localityName', 'Mountain View'),), ++ (('organizationName', 'Google Inc'),)), ++ 'subjectAltName': (('othername', 'blabla'),)} ++ fail(cert, 'google.com') ++ ++ # Empty cert / no cert ++ self.assertRaises(ValueError, ssl.match_hostname, None, 'example.com') ++ self.assertRaises(ValueError, ssl.match_hostname, {}, 'example.com') ++ ++ # Issue #17980: avoid denials of service by refusing more than one ++ # wildcard per fragment. ++ cert = {'subject': ((('commonName', 'a*b.com'),),)} ++ ok(cert, 'axxb.com') ++ cert = {'subject': ((('commonName', 'a*b.co*'),),)} ++ fail(cert, 'axxb.com') ++ cert = {'subject': ((('commonName', 'a*b*.com'),),)} ++ with self.assertRaises(ssl.CertificateError) as cm: ++ ssl.match_hostname(cert, 'axxbxxc.com') ++ self.assertIn("too many wildcards", str(cm.exception)) ++ ++ def test_server_side(self): ++ # server_hostname doesn't work for server sockets ++ ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ with closing(socket.socket()) as sock: ++ self.assertRaises(ValueError, ctx.wrap_socket, sock, True, ++ server_hostname="some.hostname") ++ ++ def test_unknown_channel_binding(self): ++ # should raise ValueError for unknown type ++ s = socket.socket(socket.AF_INET) ++ with closing(ssl.wrap_socket(s)) as ss: ++ with self.assertRaises(ValueError): ++ ss.get_channel_binding("unknown-type") ++ ++ @unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES, ++ "'tls-unique' channel binding not available") ++ def test_tls_unique_channel_binding(self): ++ # unconnected should return None for known type ++ s = socket.socket(socket.AF_INET) ++ with closing(ssl.wrap_socket(s)) as ss: ++ self.assertIsNone(ss.get_channel_binding("tls-unique")) ++ # the same for server-side ++ s = socket.socket(socket.AF_INET) ++ with closing(ssl.wrap_socket(s, server_side=True, certfile=CERTFILE)) as ss: ++ self.assertIsNone(ss.get_channel_binding("tls-unique")) ++ ++ def test_get_default_verify_paths(self): ++ paths = ssl.get_default_verify_paths() ++ self.assertEqual(len(paths), 6) ++ self.assertIsInstance(paths, ssl.DefaultVerifyPaths) ++ ++ with support.EnvironmentVarGuard() as env: ++ env["SSL_CERT_DIR"] = CAPATH ++ env["SSL_CERT_FILE"] = CERTFILE ++ paths = ssl.get_default_verify_paths() ++ self.assertEqual(paths.cafile, CERTFILE) ++ self.assertEqual(paths.capath, CAPATH) ++ ++ @unittest.skipUnless(sys.platform == "win32", "Windows specific") ++ def test_enum_certificates(self): ++ self.assertTrue(ssl.enum_certificates("CA")) ++ self.assertTrue(ssl.enum_certificates("ROOT")) ++ ++ self.assertRaises(TypeError, ssl.enum_certificates) ++ self.assertRaises(WindowsError, ssl.enum_certificates, "") ++ ++ trust_oids = set() ++ for storename in ("CA", "ROOT"): ++ store = ssl.enum_certificates(storename) ++ self.assertIsInstance(store, list) ++ for element in store: ++ self.assertIsInstance(element, tuple) ++ self.assertEqual(len(element), 3) ++ cert, enc, trust = element ++ self.assertIsInstance(cert, bytes) ++ self.assertIn(enc, {"x509_asn", "pkcs_7_asn"}) ++ self.assertIsInstance(trust, (set, bool)) ++ if isinstance(trust, set): ++ trust_oids.update(trust) ++ ++ serverAuth = "1.3.6.1.5.5.7.3.1" ++ self.assertIn(serverAuth, trust_oids) ++ ++ @unittest.skipUnless(sys.platform == "win32", "Windows specific") ++ def test_enum_crls(self): ++ self.assertTrue(ssl.enum_crls("CA")) ++ self.assertRaises(TypeError, ssl.enum_crls) ++ self.assertRaises(WindowsError, ssl.enum_crls, "") ++ ++ crls = ssl.enum_crls("CA") ++ self.assertIsInstance(crls, list) ++ for element in crls: ++ self.assertIsInstance(element, tuple) ++ self.assertEqual(len(element), 2) ++ self.assertIsInstance(element[0], bytes) ++ self.assertIn(element[1], {"x509_asn", "pkcs_7_asn"}) ++ ++ ++ def test_asn1object(self): ++ expected = (129, 'serverAuth', 'TLS Web Server Authentication', ++ '1.3.6.1.5.5.7.3.1') ++ ++ val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1') ++ self.assertEqual(val, expected) ++ self.assertEqual(val.nid, 129) ++ self.assertEqual(val.shortname, 'serverAuth') ++ self.assertEqual(val.longname, 'TLS Web Server Authentication') ++ self.assertEqual(val.oid, '1.3.6.1.5.5.7.3.1') ++ self.assertIsInstance(val, ssl._ASN1Object) ++ self.assertRaises(ValueError, ssl._ASN1Object, 'serverAuth') ++ ++ val = ssl._ASN1Object.fromnid(129) ++ self.assertEqual(val, expected) ++ self.assertIsInstance(val, ssl._ASN1Object) ++ self.assertRaises(ValueError, ssl._ASN1Object.fromnid, -1) ++ with self.assertRaisesRegexp(ValueError, "unknown NID 100000"): ++ ssl._ASN1Object.fromnid(100000) ++ for i in range(1000): ++ try: ++ obj = ssl._ASN1Object.fromnid(i) ++ except ValueError: ++ pass ++ else: ++ self.assertIsInstance(obj.nid, int) ++ self.assertIsInstance(obj.shortname, str) ++ self.assertIsInstance(obj.longname, str) ++ self.assertIsInstance(obj.oid, (str, type(None))) ++ ++ val = ssl._ASN1Object.fromname('TLS Web Server Authentication') ++ self.assertEqual(val, expected) ++ self.assertIsInstance(val, ssl._ASN1Object) ++ self.assertEqual(ssl._ASN1Object.fromname('serverAuth'), expected) ++ self.assertEqual(ssl._ASN1Object.fromname('1.3.6.1.5.5.7.3.1'), ++ expected) ++ with self.assertRaisesRegexp(ValueError, "unknown object 'serverauth'"): ++ ssl._ASN1Object.fromname('serverauth') ++ ++ def test_purpose_enum(self): ++ val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1') ++ self.assertIsInstance(ssl.Purpose.SERVER_AUTH, ssl._ASN1Object) ++ self.assertEqual(ssl.Purpose.SERVER_AUTH, val) ++ self.assertEqual(ssl.Purpose.SERVER_AUTH.nid, 129) ++ self.assertEqual(ssl.Purpose.SERVER_AUTH.shortname, 'serverAuth') ++ self.assertEqual(ssl.Purpose.SERVER_AUTH.oid, ++ '1.3.6.1.5.5.7.3.1') ++ ++ val = ssl._ASN1Object('1.3.6.1.5.5.7.3.2') ++ self.assertIsInstance(ssl.Purpose.CLIENT_AUTH, ssl._ASN1Object) ++ self.assertEqual(ssl.Purpose.CLIENT_AUTH, val) ++ self.assertEqual(ssl.Purpose.CLIENT_AUTH.nid, 130) ++ self.assertEqual(ssl.Purpose.CLIENT_AUTH.shortname, 'clientAuth') ++ self.assertEqual(ssl.Purpose.CLIENT_AUTH.oid, ++ '1.3.6.1.5.5.7.3.2') ++ ++ def cert_time_ok(self, timestring, timestamp): ++ self.assertEqual(ssl.cert_time_to_seconds(timestring), timestamp) ++ ++ def cert_time_fail(self, timestring): ++ with self.assertRaises(ValueError): ++ ssl.cert_time_to_seconds(timestring) ++ ++ @unittest.skipUnless(utc_offset(), ++ 'local time needs to be different from UTC') ++ def test_cert_time_to_seconds_timezone(self): ++ # Issue #19940: ssl.cert_time_to_seconds() returns wrong ++ # results if local timezone is not UTC ++ self.cert_time_ok("May 9 00:00:00 2007 GMT", 1178668800.0) ++ self.cert_time_ok("Jan 5 09:34:43 2018 GMT", 1515144883.0) ++ ++ def test_cert_time_to_seconds(self): ++ timestring = "Jan 5 09:34:43 2018 GMT" ++ ts = 1515144883.0 ++ self.cert_time_ok(timestring, ts) ++ # accept keyword parameter, assert its name ++ self.assertEqual(ssl.cert_time_to_seconds(cert_time=timestring), ts) ++ # accept both %e and %d (space or zero generated by strftime) ++ self.cert_time_ok("Jan 05 09:34:43 2018 GMT", ts) ++ # case-insensitive ++ self.cert_time_ok("JaN 5 09:34:43 2018 GmT", ts) ++ self.cert_time_fail("Jan 5 09:34 2018 GMT") # no seconds ++ self.cert_time_fail("Jan 5 09:34:43 2018") # no GMT ++ self.cert_time_fail("Jan 5 09:34:43 2018 UTC") # not GMT timezone ++ self.cert_time_fail("Jan 35 09:34:43 2018 GMT") # invalid day ++ self.cert_time_fail("Jon 5 09:34:43 2018 GMT") # invalid month ++ self.cert_time_fail("Jan 5 24:00:00 2018 GMT") # invalid hour ++ self.cert_time_fail("Jan 5 09:60:43 2018 GMT") # invalid minute ++ ++ newyear_ts = 1230768000.0 ++ # leap seconds ++ self.cert_time_ok("Dec 31 23:59:60 2008 GMT", newyear_ts) ++ # same timestamp ++ self.cert_time_ok("Jan 1 00:00:00 2009 GMT", newyear_ts) ++ ++ self.cert_time_ok("Jan 5 09:34:59 2018 GMT", 1515144899) ++ # allow 60th second (even if it is not a leap second) ++ self.cert_time_ok("Jan 5 09:34:60 2018 GMT", 1515144900) ++ # allow 2nd leap second for compatibility with time.strptime() ++ self.cert_time_ok("Jan 5 09:34:61 2018 GMT", 1515144901) ++ self.cert_time_fail("Jan 5 09:34:62 2018 GMT") # invalid seconds ++ ++ # no special treatement for the special value: ++ # 99991231235959Z (rfc 5280) ++ self.cert_time_ok("Dec 31 23:59:59 9999 GMT", 253402300799.0) ++ ++ @support.run_with_locale('LC_ALL', '') ++ def test_cert_time_to_seconds_locale(self): ++ # `cert_time_to_seconds()` should be locale independent ++ ++ def local_february_name(): ++ return time.strftime('%b', (1, 2, 3, 4, 5, 6, 0, 0, 0)) ++ ++ if local_february_name().lower() == 'feb': ++ self.skipTest("locale-specific month name needs to be " ++ "different from C locale") ++ ++ # locale-independent ++ self.cert_time_ok("Feb 9 00:00:00 2007 GMT", 1170979200.0) ++ self.cert_time_fail(local_february_name() + " 9 00:00:00 2007 GMT") ++ ++ ++class ContextTests(unittest.TestCase): ++ ++ @skip_if_broken_ubuntu_ssl ++ def test_constructor(self): ++ for protocol in PROTOCOLS: ++ ssl.SSLContext(protocol) ++ self.assertRaises(TypeError, ssl.SSLContext) ++ self.assertRaises(ValueError, ssl.SSLContext, -1) ++ self.assertRaises(ValueError, ssl.SSLContext, 42) ++ ++ @skip_if_broken_ubuntu_ssl ++ def test_protocol(self): ++ for proto in PROTOCOLS: ++ ctx = ssl.SSLContext(proto) ++ self.assertEqual(ctx.protocol, proto) ++ ++ def test_ciphers(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.set_ciphers("ALL") ++ ctx.set_ciphers("DEFAULT") ++ with self.assertRaisesRegexp(ssl.SSLError, "No cipher can be selected"): ++ ctx.set_ciphers("^$:,;?*'dorothyx") ++ ++ @skip_if_broken_ubuntu_ssl ++ def test_options(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ # OP_ALL | OP_NO_SSLv2 is the default value ++ self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2, ++ ctx.options) ++ ctx.options |= ssl.OP_NO_SSLv3 ++ self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3, ++ ctx.options) ++ if can_clear_options(): ++ ctx.options = (ctx.options & ~ssl.OP_NO_SSLv2) | ssl.OP_NO_TLSv1 ++ self.assertEqual(ssl.OP_ALL | ssl.OP_NO_TLSv1 | ssl.OP_NO_SSLv3, ++ ctx.options) ++ ctx.options = 0 ++ self.assertEqual(0, ctx.options) ++ else: ++ with self.assertRaises(ValueError): ++ ctx.options = 0 ++ ++ def test_verify_mode(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ # Default value ++ self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) ++ ctx.verify_mode = ssl.CERT_OPTIONAL ++ self.assertEqual(ctx.verify_mode, ssl.CERT_OPTIONAL) ++ ctx.verify_mode = ssl.CERT_REQUIRED ++ self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) ++ ctx.verify_mode = ssl.CERT_NONE ++ self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) ++ with self.assertRaises(TypeError): ++ ctx.verify_mode = None ++ with self.assertRaises(ValueError): ++ ctx.verify_mode = 42 ++ ++ @unittest.skipUnless(have_verify_flags(), ++ "verify_flags need OpenSSL > 0.9.8") ++ def test_verify_flags(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ # default value by OpenSSL ++ self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT) ++ ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF ++ self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_LEAF) ++ ctx.verify_flags = ssl.VERIFY_CRL_CHECK_CHAIN ++ self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_CHAIN) ++ ctx.verify_flags = ssl.VERIFY_DEFAULT ++ self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT) ++ # supports any value ++ ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT ++ self.assertEqual(ctx.verify_flags, ++ ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT) ++ with self.assertRaises(TypeError): ++ ctx.verify_flags = None ++ ++ def test_load_cert_chain(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ # Combined key and cert in a single file ++ ctx.load_cert_chain(CERTFILE) ++ ctx.load_cert_chain(CERTFILE, keyfile=CERTFILE) ++ self.assertRaises(TypeError, ctx.load_cert_chain, keyfile=CERTFILE) ++ with self.assertRaises(IOError) as cm: ++ ctx.load_cert_chain(WRONGCERT) ++ self.assertEqual(cm.exception.errno, errno.ENOENT) ++ with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"): ++ ctx.load_cert_chain(BADCERT) ++ with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"): ++ ctx.load_cert_chain(EMPTYCERT) ++ # Separate key and cert ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.load_cert_chain(ONLYCERT, ONLYKEY) ++ ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY) ++ ctx.load_cert_chain(certfile=BYTES_ONLYCERT, keyfile=BYTES_ONLYKEY) ++ with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"): ++ ctx.load_cert_chain(ONLYCERT) ++ with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"): ++ ctx.load_cert_chain(ONLYKEY) ++ with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"): ++ ctx.load_cert_chain(certfile=ONLYKEY, keyfile=ONLYCERT) ++ # Mismatching key and cert ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ with self.assertRaisesRegexp(ssl.SSLError, "key values mismatch"): ++ ctx.load_cert_chain(SVN_PYTHON_ORG_ROOT_CERT, ONLYKEY) ++ # Password protected key and cert ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD) ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode()) ++ ctx.load_cert_chain(CERTFILE_PROTECTED, ++ password=bytearray(KEY_PASSWORD.encode())) ++ ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD) ++ ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD.encode()) ++ ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, ++ bytearray(KEY_PASSWORD.encode())) ++ with self.assertRaisesRegexp(TypeError, "should be a string"): ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=True) ++ with self.assertRaises(ssl.SSLError): ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password="badpass") ++ with self.assertRaisesRegexp(ValueError, "cannot be longer"): ++ # openssl has a fixed limit on the password buffer. ++ # PEM_BUFSIZE is generally set to 1kb. ++ # Return a string larger than this. ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=b'a' * 102400) ++ # Password callback ++ def getpass_unicode(): ++ return KEY_PASSWORD ++ def getpass_bytes(): ++ return KEY_PASSWORD.encode() ++ def getpass_bytearray(): ++ return bytearray(KEY_PASSWORD.encode()) ++ def getpass_badpass(): ++ return "badpass" ++ def getpass_huge(): ++ return b'a' * (1024 * 1024) ++ def getpass_bad_type(): ++ return 9 ++ def getpass_exception(): ++ raise Exception('getpass error') ++ class GetPassCallable: ++ def __call__(self): ++ return KEY_PASSWORD ++ def getpass(self): ++ return KEY_PASSWORD ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_unicode) ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytes) ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytearray) ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=GetPassCallable()) ++ ctx.load_cert_chain(CERTFILE_PROTECTED, ++ password=GetPassCallable().getpass) ++ with self.assertRaises(ssl.SSLError): ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_badpass) ++ with self.assertRaisesRegexp(ValueError, "cannot be longer"): ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_huge) ++ with self.assertRaisesRegexp(TypeError, "must return a string"): ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bad_type) ++ with self.assertRaisesRegexp(Exception, "getpass error"): ++ ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_exception) ++ # Make sure the password function isn't called if it isn't needed ++ ctx.load_cert_chain(CERTFILE, password=getpass_exception) ++ ++ def test_load_verify_locations(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.load_verify_locations(CERTFILE) ++ ctx.load_verify_locations(cafile=CERTFILE, capath=None) ++ ctx.load_verify_locations(BYTES_CERTFILE) ++ ctx.load_verify_locations(cafile=BYTES_CERTFILE, capath=None) ++ self.assertRaises(TypeError, ctx.load_verify_locations) ++ self.assertRaises(TypeError, ctx.load_verify_locations, None, None, None) ++ with self.assertRaises(IOError) as cm: ++ ctx.load_verify_locations(WRONGCERT) ++ self.assertEqual(cm.exception.errno, errno.ENOENT) ++ with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"): ++ ctx.load_verify_locations(BADCERT) ++ ctx.load_verify_locations(CERTFILE, CAPATH) ++ ctx.load_verify_locations(CERTFILE, capath=BYTES_CAPATH) ++ ++ # Issue #10989: crash if the second argument type is invalid ++ self.assertRaises(TypeError, ctx.load_verify_locations, None, True) ++ ++ def test_load_verify_cadata(self): ++ # test cadata ++ with open(CAFILE_CACERT) as f: ++ cacert_pem = f.read().decode("ascii") ++ cacert_der = ssl.PEM_cert_to_DER_cert(cacert_pem) ++ with open(CAFILE_NEURONIO) as f: ++ neuronio_pem = f.read().decode("ascii") ++ neuronio_der = ssl.PEM_cert_to_DER_cert(neuronio_pem) ++ ++ # test PEM ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ self.assertEqual(ctx.cert_store_stats()["x509_ca"], 0) ++ ctx.load_verify_locations(cadata=cacert_pem) ++ self.assertEqual(ctx.cert_store_stats()["x509_ca"], 1) ++ ctx.load_verify_locations(cadata=neuronio_pem) ++ self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) ++ # cert already in hash table ++ ctx.load_verify_locations(cadata=neuronio_pem) ++ self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) ++ ++ # combined ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ combined = "\n".join((cacert_pem, neuronio_pem)) ++ ctx.load_verify_locations(cadata=combined) ++ self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) ++ ++ # with junk around the certs ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ combined = ["head", cacert_pem, "other", neuronio_pem, "again", ++ neuronio_pem, "tail"] ++ ctx.load_verify_locations(cadata="\n".join(combined)) ++ self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) ++ ++ # test DER ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.load_verify_locations(cadata=cacert_der) ++ ctx.load_verify_locations(cadata=neuronio_der) ++ self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) ++ # cert already in hash table ++ ctx.load_verify_locations(cadata=cacert_der) ++ self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) ++ ++ # combined ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ combined = b"".join((cacert_der, neuronio_der)) ++ ctx.load_verify_locations(cadata=combined) ++ self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) ++ ++ # error cases ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ self.assertRaises(TypeError, ctx.load_verify_locations, cadata=object) ++ ++ with self.assertRaisesRegexp(ssl.SSLError, "no start line"): ++ ctx.load_verify_locations(cadata=u"broken") ++ with self.assertRaisesRegexp(ssl.SSLError, "not enough data"): ++ ctx.load_verify_locations(cadata=b"broken") ++ ++ ++ def test_load_dh_params(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.load_dh_params(DHFILE) ++ if os.name != 'nt': ++ ctx.load_dh_params(BYTES_DHFILE) ++ self.assertRaises(TypeError, ctx.load_dh_params) ++ self.assertRaises(TypeError, ctx.load_dh_params, None) ++ with self.assertRaises(IOError) as cm: ++ ctx.load_dh_params(WRONGCERT) ++ self.assertEqual(cm.exception.errno, errno.ENOENT) ++ with self.assertRaises(ssl.SSLError) as cm: ++ ctx.load_dh_params(CERTFILE) ++ ++ @skip_if_broken_ubuntu_ssl ++ def test_session_stats(self): ++ for proto in PROTOCOLS: ++ ctx = ssl.SSLContext(proto) ++ self.assertEqual(ctx.session_stats(), { ++ 'number': 0, ++ 'connect': 0, ++ 'connect_good': 0, ++ 'connect_renegotiate': 0, ++ 'accept': 0, ++ 'accept_good': 0, ++ 'accept_renegotiate': 0, ++ 'hits': 0, ++ 'misses': 0, ++ 'timeouts': 0, ++ 'cache_full': 0, ++ }) ++ ++ def test_set_default_verify_paths(self): ++ # There's not much we can do to test that it acts as expected, ++ # so just check it doesn't crash or raise an exception. ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.set_default_verify_paths() ++ ++ @unittest.skipUnless(ssl.HAS_ECDH, "ECDH disabled on this OpenSSL build") ++ def test_set_ecdh_curve(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.set_ecdh_curve("prime256v1") ++ ctx.set_ecdh_curve(b"prime256v1") ++ self.assertRaises(TypeError, ctx.set_ecdh_curve) ++ self.assertRaises(TypeError, ctx.set_ecdh_curve, None) ++ self.assertRaises(ValueError, ctx.set_ecdh_curve, "foo") ++ self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo") ++ ++ @needs_sni ++ def test_sni_callback(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ++ # set_servername_callback expects a callable, or None ++ self.assertRaises(TypeError, ctx.set_servername_callback) ++ self.assertRaises(TypeError, ctx.set_servername_callback, 4) ++ self.assertRaises(TypeError, ctx.set_servername_callback, "") ++ self.assertRaises(TypeError, ctx.set_servername_callback, ctx) ++ ++ def dummycallback(sock, servername, ctx): ++ pass ++ ctx.set_servername_callback(None) ++ ctx.set_servername_callback(dummycallback) ++ ++ @needs_sni ++ def test_sni_callback_refcycle(self): ++ # Reference cycles through the servername callback are detected ++ # and cleared. ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ def dummycallback(sock, servername, ctx, cycle=ctx): ++ pass ++ ctx.set_servername_callback(dummycallback) ++ wr = weakref.ref(ctx) ++ del ctx, dummycallback ++ gc.collect() ++ self.assertIs(wr(), None) ++ ++ def test_cert_store_stats(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ self.assertEqual(ctx.cert_store_stats(), ++ {'x509_ca': 0, 'crl': 0, 'x509': 0}) ++ ctx.load_cert_chain(CERTFILE) ++ self.assertEqual(ctx.cert_store_stats(), ++ {'x509_ca': 0, 'crl': 0, 'x509': 0}) ++ ctx.load_verify_locations(CERTFILE) ++ self.assertEqual(ctx.cert_store_stats(), ++ {'x509_ca': 0, 'crl': 0, 'x509': 1}) ++ ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) ++ self.assertEqual(ctx.cert_store_stats(), ++ {'x509_ca': 1, 'crl': 0, 'x509': 2}) ++ ++ def test_get_ca_certs(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ self.assertEqual(ctx.get_ca_certs(), []) ++ # CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE ++ ctx.load_verify_locations(CERTFILE) ++ self.assertEqual(ctx.get_ca_certs(), []) ++ # but SVN_PYTHON_ORG_ROOT_CERT is a CA cert ++ ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) ++ self.assertEqual(ctx.get_ca_certs(), ++ [{'issuer': ((('organizationName', 'Root CA'),), ++ (('organizationalUnitName', 'http://www.cacert.org'),), ++ (('commonName', 'CA Cert Signing Authority'),), ++ (('emailAddress', 'support@cacert.org'),)), ++ 'notAfter': asn1time('Mar 29 12:29:49 2033 GMT'), ++ 'notBefore': asn1time('Mar 30 12:29:49 2003 GMT'), ++ 'serialNumber': '00', ++ 'crlDistributionPoints': ('https://www.cacert.org/revoke.crl',), ++ 'subject': ((('organizationName', 'Root CA'),), ++ (('organizationalUnitName', 'http://www.cacert.org'),), ++ (('commonName', 'CA Cert Signing Authority'),), ++ (('emailAddress', 'support@cacert.org'),)), ++ 'version': 3}]) ++ ++ with open(SVN_PYTHON_ORG_ROOT_CERT) as f: ++ pem = f.read() ++ der = ssl.PEM_cert_to_DER_cert(pem) ++ self.assertEqual(ctx.get_ca_certs(True), [der]) + ++ def test_load_default_certs(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.load_default_certs() ++ ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.load_default_certs(ssl.Purpose.SERVER_AUTH) ++ ctx.load_default_certs() ++ ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.load_default_certs(ssl.Purpose.CLIENT_AUTH) ++ ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ self.assertRaises(TypeError, ctx.load_default_certs, None) ++ self.assertRaises(TypeError, ctx.load_default_certs, 'SERVER_AUTH') ++ ++ def test_create_default_context(self): ++ ctx = ssl.create_default_context() ++ self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) ++ self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) ++ self.assertTrue(ctx.check_hostname) ++ self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2) ++ self.assertEqual( ++ ctx.options & getattr(ssl, "OP_NO_COMPRESSION", 0), ++ getattr(ssl, "OP_NO_COMPRESSION", 0), ++ ) ++ ++ with open(SIGNING_CA) as f: ++ cadata = f.read().decode("ascii") ++ ctx = ssl.create_default_context(cafile=SIGNING_CA, capath=CAPATH, ++ cadata=cadata) ++ self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) ++ self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) ++ self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2) ++ self.assertEqual( ++ ctx.options & getattr(ssl, "OP_NO_COMPRESSION", 0), ++ getattr(ssl, "OP_NO_COMPRESSION", 0), ++ ) ++ ++ ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ++ self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) ++ self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) ++ self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2) ++ self.assertEqual( ++ ctx.options & getattr(ssl, "OP_NO_COMPRESSION", 0), ++ getattr(ssl, "OP_NO_COMPRESSION", 0), ++ ) ++ self.assertEqual( ++ ctx.options & getattr(ssl, "OP_SINGLE_DH_USE", 0), ++ getattr(ssl, "OP_SINGLE_DH_USE", 0), ++ ) ++ self.assertEqual( ++ ctx.options & getattr(ssl, "OP_SINGLE_ECDH_USE", 0), ++ getattr(ssl, "OP_SINGLE_ECDH_USE", 0), ++ ) ++ ++ def test__create_stdlib_context(self): ++ ctx = ssl._create_stdlib_context() ++ self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) ++ self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) ++ self.assertFalse(ctx.check_hostname) ++ self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2) ++ ++ ctx = ssl._create_stdlib_context(ssl.PROTOCOL_TLSv1) ++ self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1) ++ self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) ++ self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2) ++ ++ ctx = ssl._create_stdlib_context(ssl.PROTOCOL_TLSv1, ++ cert_reqs=ssl.CERT_REQUIRED, ++ check_hostname=True) ++ self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1) ++ self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) ++ self.assertTrue(ctx.check_hostname) ++ self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2) ++ ++ ctx = ssl._create_stdlib_context(purpose=ssl.Purpose.CLIENT_AUTH) ++ self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) ++ self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) ++ self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2) ++ ++ def test_check_hostname(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ self.assertFalse(ctx.check_hostname) ++ ++ # Requires CERT_REQUIRED or CERT_OPTIONAL ++ with self.assertRaises(ValueError): ++ ctx.check_hostname = True ++ ctx.verify_mode = ssl.CERT_REQUIRED ++ self.assertFalse(ctx.check_hostname) ++ ctx.check_hostname = True ++ self.assertTrue(ctx.check_hostname) ++ ++ ctx.verify_mode = ssl.CERT_OPTIONAL ++ ctx.check_hostname = True ++ self.assertTrue(ctx.check_hostname) ++ ++ # Cannot set CERT_NONE with check_hostname enabled ++ with self.assertRaises(ValueError): ++ ctx.verify_mode = ssl.CERT_NONE ++ ctx.check_hostname = False ++ self.assertFalse(ctx.check_hostname) ++ ++ ++class SSLErrorTests(unittest.TestCase): ++ ++ def test_str(self): ++ # The str() of a SSLError doesn't include the errno ++ e = ssl.SSLError(1, "foo") ++ self.assertEqual(str(e), "foo") ++ self.assertEqual(e.errno, 1) ++ # Same for a subclass ++ e = ssl.SSLZeroReturnError(1, "foo") ++ self.assertEqual(str(e), "foo") ++ self.assertEqual(e.errno, 1) ++ ++ def test_lib_reason(self): ++ # Test the library and reason attributes ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ with self.assertRaises(ssl.SSLError) as cm: ++ ctx.load_dh_params(CERTFILE) ++ self.assertEqual(cm.exception.library, 'PEM') ++ self.assertEqual(cm.exception.reason, 'NO_START_LINE') ++ s = str(cm.exception) ++ self.assertTrue(s.startswith("[PEM: NO_START_LINE] no start line"), s) ++ ++ def test_subclass(self): ++ # Check that the appropriate SSLError subclass is raised ++ # (this only tests one of them) ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ with closing(socket.socket()) as s: ++ s.bind(("127.0.0.1", 0)) ++ s.listen(5) ++ c = socket.socket() ++ c.connect(s.getsockname()) ++ c.setblocking(False) ++ with closing(ctx.wrap_socket(c, False, do_handshake_on_connect=False)) as c: ++ with self.assertRaises(ssl.SSLWantReadError) as cm: ++ c.do_handshake() ++ s = str(cm.exception) ++ self.assertTrue(s.startswith("The operation did not complete (read)"), s) ++ # For compatibility ++ self.assertEqual(cm.exception.errno, ssl.SSL_ERROR_WANT_READ) ++ ++ ++ + + class NetworkedTests(unittest.TestCase): + + def test_connect(self): +- with test_support.transient_internet("svn.python.org"): ++ with support.transient_internet("svn.python.org"): + s = ssl.wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_NONE) +- s.connect(("svn.python.org", 443)) +- c = s.getpeercert() +- if c: +- self.fail("Peer cert %s shouldn't be here!") +- s.close() +- +- # this should fail because we have no verification certs +- s = ssl.wrap_socket(socket.socket(socket.AF_INET), +- cert_reqs=ssl.CERT_REQUIRED) + try: + s.connect(("svn.python.org", 443)) +- except ssl.SSLError: +- pass ++ self.assertEqual({}, s.getpeercert()) + finally: + s.close() + ++ # this should fail because we have no verification certs ++ s = ssl.wrap_socket(socket.socket(socket.AF_INET), ++ cert_reqs=ssl.CERT_REQUIRED) ++ self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", ++ s.connect, ("svn.python.org", 443)) ++ s.close() ++ + # this should succeed because we specify the root cert + s = ssl.wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED, + ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + try: + s.connect(("svn.python.org", 443)) ++ self.assertTrue(s.getpeercert()) + finally: + s.close() + + def test_connect_ex(self): + # Issue #11326: check connect_ex() implementation +- with test_support.transient_internet("svn.python.org"): ++ with support.transient_internet("svn.python.org"): + s = ssl.wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED, + ca_certs=SVN_PYTHON_ORG_ROOT_CERT) +@@ -272,7 +1206,7 @@ class NetworkedTests(unittest.TestCase): + def test_non_blocking_connect_ex(self): + # Issue #11326: non-blocking connect_ex() should allow handshake + # to proceed after the socket gets ready. +- with test_support.transient_internet("svn.python.org"): ++ with support.transient_internet("svn.python.org"): + s = ssl.wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED, + ca_certs=SVN_PYTHON_ORG_ROOT_CERT, +@@ -289,13 +1223,10 @@ class NetworkedTests(unittest.TestCase): + try: + s.do_handshake() + break +- except ssl.SSLError as err: +- if err.args[0] == ssl.SSL_ERROR_WANT_READ: +- select.select([s], [], [], 5.0) +- elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE: +- select.select([], [s], [], 5.0) +- else: +- raise ++ except ssl.SSLWantReadError: ++ select.select([s], [], [], 5.0) ++ except ssl.SSLWantWriteError: ++ select.select([], [s], [], 5.0) + # SSL established + self.assertTrue(s.getpeercert()) + finally: +@@ -304,7 +1235,7 @@ class NetworkedTests(unittest.TestCase): + def test_timeout_connect_ex(self): + # Issue #12065: on a timeout, connect_ex() should return the original + # errno (mimicking the behaviour of non-SSL sockets). +- with test_support.transient_internet("svn.python.org"): ++ with support.transient_internet("svn.python.org"): + s = ssl.wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED, + ca_certs=SVN_PYTHON_ORG_ROOT_CERT, +@@ -319,22 +1250,109 @@ class NetworkedTests(unittest.TestCase): + s.close() + + def test_connect_ex_error(self): +- with test_support.transient_internet("svn.python.org"): ++ with support.transient_internet("svn.python.org"): + s = ssl.wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED, + ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + try: +- self.assertEqual(errno.ECONNREFUSED, +- s.connect_ex(("svn.python.org", 444))) ++ rc = s.connect_ex(("svn.python.org", 444)) ++ # Issue #19919: Windows machines or VMs hosted on Windows ++ # machines sometimes return EWOULDBLOCK. ++ self.assertIn(rc, (errno.ECONNREFUSED, errno.EWOULDBLOCK)) ++ finally: ++ s.close() ++ ++ def test_connect_with_context(self): ++ with support.transient_internet("svn.python.org"): ++ # Same as test_connect, but with a separately created context ++ ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ s = ctx.wrap_socket(socket.socket(socket.AF_INET)) ++ s.connect(("svn.python.org", 443)) ++ try: ++ self.assertEqual({}, s.getpeercert()) + finally: + s.close() ++ # Same with a server hostname ++ s = ctx.wrap_socket(socket.socket(socket.AF_INET), ++ server_hostname="svn.python.org") ++ if ssl.HAS_SNI: ++ s.connect(("svn.python.org", 443)) ++ s.close() ++ else: ++ self.assertRaises(ValueError, s.connect, ("svn.python.org", 443)) ++ # This should fail because we have no verification certs ++ ctx.verify_mode = ssl.CERT_REQUIRED ++ s = ctx.wrap_socket(socket.socket(socket.AF_INET)) ++ self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", ++ s.connect, ("svn.python.org", 443)) ++ s.close() ++ # This should succeed because we specify the root cert ++ ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) ++ s = ctx.wrap_socket(socket.socket(socket.AF_INET)) ++ s.connect(("svn.python.org", 443)) ++ try: ++ cert = s.getpeercert() ++ self.assertTrue(cert) ++ finally: ++ s.close() ++ ++ def test_connect_capath(self): ++ # Verify server certificates using the `capath` argument ++ # NOTE: the subject hashing algorithm has been changed between ++ # OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must ++ # contain both versions of each certificate (same content, different ++ # filename) for this test to be portable across OpenSSL releases. ++ with support.transient_internet("svn.python.org"): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ ctx.verify_mode = ssl.CERT_REQUIRED ++ ctx.load_verify_locations(capath=CAPATH) ++ s = ctx.wrap_socket(socket.socket(socket.AF_INET)) ++ s.connect(("svn.python.org", 443)) ++ try: ++ cert = s.getpeercert() ++ self.assertTrue(cert) ++ finally: ++ s.close() ++ # Same with a bytes `capath` argument ++ ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ ctx.verify_mode = ssl.CERT_REQUIRED ++ ctx.load_verify_locations(capath=BYTES_CAPATH) ++ s = ctx.wrap_socket(socket.socket(socket.AF_INET)) ++ s.connect(("svn.python.org", 443)) ++ try: ++ cert = s.getpeercert() ++ self.assertTrue(cert) ++ finally: ++ s.close() ++ ++ def test_connect_cadata(self): ++ with open(CAFILE_CACERT) as f: ++ pem = f.read().decode('ascii') ++ der = ssl.PEM_cert_to_DER_cert(pem) ++ with support.transient_internet("svn.python.org"): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ ctx.verify_mode = ssl.CERT_REQUIRED ++ ctx.load_verify_locations(cadata=pem) ++ with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: ++ s.connect(("svn.python.org", 443)) ++ cert = s.getpeercert() ++ self.assertTrue(cert) ++ ++ # same with DER ++ ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ ctx.verify_mode = ssl.CERT_REQUIRED ++ ctx.load_verify_locations(cadata=der) ++ with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: ++ s.connect(("svn.python.org", 443)) ++ cert = s.getpeercert() ++ self.assertTrue(cert) + + @unittest.skipIf(os.name == "nt", "Can't use a socket as a file under Windows") + def test_makefile_close(self): + # Issue #5238: creating a file-like object with makefile() shouldn't + # delay closing the underlying "real socket" (here tested with its + # file descriptor, hence skipping the test under Windows). +- with test_support.transient_internet("svn.python.org"): ++ with support.transient_internet("svn.python.org"): + ss = ssl.wrap_socket(socket.socket(socket.AF_INET)) + ss.connect(("svn.python.org", 443)) + fd = ss.fileno() +@@ -350,7 +1368,7 @@ class NetworkedTests(unittest.TestCase): + self.assertEqual(e.exception.errno, errno.EBADF) + + def test_non_blocking_handshake(self): +- with test_support.transient_internet("svn.python.org"): ++ with support.transient_internet("svn.python.org"): + s = socket.socket(socket.AF_INET) + s.connect(("svn.python.org", 443)) + s.setblocking(False) +@@ -363,36 +1381,57 @@ class NetworkedTests(unittest.TestCase): + count += 1 + s.do_handshake() + break +- except ssl.SSLError, err: +- if err.args[0] == ssl.SSL_ERROR_WANT_READ: +- select.select([s], [], []) +- elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE: +- select.select([], [s], []) +- else: +- raise ++ except ssl.SSLWantReadError: ++ select.select([s], [], []) ++ except ssl.SSLWantWriteError: ++ select.select([], [s], []) + s.close() +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\nNeeded %d calls to do_handshake() to establish session.\n" % count) + + def test_get_server_certificate(self): +- with test_support.transient_internet("svn.python.org"): +- pem = ssl.get_server_certificate(("svn.python.org", 443)) +- if not pem: +- self.fail("No server certificate on svn.python.org:443!") ++ def _test_get_server_certificate(host, port, cert=None): ++ with support.transient_internet(host): ++ pem = ssl.get_server_certificate((host, port)) ++ if not pem: ++ self.fail("No server certificate on %s:%s!" % (host, port)) + +- try: +- pem = ssl.get_server_certificate(("svn.python.org", 443), ca_certs=CERTFILE) +- except ssl.SSLError: +- #should fail +- pass +- else: +- self.fail("Got server certificate %s for svn.python.org!" % pem) ++ try: ++ pem = ssl.get_server_certificate((host, port), ++ ca_certs=CERTFILE) ++ except ssl.SSLError as x: ++ #should fail ++ if support.verbose: ++ sys.stdout.write("%s\n" % x) ++ else: ++ self.fail("Got server certificate %s for %s:%s!" % (pem, host, port)) ++ pem = ssl.get_server_certificate((host, port), ++ ca_certs=cert) ++ if not pem: ++ self.fail("No server certificate on %s:%s!" % (host, port)) ++ if support.verbose: ++ sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem)) ++ ++ _test_get_server_certificate('svn.python.org', 443, SVN_PYTHON_ORG_ROOT_CERT) ++ if support.IPV6_ENABLED: ++ _test_get_server_certificate('ipv6.google.com', 443) ++ ++ def test_ciphers(self): ++ remote = ("svn.python.org", 443) ++ with support.transient_internet(remote[0]): ++ with closing(ssl.wrap_socket(socket.socket(socket.AF_INET), ++ cert_reqs=ssl.CERT_NONE, ciphers="ALL")) as s: ++ s.connect(remote) ++ with closing(ssl.wrap_socket(socket.socket(socket.AF_INET), ++ cert_reqs=ssl.CERT_NONE, ciphers="DEFAULT")) as s: ++ s.connect(remote) ++ # Error checking can happen at instantiation or when connecting ++ with self.assertRaisesRegexp(ssl.SSLError, "No cipher can be selected"): ++ with closing(socket.socket(socket.AF_INET)) as sock: ++ s = ssl.wrap_socket(sock, ++ cert_reqs=ssl.CERT_NONE, ciphers="^$:,;?*'dorothyx") ++ s.connect(remote) + +- pem = ssl.get_server_certificate(("svn.python.org", 443), ca_certs=SVN_PYTHON_ORG_ROOT_CERT) +- if not pem: +- self.fail("No server certificate on svn.python.org:443!") +- if test_support.verbose: +- sys.stdout.write("\nVerified certificate for svn.python.org:443 is\n%s\n" % pem) + + def test_algorithms(self): + # Issue #8484: all algorithms should be available when verifying a +@@ -400,17 +1439,21 @@ class NetworkedTests(unittest.TestCase): + # SHA256 was added in OpenSSL 0.9.8 + if ssl.OPENSSL_VERSION_INFO < (0, 9, 8, 0, 15): + self.skipTest("SHA256 not available on %r" % ssl.OPENSSL_VERSION) +- self.skipTest("remote host needs SNI, only available on Python 3.2+") +- # NOTE: https://sha2.hboeck.de is another possible test host ++ # sha256.tbs-internet.com needs SNI to use the correct certificate ++ if not ssl.HAS_SNI: ++ self.skipTest("SNI needed for this test") ++ # https://sha2.hboeck.de/ was used until 2011-01-08 (no route to host) + remote = ("sha256.tbs-internet.com", 443) + sha256_cert = os.path.join(os.path.dirname(__file__), "sha256.pem") +- with test_support.transient_internet("sha256.tbs-internet.com"): +- s = ssl.wrap_socket(socket.socket(socket.AF_INET), +- cert_reqs=ssl.CERT_REQUIRED, +- ca_certs=sha256_cert,) ++ with support.transient_internet("sha256.tbs-internet.com"): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx.verify_mode = ssl.CERT_REQUIRED ++ ctx.load_verify_locations(sha256_cert) ++ s = ctx.wrap_socket(socket.socket(socket.AF_INET), ++ server_hostname="sha256.tbs-internet.com") + try: + s.connect(remote) +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\nCipher with %r is %r\n" % + (remote, s.cipher())) + sys.stdout.write("Certificate is:\n%s\n" % +@@ -418,6 +1461,36 @@ class NetworkedTests(unittest.TestCase): + finally: + s.close() + ++ def test_get_ca_certs_capath(self): ++ # capath certs are loaded on request ++ with support.transient_internet("svn.python.org"): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ ctx.verify_mode = ssl.CERT_REQUIRED ++ ctx.load_verify_locations(capath=CAPATH) ++ self.assertEqual(ctx.get_ca_certs(), []) ++ s = ctx.wrap_socket(socket.socket(socket.AF_INET)) ++ s.connect(("svn.python.org", 443)) ++ try: ++ cert = s.getpeercert() ++ self.assertTrue(cert) ++ finally: ++ s.close() ++ self.assertEqual(len(ctx.get_ca_certs()), 1) ++ ++ @needs_sni ++ def test_context_setget(self): ++ # Check that the context of a connected socket can be replaced. ++ with support.transient_internet("svn.python.org"): ++ ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx2 = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ s = socket.socket(socket.AF_INET) ++ with closing(ctx1.wrap_socket(s)) as ss: ++ ss.connect(("svn.python.org", 443)) ++ self.assertIs(ss.context, ctx1) ++ self.assertIs(ss._sslobj.context, ctx1) ++ ss.context = ctx2 ++ self.assertIs(ss.context, ctx2) ++ self.assertIs(ss._sslobj.context, ctx2) + + try: + import threading +@@ -426,6 +1499,8 @@ except ImportError: + else: + _have_threads = True + ++ from test.ssl_servers import make_https_server ++ + class ThreadedEchoServer(threading.Thread): + + class ConnectionHandler(threading.Thread): +@@ -434,48 +1509,51 @@ else: + with and without the SSL wrapper around the socket connection, so + that we can test the STARTTLS functionality.""" + +- def __init__(self, server, connsock): ++ def __init__(self, server, connsock, addr): + self.server = server + self.running = False + self.sock = connsock ++ self.addr = addr + self.sock.setblocking(1) + self.sslconn = None + threading.Thread.__init__(self) + self.daemon = True + +- def show_conn_details(self): +- if self.server.certreqs == ssl.CERT_REQUIRED: +- cert = self.sslconn.getpeercert() +- if test_support.verbose and self.server.chatty: +- sys.stdout.write(" client cert is " + pprint.pformat(cert) + "\n") +- cert_binary = self.sslconn.getpeercert(True) +- if test_support.verbose and self.server.chatty: +- sys.stdout.write(" cert binary is " + str(len(cert_binary)) + " bytes\n") +- cipher = self.sslconn.cipher() +- if test_support.verbose and self.server.chatty: +- sys.stdout.write(" server: connection cipher is now " + str(cipher) + "\n") +- + def wrap_conn(self): + try: +- self.sslconn = ssl.wrap_socket(self.sock, server_side=True, +- certfile=self.server.certificate, +- ssl_version=self.server.protocol, +- ca_certs=self.server.cacerts, +- cert_reqs=self.server.certreqs, +- ciphers=self.server.ciphers) +- except ssl.SSLError as e: ++ self.sslconn = self.server.context.wrap_socket( ++ self.sock, server_side=True) ++ self.server.selected_protocols.append(self.sslconn.selected_npn_protocol()) ++ except socket.error as e: ++ # We treat ConnectionResetError as though it were an ++ # SSLError - OpenSSL on Ubuntu abruptly closes the ++ # connection when asked to use an unsupported protocol. ++ # + # XXX Various errors can have happened here, for example + # a mismatching protocol version, an invalid certificate, + # or a low-level bug. This should be made more discriminating. ++ if not isinstance(e, ssl.SSLError) and e.errno != errno.ECONNRESET: ++ raise + self.server.conn_errors.append(e) + if self.server.chatty: +- handle_error("\n server: bad connection attempt from " + +- str(self.sock.getpeername()) + ":\n") +- self.close() ++ handle_error("\n server: bad connection attempt from " + repr(self.addr) + ":\n") + self.running = False + self.server.stop() ++ self.close() + return False + else: ++ if self.server.context.verify_mode == ssl.CERT_REQUIRED: ++ cert = self.sslconn.getpeercert() ++ if support.verbose and self.server.chatty: ++ sys.stdout.write(" client cert is " + pprint.pformat(cert) + "\n") ++ cert_binary = self.sslconn.getpeercert(True) ++ if support.verbose and self.server.chatty: ++ sys.stdout.write(" cert binary is " + str(len(cert_binary)) + " bytes\n") ++ cipher = self.sslconn.cipher() ++ if support.verbose and self.server.chatty: ++ sys.stdout.write(" server: connection cipher is now " + str(cipher) + "\n") ++ sys.stdout.write(" server: selected protocol is now " ++ + str(self.sslconn.selected_npn_protocol()) + "\n") + return True + + def read(self): +@@ -494,48 +1572,53 @@ else: + if self.sslconn: + self.sslconn.close() + else: +- self.sock._sock.close() ++ self.sock.close() + + def run(self): + self.running = True + if not self.server.starttls_server: +- if isinstance(self.sock, ssl.SSLSocket): +- self.sslconn = self.sock +- elif not self.wrap_conn(): ++ if not self.wrap_conn(): + return +- self.show_conn_details() + while self.running: + try: + msg = self.read() +- if not msg: ++ stripped = msg.strip() ++ if not stripped: + # eof, so quit this handler + self.running = False + self.close() +- elif msg.strip() == 'over': +- if test_support.verbose and self.server.connectionchatty: ++ elif stripped == b'over': ++ if support.verbose and self.server.connectionchatty: + sys.stdout.write(" server: client closed connection\n") + self.close() + return +- elif self.server.starttls_server and msg.strip() == 'STARTTLS': +- if test_support.verbose and self.server.connectionchatty: ++ elif (self.server.starttls_server and ++ stripped == b'STARTTLS'): ++ if support.verbose and self.server.connectionchatty: + sys.stdout.write(" server: read STARTTLS from client, sending OK...\n") +- self.write("OK\n") ++ self.write(b"OK\n") + if not self.wrap_conn(): + return +- elif self.server.starttls_server and self.sslconn and msg.strip() == 'ENDTLS': +- if test_support.verbose and self.server.connectionchatty: ++ elif (self.server.starttls_server and self.sslconn ++ and stripped == b'ENDTLS'): ++ if support.verbose and self.server.connectionchatty: + sys.stdout.write(" server: read ENDTLS from client, sending OK...\n") +- self.write("OK\n") +- self.sslconn.unwrap() ++ self.write(b"OK\n") ++ self.sock = self.sslconn.unwrap() + self.sslconn = None +- if test_support.verbose and self.server.connectionchatty: ++ if support.verbose and self.server.connectionchatty: + sys.stdout.write(" server: connection is now unencrypted...\n") ++ elif stripped == b'CB tls-unique': ++ if support.verbose and self.server.connectionchatty: ++ sys.stdout.write(" server: read CB tls-unique from client, sending our CB data...\n") ++ data = self.sslconn.get_channel_binding("tls-unique") ++ self.write(repr(data).encode("us-ascii") + b"\n") + else: +- if (test_support.verbose and ++ if (support.verbose and + self.server.connectionchatty): + ctype = (self.sslconn and "encrypted") or "unencrypted" +- sys.stdout.write(" server: read %s (%s), sending back %s (%s)...\n" +- % (repr(msg), ctype, repr(msg.lower()), ctype)) ++ sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n" ++ % (msg, ctype, msg.lower(), ctype)) + self.write(msg.lower()) + except ssl.SSLError: + if self.server.chatty: +@@ -546,36 +1629,34 @@ else: + # harness, we want to stop the server + self.server.stop() + +- def __init__(self, certificate, ssl_version=None, ++ def __init__(self, certificate=None, ssl_version=None, + certreqs=None, cacerts=None, + chatty=True, connectionchatty=False, starttls_server=False, +- wrap_accepting_socket=False, ciphers=None): +- +- if ssl_version is None: +- ssl_version = ssl.PROTOCOL_TLSv1 +- if certreqs is None: +- certreqs = ssl.CERT_NONE +- self.certificate = certificate +- self.protocol = ssl_version +- self.certreqs = certreqs +- self.cacerts = cacerts +- self.ciphers = ciphers ++ npn_protocols=None, ciphers=None, context=None): ++ if context: ++ self.context = context ++ else: ++ self.context = ssl.SSLContext(ssl_version ++ if ssl_version is not None ++ else ssl.PROTOCOL_TLSv1) ++ self.context.verify_mode = (certreqs if certreqs is not None ++ else ssl.CERT_NONE) ++ if cacerts: ++ self.context.load_verify_locations(cacerts) ++ if certificate: ++ self.context.load_cert_chain(certificate) ++ if npn_protocols: ++ self.context.set_npn_protocols(npn_protocols) ++ if ciphers: ++ self.context.set_ciphers(ciphers) + self.chatty = chatty + self.connectionchatty = connectionchatty + self.starttls_server = starttls_server + self.sock = socket.socket() ++ self.port = support.bind_port(self.sock) + self.flag = None +- if wrap_accepting_socket: +- self.sock = ssl.wrap_socket(self.sock, server_side=True, +- certfile=self.certificate, +- cert_reqs = self.certreqs, +- ca_certs = self.cacerts, +- ssl_version = self.protocol, +- ciphers = self.ciphers) +- if test_support.verbose and self.chatty: +- sys.stdout.write(' server: wrapped server socket as %s\n' % str(self.sock)) +- self.port = test_support.bind_port(self.sock) + self.active = False ++ self.selected_protocols = [] + self.conn_errors = [] + threading.Thread.__init__(self) + self.daemon = True +@@ -603,10 +1684,10 @@ else: + while self.active: + try: + newconn, connaddr = self.sock.accept() +- if test_support.verbose and self.chatty: ++ if support.verbose and self.chatty: + sys.stdout.write(' server: new connection from ' +- + str(connaddr) + '\n') +- handler = self.ConnectionHandler(self, newconn) ++ + repr(connaddr) + '\n') ++ handler = self.ConnectionHandler(self, newconn, connaddr) + handler.start() + handler.join() + except socket.timeout: +@@ -625,11 +1706,12 @@ else: + class ConnectionHandler(asyncore.dispatcher_with_send): + + def __init__(self, conn, certfile): +- asyncore.dispatcher_with_send.__init__(self, conn) + self.socket = ssl.wrap_socket(conn, server_side=True, + certfile=certfile, + do_handshake_on_connect=False) ++ asyncore.dispatcher_with_send.__init__(self, self.socket) + self._ssl_accepting = True ++ self._do_ssl_handshake() + + def readable(self): + if isinstance(self.socket, ssl.SSLSocket): +@@ -640,12 +1722,11 @@ else: + def _do_ssl_handshake(self): + try: + self.socket.do_handshake() +- except ssl.SSLError, err: +- if err.args[0] in (ssl.SSL_ERROR_WANT_READ, +- ssl.SSL_ERROR_WANT_WRITE): +- return +- elif err.args[0] == ssl.SSL_ERROR_EOF: +- return self.handle_close() ++ except (ssl.SSLWantReadError, ssl.SSLWantWriteError): ++ return ++ except ssl.SSLEOFError: ++ return self.handle_close() ++ except ssl.SSLError: + raise + except socket.error, err: + if err.args[0] == errno.ECONNABORTED: +@@ -658,12 +1739,16 @@ else: + self._do_ssl_handshake() + else: + data = self.recv(1024) +- if data and data.strip() != 'over': ++ if support.verbose: ++ sys.stdout.write(" server: read %s from client\n" % repr(data)) ++ if not data: ++ self.close() ++ else: + self.send(data.lower()) + + def handle_close(self): + self.close() +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write(" server: closed connection %s\n" % self.socket) + + def handle_error(self): +@@ -671,14 +1756,14 @@ else: + + def __init__(self, certfile): + self.certfile = certfile +- asyncore.dispatcher.__init__(self) +- self.create_socket(socket.AF_INET, socket.SOCK_STREAM) +- self.port = test_support.bind_port(self.socket) ++ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) ++ self.port = support.bind_port(sock, '') ++ asyncore.dispatcher.__init__(self, sock) + self.listen(5) + + def handle_accept(self): + sock_obj, addr = self.accept() +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write(" server: new connection from %s:%s\n" %addr) + self.ConnectionHandler(sock_obj, self.certfile) + +@@ -702,13 +1787,13 @@ else: + return self + + def __exit__(self, *args): +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write(" cleanup: stopping server.\n") + self.stop() +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write(" cleanup: joining server thread.\n") + self.join() +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write(" cleanup: successfully joined.\n") + + def start(self, flag=None): +@@ -720,103 +1805,15 @@ else: + if self.flag: + self.flag.set() + while self.active: +- asyncore.loop(0.05) ++ try: ++ asyncore.loop(1) ++ except: ++ pass + + def stop(self): + self.active = False + self.server.close() + +- class SocketServerHTTPSServer(threading.Thread): +- +- class HTTPSServer(HTTPServer): +- +- def __init__(self, server_address, RequestHandlerClass, certfile): +- HTTPServer.__init__(self, server_address, RequestHandlerClass) +- # we assume the certfile contains both private key and certificate +- self.certfile = certfile +- self.allow_reuse_address = True +- +- def __str__(self): +- return ('<%s %s:%s>' % +- (self.__class__.__name__, +- self.server_name, +- self.server_port)) +- +- def get_request(self): +- # override this to wrap socket with SSL +- sock, addr = self.socket.accept() +- sslconn = ssl.wrap_socket(sock, server_side=True, +- certfile=self.certfile) +- return sslconn, addr +- +- class RootedHTTPRequestHandler(SimpleHTTPRequestHandler): +- # need to override translate_path to get a known root, +- # instead of using os.curdir, since the test could be +- # run from anywhere +- +- server_version = "TestHTTPS/1.0" +- +- root = None +- +- def translate_path(self, path): +- """Translate a /-separated PATH to the local filename syntax. +- +- Components that mean special things to the local file system +- (e.g. drive or directory names) are ignored. (XXX They should +- probably be diagnosed.) +- +- """ +- # abandon query parameters +- path = urlparse.urlparse(path)[2] +- path = os.path.normpath(urllib.unquote(path)) +- words = path.split('/') +- words = filter(None, words) +- path = self.root +- for word in words: +- drive, word = os.path.splitdrive(word) +- head, word = os.path.split(word) +- if word in self.root: continue +- path = os.path.join(path, word) +- return path +- +- def log_message(self, format, *args): +- +- # we override this to suppress logging unless "verbose" +- +- if test_support.verbose: +- sys.stdout.write(" server (%s:%d %s):\n [%s] %s\n" % +- (self.server.server_address, +- self.server.server_port, +- self.request.cipher(), +- self.log_date_time_string(), +- format%args)) +- +- +- def __init__(self, certfile): +- self.flag = None +- self.RootedHTTPRequestHandler.root = os.path.split(CERTFILE)[0] +- self.server = self.HTTPSServer( +- (HOST, 0), self.RootedHTTPRequestHandler, certfile) +- self.port = self.server.server_port +- threading.Thread.__init__(self) +- self.daemon = True +- +- def __str__(self): +- return "<%s %s>" % (self.__class__.__name__, self.server) +- +- def start(self, flag=None): +- self.flag = flag +- threading.Thread.start(self) +- +- def run(self): +- if self.flag: +- self.flag.set() +- self.server.serve_forever(0.05) +- +- def stop(self): +- self.server.shutdown() +- +- + def bad_cert_test(certfile): + """ + Launch a server with CERT_REQUIRED, and check that trying to +@@ -824,74 +1821,74 @@ else: + """ + server = ThreadedEchoServer(CERTFILE, + certreqs=ssl.CERT_REQUIRED, +- cacerts=CERTFILE, chatty=False) ++ cacerts=CERTFILE, chatty=False, ++ connectionchatty=False) + with server: + try: +- s = ssl.wrap_socket(socket.socket(), +- certfile=certfile, +- ssl_version=ssl.PROTOCOL_TLSv1) +- s.connect((HOST, server.port)) +- except ssl.SSLError, x: +- if test_support.verbose: +- sys.stdout.write("\nSSLError is %s\n" % x[1]) +- except socket.error, x: +- if test_support.verbose: +- sys.stdout.write("\nsocket.error is %s\n" % x[1]) ++ with closing(socket.socket()) as sock: ++ s = ssl.wrap_socket(sock, ++ certfile=certfile, ++ ssl_version=ssl.PROTOCOL_TLSv1) ++ s.connect((HOST, server.port)) ++ except ssl.SSLError as x: ++ if support.verbose: ++ sys.stdout.write("\nSSLError is %s\n" % x.args[1]) ++ except OSError as x: ++ if support.verbose: ++ sys.stdout.write("\nOSError is %s\n" % x.args[1]) ++ except OSError as x: ++ if x.errno != errno.ENOENT: ++ raise ++ if support.verbose: ++ sys.stdout.write("\OSError is %s\n" % str(x)) + else: + raise AssertionError("Use of invalid cert should have failed!") + +- def server_params_test(certfile, protocol, certreqs, cacertsfile, +- client_certfile, client_protocol=None, indata="FOO\n", +- ciphers=None, chatty=True, connectionchatty=False, +- wrap_accepting_socket=False): ++ def server_params_test(client_context, server_context, indata=b"FOO\n", ++ chatty=True, connectionchatty=False, sni_name=None): + """ + Launch a server, connect a client to it and try various reads + and writes. + """ +- server = ThreadedEchoServer(certfile, +- certreqs=certreqs, +- ssl_version=protocol, +- cacerts=cacertsfile, +- ciphers=ciphers, ++ stats = {} ++ server = ThreadedEchoServer(context=server_context, + chatty=chatty, +- connectionchatty=connectionchatty, +- wrap_accepting_socket=wrap_accepting_socket) ++ connectionchatty=False) + with server: +- # try to connect +- if client_protocol is None: +- client_protocol = protocol +- s = ssl.wrap_socket(socket.socket(), +- certfile=client_certfile, +- ca_certs=cacertsfile, +- ciphers=ciphers, +- cert_reqs=certreqs, +- ssl_version=client_protocol) +- s.connect((HOST, server.port)) +- for arg in [indata, bytearray(indata), memoryview(indata)]: +- if connectionchatty: +- if test_support.verbose: +- sys.stdout.write( +- " client: sending %s...\n" % (repr(arg))) +- s.write(arg) +- outdata = s.read() ++ with closing(client_context.wrap_socket(socket.socket(), ++ server_hostname=sni_name)) as s: ++ s.connect((HOST, server.port)) ++ for arg in [indata, bytearray(indata), memoryview(indata)]: ++ if connectionchatty: ++ if support.verbose: ++ sys.stdout.write( ++ " client: sending %r...\n" % indata) ++ s.write(arg) ++ outdata = s.read() ++ if connectionchatty: ++ if support.verbose: ++ sys.stdout.write(" client: read %r\n" % outdata) ++ if outdata != indata.lower(): ++ raise AssertionError( ++ "bad data <<%r>> (%d) received; expected <<%r>> (%d)\n" ++ % (outdata[:20], len(outdata), ++ indata[:20].lower(), len(indata))) ++ s.write(b"over\n") + if connectionchatty: +- if test_support.verbose: +- sys.stdout.write(" client: read %s\n" % repr(outdata)) +- if outdata != indata.lower(): +- raise AssertionError( +- "bad data <<%s>> (%d) received; expected <<%s>> (%d)\n" +- % (outdata[:min(len(outdata),20)], len(outdata), +- indata[:min(len(indata),20)].lower(), len(indata))) +- s.write("over\n") +- if connectionchatty: +- if test_support.verbose: +- sys.stdout.write(" client: closing connection.\n") +- s.close() ++ if support.verbose: ++ sys.stdout.write(" client: closing connection.\n") ++ stats.update({ ++ 'compression': s.compression(), ++ 'cipher': s.cipher(), ++ 'peercert': s.getpeercert(), ++ 'client_npn_protocol': s.selected_npn_protocol() ++ }) ++ s.close() ++ stats['server_npn_protocols'] = server.selected_protocols ++ return stats + +- def try_protocol_combo(server_protocol, +- client_protocol, +- expect_success, +- certsreqs=None): ++ def try_protocol_combo(server_protocol, client_protocol, expect_success, ++ certsreqs=None, server_options=0, client_options=0): + if certsreqs is None: + certsreqs = ssl.CERT_NONE + certtype = { +@@ -899,19 +1896,30 @@ else: + ssl.CERT_OPTIONAL: "CERT_OPTIONAL", + ssl.CERT_REQUIRED: "CERT_REQUIRED", + }[certsreqs] +- if test_support.verbose: ++ if support.verbose: + formatstr = (expect_success and " %s->%s %s\n") or " {%s->%s} %s\n" + sys.stdout.write(formatstr % + (ssl.get_protocol_name(client_protocol), + ssl.get_protocol_name(server_protocol), + certtype)) ++ client_context = ssl.SSLContext(client_protocol) ++ client_context.options |= client_options ++ server_context = ssl.SSLContext(server_protocol) ++ server_context.options |= server_options ++ ++ # NOTE: we must enable "ALL" ciphers on the client, otherwise an ++ # SSLv23 client will send an SSLv3 hello (rather than SSLv2) ++ # starting from OpenSSL 1.0.0 (see issue #8322). ++ if client_context.protocol == ssl.PROTOCOL_SSLv23: ++ client_context.set_ciphers("ALL") ++ ++ for ctx in (client_context, server_context): ++ ctx.verify_mode = certsreqs ++ ctx.load_cert_chain(CERTFILE) ++ ctx.load_verify_locations(CERTFILE) + try: +- # NOTE: we must enable "ALL" ciphers, otherwise an SSLv23 client +- # will send an SSLv3 hello (rather than SSLv2) starting from +- # OpenSSL 1.0.0 (see issue #8322). +- server_params_test(CERTFILE, server_protocol, certsreqs, +- CERTFILE, CERTFILE, client_protocol, +- ciphers="ALL", chatty=False) ++ server_params_test(client_context, server_context, ++ chatty=False, connectionchatty=False) + # Protocol mismatch can result in either an SSLError, or a + # "Connection reset by peer" error. + except ssl.SSLError: +@@ -930,75 +1938,38 @@ else: + + class ThreadedTests(unittest.TestCase): + +- def test_rude_shutdown(self): +- """A brutal shutdown of an SSL server should raise an IOError +- in the client when attempting handshake. +- """ +- listener_ready = threading.Event() +- listener_gone = threading.Event() +- +- s = socket.socket() +- port = test_support.bind_port(s, HOST) +- +- # `listener` runs in a thread. It sits in an accept() until +- # the main thread connects. Then it rudely closes the socket, +- # and sets Event `listener_gone` to let the main thread know +- # the socket is gone. +- def listener(): +- s.listen(5) +- listener_ready.set() +- s.accept() +- s.close() +- listener_gone.set() +- +- def connector(): +- listener_ready.wait() +- c = socket.socket() +- c.connect((HOST, port)) +- listener_gone.wait() +- try: +- ssl_sock = ssl.wrap_socket(c) +- except IOError: +- pass +- else: +- self.fail('connecting to closed SSL socket should have failed') +- +- t = threading.Thread(target=listener) +- t.start() +- try: +- connector() +- finally: +- t.join() +- + @skip_if_broken_ubuntu_ssl + def test_echo(self): + """Basic test of an SSL client connecting to a server""" +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n") +- server_params_test(CERTFILE, ssl.PROTOCOL_TLSv1, ssl.CERT_NONE, +- CERTFILE, CERTFILE, ssl.PROTOCOL_TLSv1, +- chatty=True, connectionchatty=True) ++ for protocol in PROTOCOLS: ++ context = ssl.SSLContext(protocol) ++ context.load_cert_chain(CERTFILE) ++ server_params_test(context, context, ++ chatty=True, connectionchatty=True) + + def test_getpeercert(self): +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n") +- s2 = socket.socket() +- server = ThreadedEchoServer(CERTFILE, +- certreqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_SSLv23, +- cacerts=CERTFILE, +- chatty=False) ++ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.load_verify_locations(CERTFILE) ++ context.load_cert_chain(CERTFILE) ++ server = ThreadedEchoServer(context=context, chatty=False) + with server: +- s = ssl.wrap_socket(socket.socket(), +- certfile=CERTFILE, +- ca_certs=CERTFILE, +- cert_reqs=ssl.CERT_REQUIRED, +- ssl_version=ssl.PROTOCOL_SSLv23) ++ s = context.wrap_socket(socket.socket(), ++ do_handshake_on_connect=False) + s.connect((HOST, server.port)) ++ # getpeercert() raise ValueError while the handshake isn't ++ # done. ++ with self.assertRaises(ValueError): ++ s.getpeercert() ++ s.do_handshake() + cert = s.getpeercert() + self.assertTrue(cert, "Can't get peer certificate.") + cipher = s.cipher() +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write(pprint.pformat(cert) + '\n') + sys.stdout.write("Connection cipher is " + str(cipher) + '.\n') + if 'subject' not in cert: +@@ -1009,8 +1980,94 @@ else: + self.fail( + "Missing or invalid 'organizationName' field in certificate subject; " + "should be 'Python Software Foundation'.") ++ self.assertIn('notBefore', cert) ++ self.assertIn('notAfter', cert) ++ before = ssl.cert_time_to_seconds(cert['notBefore']) ++ after = ssl.cert_time_to_seconds(cert['notAfter']) ++ self.assertLess(before, after) + s.close() + ++ @unittest.skipUnless(have_verify_flags(), ++ "verify_flags need OpenSSL > 0.9.8") ++ def test_crl_check(self): ++ if support.verbose: ++ sys.stdout.write("\n") ++ ++ server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ server_context.load_cert_chain(SIGNED_CERTFILE) ++ ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.load_verify_locations(SIGNING_CA) ++ self.assertEqual(context.verify_flags, ssl.VERIFY_DEFAULT) ++ ++ # VERIFY_DEFAULT should pass ++ server = ThreadedEchoServer(context=server_context, chatty=True) ++ with server: ++ with closing(context.wrap_socket(socket.socket())) as s: ++ s.connect((HOST, server.port)) ++ cert = s.getpeercert() ++ self.assertTrue(cert, "Can't get peer certificate.") ++ ++ # VERIFY_CRL_CHECK_LEAF without a loaded CRL file fails ++ context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF ++ ++ server = ThreadedEchoServer(context=server_context, chatty=True) ++ with server: ++ with closing(context.wrap_socket(socket.socket())) as s: ++ with self.assertRaisesRegexp(ssl.SSLError, ++ "certificate verify failed"): ++ s.connect((HOST, server.port)) ++ ++ # now load a CRL file. The CRL file is signed by the CA. ++ context.load_verify_locations(CRLFILE) ++ ++ server = ThreadedEchoServer(context=server_context, chatty=True) ++ with server: ++ with closing(context.wrap_socket(socket.socket())) as s: ++ s.connect((HOST, server.port)) ++ cert = s.getpeercert() ++ self.assertTrue(cert, "Can't get peer certificate.") ++ ++ @needs_sni ++ def test_check_hostname(self): ++ if support.verbose: ++ sys.stdout.write("\n") ++ ++ server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ server_context.load_cert_chain(SIGNED_CERTFILE) ++ ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.check_hostname = True ++ context.load_verify_locations(SIGNING_CA) ++ ++ # correct hostname should verify ++ server = ThreadedEchoServer(context=server_context, chatty=True) ++ with server: ++ with closing(context.wrap_socket(socket.socket(), ++ server_hostname="localhost")) as s: ++ s.connect((HOST, server.port)) ++ cert = s.getpeercert() ++ self.assertTrue(cert, "Can't get peer certificate.") ++ ++ # incorrect hostname should raise an exception ++ server = ThreadedEchoServer(context=server_context, chatty=True) ++ with server: ++ with closing(context.wrap_socket(socket.socket(), ++ server_hostname="invalid")) as s: ++ with self.assertRaisesRegexp(ssl.CertificateError, ++ "hostname 'invalid' doesn't match u?'localhost'"): ++ s.connect((HOST, server.port)) ++ ++ # missing server_hostname arg should cause an exception, too ++ server = ThreadedEchoServer(context=server_context, chatty=True) ++ with server: ++ with closing(socket.socket()) as s: ++ with self.assertRaisesRegexp(ValueError, ++ "check_hostname requires server_hostname"): ++ context.wrap_socket(s) ++ + def test_empty_cert(self): + """Connecting with an empty cert file""" + bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir, +@@ -1027,26 +2084,83 @@ else: + """Connecting with a badly formatted key (syntax error)""" + bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir, + "badkey.pem")) ++ def test_rude_shutdown(self): ++ """A brutal shutdown of an SSL server should raise an OSError ++ in the client when attempting handshake. ++ """ ++ listener_ready = threading.Event() ++ listener_gone = threading.Event() + +- @skip_if_broken_ubuntu_ssl ++ s = socket.socket() ++ port = support.bind_port(s, HOST) ++ ++ # `listener` runs in a thread. It sits in an accept() until ++ # the main thread connects. Then it rudely closes the socket, ++ # and sets Event `listener_gone` to let the main thread know ++ # the socket is gone. ++ def listener(): ++ s.listen(5) ++ listener_ready.set() ++ newsock, addr = s.accept() ++ newsock.close() ++ s.close() ++ listener_gone.set() ++ ++ def connector(): ++ listener_ready.wait() ++ with closing(socket.socket()) as c: ++ c.connect((HOST, port)) ++ listener_gone.wait() ++ try: ++ ssl_sock = ssl.wrap_socket(c) ++ except ssl.SSLError: ++ pass ++ else: ++ self.fail('connecting to closed SSL socket should have failed') ++ ++ t = threading.Thread(target=listener) ++ t.start() ++ try: ++ connector() ++ finally: ++ t.join() ++ ++ @unittest.skipUnless(hasattr(ssl, 'PROTOCOL_SSLv2'), ++ "OpenSSL is compiled without SSLv2 support") + def test_protocol_sslv2(self): + """Connecting to an SSLv2 server with various client options""" +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n") +- if not hasattr(ssl, 'PROTOCOL_SSLv2'): +- self.skipTest("PROTOCOL_SSLv2 needed") + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True) + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_OPTIONAL) + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_REQUIRED) + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True) + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv3, False) + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLSv1, False) ++ # SSLv23 client with specific SSL options ++ if no_sslv2_implies_sslv3_hello(): ++ # No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs ++ try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False, ++ client_options=ssl.OP_NO_SSLv2) ++ try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False, ++ client_options=ssl.OP_NO_SSLv3) ++ try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False, ++ client_options=ssl.OP_NO_TLSv1) + + @skip_if_broken_ubuntu_ssl + def test_protocol_sslv23(self): + """Connecting to an SSLv23 server with various client options""" +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n") ++ if hasattr(ssl, 'PROTOCOL_SSLv2'): ++ try: ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv2, True) ++ except socket.error as x: ++ # this fails on some older versions of OpenSSL (0.9.7l, for instance) ++ if support.verbose: ++ sys.stdout.write( ++ " SSL2 client to SSL23 server test unexpectedly failed:\n %s\n" ++ % str(x)) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True) +@@ -1059,22 +2173,38 @@ else: + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED) + ++ # Server with specific SSL options ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ++ server_options=ssl.OP_NO_SSLv3) ++ # Will choose TLSv1 ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ++ server_options=ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3) ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, False, ++ server_options=ssl.OP_NO_TLSv1) ++ ++ + @skip_if_broken_ubuntu_ssl + def test_protocol_sslv3(self): + """Connecting to an SSLv3 server with various client options""" +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n") + try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True) + try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL) + try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED) + if hasattr(ssl, 'PROTOCOL_SSLv2'): + try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False) ++ try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, False, ++ client_options=ssl.OP_NO_SSLv3) + try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False) ++ if no_sslv2_implies_sslv3_hello(): ++ # No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs ++ try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, True, ++ client_options=ssl.OP_NO_SSLv2) + + @skip_if_broken_ubuntu_ssl + def test_protocol_tlsv1(self): + """Connecting to a TLSv1 server with various client options""" +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n") + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True) + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL) +@@ -1082,10 +2212,55 @@ else: + if hasattr(ssl, 'PROTOCOL_SSLv2'): + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False) + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv23, False, ++ client_options=ssl.OP_NO_TLSv1) ++ ++ @skip_if_broken_ubuntu_ssl ++ @unittest.skipUnless(hasattr(ssl, "PROTOCOL_TLSv1_1"), ++ "TLS version 1.1 not supported.") ++ def test_protocol_tlsv1_1(self): ++ """Connecting to a TLSv1.1 server with various client options. ++ Testing against older TLS versions.""" ++ if support.verbose: ++ sys.stdout.write("\n") ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, True) ++ if hasattr(ssl, 'PROTOCOL_SSLv2'): ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv2, False) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv3, False) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv23, False, ++ client_options=ssl.OP_NO_TLSv1_1) ++ ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, True) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1, False) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1, False) ++ ++ ++ @skip_if_broken_ubuntu_ssl ++ @unittest.skipUnless(hasattr(ssl, "PROTOCOL_TLSv1_2"), ++ "TLS version 1.2 not supported.") ++ def test_protocol_tlsv1_2(self): ++ """Connecting to a TLSv1.2 server with various client options. ++ Testing against older TLS versions.""" ++ if support.verbose: ++ sys.stdout.write("\n") ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_2, True, ++ server_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2, ++ client_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2,) ++ if hasattr(ssl, 'PROTOCOL_SSLv2'): ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv2, False) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv3, False) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv23, False, ++ client_options=ssl.OP_NO_TLSv1_2) ++ ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_2, True) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1, False) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_2, False) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_1, False) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_2, False) + + def test_starttls(self): + """Switching from clear text to encrypted and back again.""" +- msgs = ("msg 1", "MSG 2", "STARTTLS", "MSG 3", "msg 4", "ENDTLS", "msg 5", "msg 6") ++ msgs = (b"msg 1", b"MSG 2", b"STARTTLS", b"MSG 3", b"msg 4", b"ENDTLS", b"msg 5", b"msg 6") + + server = ThreadedEchoServer(CERTFILE, + ssl_version=ssl.PROTOCOL_TLSv1, +@@ -1097,119 +2272,109 @@ else: + s = socket.socket() + s.setblocking(1) + s.connect((HOST, server.port)) +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n") + for indata in msgs: +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write( +- " client: sending %s...\n" % repr(indata)) ++ " client: sending %r...\n" % indata) + if wrapped: + conn.write(indata) + outdata = conn.read() + else: + s.send(indata) + outdata = s.recv(1024) +- if (indata == "STARTTLS" and +- outdata.strip().lower().startswith("ok")): ++ msg = outdata.strip().lower() ++ if indata == b"STARTTLS" and msg.startswith(b"ok"): + # STARTTLS ok, switch to secure mode +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write( +- " client: read %s from server, starting TLS...\n" +- % repr(outdata)) ++ " client: read %r from server, starting TLS...\n" ++ % msg) + conn = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1) + wrapped = True +- elif (indata == "ENDTLS" and +- outdata.strip().lower().startswith("ok")): ++ elif indata == b"ENDTLS" and msg.startswith(b"ok"): + # ENDTLS ok, switch back to clear text +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write( +- " client: read %s from server, ending TLS...\n" +- % repr(outdata)) ++ " client: read %r from server, ending TLS...\n" ++ % msg) + s = conn.unwrap() + wrapped = False + else: +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write( +- " client: read %s from server\n" % repr(outdata)) +- if test_support.verbose: ++ " client: read %r from server\n" % msg) ++ if support.verbose: + sys.stdout.write(" client: closing connection.\n") + if wrapped: +- conn.write("over\n") ++ conn.write(b"over\n") + else: +- s.send("over\n") +- s.close() ++ s.send(b"over\n") ++ if wrapped: ++ conn.close() ++ else: ++ s.close() + + def test_socketserver(self): + """Using a SocketServer to create and manage SSL connections.""" +- server = SocketServerHTTPSServer(CERTFILE) +- flag = threading.Event() +- server.start(flag) +- # wait for it to start +- flag.wait() ++ server = make_https_server(self, certfile=CERTFILE) + # try to connect ++ if support.verbose: ++ sys.stdout.write('\n') ++ with open(CERTFILE, 'rb') as f: ++ d1 = f.read() ++ d2 = '' ++ # now fetch the same data from the HTTPS server ++ url = 'https://%s:%d/%s' % ( ++ HOST, server.port, os.path.split(CERTFILE)[1]) ++ f = urllib.urlopen(url) + try: +- if test_support.verbose: +- sys.stdout.write('\n') +- with open(CERTFILE, 'rb') as f: +- d1 = f.read() +- d2 = '' +- # now fetch the same data from the HTTPS server +- url = 'https://127.0.0.1:%d/%s' % ( +- server.port, os.path.split(CERTFILE)[1]) +- with test_support.check_py3k_warnings(): +- f = urllib.urlopen(url) + dlen = f.info().getheader("content-length") + if dlen and (int(dlen) > 0): + d2 = f.read(int(dlen)) +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write( + " client: read %d bytes from remote server '%s'\n" + % (len(d2), server)) +- f.close() +- self.assertEqual(d1, d2) + finally: +- server.stop() +- server.join() +- +- def test_wrapped_accept(self): +- """Check the accept() method on SSL sockets.""" +- if test_support.verbose: +- sys.stdout.write("\n") +- server_params_test(CERTFILE, ssl.PROTOCOL_SSLv23, ssl.CERT_REQUIRED, +- CERTFILE, CERTFILE, ssl.PROTOCOL_SSLv23, +- chatty=True, connectionchatty=True, +- wrap_accepting_socket=True) ++ f.close() ++ self.assertEqual(d1, d2) + + def test_asyncore_server(self): + """Check the example asyncore integration.""" + indata = "TEST MESSAGE of mixed case\n" + +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n") ++ ++ indata = b"FOO\n" + server = AsyncoreEchoServer(CERTFILE) + with server: + s = ssl.wrap_socket(socket.socket()) + s.connect(('127.0.0.1', server.port)) +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write( +- " client: sending %s...\n" % (repr(indata))) ++ " client: sending %r...\n" % indata) + s.write(indata) + outdata = s.read() +- if test_support.verbose: +- sys.stdout.write(" client: read %s\n" % repr(outdata)) ++ if support.verbose: ++ sys.stdout.write(" client: read %r\n" % outdata) + if outdata != indata.lower(): + self.fail( +- "bad data <<%s>> (%d) received; expected <<%s>> (%d)\n" +- % (outdata[:min(len(outdata),20)], len(outdata), +- indata[:min(len(indata),20)].lower(), len(indata))) +- s.write("over\n") +- if test_support.verbose: ++ "bad data <<%r>> (%d) received; expected <<%r>> (%d)\n" ++ % (outdata[:20], len(outdata), ++ indata[:20].lower(), len(indata))) ++ s.write(b"over\n") ++ if support.verbose: + sys.stdout.write(" client: closing connection.\n") + s.close() ++ if support.verbose: ++ sys.stdout.write(" client: connection closed.\n") + + def test_recv_send(self): + """Test recv(), send() and friends.""" +- if test_support.verbose: ++ if support.verbose: + sys.stdout.write("\n") + + server = ThreadedEchoServer(CERTFILE, +@@ -1228,12 +2393,12 @@ else: + s.connect((HOST, server.port)) + # helper methods for standardising recv* method signatures + def _recv_into(): +- b = bytearray("\0"*100) ++ b = bytearray(b"\0"*100) + count = s.recv_into(b) + return b[:count] + + def _recvfrom_into(): +- b = bytearray("\0"*100) ++ b = bytearray(b"\0"*100) + count, addr = s.recvfrom_into(b) + return b[:count] + +@@ -1252,73 +2417,73 @@ else: + data_prefix = u"PREFIX_" + + for meth_name, send_meth, expect_success, args in send_methods: +- indata = data_prefix + meth_name ++ indata = (data_prefix + meth_name).encode('ascii') + try: +- send_meth(indata.encode('ASCII', 'strict'), *args) ++ send_meth(indata, *args) + outdata = s.read() +- outdata = outdata.decode('ASCII', 'strict') + if outdata != indata.lower(): + self.fail( +- "While sending with <<%s>> bad data " +- "<<%r>> (%d) received; " +- "expected <<%r>> (%d)\n" % ( +- meth_name, outdata[:20], len(outdata), +- indata[:20], len(indata) ++ "While sending with <<{name:s}>> bad data " ++ "<<{outdata:r}>> ({nout:d}) received; " ++ "expected <<{indata:r}>> ({nin:d})\n".format( ++ name=meth_name, outdata=outdata[:20], ++ nout=len(outdata), ++ indata=indata[:20], nin=len(indata) + ) + ) + except ValueError as e: + if expect_success: + self.fail( +- "Failed to send with method <<%s>>; " +- "expected to succeed.\n" % (meth_name,) ++ "Failed to send with method <<{name:s}>>; " ++ "expected to succeed.\n".format(name=meth_name) + ) + if not str(e).startswith(meth_name): + self.fail( +- "Method <<%s>> failed with unexpected " +- "exception message: %s\n" % ( +- meth_name, e ++ "Method <<{name:s}>> failed with unexpected " ++ "exception message: {exp:s}\n".format( ++ name=meth_name, exp=e + ) + ) + + for meth_name, recv_meth, expect_success, args in recv_methods: +- indata = data_prefix + meth_name ++ indata = (data_prefix + meth_name).encode('ascii') + try: +- s.send(indata.encode('ASCII', 'strict')) ++ s.send(indata) + outdata = recv_meth(*args) +- outdata = outdata.decode('ASCII', 'strict') + if outdata != indata.lower(): + self.fail( +- "While receiving with <<%s>> bad data " +- "<<%r>> (%d) received; " +- "expected <<%r>> (%d)\n" % ( +- meth_name, outdata[:20], len(outdata), +- indata[:20], len(indata) ++ "While receiving with <<{name:s}>> bad data " ++ "<<{outdata:r}>> ({nout:d}) received; " ++ "expected <<{indata:r}>> ({nin:d})\n".format( ++ name=meth_name, outdata=outdata[:20], ++ nout=len(outdata), ++ indata=indata[:20], nin=len(indata) + ) + ) + except ValueError as e: + if expect_success: + self.fail( +- "Failed to receive with method <<%s>>; " +- "expected to succeed.\n" % (meth_name,) ++ "Failed to receive with method <<{name:s}>>; " ++ "expected to succeed.\n".format(name=meth_name) + ) + if not str(e).startswith(meth_name): + self.fail( +- "Method <<%s>> failed with unexpected " +- "exception message: %s\n" % ( +- meth_name, e ++ "Method <<{name:s}>> failed with unexpected " ++ "exception message: {exp:s}\n".format( ++ name=meth_name, exp=e + ) + ) + # consume data + s.read() + +- s.write("over\n".encode("ASCII", "strict")) ++ s.write(b"over\n") + s.close() + + def test_handshake_timeout(self): + # Issue #5103: SSL handshake must respect the socket timeout + server = socket.socket(socket.AF_INET) + host = "127.0.0.1" +- port = test_support.bind_port(server) ++ port = support.bind_port(server) + started = threading.Event() + finish = False + +@@ -1332,6 +2497,8 @@ else: + # Let the socket hang around rather than having + # it closed by garbage collection. + conns.append(server.accept()[0]) ++ for sock in conns: ++ sock.close() + + t = threading.Thread(target=serve) + t.start() +@@ -1349,8 +2516,8 @@ else: + c.close() + try: + c = socket.socket(socket.AF_INET) +- c.settimeout(0.2) + c = ssl.wrap_socket(c) ++ c.settimeout(0.2) + # Will attempt handshake and time out + self.assertRaisesRegexp(ssl.SSLError, "timed out", + c.connect, (host, port)) +@@ -1361,59 +2528,384 @@ else: + t.join() + server.close() + ++ def test_server_accept(self): ++ # Issue #16357: accept() on a SSLSocket created through ++ # SSLContext.wrap_socket(). ++ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.load_verify_locations(CERTFILE) ++ context.load_cert_chain(CERTFILE) ++ server = socket.socket(socket.AF_INET) ++ host = "127.0.0.1" ++ port = support.bind_port(server) ++ server = context.wrap_socket(server, server_side=True) ++ ++ evt = threading.Event() ++ remote = [None] ++ peer = [None] ++ def serve(): ++ server.listen(5) ++ # Block on the accept and wait on the connection to close. ++ evt.set() ++ remote[0], peer[0] = server.accept() ++ remote[0].recv(1) ++ ++ t = threading.Thread(target=serve) ++ t.start() ++ # Client wait until server setup and perform a connect. ++ evt.wait() ++ client = context.wrap_socket(socket.socket()) ++ client.connect((host, port)) ++ client_addr = client.getsockname() ++ client.close() ++ t.join() ++ remote[0].close() ++ server.close() ++ # Sanity checks. ++ self.assertIsInstance(remote[0], ssl.SSLSocket) ++ self.assertEqual(peer[0], client_addr) ++ ++ def test_getpeercert_enotconn(self): ++ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ with closing(context.wrap_socket(socket.socket())) as sock: ++ with self.assertRaises(socket.error) as cm: ++ sock.getpeercert() ++ self.assertEqual(cm.exception.errno, errno.ENOTCONN) ++ ++ def test_do_handshake_enotconn(self): ++ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ with closing(context.wrap_socket(socket.socket())) as sock: ++ with self.assertRaises(socket.error) as cm: ++ sock.do_handshake() ++ self.assertEqual(cm.exception.errno, errno.ENOTCONN) ++ + def test_default_ciphers(self): ++ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ try: ++ # Force a set of weak ciphers on our client context ++ context.set_ciphers("DES") ++ except ssl.SSLError: ++ self.skipTest("no DES cipher available") + with ThreadedEchoServer(CERTFILE, + ssl_version=ssl.PROTOCOL_SSLv23, + chatty=False) as server: +- sock = socket.socket() +- try: +- # Force a set of weak ciphers on our client socket +- try: +- s = ssl.wrap_socket(sock, +- ssl_version=ssl.PROTOCOL_SSLv23, +- ciphers="DES") +- except ssl.SSLError: +- self.skipTest("no DES cipher available") +- with self.assertRaises((OSError, ssl.SSLError)): ++ with closing(context.wrap_socket(socket.socket())) as s: ++ with self.assertRaises(ssl.SSLError): + s.connect((HOST, server.port)) +- finally: +- sock.close() + self.assertIn("no shared cipher", str(server.conn_errors[0])) + ++ @unittest.skipUnless(ssl.HAS_ECDH, "test requires ECDH-enabled OpenSSL") ++ def test_default_ecdh_curve(self): ++ # Issue #21015: elliptic curve-based Diffie Hellman key exchange ++ # should be enabled by default on SSL contexts. ++ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ context.load_cert_chain(CERTFILE) ++ # Prior to OpenSSL 1.0.0, ECDH ciphers have to be enabled ++ # explicitly using the 'ECCdraft' cipher alias. Otherwise, ++ # our default cipher list should prefer ECDH-based ciphers ++ # automatically. ++ if ssl.OPENSSL_VERSION_INFO < (1, 0, 0): ++ context.set_ciphers("ECCdraft:ECDH") ++ with ThreadedEchoServer(context=context) as server: ++ with closing(context.wrap_socket(socket.socket())) as s: ++ s.connect((HOST, server.port)) ++ self.assertIn("ECDH", s.cipher()[0]) ++ ++ @unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES, ++ "'tls-unique' channel binding not available") ++ def test_tls_unique_channel_binding(self): ++ """Test tls-unique channel binding.""" ++ if support.verbose: ++ sys.stdout.write("\n") ++ ++ server = ThreadedEchoServer(CERTFILE, ++ certreqs=ssl.CERT_NONE, ++ ssl_version=ssl.PROTOCOL_TLSv1, ++ cacerts=CERTFILE, ++ chatty=True, ++ connectionchatty=False) ++ with server: ++ s = ssl.wrap_socket(socket.socket(), ++ server_side=False, ++ certfile=CERTFILE, ++ ca_certs=CERTFILE, ++ cert_reqs=ssl.CERT_NONE, ++ ssl_version=ssl.PROTOCOL_TLSv1) ++ s.connect((HOST, server.port)) ++ # get the data ++ cb_data = s.get_channel_binding("tls-unique") ++ if support.verbose: ++ sys.stdout.write(" got channel binding data: {0!r}\n" ++ .format(cb_data)) ++ ++ # check if it is sane ++ self.assertIsNotNone(cb_data) ++ self.assertEqual(len(cb_data), 12) # True for TLSv1 ++ ++ # and compare with the peers version ++ s.write(b"CB tls-unique\n") ++ peer_data_repr = s.read().strip() ++ self.assertEqual(peer_data_repr, ++ repr(cb_data).encode("us-ascii")) ++ s.close() ++ ++ # now, again ++ s = ssl.wrap_socket(socket.socket(), ++ server_side=False, ++ certfile=CERTFILE, ++ ca_certs=CERTFILE, ++ cert_reqs=ssl.CERT_NONE, ++ ssl_version=ssl.PROTOCOL_TLSv1) ++ s.connect((HOST, server.port)) ++ new_cb_data = s.get_channel_binding("tls-unique") ++ if support.verbose: ++ sys.stdout.write(" got another channel binding data: {0!r}\n" ++ .format(new_cb_data)) ++ # is it really unique ++ self.assertNotEqual(cb_data, new_cb_data) ++ self.assertIsNotNone(cb_data) ++ self.assertEqual(len(cb_data), 12) # True for TLSv1 ++ s.write(b"CB tls-unique\n") ++ peer_data_repr = s.read().strip() ++ self.assertEqual(peer_data_repr, ++ repr(new_cb_data).encode("us-ascii")) ++ s.close() ++ ++ def test_compression(self): ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.load_cert_chain(CERTFILE) ++ stats = server_params_test(context, context, ++ chatty=True, connectionchatty=True) ++ if support.verbose: ++ sys.stdout.write(" got compression: {!r}\n".format(stats['compression'])) ++ self.assertIn(stats['compression'], { None, 'ZLIB', 'RLE' }) ++ ++ @unittest.skipUnless(hasattr(ssl, 'OP_NO_COMPRESSION'), ++ "ssl.OP_NO_COMPRESSION needed for this test") ++ def test_compression_disabled(self): ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.load_cert_chain(CERTFILE) ++ context.options |= ssl.OP_NO_COMPRESSION ++ stats = server_params_test(context, context, ++ chatty=True, connectionchatty=True) ++ self.assertIs(stats['compression'], None) ++ ++ def test_dh_params(self): ++ # Check we can get a connection with ephemeral Diffie-Hellman ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.load_cert_chain(CERTFILE) ++ context.load_dh_params(DHFILE) ++ context.set_ciphers("kEDH") ++ stats = server_params_test(context, context, ++ chatty=True, connectionchatty=True) ++ cipher = stats["cipher"][0] ++ parts = cipher.split("-") ++ if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts: ++ self.fail("Non-DH cipher: " + cipher[0]) ++ ++ def test_selected_npn_protocol(self): ++ # selected_npn_protocol() is None unless NPN is used ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.load_cert_chain(CERTFILE) ++ stats = server_params_test(context, context, ++ chatty=True, connectionchatty=True) ++ self.assertIs(stats['client_npn_protocol'], None) ++ ++ @unittest.skipUnless(ssl.HAS_NPN, "NPN support needed for this test") ++ def test_npn_protocols(self): ++ server_protocols = ['http/1.1', 'spdy/2'] ++ protocol_tests = [ ++ (['http/1.1', 'spdy/2'], 'http/1.1'), ++ (['spdy/2', 'http/1.1'], 'http/1.1'), ++ (['spdy/2', 'test'], 'spdy/2'), ++ (['abc', 'def'], 'abc') ++ ] ++ for client_protocols, expected in protocol_tests: ++ server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ server_context.load_cert_chain(CERTFILE) ++ server_context.set_npn_protocols(server_protocols) ++ client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ client_context.load_cert_chain(CERTFILE) ++ client_context.set_npn_protocols(client_protocols) ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True) ++ ++ msg = "failed trying %s (s) and %s (c).\n" \ ++ "was expecting %s, but got %%s from the %%s" \ ++ % (str(server_protocols), str(client_protocols), ++ str(expected)) ++ client_result = stats['client_npn_protocol'] ++ self.assertEqual(client_result, expected, msg % (client_result, "client")) ++ server_result = stats['server_npn_protocols'][-1] \ ++ if len(stats['server_npn_protocols']) else 'nothing' ++ self.assertEqual(server_result, expected, msg % (server_result, "server")) ++ ++ def sni_contexts(self): ++ server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ server_context.load_cert_chain(SIGNED_CERTFILE) ++ other_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ other_context.load_cert_chain(SIGNED_CERTFILE2) ++ client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ client_context.verify_mode = ssl.CERT_REQUIRED ++ client_context.load_verify_locations(SIGNING_CA) ++ return server_context, other_context, client_context ++ ++ def check_common_name(self, stats, name): ++ cert = stats['peercert'] ++ self.assertIn((('commonName', name),), cert['subject']) ++ ++ @needs_sni ++ def test_sni_callback(self): ++ calls = [] ++ server_context, other_context, client_context = self.sni_contexts() ++ ++ def servername_cb(ssl_sock, server_name, initial_context): ++ calls.append((server_name, initial_context)) ++ if server_name is not None: ++ ssl_sock.context = other_context ++ server_context.set_servername_callback(servername_cb) ++ ++ stats = server_params_test(client_context, server_context, ++ chatty=True, ++ sni_name='supermessage') ++ # The hostname was fetched properly, and the certificate was ++ # changed for the connection. ++ self.assertEqual(calls, [("supermessage", server_context)]) ++ # CERTFILE4 was selected ++ self.check_common_name(stats, 'fakehostname') ++ ++ calls = [] ++ # The callback is called with server_name=None ++ stats = server_params_test(client_context, server_context, ++ chatty=True, ++ sni_name=None) ++ self.assertEqual(calls, [(None, server_context)]) ++ self.check_common_name(stats, 'localhost') ++ ++ # Check disabling the callback ++ calls = [] ++ server_context.set_servername_callback(None) ++ ++ stats = server_params_test(client_context, server_context, ++ chatty=True, ++ sni_name='notfunny') ++ # Certificate didn't change ++ self.check_common_name(stats, 'localhost') ++ self.assertEqual(calls, []) ++ ++ @needs_sni ++ def test_sni_callback_alert(self): ++ # Returning a TLS alert is reflected to the connecting client ++ server_context, other_context, client_context = self.sni_contexts() ++ ++ def cb_returning_alert(ssl_sock, server_name, initial_context): ++ return ssl.ALERT_DESCRIPTION_ACCESS_DENIED ++ server_context.set_servername_callback(cb_returning_alert) ++ ++ with self.assertRaises(ssl.SSLError) as cm: ++ stats = server_params_test(client_context, server_context, ++ chatty=False, ++ sni_name='supermessage') ++ self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_ACCESS_DENIED') ++ ++ @needs_sni ++ def test_sni_callback_raising(self): ++ # Raising fails the connection with a TLS handshake failure alert. ++ server_context, other_context, client_context = self.sni_contexts() ++ ++ def cb_raising(ssl_sock, server_name, initial_context): ++ 1/0 ++ server_context.set_servername_callback(cb_raising) ++ ++ with self.assertRaises(ssl.SSLError) as cm, \ ++ support.captured_stderr() as stderr: ++ stats = server_params_test(client_context, server_context, ++ chatty=False, ++ sni_name='supermessage') ++ self.assertEqual(cm.exception.reason, 'SSLV3_ALERT_HANDSHAKE_FAILURE') ++ self.assertIn("ZeroDivisionError", stderr.getvalue()) ++ ++ @needs_sni ++ def test_sni_callback_wrong_return_type(self): ++ # Returning the wrong return type terminates the TLS connection ++ # with an internal error alert. ++ server_context, other_context, client_context = self.sni_contexts() ++ ++ def cb_wrong_return_type(ssl_sock, server_name, initial_context): ++ return "foo" ++ server_context.set_servername_callback(cb_wrong_return_type) ++ ++ with self.assertRaises(ssl.SSLError) as cm, \ ++ support.captured_stderr() as stderr: ++ stats = server_params_test(client_context, server_context, ++ chatty=False, ++ sni_name='supermessage') ++ self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_INTERNAL_ERROR') ++ self.assertIn("TypeError", stderr.getvalue()) ++ ++ def test_read_write_after_close_raises_valuerror(self): ++ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.load_verify_locations(CERTFILE) ++ context.load_cert_chain(CERTFILE) ++ server = ThreadedEchoServer(context=context, chatty=False) ++ ++ with server: ++ s = context.wrap_socket(socket.socket()) ++ s.connect((HOST, server.port)) ++ s.close() ++ ++ self.assertRaises(ValueError, s.read, 1024) ++ self.assertRaises(ValueError, s.write, b'hello') ++ + + def test_main(verbose=False): +- global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, NOKIACERT, NULLBYTECERT +- CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, +- "keycert.pem") +- SVN_PYTHON_ORG_ROOT_CERT = os.path.join( +- os.path.dirname(__file__) or os.curdir, +- "https_svn_python_org_root.pem") +- NOKIACERT = os.path.join(os.path.dirname(__file__) or os.curdir, +- "nokia.pem") +- NULLBYTECERT = os.path.join(os.path.dirname(__file__) or os.curdir, +- "nullbytecert.pem") +- +- if (not os.path.exists(CERTFILE) or +- not os.path.exists(SVN_PYTHON_ORG_ROOT_CERT) or +- not os.path.exists(NOKIACERT) or +- not os.path.exists(NULLBYTECERT)): +- raise test_support.TestFailed("Can't read certificate files!") ++ if support.verbose: ++ plats = { ++ 'Linux': platform.linux_distribution, ++ 'Mac': platform.mac_ver, ++ 'Windows': platform.win32_ver, ++ } ++ for name, func in plats.items(): ++ plat = func() ++ if plat and plat[0]: ++ plat = '%s %r' % (name, plat) ++ break ++ else: ++ plat = repr(platform.platform()) ++ print("test_ssl: testing with %r %r" % ++ (ssl.OPENSSL_VERSION, ssl.OPENSSL_VERSION_INFO)) ++ print(" under %s" % plat) ++ print(" HAS_SNI = %r" % ssl.HAS_SNI) ++ print(" OP_ALL = 0x%8x" % ssl.OP_ALL) ++ try: ++ print(" OP_NO_TLSv1_1 = 0x%8x" % ssl.OP_NO_TLSv1_1) ++ except AttributeError: ++ pass ++ ++ for filename in [ ++ CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, BYTES_CERTFILE, ++ ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY, ++ SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA, ++ BADCERT, BADKEY, EMPTYCERT]: ++ if not os.path.exists(filename): ++ raise support.TestFailed("Can't read certificate file %r" % filename) + +- tests = [BasicTests, BasicSocketTests] ++ tests = [ContextTests, BasicSocketTests, SSLErrorTests] + +- if test_support.is_resource_enabled('network'): ++ if support.is_resource_enabled('network'): + tests.append(NetworkedTests) + + if _have_threads: +- thread_info = test_support.threading_setup() +- if thread_info and test_support.is_resource_enabled('network'): ++ thread_info = support.threading_setup() ++ if thread_info: + tests.append(ThreadedTests) + + try: +- test_support.run_unittest(*tests) ++ support.run_unittest(*tests) + finally: + if _have_threads: +- test_support.threading_cleanup(*thread_info) ++ support.threading_cleanup(*thread_info) + + if __name__ == "__main__": + test_main() +diff -up Python-2.7.5/Makefile.pre.in.ssl Python-2.7.5/Makefile.pre.in +--- Python-2.7.5/Makefile.pre.in.ssl 2015-02-24 11:37:01.544850801 +0100 ++++ Python-2.7.5/Makefile.pre.in 2015-02-24 10:19:57.491864267 +0100 +@@ -931,7 +931,7 @@ PLATMACDIRS= plat-mac plat-mac/Carbon pl + plat-mac/lib-scriptpackages/Terminal + PLATMACPATH=:plat-mac:plat-mac/lib-scriptpackages + LIBSUBDIRS= lib-tk lib-tk/test lib-tk/test/test_tkinter \ +- lib-tk/test/test_ttk site-packages test test/data \ ++ lib-tk/test/test_ttk site-packages test test/data test/capath \ + test/cjkencodings test/decimaltestdata test/xmltestdata test/subprocessdata \ + test/tracedmodules \ + encodings compiler hotshot \ +diff --git a/Modules/_ssl_data.h b/Modules/_ssl_data.h +new file mode 100644 +--- /dev/null ++++ b/Modules/_ssl_data.h +@@ -0,0 +1,1653 @@ ++/* File generated by Tools/ssl/make_ssl_data.py */ ++/* Generated on 2012-05-16T23:56:40.981382 */ ++ ++static struct py_ssl_library_code library_codes[] = { ++ {"PEM", ERR_LIB_PEM}, ++ {"SSL", ERR_LIB_SSL}, ++ {"X509", ERR_LIB_X509}, ++ { NULL } ++}; ++ ++static struct py_ssl_error_code error_codes[] = { ++ #ifdef PEM_R_BAD_BASE64_DECODE ++ {"BAD_BASE64_DECODE", ERR_LIB_PEM, PEM_R_BAD_BASE64_DECODE}, ++ #else ++ {"BAD_BASE64_DECODE", ERR_LIB_PEM, 100}, ++ #endif ++ #ifdef PEM_R_BAD_DECRYPT ++ {"BAD_DECRYPT", ERR_LIB_PEM, PEM_R_BAD_DECRYPT}, ++ #else ++ {"BAD_DECRYPT", ERR_LIB_PEM, 101}, ++ #endif ++ #ifdef PEM_R_BAD_END_LINE ++ {"BAD_END_LINE", ERR_LIB_PEM, PEM_R_BAD_END_LINE}, ++ #else ++ {"BAD_END_LINE", ERR_LIB_PEM, 102}, ++ #endif ++ #ifdef PEM_R_BAD_IV_CHARS ++ {"BAD_IV_CHARS", ERR_LIB_PEM, PEM_R_BAD_IV_CHARS}, ++ #else ++ {"BAD_IV_CHARS", ERR_LIB_PEM, 103}, ++ #endif ++ #ifdef PEM_R_BAD_MAGIC_NUMBER ++ {"BAD_MAGIC_NUMBER", ERR_LIB_PEM, PEM_R_BAD_MAGIC_NUMBER}, ++ #else ++ {"BAD_MAGIC_NUMBER", ERR_LIB_PEM, 116}, ++ #endif ++ #ifdef PEM_R_BAD_PASSWORD_READ ++ {"BAD_PASSWORD_READ", ERR_LIB_PEM, PEM_R_BAD_PASSWORD_READ}, ++ #else ++ {"BAD_PASSWORD_READ", ERR_LIB_PEM, 104}, ++ #endif ++ #ifdef PEM_R_BAD_VERSION_NUMBER ++ {"BAD_VERSION_NUMBER", ERR_LIB_PEM, PEM_R_BAD_VERSION_NUMBER}, ++ #else ++ {"BAD_VERSION_NUMBER", ERR_LIB_PEM, 117}, ++ #endif ++ #ifdef PEM_R_BIO_WRITE_FAILURE ++ {"BIO_WRITE_FAILURE", ERR_LIB_PEM, PEM_R_BIO_WRITE_FAILURE}, ++ #else ++ {"BIO_WRITE_FAILURE", ERR_LIB_PEM, 118}, ++ #endif ++ #ifdef PEM_R_CIPHER_IS_NULL ++ {"CIPHER_IS_NULL", ERR_LIB_PEM, PEM_R_CIPHER_IS_NULL}, ++ #else ++ {"CIPHER_IS_NULL", ERR_LIB_PEM, 127}, ++ #endif ++ #ifdef PEM_R_ERROR_CONVERTING_PRIVATE_KEY ++ {"ERROR_CONVERTING_PRIVATE_KEY", ERR_LIB_PEM, PEM_R_ERROR_CONVERTING_PRIVATE_KEY}, ++ #else ++ {"ERROR_CONVERTING_PRIVATE_KEY", ERR_LIB_PEM, 115}, ++ #endif ++ #ifdef PEM_R_EXPECTING_PRIVATE_KEY_BLOB ++ {"EXPECTING_PRIVATE_KEY_BLOB", ERR_LIB_PEM, PEM_R_EXPECTING_PRIVATE_KEY_BLOB}, ++ #else ++ {"EXPECTING_PRIVATE_KEY_BLOB", ERR_LIB_PEM, 119}, ++ #endif ++ #ifdef PEM_R_EXPECTING_PUBLIC_KEY_BLOB ++ {"EXPECTING_PUBLIC_KEY_BLOB", ERR_LIB_PEM, PEM_R_EXPECTING_PUBLIC_KEY_BLOB}, ++ #else ++ {"EXPECTING_PUBLIC_KEY_BLOB", ERR_LIB_PEM, 120}, ++ #endif ++ #ifdef PEM_R_INCONSISTENT_HEADER ++ {"INCONSISTENT_HEADER", ERR_LIB_PEM, PEM_R_INCONSISTENT_HEADER}, ++ #else ++ {"INCONSISTENT_HEADER", ERR_LIB_PEM, 121}, ++ #endif ++ #ifdef PEM_R_KEYBLOB_HEADER_PARSE_ERROR ++ {"KEYBLOB_HEADER_PARSE_ERROR", ERR_LIB_PEM, PEM_R_KEYBLOB_HEADER_PARSE_ERROR}, ++ #else ++ {"KEYBLOB_HEADER_PARSE_ERROR", ERR_LIB_PEM, 122}, ++ #endif ++ #ifdef PEM_R_KEYBLOB_TOO_SHORT ++ {"KEYBLOB_TOO_SHORT", ERR_LIB_PEM, PEM_R_KEYBLOB_TOO_SHORT}, ++ #else ++ {"KEYBLOB_TOO_SHORT", ERR_LIB_PEM, 123}, ++ #endif ++ #ifdef PEM_R_NOT_DEK_INFO ++ {"NOT_DEK_INFO", ERR_LIB_PEM, PEM_R_NOT_DEK_INFO}, ++ #else ++ {"NOT_DEK_INFO", ERR_LIB_PEM, 105}, ++ #endif ++ #ifdef PEM_R_NOT_ENCRYPTED ++ {"NOT_ENCRYPTED", ERR_LIB_PEM, PEM_R_NOT_ENCRYPTED}, ++ #else ++ {"NOT_ENCRYPTED", ERR_LIB_PEM, 106}, ++ #endif ++ #ifdef PEM_R_NOT_PROC_TYPE ++ {"NOT_PROC_TYPE", ERR_LIB_PEM, PEM_R_NOT_PROC_TYPE}, ++ #else ++ {"NOT_PROC_TYPE", ERR_LIB_PEM, 107}, ++ #endif ++ #ifdef PEM_R_NO_START_LINE ++ {"NO_START_LINE", ERR_LIB_PEM, PEM_R_NO_START_LINE}, ++ #else ++ {"NO_START_LINE", ERR_LIB_PEM, 108}, ++ #endif ++ #ifdef PEM_R_PROBLEMS_GETTING_PASSWORD ++ {"PROBLEMS_GETTING_PASSWORD", ERR_LIB_PEM, PEM_R_PROBLEMS_GETTING_PASSWORD}, ++ #else ++ {"PROBLEMS_GETTING_PASSWORD", ERR_LIB_PEM, 109}, ++ #endif ++ #ifdef PEM_R_PUBLIC_KEY_NO_RSA ++ {"PUBLIC_KEY_NO_RSA", ERR_LIB_PEM, PEM_R_PUBLIC_KEY_NO_RSA}, ++ #else ++ {"PUBLIC_KEY_NO_RSA", ERR_LIB_PEM, 110}, ++ #endif ++ #ifdef PEM_R_PVK_DATA_TOO_SHORT ++ {"PVK_DATA_TOO_SHORT", ERR_LIB_PEM, PEM_R_PVK_DATA_TOO_SHORT}, ++ #else ++ {"PVK_DATA_TOO_SHORT", ERR_LIB_PEM, 124}, ++ #endif ++ #ifdef PEM_R_PVK_TOO_SHORT ++ {"PVK_TOO_SHORT", ERR_LIB_PEM, PEM_R_PVK_TOO_SHORT}, ++ #else ++ {"PVK_TOO_SHORT", ERR_LIB_PEM, 125}, ++ #endif ++ #ifdef PEM_R_READ_KEY ++ {"READ_KEY", ERR_LIB_PEM, PEM_R_READ_KEY}, ++ #else ++ {"READ_KEY", ERR_LIB_PEM, 111}, ++ #endif ++ #ifdef PEM_R_SHORT_HEADER ++ {"SHORT_HEADER", ERR_LIB_PEM, PEM_R_SHORT_HEADER}, ++ #else ++ {"SHORT_HEADER", ERR_LIB_PEM, 112}, ++ #endif ++ #ifdef PEM_R_UNSUPPORTED_CIPHER ++ {"UNSUPPORTED_CIPHER", ERR_LIB_PEM, PEM_R_UNSUPPORTED_CIPHER}, ++ #else ++ {"UNSUPPORTED_CIPHER", ERR_LIB_PEM, 113}, ++ #endif ++ #ifdef PEM_R_UNSUPPORTED_ENCRYPTION ++ {"UNSUPPORTED_ENCRYPTION", ERR_LIB_PEM, PEM_R_UNSUPPORTED_ENCRYPTION}, ++ #else ++ {"UNSUPPORTED_ENCRYPTION", ERR_LIB_PEM, 114}, ++ #endif ++ #ifdef PEM_R_UNSUPPORTED_KEY_COMPONENTS ++ {"UNSUPPORTED_KEY_COMPONENTS", ERR_LIB_PEM, PEM_R_UNSUPPORTED_KEY_COMPONENTS}, ++ #else ++ {"UNSUPPORTED_KEY_COMPONENTS", ERR_LIB_PEM, 126}, ++ #endif ++ #ifdef SSL_R_APP_DATA_IN_HANDSHAKE ++ {"APP_DATA_IN_HANDSHAKE", ERR_LIB_SSL, SSL_R_APP_DATA_IN_HANDSHAKE}, ++ #else ++ {"APP_DATA_IN_HANDSHAKE", ERR_LIB_SSL, 100}, ++ #endif ++ #ifdef SSL_R_ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT ++ {"ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT", ERR_LIB_SSL, SSL_R_ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT}, ++ #else ++ {"ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT", ERR_LIB_SSL, 272}, ++ #endif ++ #ifdef SSL_R_BAD_ALERT_RECORD ++ {"BAD_ALERT_RECORD", ERR_LIB_SSL, SSL_R_BAD_ALERT_RECORD}, ++ #else ++ {"BAD_ALERT_RECORD", ERR_LIB_SSL, 101}, ++ #endif ++ #ifdef SSL_R_BAD_AUTHENTICATION_TYPE ++ {"BAD_AUTHENTICATION_TYPE", ERR_LIB_SSL, SSL_R_BAD_AUTHENTICATION_TYPE}, ++ #else ++ {"BAD_AUTHENTICATION_TYPE", ERR_LIB_SSL, 102}, ++ #endif ++ #ifdef SSL_R_BAD_CHANGE_CIPHER_SPEC ++ {"BAD_CHANGE_CIPHER_SPEC", ERR_LIB_SSL, SSL_R_BAD_CHANGE_CIPHER_SPEC}, ++ #else ++ {"BAD_CHANGE_CIPHER_SPEC", ERR_LIB_SSL, 103}, ++ #endif ++ #ifdef SSL_R_BAD_CHECKSUM ++ {"BAD_CHECKSUM", ERR_LIB_SSL, SSL_R_BAD_CHECKSUM}, ++ #else ++ {"BAD_CHECKSUM", ERR_LIB_SSL, 104}, ++ #endif ++ #ifdef SSL_R_BAD_DATA_RETURNED_BY_CALLBACK ++ {"BAD_DATA_RETURNED_BY_CALLBACK", ERR_LIB_SSL, SSL_R_BAD_DATA_RETURNED_BY_CALLBACK}, ++ #else ++ {"BAD_DATA_RETURNED_BY_CALLBACK", ERR_LIB_SSL, 106}, ++ #endif ++ #ifdef SSL_R_BAD_DECOMPRESSION ++ {"BAD_DECOMPRESSION", ERR_LIB_SSL, SSL_R_BAD_DECOMPRESSION}, ++ #else ++ {"BAD_DECOMPRESSION", ERR_LIB_SSL, 107}, ++ #endif ++ #ifdef SSL_R_BAD_DH_G_LENGTH ++ {"BAD_DH_G_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DH_G_LENGTH}, ++ #else ++ {"BAD_DH_G_LENGTH", ERR_LIB_SSL, 108}, ++ #endif ++ #ifdef SSL_R_BAD_DH_PUB_KEY_LENGTH ++ {"BAD_DH_PUB_KEY_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DH_PUB_KEY_LENGTH}, ++ #else ++ {"BAD_DH_PUB_KEY_LENGTH", ERR_LIB_SSL, 109}, ++ #endif ++ #ifdef SSL_R_BAD_DH_P_LENGTH ++ {"BAD_DH_P_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DH_P_LENGTH}, ++ #else ++ {"BAD_DH_P_LENGTH", ERR_LIB_SSL, 110}, ++ #endif ++ #ifdef SSL_R_BAD_DIGEST_LENGTH ++ {"BAD_DIGEST_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DIGEST_LENGTH}, ++ #else ++ {"BAD_DIGEST_LENGTH", ERR_LIB_SSL, 111}, ++ #endif ++ #ifdef SSL_R_BAD_DSA_SIGNATURE ++ {"BAD_DSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_DSA_SIGNATURE}, ++ #else ++ {"BAD_DSA_SIGNATURE", ERR_LIB_SSL, 112}, ++ #endif ++ #ifdef SSL_R_BAD_ECC_CERT ++ {"BAD_ECC_CERT", ERR_LIB_SSL, SSL_R_BAD_ECC_CERT}, ++ #else ++ {"BAD_ECC_CERT", ERR_LIB_SSL, 304}, ++ #endif ++ #ifdef SSL_R_BAD_ECDSA_SIGNATURE ++ {"BAD_ECDSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_ECDSA_SIGNATURE}, ++ #else ++ {"BAD_ECDSA_SIGNATURE", ERR_LIB_SSL, 305}, ++ #endif ++ #ifdef SSL_R_BAD_ECPOINT ++ {"BAD_ECPOINT", ERR_LIB_SSL, SSL_R_BAD_ECPOINT}, ++ #else ++ {"BAD_ECPOINT", ERR_LIB_SSL, 306}, ++ #endif ++ #ifdef SSL_R_BAD_HANDSHAKE_LENGTH ++ {"BAD_HANDSHAKE_LENGTH", ERR_LIB_SSL, SSL_R_BAD_HANDSHAKE_LENGTH}, ++ #else ++ {"BAD_HANDSHAKE_LENGTH", ERR_LIB_SSL, 332}, ++ #endif ++ #ifdef SSL_R_BAD_HELLO_REQUEST ++ {"BAD_HELLO_REQUEST", ERR_LIB_SSL, SSL_R_BAD_HELLO_REQUEST}, ++ #else ++ {"BAD_HELLO_REQUEST", ERR_LIB_SSL, 105}, ++ #endif ++ #ifdef SSL_R_BAD_LENGTH ++ {"BAD_LENGTH", ERR_LIB_SSL, SSL_R_BAD_LENGTH}, ++ #else ++ {"BAD_LENGTH", ERR_LIB_SSL, 271}, ++ #endif ++ #ifdef SSL_R_BAD_MAC_DECODE ++ {"BAD_MAC_DECODE", ERR_LIB_SSL, SSL_R_BAD_MAC_DECODE}, ++ #else ++ {"BAD_MAC_DECODE", ERR_LIB_SSL, 113}, ++ #endif ++ #ifdef SSL_R_BAD_MAC_LENGTH ++ {"BAD_MAC_LENGTH", ERR_LIB_SSL, SSL_R_BAD_MAC_LENGTH}, ++ #else ++ {"BAD_MAC_LENGTH", ERR_LIB_SSL, 333}, ++ #endif ++ #ifdef SSL_R_BAD_MESSAGE_TYPE ++ {"BAD_MESSAGE_TYPE", ERR_LIB_SSL, SSL_R_BAD_MESSAGE_TYPE}, ++ #else ++ {"BAD_MESSAGE_TYPE", ERR_LIB_SSL, 114}, ++ #endif ++ #ifdef SSL_R_BAD_PACKET_LENGTH ++ {"BAD_PACKET_LENGTH", ERR_LIB_SSL, SSL_R_BAD_PACKET_LENGTH}, ++ #else ++ {"BAD_PACKET_LENGTH", ERR_LIB_SSL, 115}, ++ #endif ++ #ifdef SSL_R_BAD_PROTOCOL_VERSION_NUMBER ++ {"BAD_PROTOCOL_VERSION_NUMBER", ERR_LIB_SSL, SSL_R_BAD_PROTOCOL_VERSION_NUMBER}, ++ #else ++ {"BAD_PROTOCOL_VERSION_NUMBER", ERR_LIB_SSL, 116}, ++ #endif ++ #ifdef SSL_R_BAD_PSK_IDENTITY_HINT_LENGTH ++ {"BAD_PSK_IDENTITY_HINT_LENGTH", ERR_LIB_SSL, SSL_R_BAD_PSK_IDENTITY_HINT_LENGTH}, ++ #else ++ {"BAD_PSK_IDENTITY_HINT_LENGTH", ERR_LIB_SSL, 316}, ++ #endif ++ #ifdef SSL_R_BAD_RESPONSE_ARGUMENT ++ {"BAD_RESPONSE_ARGUMENT", ERR_LIB_SSL, SSL_R_BAD_RESPONSE_ARGUMENT}, ++ #else ++ {"BAD_RESPONSE_ARGUMENT", ERR_LIB_SSL, 117}, ++ #endif ++ #ifdef SSL_R_BAD_RSA_DECRYPT ++ {"BAD_RSA_DECRYPT", ERR_LIB_SSL, SSL_R_BAD_RSA_DECRYPT}, ++ #else ++ {"BAD_RSA_DECRYPT", ERR_LIB_SSL, 118}, ++ #endif ++ #ifdef SSL_R_BAD_RSA_ENCRYPT ++ {"BAD_RSA_ENCRYPT", ERR_LIB_SSL, SSL_R_BAD_RSA_ENCRYPT}, ++ #else ++ {"BAD_RSA_ENCRYPT", ERR_LIB_SSL, 119}, ++ #endif ++ #ifdef SSL_R_BAD_RSA_E_LENGTH ++ {"BAD_RSA_E_LENGTH", ERR_LIB_SSL, SSL_R_BAD_RSA_E_LENGTH}, ++ #else ++ {"BAD_RSA_E_LENGTH", ERR_LIB_SSL, 120}, ++ #endif ++ #ifdef SSL_R_BAD_RSA_MODULUS_LENGTH ++ {"BAD_RSA_MODULUS_LENGTH", ERR_LIB_SSL, SSL_R_BAD_RSA_MODULUS_LENGTH}, ++ #else ++ {"BAD_RSA_MODULUS_LENGTH", ERR_LIB_SSL, 121}, ++ #endif ++ #ifdef SSL_R_BAD_RSA_SIGNATURE ++ {"BAD_RSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_RSA_SIGNATURE}, ++ #else ++ {"BAD_RSA_SIGNATURE", ERR_LIB_SSL, 122}, ++ #endif ++ #ifdef SSL_R_BAD_SIGNATURE ++ {"BAD_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_SIGNATURE}, ++ #else ++ {"BAD_SIGNATURE", ERR_LIB_SSL, 123}, ++ #endif ++ #ifdef SSL_R_BAD_SSL_FILETYPE ++ {"BAD_SSL_FILETYPE", ERR_LIB_SSL, SSL_R_BAD_SSL_FILETYPE}, ++ #else ++ {"BAD_SSL_FILETYPE", ERR_LIB_SSL, 124}, ++ #endif ++ #ifdef SSL_R_BAD_SSL_SESSION_ID_LENGTH ++ {"BAD_SSL_SESSION_ID_LENGTH", ERR_LIB_SSL, SSL_R_BAD_SSL_SESSION_ID_LENGTH}, ++ #else ++ {"BAD_SSL_SESSION_ID_LENGTH", ERR_LIB_SSL, 125}, ++ #endif ++ #ifdef SSL_R_BAD_STATE ++ {"BAD_STATE", ERR_LIB_SSL, SSL_R_BAD_STATE}, ++ #else ++ {"BAD_STATE", ERR_LIB_SSL, 126}, ++ #endif ++ #ifdef SSL_R_BAD_WRITE_RETRY ++ {"BAD_WRITE_RETRY", ERR_LIB_SSL, SSL_R_BAD_WRITE_RETRY}, ++ #else ++ {"BAD_WRITE_RETRY", ERR_LIB_SSL, 127}, ++ #endif ++ #ifdef SSL_R_BIO_NOT_SET ++ {"BIO_NOT_SET", ERR_LIB_SSL, SSL_R_BIO_NOT_SET}, ++ #else ++ {"BIO_NOT_SET", ERR_LIB_SSL, 128}, ++ #endif ++ #ifdef SSL_R_BLOCK_CIPHER_PAD_IS_WRONG ++ {"BLOCK_CIPHER_PAD_IS_WRONG", ERR_LIB_SSL, SSL_R_BLOCK_CIPHER_PAD_IS_WRONG}, ++ #else ++ {"BLOCK_CIPHER_PAD_IS_WRONG", ERR_LIB_SSL, 129}, ++ #endif ++ #ifdef SSL_R_BN_LIB ++ {"BN_LIB", ERR_LIB_SSL, SSL_R_BN_LIB}, ++ #else ++ {"BN_LIB", ERR_LIB_SSL, 130}, ++ #endif ++ #ifdef SSL_R_CA_DN_LENGTH_MISMATCH ++ {"CA_DN_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_CA_DN_LENGTH_MISMATCH}, ++ #else ++ {"CA_DN_LENGTH_MISMATCH", ERR_LIB_SSL, 131}, ++ #endif ++ #ifdef SSL_R_CA_DN_TOO_LONG ++ {"CA_DN_TOO_LONG", ERR_LIB_SSL, SSL_R_CA_DN_TOO_LONG}, ++ #else ++ {"CA_DN_TOO_LONG", ERR_LIB_SSL, 132}, ++ #endif ++ #ifdef SSL_R_CCS_RECEIVED_EARLY ++ {"CCS_RECEIVED_EARLY", ERR_LIB_SSL, SSL_R_CCS_RECEIVED_EARLY}, ++ #else ++ {"CCS_RECEIVED_EARLY", ERR_LIB_SSL, 133}, ++ #endif ++ #ifdef SSL_R_CERTIFICATE_VERIFY_FAILED ++ {"CERTIFICATE_VERIFY_FAILED", ERR_LIB_SSL, SSL_R_CERTIFICATE_VERIFY_FAILED}, ++ #else ++ {"CERTIFICATE_VERIFY_FAILED", ERR_LIB_SSL, 134}, ++ #endif ++ #ifdef SSL_R_CERT_LENGTH_MISMATCH ++ {"CERT_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_CERT_LENGTH_MISMATCH}, ++ #else ++ {"CERT_LENGTH_MISMATCH", ERR_LIB_SSL, 135}, ++ #endif ++ #ifdef SSL_R_CHALLENGE_IS_DIFFERENT ++ {"CHALLENGE_IS_DIFFERENT", ERR_LIB_SSL, SSL_R_CHALLENGE_IS_DIFFERENT}, ++ #else ++ {"CHALLENGE_IS_DIFFERENT", ERR_LIB_SSL, 136}, ++ #endif ++ #ifdef SSL_R_CIPHER_CODE_WRONG_LENGTH ++ {"CIPHER_CODE_WRONG_LENGTH", ERR_LIB_SSL, SSL_R_CIPHER_CODE_WRONG_LENGTH}, ++ #else ++ {"CIPHER_CODE_WRONG_LENGTH", ERR_LIB_SSL, 137}, ++ #endif ++ #ifdef SSL_R_CIPHER_OR_HASH_UNAVAILABLE ++ {"CIPHER_OR_HASH_UNAVAILABLE", ERR_LIB_SSL, SSL_R_CIPHER_OR_HASH_UNAVAILABLE}, ++ #else ++ {"CIPHER_OR_HASH_UNAVAILABLE", ERR_LIB_SSL, 138}, ++ #endif ++ #ifdef SSL_R_CIPHER_TABLE_SRC_ERROR ++ {"CIPHER_TABLE_SRC_ERROR", ERR_LIB_SSL, SSL_R_CIPHER_TABLE_SRC_ERROR}, ++ #else ++ {"CIPHER_TABLE_SRC_ERROR", ERR_LIB_SSL, 139}, ++ #endif ++ #ifdef SSL_R_CLIENTHELLO_TLSEXT ++ {"CLIENTHELLO_TLSEXT", ERR_LIB_SSL, SSL_R_CLIENTHELLO_TLSEXT}, ++ #else ++ {"CLIENTHELLO_TLSEXT", ERR_LIB_SSL, 226}, ++ #endif ++ #ifdef SSL_R_COMPRESSED_LENGTH_TOO_LONG ++ {"COMPRESSED_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_COMPRESSED_LENGTH_TOO_LONG}, ++ #else ++ {"COMPRESSED_LENGTH_TOO_LONG", ERR_LIB_SSL, 140}, ++ #endif ++ #ifdef SSL_R_COMPRESSION_DISABLED ++ {"COMPRESSION_DISABLED", ERR_LIB_SSL, SSL_R_COMPRESSION_DISABLED}, ++ #else ++ {"COMPRESSION_DISABLED", ERR_LIB_SSL, 343}, ++ #endif ++ #ifdef SSL_R_COMPRESSION_FAILURE ++ {"COMPRESSION_FAILURE", ERR_LIB_SSL, SSL_R_COMPRESSION_FAILURE}, ++ #else ++ {"COMPRESSION_FAILURE", ERR_LIB_SSL, 141}, ++ #endif ++ #ifdef SSL_R_COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE ++ {"COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE", ERR_LIB_SSL, SSL_R_COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE}, ++ #else ++ {"COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE", ERR_LIB_SSL, 307}, ++ #endif ++ #ifdef SSL_R_COMPRESSION_LIBRARY_ERROR ++ {"COMPRESSION_LIBRARY_ERROR", ERR_LIB_SSL, SSL_R_COMPRESSION_LIBRARY_ERROR}, ++ #else ++ {"COMPRESSION_LIBRARY_ERROR", ERR_LIB_SSL, 142}, ++ #endif ++ #ifdef SSL_R_CONNECTION_ID_IS_DIFFERENT ++ {"CONNECTION_ID_IS_DIFFERENT", ERR_LIB_SSL, SSL_R_CONNECTION_ID_IS_DIFFERENT}, ++ #else ++ {"CONNECTION_ID_IS_DIFFERENT", ERR_LIB_SSL, 143}, ++ #endif ++ #ifdef SSL_R_CONNECTION_TYPE_NOT_SET ++ {"CONNECTION_TYPE_NOT_SET", ERR_LIB_SSL, SSL_R_CONNECTION_TYPE_NOT_SET}, ++ #else ++ {"CONNECTION_TYPE_NOT_SET", ERR_LIB_SSL, 144}, ++ #endif ++ #ifdef SSL_R_COOKIE_MISMATCH ++ {"COOKIE_MISMATCH", ERR_LIB_SSL, SSL_R_COOKIE_MISMATCH}, ++ #else ++ {"COOKIE_MISMATCH", ERR_LIB_SSL, 308}, ++ #endif ++ #ifdef SSL_R_DATA_BETWEEN_CCS_AND_FINISHED ++ {"DATA_BETWEEN_CCS_AND_FINISHED", ERR_LIB_SSL, SSL_R_DATA_BETWEEN_CCS_AND_FINISHED}, ++ #else ++ {"DATA_BETWEEN_CCS_AND_FINISHED", ERR_LIB_SSL, 145}, ++ #endif ++ #ifdef SSL_R_DATA_LENGTH_TOO_LONG ++ {"DATA_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_DATA_LENGTH_TOO_LONG}, ++ #else ++ {"DATA_LENGTH_TOO_LONG", ERR_LIB_SSL, 146}, ++ #endif ++ #ifdef SSL_R_DECRYPTION_FAILED ++ {"DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_DECRYPTION_FAILED}, ++ #else ++ {"DECRYPTION_FAILED", ERR_LIB_SSL, 147}, ++ #endif ++ #ifdef SSL_R_DECRYPTION_FAILED_OR_BAD_RECORD_MAC ++ {"DECRYPTION_FAILED_OR_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_DECRYPTION_FAILED_OR_BAD_RECORD_MAC}, ++ #else ++ {"DECRYPTION_FAILED_OR_BAD_RECORD_MAC", ERR_LIB_SSL, 281}, ++ #endif ++ #ifdef SSL_R_DH_PUBLIC_VALUE_LENGTH_IS_WRONG ++ {"DH_PUBLIC_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, SSL_R_DH_PUBLIC_VALUE_LENGTH_IS_WRONG}, ++ #else ++ {"DH_PUBLIC_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, 148}, ++ #endif ++ #ifdef SSL_R_DIGEST_CHECK_FAILED ++ {"DIGEST_CHECK_FAILED", ERR_LIB_SSL, SSL_R_DIGEST_CHECK_FAILED}, ++ #else ++ {"DIGEST_CHECK_FAILED", ERR_LIB_SSL, 149}, ++ #endif ++ #ifdef SSL_R_DTLS_MESSAGE_TOO_BIG ++ {"DTLS_MESSAGE_TOO_BIG", ERR_LIB_SSL, SSL_R_DTLS_MESSAGE_TOO_BIG}, ++ #else ++ {"DTLS_MESSAGE_TOO_BIG", ERR_LIB_SSL, 334}, ++ #endif ++ #ifdef SSL_R_DUPLICATE_COMPRESSION_ID ++ {"DUPLICATE_COMPRESSION_ID", ERR_LIB_SSL, SSL_R_DUPLICATE_COMPRESSION_ID}, ++ #else ++ {"DUPLICATE_COMPRESSION_ID", ERR_LIB_SSL, 309}, ++ #endif ++ #ifdef SSL_R_ECC_CERT_NOT_FOR_KEY_AGREEMENT ++ {"ECC_CERT_NOT_FOR_KEY_AGREEMENT", ERR_LIB_SSL, SSL_R_ECC_CERT_NOT_FOR_KEY_AGREEMENT}, ++ #else ++ {"ECC_CERT_NOT_FOR_KEY_AGREEMENT", ERR_LIB_SSL, 317}, ++ #endif ++ #ifdef SSL_R_ECC_CERT_NOT_FOR_SIGNING ++ {"ECC_CERT_NOT_FOR_SIGNING", ERR_LIB_SSL, SSL_R_ECC_CERT_NOT_FOR_SIGNING}, ++ #else ++ {"ECC_CERT_NOT_FOR_SIGNING", ERR_LIB_SSL, 318}, ++ #endif ++ #ifdef SSL_R_ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE ++ {"ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE", ERR_LIB_SSL, SSL_R_ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE}, ++ #else ++ {"ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE", ERR_LIB_SSL, 322}, ++ #endif ++ #ifdef SSL_R_ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE ++ {"ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE", ERR_LIB_SSL, SSL_R_ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE}, ++ #else ++ {"ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE", ERR_LIB_SSL, 323}, ++ #endif ++ #ifdef SSL_R_ECGROUP_TOO_LARGE_FOR_CIPHER ++ {"ECGROUP_TOO_LARGE_FOR_CIPHER", ERR_LIB_SSL, SSL_R_ECGROUP_TOO_LARGE_FOR_CIPHER}, ++ #else ++ {"ECGROUP_TOO_LARGE_FOR_CIPHER", ERR_LIB_SSL, 310}, ++ #endif ++ #ifdef SSL_R_ENCRYPTED_LENGTH_TOO_LONG ++ {"ENCRYPTED_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_ENCRYPTED_LENGTH_TOO_LONG}, ++ #else ++ {"ENCRYPTED_LENGTH_TOO_LONG", ERR_LIB_SSL, 150}, ++ #endif ++ #ifdef SSL_R_ERROR_GENERATING_TMP_RSA_KEY ++ {"ERROR_GENERATING_TMP_RSA_KEY", ERR_LIB_SSL, SSL_R_ERROR_GENERATING_TMP_RSA_KEY}, ++ #else ++ {"ERROR_GENERATING_TMP_RSA_KEY", ERR_LIB_SSL, 282}, ++ #endif ++ #ifdef SSL_R_ERROR_IN_RECEIVED_CIPHER_LIST ++ {"ERROR_IN_RECEIVED_CIPHER_LIST", ERR_LIB_SSL, SSL_R_ERROR_IN_RECEIVED_CIPHER_LIST}, ++ #else ++ {"ERROR_IN_RECEIVED_CIPHER_LIST", ERR_LIB_SSL, 151}, ++ #endif ++ #ifdef SSL_R_EXCESSIVE_MESSAGE_SIZE ++ {"EXCESSIVE_MESSAGE_SIZE", ERR_LIB_SSL, SSL_R_EXCESSIVE_MESSAGE_SIZE}, ++ #else ++ {"EXCESSIVE_MESSAGE_SIZE", ERR_LIB_SSL, 152}, ++ #endif ++ #ifdef SSL_R_EXTRA_DATA_IN_MESSAGE ++ {"EXTRA_DATA_IN_MESSAGE", ERR_LIB_SSL, SSL_R_EXTRA_DATA_IN_MESSAGE}, ++ #else ++ {"EXTRA_DATA_IN_MESSAGE", ERR_LIB_SSL, 153}, ++ #endif ++ #ifdef SSL_R_GOT_A_FIN_BEFORE_A_CCS ++ {"GOT_A_FIN_BEFORE_A_CCS", ERR_LIB_SSL, SSL_R_GOT_A_FIN_BEFORE_A_CCS}, ++ #else ++ {"GOT_A_FIN_BEFORE_A_CCS", ERR_LIB_SSL, 154}, ++ #endif ++ #ifdef SSL_R_HTTPS_PROXY_REQUEST ++ {"HTTPS_PROXY_REQUEST", ERR_LIB_SSL, SSL_R_HTTPS_PROXY_REQUEST}, ++ #else ++ {"HTTPS_PROXY_REQUEST", ERR_LIB_SSL, 155}, ++ #endif ++ #ifdef SSL_R_HTTP_REQUEST ++ {"HTTP_REQUEST", ERR_LIB_SSL, SSL_R_HTTP_REQUEST}, ++ #else ++ {"HTTP_REQUEST", ERR_LIB_SSL, 156}, ++ #endif ++ #ifdef SSL_R_ILLEGAL_PADDING ++ {"ILLEGAL_PADDING", ERR_LIB_SSL, SSL_R_ILLEGAL_PADDING}, ++ #else ++ {"ILLEGAL_PADDING", ERR_LIB_SSL, 283}, ++ #endif ++ #ifdef SSL_R_INCONSISTENT_COMPRESSION ++ {"INCONSISTENT_COMPRESSION", ERR_LIB_SSL, SSL_R_INCONSISTENT_COMPRESSION}, ++ #else ++ {"INCONSISTENT_COMPRESSION", ERR_LIB_SSL, 340}, ++ #endif ++ #ifdef SSL_R_INVALID_CHALLENGE_LENGTH ++ {"INVALID_CHALLENGE_LENGTH", ERR_LIB_SSL, SSL_R_INVALID_CHALLENGE_LENGTH}, ++ #else ++ {"INVALID_CHALLENGE_LENGTH", ERR_LIB_SSL, 158}, ++ #endif ++ #ifdef SSL_R_INVALID_COMMAND ++ {"INVALID_COMMAND", ERR_LIB_SSL, SSL_R_INVALID_COMMAND}, ++ #else ++ {"INVALID_COMMAND", ERR_LIB_SSL, 280}, ++ #endif ++ #ifdef SSL_R_INVALID_COMPRESSION_ALGORITHM ++ {"INVALID_COMPRESSION_ALGORITHM", ERR_LIB_SSL, SSL_R_INVALID_COMPRESSION_ALGORITHM}, ++ #else ++ {"INVALID_COMPRESSION_ALGORITHM", ERR_LIB_SSL, 341}, ++ #endif ++ #ifdef SSL_R_INVALID_PURPOSE ++ {"INVALID_PURPOSE", ERR_LIB_SSL, SSL_R_INVALID_PURPOSE}, ++ #else ++ {"INVALID_PURPOSE", ERR_LIB_SSL, 278}, ++ #endif ++ #ifdef SSL_R_INVALID_STATUS_RESPONSE ++ {"INVALID_STATUS_RESPONSE", ERR_LIB_SSL, SSL_R_INVALID_STATUS_RESPONSE}, ++ #else ++ {"INVALID_STATUS_RESPONSE", ERR_LIB_SSL, 328}, ++ #endif ++ #ifdef SSL_R_INVALID_TICKET_KEYS_LENGTH ++ {"INVALID_TICKET_KEYS_LENGTH", ERR_LIB_SSL, SSL_R_INVALID_TICKET_KEYS_LENGTH}, ++ #else ++ {"INVALID_TICKET_KEYS_LENGTH", ERR_LIB_SSL, 325}, ++ #endif ++ #ifdef SSL_R_INVALID_TRUST ++ {"INVALID_TRUST", ERR_LIB_SSL, SSL_R_INVALID_TRUST}, ++ #else ++ {"INVALID_TRUST", ERR_LIB_SSL, 279}, ++ #endif ++ #ifdef SSL_R_KEY_ARG_TOO_LONG ++ {"KEY_ARG_TOO_LONG", ERR_LIB_SSL, SSL_R_KEY_ARG_TOO_LONG}, ++ #else ++ {"KEY_ARG_TOO_LONG", ERR_LIB_SSL, 284}, ++ #endif ++ #ifdef SSL_R_KRB5 ++ {"KRB5", ERR_LIB_SSL, SSL_R_KRB5}, ++ #else ++ {"KRB5", ERR_LIB_SSL, 285}, ++ #endif ++ #ifdef SSL_R_KRB5_C_CC_PRINC ++ {"KRB5_C_CC_PRINC", ERR_LIB_SSL, SSL_R_KRB5_C_CC_PRINC}, ++ #else ++ {"KRB5_C_CC_PRINC", ERR_LIB_SSL, 286}, ++ #endif ++ #ifdef SSL_R_KRB5_C_GET_CRED ++ {"KRB5_C_GET_CRED", ERR_LIB_SSL, SSL_R_KRB5_C_GET_CRED}, ++ #else ++ {"KRB5_C_GET_CRED", ERR_LIB_SSL, 287}, ++ #endif ++ #ifdef SSL_R_KRB5_C_INIT ++ {"KRB5_C_INIT", ERR_LIB_SSL, SSL_R_KRB5_C_INIT}, ++ #else ++ {"KRB5_C_INIT", ERR_LIB_SSL, 288}, ++ #endif ++ #ifdef SSL_R_KRB5_C_MK_REQ ++ {"KRB5_C_MK_REQ", ERR_LIB_SSL, SSL_R_KRB5_C_MK_REQ}, ++ #else ++ {"KRB5_C_MK_REQ", ERR_LIB_SSL, 289}, ++ #endif ++ #ifdef SSL_R_KRB5_S_BAD_TICKET ++ {"KRB5_S_BAD_TICKET", ERR_LIB_SSL, SSL_R_KRB5_S_BAD_TICKET}, ++ #else ++ {"KRB5_S_BAD_TICKET", ERR_LIB_SSL, 290}, ++ #endif ++ #ifdef SSL_R_KRB5_S_INIT ++ {"KRB5_S_INIT", ERR_LIB_SSL, SSL_R_KRB5_S_INIT}, ++ #else ++ {"KRB5_S_INIT", ERR_LIB_SSL, 291}, ++ #endif ++ #ifdef SSL_R_KRB5_S_RD_REQ ++ {"KRB5_S_RD_REQ", ERR_LIB_SSL, SSL_R_KRB5_S_RD_REQ}, ++ #else ++ {"KRB5_S_RD_REQ", ERR_LIB_SSL, 292}, ++ #endif ++ #ifdef SSL_R_KRB5_S_TKT_EXPIRED ++ {"KRB5_S_TKT_EXPIRED", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_EXPIRED}, ++ #else ++ {"KRB5_S_TKT_EXPIRED", ERR_LIB_SSL, 293}, ++ #endif ++ #ifdef SSL_R_KRB5_S_TKT_NYV ++ {"KRB5_S_TKT_NYV", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_NYV}, ++ #else ++ {"KRB5_S_TKT_NYV", ERR_LIB_SSL, 294}, ++ #endif ++ #ifdef SSL_R_KRB5_S_TKT_SKEW ++ {"KRB5_S_TKT_SKEW", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_SKEW}, ++ #else ++ {"KRB5_S_TKT_SKEW", ERR_LIB_SSL, 295}, ++ #endif ++ #ifdef SSL_R_LENGTH_MISMATCH ++ {"LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_LENGTH_MISMATCH}, ++ #else ++ {"LENGTH_MISMATCH", ERR_LIB_SSL, 159}, ++ #endif ++ #ifdef SSL_R_LENGTH_TOO_SHORT ++ {"LENGTH_TOO_SHORT", ERR_LIB_SSL, SSL_R_LENGTH_TOO_SHORT}, ++ #else ++ {"LENGTH_TOO_SHORT", ERR_LIB_SSL, 160}, ++ #endif ++ #ifdef SSL_R_LIBRARY_BUG ++ {"LIBRARY_BUG", ERR_LIB_SSL, SSL_R_LIBRARY_BUG}, ++ #else ++ {"LIBRARY_BUG", ERR_LIB_SSL, 274}, ++ #endif ++ #ifdef SSL_R_LIBRARY_HAS_NO_CIPHERS ++ {"LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, SSL_R_LIBRARY_HAS_NO_CIPHERS}, ++ #else ++ {"LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, 161}, ++ #endif ++ #ifdef SSL_R_MESSAGE_TOO_LONG ++ {"MESSAGE_TOO_LONG", ERR_LIB_SSL, SSL_R_MESSAGE_TOO_LONG}, ++ #else ++ {"MESSAGE_TOO_LONG", ERR_LIB_SSL, 296}, ++ #endif ++ #ifdef SSL_R_MISSING_DH_DSA_CERT ++ {"MISSING_DH_DSA_CERT", ERR_LIB_SSL, SSL_R_MISSING_DH_DSA_CERT}, ++ #else ++ {"MISSING_DH_DSA_CERT", ERR_LIB_SSL, 162}, ++ #endif ++ #ifdef SSL_R_MISSING_DH_KEY ++ {"MISSING_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_DH_KEY}, ++ #else ++ {"MISSING_DH_KEY", ERR_LIB_SSL, 163}, ++ #endif ++ #ifdef SSL_R_MISSING_DH_RSA_CERT ++ {"MISSING_DH_RSA_CERT", ERR_LIB_SSL, SSL_R_MISSING_DH_RSA_CERT}, ++ #else ++ {"MISSING_DH_RSA_CERT", ERR_LIB_SSL, 164}, ++ #endif ++ #ifdef SSL_R_MISSING_DSA_SIGNING_CERT ++ {"MISSING_DSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_DSA_SIGNING_CERT}, ++ #else ++ {"MISSING_DSA_SIGNING_CERT", ERR_LIB_SSL, 165}, ++ #endif ++ #ifdef SSL_R_MISSING_EXPORT_TMP_DH_KEY ++ {"MISSING_EXPORT_TMP_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_EXPORT_TMP_DH_KEY}, ++ #else ++ {"MISSING_EXPORT_TMP_DH_KEY", ERR_LIB_SSL, 166}, ++ #endif ++ #ifdef SSL_R_MISSING_EXPORT_TMP_RSA_KEY ++ {"MISSING_EXPORT_TMP_RSA_KEY", ERR_LIB_SSL, SSL_R_MISSING_EXPORT_TMP_RSA_KEY}, ++ #else ++ {"MISSING_EXPORT_TMP_RSA_KEY", ERR_LIB_SSL, 167}, ++ #endif ++ #ifdef SSL_R_MISSING_RSA_CERTIFICATE ++ {"MISSING_RSA_CERTIFICATE", ERR_LIB_SSL, SSL_R_MISSING_RSA_CERTIFICATE}, ++ #else ++ {"MISSING_RSA_CERTIFICATE", ERR_LIB_SSL, 168}, ++ #endif ++ #ifdef SSL_R_MISSING_RSA_ENCRYPTING_CERT ++ {"MISSING_RSA_ENCRYPTING_CERT", ERR_LIB_SSL, SSL_R_MISSING_RSA_ENCRYPTING_CERT}, ++ #else ++ {"MISSING_RSA_ENCRYPTING_CERT", ERR_LIB_SSL, 169}, ++ #endif ++ #ifdef SSL_R_MISSING_RSA_SIGNING_CERT ++ {"MISSING_RSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_RSA_SIGNING_CERT}, ++ #else ++ {"MISSING_RSA_SIGNING_CERT", ERR_LIB_SSL, 170}, ++ #endif ++ #ifdef SSL_R_MISSING_TMP_DH_KEY ++ {"MISSING_TMP_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_DH_KEY}, ++ #else ++ {"MISSING_TMP_DH_KEY", ERR_LIB_SSL, 171}, ++ #endif ++ #ifdef SSL_R_MISSING_TMP_ECDH_KEY ++ {"MISSING_TMP_ECDH_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_ECDH_KEY}, ++ #else ++ {"MISSING_TMP_ECDH_KEY", ERR_LIB_SSL, 311}, ++ #endif ++ #ifdef SSL_R_MISSING_TMP_RSA_KEY ++ {"MISSING_TMP_RSA_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_RSA_KEY}, ++ #else ++ {"MISSING_TMP_RSA_KEY", ERR_LIB_SSL, 172}, ++ #endif ++ #ifdef SSL_R_MISSING_TMP_RSA_PKEY ++ {"MISSING_TMP_RSA_PKEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_RSA_PKEY}, ++ #else ++ {"MISSING_TMP_RSA_PKEY", ERR_LIB_SSL, 173}, ++ #endif ++ #ifdef SSL_R_MISSING_VERIFY_MESSAGE ++ {"MISSING_VERIFY_MESSAGE", ERR_LIB_SSL, SSL_R_MISSING_VERIFY_MESSAGE}, ++ #else ++ {"MISSING_VERIFY_MESSAGE", ERR_LIB_SSL, 174}, ++ #endif ++ #ifdef SSL_R_NON_SSLV2_INITIAL_PACKET ++ {"NON_SSLV2_INITIAL_PACKET", ERR_LIB_SSL, SSL_R_NON_SSLV2_INITIAL_PACKET}, ++ #else ++ {"NON_SSLV2_INITIAL_PACKET", ERR_LIB_SSL, 175}, ++ #endif ++ #ifdef SSL_R_NO_CERTIFICATES_RETURNED ++ {"NO_CERTIFICATES_RETURNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATES_RETURNED}, ++ #else ++ {"NO_CERTIFICATES_RETURNED", ERR_LIB_SSL, 176}, ++ #endif ++ #ifdef SSL_R_NO_CERTIFICATE_ASSIGNED ++ {"NO_CERTIFICATE_ASSIGNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_ASSIGNED}, ++ #else ++ {"NO_CERTIFICATE_ASSIGNED", ERR_LIB_SSL, 177}, ++ #endif ++ #ifdef SSL_R_NO_CERTIFICATE_RETURNED ++ {"NO_CERTIFICATE_RETURNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_RETURNED}, ++ #else ++ {"NO_CERTIFICATE_RETURNED", ERR_LIB_SSL, 178}, ++ #endif ++ #ifdef SSL_R_NO_CERTIFICATE_SET ++ {"NO_CERTIFICATE_SET", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_SET}, ++ #else ++ {"NO_CERTIFICATE_SET", ERR_LIB_SSL, 179}, ++ #endif ++ #ifdef SSL_R_NO_CERTIFICATE_SPECIFIED ++ {"NO_CERTIFICATE_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_SPECIFIED}, ++ #else ++ {"NO_CERTIFICATE_SPECIFIED", ERR_LIB_SSL, 180}, ++ #endif ++ #ifdef SSL_R_NO_CIPHERS_AVAILABLE ++ {"NO_CIPHERS_AVAILABLE", ERR_LIB_SSL, SSL_R_NO_CIPHERS_AVAILABLE}, ++ #else ++ {"NO_CIPHERS_AVAILABLE", ERR_LIB_SSL, 181}, ++ #endif ++ #ifdef SSL_R_NO_CIPHERS_PASSED ++ {"NO_CIPHERS_PASSED", ERR_LIB_SSL, SSL_R_NO_CIPHERS_PASSED}, ++ #else ++ {"NO_CIPHERS_PASSED", ERR_LIB_SSL, 182}, ++ #endif ++ #ifdef SSL_R_NO_CIPHERS_SPECIFIED ++ {"NO_CIPHERS_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_CIPHERS_SPECIFIED}, ++ #else ++ {"NO_CIPHERS_SPECIFIED", ERR_LIB_SSL, 183}, ++ #endif ++ #ifdef SSL_R_NO_CIPHER_LIST ++ {"NO_CIPHER_LIST", ERR_LIB_SSL, SSL_R_NO_CIPHER_LIST}, ++ #else ++ {"NO_CIPHER_LIST", ERR_LIB_SSL, 184}, ++ #endif ++ #ifdef SSL_R_NO_CIPHER_MATCH ++ {"NO_CIPHER_MATCH", ERR_LIB_SSL, SSL_R_NO_CIPHER_MATCH}, ++ #else ++ {"NO_CIPHER_MATCH", ERR_LIB_SSL, 185}, ++ #endif ++ #ifdef SSL_R_NO_CLIENT_CERT_METHOD ++ {"NO_CLIENT_CERT_METHOD", ERR_LIB_SSL, SSL_R_NO_CLIENT_CERT_METHOD}, ++ #else ++ {"NO_CLIENT_CERT_METHOD", ERR_LIB_SSL, 331}, ++ #endif ++ #ifdef SSL_R_NO_CLIENT_CERT_RECEIVED ++ {"NO_CLIENT_CERT_RECEIVED", ERR_LIB_SSL, SSL_R_NO_CLIENT_CERT_RECEIVED}, ++ #else ++ {"NO_CLIENT_CERT_RECEIVED", ERR_LIB_SSL, 186}, ++ #endif ++ #ifdef SSL_R_NO_COMPRESSION_SPECIFIED ++ {"NO_COMPRESSION_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_COMPRESSION_SPECIFIED}, ++ #else ++ {"NO_COMPRESSION_SPECIFIED", ERR_LIB_SSL, 187}, ++ #endif ++ #ifdef SSL_R_NO_GOST_CERTIFICATE_SENT_BY_PEER ++ {"NO_GOST_CERTIFICATE_SENT_BY_PEER", ERR_LIB_SSL, SSL_R_NO_GOST_CERTIFICATE_SENT_BY_PEER}, ++ #else ++ {"NO_GOST_CERTIFICATE_SENT_BY_PEER", ERR_LIB_SSL, 330}, ++ #endif ++ #ifdef SSL_R_NO_METHOD_SPECIFIED ++ {"NO_METHOD_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_METHOD_SPECIFIED}, ++ #else ++ {"NO_METHOD_SPECIFIED", ERR_LIB_SSL, 188}, ++ #endif ++ #ifdef SSL_R_NO_PRIVATEKEY ++ {"NO_PRIVATEKEY", ERR_LIB_SSL, SSL_R_NO_PRIVATEKEY}, ++ #else ++ {"NO_PRIVATEKEY", ERR_LIB_SSL, 189}, ++ #endif ++ #ifdef SSL_R_NO_PRIVATE_KEY_ASSIGNED ++ {"NO_PRIVATE_KEY_ASSIGNED", ERR_LIB_SSL, SSL_R_NO_PRIVATE_KEY_ASSIGNED}, ++ #else ++ {"NO_PRIVATE_KEY_ASSIGNED", ERR_LIB_SSL, 190}, ++ #endif ++ #ifdef SSL_R_NO_PROTOCOLS_AVAILABLE ++ {"NO_PROTOCOLS_AVAILABLE", ERR_LIB_SSL, SSL_R_NO_PROTOCOLS_AVAILABLE}, ++ #else ++ {"NO_PROTOCOLS_AVAILABLE", ERR_LIB_SSL, 191}, ++ #endif ++ #ifdef SSL_R_NO_PUBLICKEY ++ {"NO_PUBLICKEY", ERR_LIB_SSL, SSL_R_NO_PUBLICKEY}, ++ #else ++ {"NO_PUBLICKEY", ERR_LIB_SSL, 192}, ++ #endif ++ #ifdef SSL_R_NO_RENEGOTIATION ++ {"NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_NO_RENEGOTIATION}, ++ #else ++ {"NO_RENEGOTIATION", ERR_LIB_SSL, 339}, ++ #endif ++ #ifdef SSL_R_NO_REQUIRED_DIGEST ++ {"NO_REQUIRED_DIGEST", ERR_LIB_SSL, SSL_R_NO_REQUIRED_DIGEST}, ++ #else ++ {"NO_REQUIRED_DIGEST", ERR_LIB_SSL, 324}, ++ #endif ++ #ifdef SSL_R_NO_SHARED_CIPHER ++ {"NO_SHARED_CIPHER", ERR_LIB_SSL, SSL_R_NO_SHARED_CIPHER}, ++ #else ++ {"NO_SHARED_CIPHER", ERR_LIB_SSL, 193}, ++ #endif ++ #ifdef SSL_R_NO_VERIFY_CALLBACK ++ {"NO_VERIFY_CALLBACK", ERR_LIB_SSL, SSL_R_NO_VERIFY_CALLBACK}, ++ #else ++ {"NO_VERIFY_CALLBACK", ERR_LIB_SSL, 194}, ++ #endif ++ #ifdef SSL_R_NULL_SSL_CTX ++ {"NULL_SSL_CTX", ERR_LIB_SSL, SSL_R_NULL_SSL_CTX}, ++ #else ++ {"NULL_SSL_CTX", ERR_LIB_SSL, 195}, ++ #endif ++ #ifdef SSL_R_NULL_SSL_METHOD_PASSED ++ {"NULL_SSL_METHOD_PASSED", ERR_LIB_SSL, SSL_R_NULL_SSL_METHOD_PASSED}, ++ #else ++ {"NULL_SSL_METHOD_PASSED", ERR_LIB_SSL, 196}, ++ #endif ++ #ifdef SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED ++ {"OLD_SESSION_CIPHER_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED}, ++ #else ++ {"OLD_SESSION_CIPHER_NOT_RETURNED", ERR_LIB_SSL, 197}, ++ #endif ++ #ifdef SSL_R_OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED ++ {"OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED}, ++ #else ++ {"OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED", ERR_LIB_SSL, 344}, ++ #endif ++ #ifdef SSL_R_ONLY_TLS_ALLOWED_IN_FIPS_MODE ++ {"ONLY_TLS_ALLOWED_IN_FIPS_MODE", ERR_LIB_SSL, SSL_R_ONLY_TLS_ALLOWED_IN_FIPS_MODE}, ++ #else ++ {"ONLY_TLS_ALLOWED_IN_FIPS_MODE", ERR_LIB_SSL, 297}, ++ #endif ++ #ifdef SSL_R_OPAQUE_PRF_INPUT_TOO_LONG ++ {"OPAQUE_PRF_INPUT_TOO_LONG", ERR_LIB_SSL, SSL_R_OPAQUE_PRF_INPUT_TOO_LONG}, ++ #else ++ {"OPAQUE_PRF_INPUT_TOO_LONG", ERR_LIB_SSL, 327}, ++ #endif ++ #ifdef SSL_R_PACKET_LENGTH_TOO_LONG ++ {"PACKET_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_PACKET_LENGTH_TOO_LONG}, ++ #else ++ {"PACKET_LENGTH_TOO_LONG", ERR_LIB_SSL, 198}, ++ #endif ++ #ifdef SSL_R_PARSE_TLSEXT ++ {"PARSE_TLSEXT", ERR_LIB_SSL, SSL_R_PARSE_TLSEXT}, ++ #else ++ {"PARSE_TLSEXT", ERR_LIB_SSL, 227}, ++ #endif ++ #ifdef SSL_R_PATH_TOO_LONG ++ {"PATH_TOO_LONG", ERR_LIB_SSL, SSL_R_PATH_TOO_LONG}, ++ #else ++ {"PATH_TOO_LONG", ERR_LIB_SSL, 270}, ++ #endif ++ #ifdef SSL_R_PEER_DID_NOT_RETURN_A_CERTIFICATE ++ {"PEER_DID_NOT_RETURN_A_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_DID_NOT_RETURN_A_CERTIFICATE}, ++ #else ++ {"PEER_DID_NOT_RETURN_A_CERTIFICATE", ERR_LIB_SSL, 199}, ++ #endif ++ #ifdef SSL_R_PEER_ERROR ++ {"PEER_ERROR", ERR_LIB_SSL, SSL_R_PEER_ERROR}, ++ #else ++ {"PEER_ERROR", ERR_LIB_SSL, 200}, ++ #endif ++ #ifdef SSL_R_PEER_ERROR_CERTIFICATE ++ {"PEER_ERROR_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_ERROR_CERTIFICATE}, ++ #else ++ {"PEER_ERROR_CERTIFICATE", ERR_LIB_SSL, 201}, ++ #endif ++ #ifdef SSL_R_PEER_ERROR_NO_CERTIFICATE ++ {"PEER_ERROR_NO_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_ERROR_NO_CERTIFICATE}, ++ #else ++ {"PEER_ERROR_NO_CERTIFICATE", ERR_LIB_SSL, 202}, ++ #endif ++ #ifdef SSL_R_PEER_ERROR_NO_CIPHER ++ {"PEER_ERROR_NO_CIPHER", ERR_LIB_SSL, SSL_R_PEER_ERROR_NO_CIPHER}, ++ #else ++ {"PEER_ERROR_NO_CIPHER", ERR_LIB_SSL, 203}, ++ #endif ++ #ifdef SSL_R_PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE ++ {"PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE", ERR_LIB_SSL, SSL_R_PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE}, ++ #else ++ {"PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE", ERR_LIB_SSL, 204}, ++ #endif ++ #ifdef SSL_R_PRE_MAC_LENGTH_TOO_LONG ++ {"PRE_MAC_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_PRE_MAC_LENGTH_TOO_LONG}, ++ #else ++ {"PRE_MAC_LENGTH_TOO_LONG", ERR_LIB_SSL, 205}, ++ #endif ++ #ifdef SSL_R_PROBLEMS_MAPPING_CIPHER_FUNCTIONS ++ {"PROBLEMS_MAPPING_CIPHER_FUNCTIONS", ERR_LIB_SSL, SSL_R_PROBLEMS_MAPPING_CIPHER_FUNCTIONS}, ++ #else ++ {"PROBLEMS_MAPPING_CIPHER_FUNCTIONS", ERR_LIB_SSL, 206}, ++ #endif ++ #ifdef SSL_R_PROTOCOL_IS_SHUTDOWN ++ {"PROTOCOL_IS_SHUTDOWN", ERR_LIB_SSL, SSL_R_PROTOCOL_IS_SHUTDOWN}, ++ #else ++ {"PROTOCOL_IS_SHUTDOWN", ERR_LIB_SSL, 207}, ++ #endif ++ #ifdef SSL_R_PSK_IDENTITY_NOT_FOUND ++ {"PSK_IDENTITY_NOT_FOUND", ERR_LIB_SSL, SSL_R_PSK_IDENTITY_NOT_FOUND}, ++ #else ++ {"PSK_IDENTITY_NOT_FOUND", ERR_LIB_SSL, 223}, ++ #endif ++ #ifdef SSL_R_PSK_NO_CLIENT_CB ++ {"PSK_NO_CLIENT_CB", ERR_LIB_SSL, SSL_R_PSK_NO_CLIENT_CB}, ++ #else ++ {"PSK_NO_CLIENT_CB", ERR_LIB_SSL, 224}, ++ #endif ++ #ifdef SSL_R_PSK_NO_SERVER_CB ++ {"PSK_NO_SERVER_CB", ERR_LIB_SSL, SSL_R_PSK_NO_SERVER_CB}, ++ #else ++ {"PSK_NO_SERVER_CB", ERR_LIB_SSL, 225}, ++ #endif ++ #ifdef SSL_R_PUBLIC_KEY_ENCRYPT_ERROR ++ {"PUBLIC_KEY_ENCRYPT_ERROR", ERR_LIB_SSL, SSL_R_PUBLIC_KEY_ENCRYPT_ERROR}, ++ #else ++ {"PUBLIC_KEY_ENCRYPT_ERROR", ERR_LIB_SSL, 208}, ++ #endif ++ #ifdef SSL_R_PUBLIC_KEY_IS_NOT_RSA ++ {"PUBLIC_KEY_IS_NOT_RSA", ERR_LIB_SSL, SSL_R_PUBLIC_KEY_IS_NOT_RSA}, ++ #else ++ {"PUBLIC_KEY_IS_NOT_RSA", ERR_LIB_SSL, 209}, ++ #endif ++ #ifdef SSL_R_PUBLIC_KEY_NOT_RSA ++ {"PUBLIC_KEY_NOT_RSA", ERR_LIB_SSL, SSL_R_PUBLIC_KEY_NOT_RSA}, ++ #else ++ {"PUBLIC_KEY_NOT_RSA", ERR_LIB_SSL, 210}, ++ #endif ++ #ifdef SSL_R_READ_BIO_NOT_SET ++ {"READ_BIO_NOT_SET", ERR_LIB_SSL, SSL_R_READ_BIO_NOT_SET}, ++ #else ++ {"READ_BIO_NOT_SET", ERR_LIB_SSL, 211}, ++ #endif ++ #ifdef SSL_R_READ_TIMEOUT_EXPIRED ++ {"READ_TIMEOUT_EXPIRED", ERR_LIB_SSL, SSL_R_READ_TIMEOUT_EXPIRED}, ++ #else ++ {"READ_TIMEOUT_EXPIRED", ERR_LIB_SSL, 312}, ++ #endif ++ #ifdef SSL_R_READ_WRONG_PACKET_TYPE ++ {"READ_WRONG_PACKET_TYPE", ERR_LIB_SSL, SSL_R_READ_WRONG_PACKET_TYPE}, ++ #else ++ {"READ_WRONG_PACKET_TYPE", ERR_LIB_SSL, 212}, ++ #endif ++ #ifdef SSL_R_RECORD_LENGTH_MISMATCH ++ {"RECORD_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_RECORD_LENGTH_MISMATCH}, ++ #else ++ {"RECORD_LENGTH_MISMATCH", ERR_LIB_SSL, 213}, ++ #endif ++ #ifdef SSL_R_RECORD_TOO_LARGE ++ {"RECORD_TOO_LARGE", ERR_LIB_SSL, SSL_R_RECORD_TOO_LARGE}, ++ #else ++ {"RECORD_TOO_LARGE", ERR_LIB_SSL, 214}, ++ #endif ++ #ifdef SSL_R_RECORD_TOO_SMALL ++ {"RECORD_TOO_SMALL", ERR_LIB_SSL, SSL_R_RECORD_TOO_SMALL}, ++ #else ++ {"RECORD_TOO_SMALL", ERR_LIB_SSL, 298}, ++ #endif ++ #ifdef SSL_R_RENEGOTIATE_EXT_TOO_LONG ++ {"RENEGOTIATE_EXT_TOO_LONG", ERR_LIB_SSL, SSL_R_RENEGOTIATE_EXT_TOO_LONG}, ++ #else ++ {"RENEGOTIATE_EXT_TOO_LONG", ERR_LIB_SSL, 335}, ++ #endif ++ #ifdef SSL_R_RENEGOTIATION_ENCODING_ERR ++ {"RENEGOTIATION_ENCODING_ERR", ERR_LIB_SSL, SSL_R_RENEGOTIATION_ENCODING_ERR}, ++ #else ++ {"RENEGOTIATION_ENCODING_ERR", ERR_LIB_SSL, 336}, ++ #endif ++ #ifdef SSL_R_RENEGOTIATION_MISMATCH ++ {"RENEGOTIATION_MISMATCH", ERR_LIB_SSL, SSL_R_RENEGOTIATION_MISMATCH}, ++ #else ++ {"RENEGOTIATION_MISMATCH", ERR_LIB_SSL, 337}, ++ #endif ++ #ifdef SSL_R_REQUIRED_CIPHER_MISSING ++ {"REQUIRED_CIPHER_MISSING", ERR_LIB_SSL, SSL_R_REQUIRED_CIPHER_MISSING}, ++ #else ++ {"REQUIRED_CIPHER_MISSING", ERR_LIB_SSL, 215}, ++ #endif ++ #ifdef SSL_R_REQUIRED_COMPRESSSION_ALGORITHM_MISSING ++ {"REQUIRED_COMPRESSSION_ALGORITHM_MISSING", ERR_LIB_SSL, SSL_R_REQUIRED_COMPRESSSION_ALGORITHM_MISSING}, ++ #else ++ {"REQUIRED_COMPRESSSION_ALGORITHM_MISSING", ERR_LIB_SSL, 342}, ++ #endif ++ #ifdef SSL_R_REUSE_CERT_LENGTH_NOT_ZERO ++ {"REUSE_CERT_LENGTH_NOT_ZERO", ERR_LIB_SSL, SSL_R_REUSE_CERT_LENGTH_NOT_ZERO}, ++ #else ++ {"REUSE_CERT_LENGTH_NOT_ZERO", ERR_LIB_SSL, 216}, ++ #endif ++ #ifdef SSL_R_REUSE_CERT_TYPE_NOT_ZERO ++ {"REUSE_CERT_TYPE_NOT_ZERO", ERR_LIB_SSL, SSL_R_REUSE_CERT_TYPE_NOT_ZERO}, ++ #else ++ {"REUSE_CERT_TYPE_NOT_ZERO", ERR_LIB_SSL, 217}, ++ #endif ++ #ifdef SSL_R_REUSE_CIPHER_LIST_NOT_ZERO ++ {"REUSE_CIPHER_LIST_NOT_ZERO", ERR_LIB_SSL, SSL_R_REUSE_CIPHER_LIST_NOT_ZERO}, ++ #else ++ {"REUSE_CIPHER_LIST_NOT_ZERO", ERR_LIB_SSL, 218}, ++ #endif ++ #ifdef SSL_R_SCSV_RECEIVED_WHEN_RENEGOTIATING ++ {"SCSV_RECEIVED_WHEN_RENEGOTIATING", ERR_LIB_SSL, SSL_R_SCSV_RECEIVED_WHEN_RENEGOTIATING}, ++ #else ++ {"SCSV_RECEIVED_WHEN_RENEGOTIATING", ERR_LIB_SSL, 345}, ++ #endif ++ #ifdef SSL_R_SERVERHELLO_TLSEXT ++ {"SERVERHELLO_TLSEXT", ERR_LIB_SSL, SSL_R_SERVERHELLO_TLSEXT}, ++ #else ++ {"SERVERHELLO_TLSEXT", ERR_LIB_SSL, 275}, ++ #endif ++ #ifdef SSL_R_SESSION_ID_CONTEXT_UNINITIALIZED ++ {"SESSION_ID_CONTEXT_UNINITIALIZED", ERR_LIB_SSL, SSL_R_SESSION_ID_CONTEXT_UNINITIALIZED}, ++ #else ++ {"SESSION_ID_CONTEXT_UNINITIALIZED", ERR_LIB_SSL, 277}, ++ #endif ++ #ifdef SSL_R_SHORT_READ ++ {"SHORT_READ", ERR_LIB_SSL, SSL_R_SHORT_READ}, ++ #else ++ {"SHORT_READ", ERR_LIB_SSL, 219}, ++ #endif ++ #ifdef SSL_R_SIGNATURE_FOR_NON_SIGNING_CERTIFICATE ++ {"SIGNATURE_FOR_NON_SIGNING_CERTIFICATE", ERR_LIB_SSL, SSL_R_SIGNATURE_FOR_NON_SIGNING_CERTIFICATE}, ++ #else ++ {"SIGNATURE_FOR_NON_SIGNING_CERTIFICATE", ERR_LIB_SSL, 220}, ++ #endif ++ #ifdef SSL_R_SSL23_DOING_SESSION_ID_REUSE ++ {"SSL23_DOING_SESSION_ID_REUSE", ERR_LIB_SSL, SSL_R_SSL23_DOING_SESSION_ID_REUSE}, ++ #else ++ {"SSL23_DOING_SESSION_ID_REUSE", ERR_LIB_SSL, 221}, ++ #endif ++ #ifdef SSL_R_SSL2_CONNECTION_ID_TOO_LONG ++ {"SSL2_CONNECTION_ID_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL2_CONNECTION_ID_TOO_LONG}, ++ #else ++ {"SSL2_CONNECTION_ID_TOO_LONG", ERR_LIB_SSL, 299}, ++ #endif ++ #ifdef SSL_R_SSL3_EXT_INVALID_ECPOINTFORMAT ++ {"SSL3_EXT_INVALID_ECPOINTFORMAT", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_ECPOINTFORMAT}, ++ #else ++ {"SSL3_EXT_INVALID_ECPOINTFORMAT", ERR_LIB_SSL, 321}, ++ #endif ++ #ifdef SSL_R_SSL3_EXT_INVALID_SERVERNAME ++ {"SSL3_EXT_INVALID_SERVERNAME", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_SERVERNAME}, ++ #else ++ {"SSL3_EXT_INVALID_SERVERNAME", ERR_LIB_SSL, 319}, ++ #endif ++ #ifdef SSL_R_SSL3_EXT_INVALID_SERVERNAME_TYPE ++ {"SSL3_EXT_INVALID_SERVERNAME_TYPE", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_SERVERNAME_TYPE}, ++ #else ++ {"SSL3_EXT_INVALID_SERVERNAME_TYPE", ERR_LIB_SSL, 320}, ++ #endif ++ #ifdef SSL_R_SSL3_SESSION_ID_TOO_LONG ++ {"SSL3_SESSION_ID_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL3_SESSION_ID_TOO_LONG}, ++ #else ++ {"SSL3_SESSION_ID_TOO_LONG", ERR_LIB_SSL, 300}, ++ #endif ++ #ifdef SSL_R_SSL3_SESSION_ID_TOO_SHORT ++ {"SSL3_SESSION_ID_TOO_SHORT", ERR_LIB_SSL, SSL_R_SSL3_SESSION_ID_TOO_SHORT}, ++ #else ++ {"SSL3_SESSION_ID_TOO_SHORT", ERR_LIB_SSL, 222}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_BAD_CERTIFICATE ++ {"SSLV3_ALERT_BAD_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_CERTIFICATE}, ++ #else ++ {"SSLV3_ALERT_BAD_CERTIFICATE", ERR_LIB_SSL, 1042}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_BAD_RECORD_MAC ++ {"SSLV3_ALERT_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_RECORD_MAC}, ++ #else ++ {"SSLV3_ALERT_BAD_RECORD_MAC", ERR_LIB_SSL, 1020}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED ++ {"SSLV3_ALERT_CERTIFICATE_EXPIRED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED}, ++ #else ++ {"SSLV3_ALERT_CERTIFICATE_EXPIRED", ERR_LIB_SSL, 1045}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED ++ {"SSLV3_ALERT_CERTIFICATE_REVOKED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED}, ++ #else ++ {"SSLV3_ALERT_CERTIFICATE_REVOKED", ERR_LIB_SSL, 1044}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN ++ {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN}, ++ #else ++ {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", ERR_LIB_SSL, 1046}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE ++ {"SSLV3_ALERT_DECOMPRESSION_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE}, ++ #else ++ {"SSLV3_ALERT_DECOMPRESSION_FAILURE", ERR_LIB_SSL, 1030}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE ++ {"SSLV3_ALERT_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE}, ++ #else ++ {"SSLV3_ALERT_HANDSHAKE_FAILURE", ERR_LIB_SSL, 1040}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER ++ {"SSLV3_ALERT_ILLEGAL_PARAMETER", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER}, ++ #else ++ {"SSLV3_ALERT_ILLEGAL_PARAMETER", ERR_LIB_SSL, 1047}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_NO_CERTIFICATE ++ {"SSLV3_ALERT_NO_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_NO_CERTIFICATE}, ++ #else ++ {"SSLV3_ALERT_NO_CERTIFICATE", ERR_LIB_SSL, 1041}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE ++ {"SSLV3_ALERT_UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE}, ++ #else ++ {"SSLV3_ALERT_UNEXPECTED_MESSAGE", ERR_LIB_SSL, 1010}, ++ #endif ++ #ifdef SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE ++ {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE}, ++ #else ++ {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", ERR_LIB_SSL, 1043}, ++ #endif ++ #ifdef SSL_R_SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION ++ {"SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION", ERR_LIB_SSL, SSL_R_SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION}, ++ #else ++ {"SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION", ERR_LIB_SSL, 228}, ++ #endif ++ #ifdef SSL_R_SSL_HANDSHAKE_FAILURE ++ {"SSL_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSL_HANDSHAKE_FAILURE}, ++ #else ++ {"SSL_HANDSHAKE_FAILURE", ERR_LIB_SSL, 229}, ++ #endif ++ #ifdef SSL_R_SSL_LIBRARY_HAS_NO_CIPHERS ++ {"SSL_LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, SSL_R_SSL_LIBRARY_HAS_NO_CIPHERS}, ++ #else ++ {"SSL_LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, 230}, ++ #endif ++ #ifdef SSL_R_SSL_SESSION_ID_CALLBACK_FAILED ++ {"SSL_SESSION_ID_CALLBACK_FAILED", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CALLBACK_FAILED}, ++ #else ++ {"SSL_SESSION_ID_CALLBACK_FAILED", ERR_LIB_SSL, 301}, ++ #endif ++ #ifdef SSL_R_SSL_SESSION_ID_CONFLICT ++ {"SSL_SESSION_ID_CONFLICT", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CONFLICT}, ++ #else ++ {"SSL_SESSION_ID_CONFLICT", ERR_LIB_SSL, 302}, ++ #endif ++ #ifdef SSL_R_SSL_SESSION_ID_CONTEXT_TOO_LONG ++ {"SSL_SESSION_ID_CONTEXT_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CONTEXT_TOO_LONG}, ++ #else ++ {"SSL_SESSION_ID_CONTEXT_TOO_LONG", ERR_LIB_SSL, 273}, ++ #endif ++ #ifdef SSL_R_SSL_SESSION_ID_HAS_BAD_LENGTH ++ {"SSL_SESSION_ID_HAS_BAD_LENGTH", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_HAS_BAD_LENGTH}, ++ #else ++ {"SSL_SESSION_ID_HAS_BAD_LENGTH", ERR_LIB_SSL, 303}, ++ #endif ++ #ifdef SSL_R_SSL_SESSION_ID_IS_DIFFERENT ++ {"SSL_SESSION_ID_IS_DIFFERENT", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_IS_DIFFERENT}, ++ #else ++ {"SSL_SESSION_ID_IS_DIFFERENT", ERR_LIB_SSL, 231}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_ACCESS_DENIED ++ {"TLSV1_ALERT_ACCESS_DENIED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_ACCESS_DENIED}, ++ #else ++ {"TLSV1_ALERT_ACCESS_DENIED", ERR_LIB_SSL, 1049}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_DECODE_ERROR ++ {"TLSV1_ALERT_DECODE_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECODE_ERROR}, ++ #else ++ {"TLSV1_ALERT_DECODE_ERROR", ERR_LIB_SSL, 1050}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_DECRYPTION_FAILED ++ {"TLSV1_ALERT_DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPTION_FAILED}, ++ #else ++ {"TLSV1_ALERT_DECRYPTION_FAILED", ERR_LIB_SSL, 1021}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_DECRYPT_ERROR ++ {"TLSV1_ALERT_DECRYPT_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPT_ERROR}, ++ #else ++ {"TLSV1_ALERT_DECRYPT_ERROR", ERR_LIB_SSL, 1051}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION ++ {"TLSV1_ALERT_EXPORT_RESTRICTION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION}, ++ #else ++ {"TLSV1_ALERT_EXPORT_RESTRICTION", ERR_LIB_SSL, 1060}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY ++ {"TLSV1_ALERT_INSUFFICIENT_SECURITY", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY}, ++ #else ++ {"TLSV1_ALERT_INSUFFICIENT_SECURITY", ERR_LIB_SSL, 1071}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_INTERNAL_ERROR ++ {"TLSV1_ALERT_INTERNAL_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INTERNAL_ERROR}, ++ #else ++ {"TLSV1_ALERT_INTERNAL_ERROR", ERR_LIB_SSL, 1080}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_NO_RENEGOTIATION ++ {"TLSV1_ALERT_NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_NO_RENEGOTIATION}, ++ #else ++ {"TLSV1_ALERT_NO_RENEGOTIATION", ERR_LIB_SSL, 1100}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_PROTOCOL_VERSION ++ {"TLSV1_ALERT_PROTOCOL_VERSION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_PROTOCOL_VERSION}, ++ #else ++ {"TLSV1_ALERT_PROTOCOL_VERSION", ERR_LIB_SSL, 1070}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_RECORD_OVERFLOW ++ {"TLSV1_ALERT_RECORD_OVERFLOW", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_RECORD_OVERFLOW}, ++ #else ++ {"TLSV1_ALERT_RECORD_OVERFLOW", ERR_LIB_SSL, 1022}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_UNKNOWN_CA ++ {"TLSV1_ALERT_UNKNOWN_CA", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_UNKNOWN_CA}, ++ #else ++ {"TLSV1_ALERT_UNKNOWN_CA", ERR_LIB_SSL, 1048}, ++ #endif ++ #ifdef SSL_R_TLSV1_ALERT_USER_CANCELLED ++ {"TLSV1_ALERT_USER_CANCELLED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_USER_CANCELLED}, ++ #else ++ {"TLSV1_ALERT_USER_CANCELLED", ERR_LIB_SSL, 1090}, ++ #endif ++ #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE ++ {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE}, ++ #else ++ {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", ERR_LIB_SSL, 1114}, ++ #endif ++ #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE ++ {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE}, ++ #else ++ {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", ERR_LIB_SSL, 1113}, ++ #endif ++ #ifdef SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE ++ {"TLSV1_CERTIFICATE_UNOBTAINABLE", ERR_LIB_SSL, SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE}, ++ #else ++ {"TLSV1_CERTIFICATE_UNOBTAINABLE", ERR_LIB_SSL, 1111}, ++ #endif ++ #ifdef SSL_R_TLSV1_UNRECOGNIZED_NAME ++ {"TLSV1_UNRECOGNIZED_NAME", ERR_LIB_SSL, SSL_R_TLSV1_UNRECOGNIZED_NAME}, ++ #else ++ {"TLSV1_UNRECOGNIZED_NAME", ERR_LIB_SSL, 1112}, ++ #endif ++ #ifdef SSL_R_TLSV1_UNSUPPORTED_EXTENSION ++ {"TLSV1_UNSUPPORTED_EXTENSION", ERR_LIB_SSL, SSL_R_TLSV1_UNSUPPORTED_EXTENSION}, ++ #else ++ {"TLSV1_UNSUPPORTED_EXTENSION", ERR_LIB_SSL, 1110}, ++ #endif ++ #ifdef SSL_R_TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER ++ {"TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER", ERR_LIB_SSL, SSL_R_TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER}, ++ #else ++ {"TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER", ERR_LIB_SSL, 232}, ++ #endif ++ #ifdef SSL_R_TLS_INVALID_ECPOINTFORMAT_LIST ++ {"TLS_INVALID_ECPOINTFORMAT_LIST", ERR_LIB_SSL, SSL_R_TLS_INVALID_ECPOINTFORMAT_LIST}, ++ #else ++ {"TLS_INVALID_ECPOINTFORMAT_LIST", ERR_LIB_SSL, 157}, ++ #endif ++ #ifdef SSL_R_TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST ++ {"TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST", ERR_LIB_SSL, SSL_R_TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST}, ++ #else ++ {"TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST", ERR_LIB_SSL, 233}, ++ #endif ++ #ifdef SSL_R_TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG ++ {"TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, SSL_R_TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG}, ++ #else ++ {"TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, 234}, ++ #endif ++ #ifdef SSL_R_TRIED_TO_USE_UNSUPPORTED_CIPHER ++ {"TRIED_TO_USE_UNSUPPORTED_CIPHER", ERR_LIB_SSL, SSL_R_TRIED_TO_USE_UNSUPPORTED_CIPHER}, ++ #else ++ {"TRIED_TO_USE_UNSUPPORTED_CIPHER", ERR_LIB_SSL, 235}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_DECODE_DH_CERTS ++ {"UNABLE_TO_DECODE_DH_CERTS", ERR_LIB_SSL, SSL_R_UNABLE_TO_DECODE_DH_CERTS}, ++ #else ++ {"UNABLE_TO_DECODE_DH_CERTS", ERR_LIB_SSL, 236}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_DECODE_ECDH_CERTS ++ {"UNABLE_TO_DECODE_ECDH_CERTS", ERR_LIB_SSL, SSL_R_UNABLE_TO_DECODE_ECDH_CERTS}, ++ #else ++ {"UNABLE_TO_DECODE_ECDH_CERTS", ERR_LIB_SSL, 313}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_EXTRACT_PUBLIC_KEY ++ {"UNABLE_TO_EXTRACT_PUBLIC_KEY", ERR_LIB_SSL, SSL_R_UNABLE_TO_EXTRACT_PUBLIC_KEY}, ++ #else ++ {"UNABLE_TO_EXTRACT_PUBLIC_KEY", ERR_LIB_SSL, 237}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_FIND_DH_PARAMETERS ++ {"UNABLE_TO_FIND_DH_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_DH_PARAMETERS}, ++ #else ++ {"UNABLE_TO_FIND_DH_PARAMETERS", ERR_LIB_SSL, 238}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_FIND_ECDH_PARAMETERS ++ {"UNABLE_TO_FIND_ECDH_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_ECDH_PARAMETERS}, ++ #else ++ {"UNABLE_TO_FIND_ECDH_PARAMETERS", ERR_LIB_SSL, 314}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS ++ {"UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS}, ++ #else ++ {"UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS", ERR_LIB_SSL, 239}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_FIND_SSL_METHOD ++ {"UNABLE_TO_FIND_SSL_METHOD", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_SSL_METHOD}, ++ #else ++ {"UNABLE_TO_FIND_SSL_METHOD", ERR_LIB_SSL, 240}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_LOAD_SSL2_MD5_ROUTINES ++ {"UNABLE_TO_LOAD_SSL2_MD5_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL2_MD5_ROUTINES}, ++ #else ++ {"UNABLE_TO_LOAD_SSL2_MD5_ROUTINES", ERR_LIB_SSL, 241}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_LOAD_SSL3_MD5_ROUTINES ++ {"UNABLE_TO_LOAD_SSL3_MD5_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL3_MD5_ROUTINES}, ++ #else ++ {"UNABLE_TO_LOAD_SSL3_MD5_ROUTINES", ERR_LIB_SSL, 242}, ++ #endif ++ #ifdef SSL_R_UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES ++ {"UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES}, ++ #else ++ {"UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES", ERR_LIB_SSL, 243}, ++ #endif ++ #ifdef SSL_R_UNEXPECTED_MESSAGE ++ {"UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_UNEXPECTED_MESSAGE}, ++ #else ++ {"UNEXPECTED_MESSAGE", ERR_LIB_SSL, 244}, ++ #endif ++ #ifdef SSL_R_UNEXPECTED_RECORD ++ {"UNEXPECTED_RECORD", ERR_LIB_SSL, SSL_R_UNEXPECTED_RECORD}, ++ #else ++ {"UNEXPECTED_RECORD", ERR_LIB_SSL, 245}, ++ #endif ++ #ifdef SSL_R_UNINITIALIZED ++ {"UNINITIALIZED", ERR_LIB_SSL, SSL_R_UNINITIALIZED}, ++ #else ++ {"UNINITIALIZED", ERR_LIB_SSL, 276}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_ALERT_TYPE ++ {"UNKNOWN_ALERT_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_ALERT_TYPE}, ++ #else ++ {"UNKNOWN_ALERT_TYPE", ERR_LIB_SSL, 246}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_CERTIFICATE_TYPE ++ {"UNKNOWN_CERTIFICATE_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_CERTIFICATE_TYPE}, ++ #else ++ {"UNKNOWN_CERTIFICATE_TYPE", ERR_LIB_SSL, 247}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_CIPHER_RETURNED ++ {"UNKNOWN_CIPHER_RETURNED", ERR_LIB_SSL, SSL_R_UNKNOWN_CIPHER_RETURNED}, ++ #else ++ {"UNKNOWN_CIPHER_RETURNED", ERR_LIB_SSL, 248}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_CIPHER_TYPE ++ {"UNKNOWN_CIPHER_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_CIPHER_TYPE}, ++ #else ++ {"UNKNOWN_CIPHER_TYPE", ERR_LIB_SSL, 249}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_KEY_EXCHANGE_TYPE ++ {"UNKNOWN_KEY_EXCHANGE_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_KEY_EXCHANGE_TYPE}, ++ #else ++ {"UNKNOWN_KEY_EXCHANGE_TYPE", ERR_LIB_SSL, 250}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_PKEY_TYPE ++ {"UNKNOWN_PKEY_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_PKEY_TYPE}, ++ #else ++ {"UNKNOWN_PKEY_TYPE", ERR_LIB_SSL, 251}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_PROTOCOL ++ {"UNKNOWN_PROTOCOL", ERR_LIB_SSL, SSL_R_UNKNOWN_PROTOCOL}, ++ #else ++ {"UNKNOWN_PROTOCOL", ERR_LIB_SSL, 252}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_REMOTE_ERROR_TYPE ++ {"UNKNOWN_REMOTE_ERROR_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_REMOTE_ERROR_TYPE}, ++ #else ++ {"UNKNOWN_REMOTE_ERROR_TYPE", ERR_LIB_SSL, 253}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_SSL_VERSION ++ {"UNKNOWN_SSL_VERSION", ERR_LIB_SSL, SSL_R_UNKNOWN_SSL_VERSION}, ++ #else ++ {"UNKNOWN_SSL_VERSION", ERR_LIB_SSL, 254}, ++ #endif ++ #ifdef SSL_R_UNKNOWN_STATE ++ {"UNKNOWN_STATE", ERR_LIB_SSL, SSL_R_UNKNOWN_STATE}, ++ #else ++ {"UNKNOWN_STATE", ERR_LIB_SSL, 255}, ++ #endif ++ #ifdef SSL_R_UNSAFE_LEGACY_RENEGOTIATION_DISABLED ++ {"UNSAFE_LEGACY_RENEGOTIATION_DISABLED", ERR_LIB_SSL, SSL_R_UNSAFE_LEGACY_RENEGOTIATION_DISABLED}, ++ #else ++ {"UNSAFE_LEGACY_RENEGOTIATION_DISABLED", ERR_LIB_SSL, 338}, ++ #endif ++ #ifdef SSL_R_UNSUPPORTED_CIPHER ++ {"UNSUPPORTED_CIPHER", ERR_LIB_SSL, SSL_R_UNSUPPORTED_CIPHER}, ++ #else ++ {"UNSUPPORTED_CIPHER", ERR_LIB_SSL, 256}, ++ #endif ++ #ifdef SSL_R_UNSUPPORTED_COMPRESSION_ALGORITHM ++ {"UNSUPPORTED_COMPRESSION_ALGORITHM", ERR_LIB_SSL, SSL_R_UNSUPPORTED_COMPRESSION_ALGORITHM}, ++ #else ++ {"UNSUPPORTED_COMPRESSION_ALGORITHM", ERR_LIB_SSL, 257}, ++ #endif ++ #ifdef SSL_R_UNSUPPORTED_DIGEST_TYPE ++ {"UNSUPPORTED_DIGEST_TYPE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_DIGEST_TYPE}, ++ #else ++ {"UNSUPPORTED_DIGEST_TYPE", ERR_LIB_SSL, 326}, ++ #endif ++ #ifdef SSL_R_UNSUPPORTED_ELLIPTIC_CURVE ++ {"UNSUPPORTED_ELLIPTIC_CURVE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_ELLIPTIC_CURVE}, ++ #else ++ {"UNSUPPORTED_ELLIPTIC_CURVE", ERR_LIB_SSL, 315}, ++ #endif ++ #ifdef SSL_R_UNSUPPORTED_PROTOCOL ++ {"UNSUPPORTED_PROTOCOL", ERR_LIB_SSL, SSL_R_UNSUPPORTED_PROTOCOL}, ++ #else ++ {"UNSUPPORTED_PROTOCOL", ERR_LIB_SSL, 258}, ++ #endif ++ #ifdef SSL_R_UNSUPPORTED_SSL_VERSION ++ {"UNSUPPORTED_SSL_VERSION", ERR_LIB_SSL, SSL_R_UNSUPPORTED_SSL_VERSION}, ++ #else ++ {"UNSUPPORTED_SSL_VERSION", ERR_LIB_SSL, 259}, ++ #endif ++ #ifdef SSL_R_UNSUPPORTED_STATUS_TYPE ++ {"UNSUPPORTED_STATUS_TYPE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_STATUS_TYPE}, ++ #else ++ {"UNSUPPORTED_STATUS_TYPE", ERR_LIB_SSL, 329}, ++ #endif ++ #ifdef SSL_R_WRITE_BIO_NOT_SET ++ {"WRITE_BIO_NOT_SET", ERR_LIB_SSL, SSL_R_WRITE_BIO_NOT_SET}, ++ #else ++ {"WRITE_BIO_NOT_SET", ERR_LIB_SSL, 260}, ++ #endif ++ #ifdef SSL_R_WRONG_CIPHER_RETURNED ++ {"WRONG_CIPHER_RETURNED", ERR_LIB_SSL, SSL_R_WRONG_CIPHER_RETURNED}, ++ #else ++ {"WRONG_CIPHER_RETURNED", ERR_LIB_SSL, 261}, ++ #endif ++ #ifdef SSL_R_WRONG_MESSAGE_TYPE ++ {"WRONG_MESSAGE_TYPE", ERR_LIB_SSL, SSL_R_WRONG_MESSAGE_TYPE}, ++ #else ++ {"WRONG_MESSAGE_TYPE", ERR_LIB_SSL, 262}, ++ #endif ++ #ifdef SSL_R_WRONG_NUMBER_OF_KEY_BITS ++ {"WRONG_NUMBER_OF_KEY_BITS", ERR_LIB_SSL, SSL_R_WRONG_NUMBER_OF_KEY_BITS}, ++ #else ++ {"WRONG_NUMBER_OF_KEY_BITS", ERR_LIB_SSL, 263}, ++ #endif ++ #ifdef SSL_R_WRONG_SIGNATURE_LENGTH ++ {"WRONG_SIGNATURE_LENGTH", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_LENGTH}, ++ #else ++ {"WRONG_SIGNATURE_LENGTH", ERR_LIB_SSL, 264}, ++ #endif ++ #ifdef SSL_R_WRONG_SIGNATURE_SIZE ++ {"WRONG_SIGNATURE_SIZE", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_SIZE}, ++ #else ++ {"WRONG_SIGNATURE_SIZE", ERR_LIB_SSL, 265}, ++ #endif ++ #ifdef SSL_R_WRONG_SSL_VERSION ++ {"WRONG_SSL_VERSION", ERR_LIB_SSL, SSL_R_WRONG_SSL_VERSION}, ++ #else ++ {"WRONG_SSL_VERSION", ERR_LIB_SSL, 266}, ++ #endif ++ #ifdef SSL_R_WRONG_VERSION_NUMBER ++ {"WRONG_VERSION_NUMBER", ERR_LIB_SSL, SSL_R_WRONG_VERSION_NUMBER}, ++ #else ++ {"WRONG_VERSION_NUMBER", ERR_LIB_SSL, 267}, ++ #endif ++ #ifdef SSL_R_X509_LIB ++ {"X509_LIB", ERR_LIB_SSL, SSL_R_X509_LIB}, ++ #else ++ {"X509_LIB", ERR_LIB_SSL, 268}, ++ #endif ++ #ifdef SSL_R_X509_VERIFICATION_SETUP_PROBLEMS ++ {"X509_VERIFICATION_SETUP_PROBLEMS", ERR_LIB_SSL, SSL_R_X509_VERIFICATION_SETUP_PROBLEMS}, ++ #else ++ {"X509_VERIFICATION_SETUP_PROBLEMS", ERR_LIB_SSL, 269}, ++ #endif ++ #ifdef X509_R_BAD_X509_FILETYPE ++ {"BAD_X509_FILETYPE", ERR_LIB_X509, X509_R_BAD_X509_FILETYPE}, ++ #else ++ {"BAD_X509_FILETYPE", ERR_LIB_X509, 100}, ++ #endif ++ #ifdef X509_R_BASE64_DECODE_ERROR ++ {"BASE64_DECODE_ERROR", ERR_LIB_X509, X509_R_BASE64_DECODE_ERROR}, ++ #else ++ {"BASE64_DECODE_ERROR", ERR_LIB_X509, 118}, ++ #endif ++ #ifdef X509_R_CANT_CHECK_DH_KEY ++ {"CANT_CHECK_DH_KEY", ERR_LIB_X509, X509_R_CANT_CHECK_DH_KEY}, ++ #else ++ {"CANT_CHECK_DH_KEY", ERR_LIB_X509, 114}, ++ #endif ++ #ifdef X509_R_CERT_ALREADY_IN_HASH_TABLE ++ {"CERT_ALREADY_IN_HASH_TABLE", ERR_LIB_X509, X509_R_CERT_ALREADY_IN_HASH_TABLE}, ++ #else ++ {"CERT_ALREADY_IN_HASH_TABLE", ERR_LIB_X509, 101}, ++ #endif ++ #ifdef X509_R_ERR_ASN1_LIB ++ {"ERR_ASN1_LIB", ERR_LIB_X509, X509_R_ERR_ASN1_LIB}, ++ #else ++ {"ERR_ASN1_LIB", ERR_LIB_X509, 102}, ++ #endif ++ #ifdef X509_R_INVALID_DIRECTORY ++ {"INVALID_DIRECTORY", ERR_LIB_X509, X509_R_INVALID_DIRECTORY}, ++ #else ++ {"INVALID_DIRECTORY", ERR_LIB_X509, 113}, ++ #endif ++ #ifdef X509_R_INVALID_FIELD_NAME ++ {"INVALID_FIELD_NAME", ERR_LIB_X509, X509_R_INVALID_FIELD_NAME}, ++ #else ++ {"INVALID_FIELD_NAME", ERR_LIB_X509, 119}, ++ #endif ++ #ifdef X509_R_INVALID_TRUST ++ {"INVALID_TRUST", ERR_LIB_X509, X509_R_INVALID_TRUST}, ++ #else ++ {"INVALID_TRUST", ERR_LIB_X509, 123}, ++ #endif ++ #ifdef X509_R_KEY_TYPE_MISMATCH ++ {"KEY_TYPE_MISMATCH", ERR_LIB_X509, X509_R_KEY_TYPE_MISMATCH}, ++ #else ++ {"KEY_TYPE_MISMATCH", ERR_LIB_X509, 115}, ++ #endif ++ #ifdef X509_R_KEY_VALUES_MISMATCH ++ {"KEY_VALUES_MISMATCH", ERR_LIB_X509, X509_R_KEY_VALUES_MISMATCH}, ++ #else ++ {"KEY_VALUES_MISMATCH", ERR_LIB_X509, 116}, ++ #endif ++ #ifdef X509_R_LOADING_CERT_DIR ++ {"LOADING_CERT_DIR", ERR_LIB_X509, X509_R_LOADING_CERT_DIR}, ++ #else ++ {"LOADING_CERT_DIR", ERR_LIB_X509, 103}, ++ #endif ++ #ifdef X509_R_LOADING_DEFAULTS ++ {"LOADING_DEFAULTS", ERR_LIB_X509, X509_R_LOADING_DEFAULTS}, ++ #else ++ {"LOADING_DEFAULTS", ERR_LIB_X509, 104}, ++ #endif ++ #ifdef X509_R_METHOD_NOT_SUPPORTED ++ {"METHOD_NOT_SUPPORTED", ERR_LIB_X509, X509_R_METHOD_NOT_SUPPORTED}, ++ #else ++ {"METHOD_NOT_SUPPORTED", ERR_LIB_X509, 124}, ++ #endif ++ #ifdef X509_R_NO_CERT_SET_FOR_US_TO_VERIFY ++ {"NO_CERT_SET_FOR_US_TO_VERIFY", ERR_LIB_X509, X509_R_NO_CERT_SET_FOR_US_TO_VERIFY}, ++ #else ++ {"NO_CERT_SET_FOR_US_TO_VERIFY", ERR_LIB_X509, 105}, ++ #endif ++ #ifdef X509_R_PUBLIC_KEY_DECODE_ERROR ++ {"PUBLIC_KEY_DECODE_ERROR", ERR_LIB_X509, X509_R_PUBLIC_KEY_DECODE_ERROR}, ++ #else ++ {"PUBLIC_KEY_DECODE_ERROR", ERR_LIB_X509, 125}, ++ #endif ++ #ifdef X509_R_PUBLIC_KEY_ENCODE_ERROR ++ {"PUBLIC_KEY_ENCODE_ERROR", ERR_LIB_X509, X509_R_PUBLIC_KEY_ENCODE_ERROR}, ++ #else ++ {"PUBLIC_KEY_ENCODE_ERROR", ERR_LIB_X509, 126}, ++ #endif ++ #ifdef X509_R_SHOULD_RETRY ++ {"SHOULD_RETRY", ERR_LIB_X509, X509_R_SHOULD_RETRY}, ++ #else ++ {"SHOULD_RETRY", ERR_LIB_X509, 106}, ++ #endif ++ #ifdef X509_R_UNABLE_TO_FIND_PARAMETERS_IN_CHAIN ++ {"UNABLE_TO_FIND_PARAMETERS_IN_CHAIN", ERR_LIB_X509, X509_R_UNABLE_TO_FIND_PARAMETERS_IN_CHAIN}, ++ #else ++ {"UNABLE_TO_FIND_PARAMETERS_IN_CHAIN", ERR_LIB_X509, 107}, ++ #endif ++ #ifdef X509_R_UNABLE_TO_GET_CERTS_PUBLIC_KEY ++ {"UNABLE_TO_GET_CERTS_PUBLIC_KEY", ERR_LIB_X509, X509_R_UNABLE_TO_GET_CERTS_PUBLIC_KEY}, ++ #else ++ {"UNABLE_TO_GET_CERTS_PUBLIC_KEY", ERR_LIB_X509, 108}, ++ #endif ++ #ifdef X509_R_UNKNOWN_KEY_TYPE ++ {"UNKNOWN_KEY_TYPE", ERR_LIB_X509, X509_R_UNKNOWN_KEY_TYPE}, ++ #else ++ {"UNKNOWN_KEY_TYPE", ERR_LIB_X509, 117}, ++ #endif ++ #ifdef X509_R_UNKNOWN_NID ++ {"UNKNOWN_NID", ERR_LIB_X509, X509_R_UNKNOWN_NID}, ++ #else ++ {"UNKNOWN_NID", ERR_LIB_X509, 109}, ++ #endif ++ #ifdef X509_R_UNKNOWN_PURPOSE_ID ++ {"UNKNOWN_PURPOSE_ID", ERR_LIB_X509, X509_R_UNKNOWN_PURPOSE_ID}, ++ #else ++ {"UNKNOWN_PURPOSE_ID", ERR_LIB_X509, 121}, ++ #endif ++ #ifdef X509_R_UNKNOWN_TRUST_ID ++ {"UNKNOWN_TRUST_ID", ERR_LIB_X509, X509_R_UNKNOWN_TRUST_ID}, ++ #else ++ {"UNKNOWN_TRUST_ID", ERR_LIB_X509, 120}, ++ #endif ++ #ifdef X509_R_UNSUPPORTED_ALGORITHM ++ {"UNSUPPORTED_ALGORITHM", ERR_LIB_X509, X509_R_UNSUPPORTED_ALGORITHM}, ++ #else ++ {"UNSUPPORTED_ALGORITHM", ERR_LIB_X509, 111}, ++ #endif ++ #ifdef X509_R_WRONG_LOOKUP_TYPE ++ {"WRONG_LOOKUP_TYPE", ERR_LIB_X509, X509_R_WRONG_LOOKUP_TYPE}, ++ #else ++ {"WRONG_LOOKUP_TYPE", ERR_LIB_X509, 112}, ++ #endif ++ #ifdef X509_R_WRONG_TYPE ++ {"WRONG_TYPE", ERR_LIB_X509, X509_R_WRONG_TYPE}, ++ #else ++ {"WRONG_TYPE", ERR_LIB_X509, 122}, ++ #endif ++ { NULL } ++}; +diff -up Python-2.7.5/Lib/test/test_support.py.ssl Python-2.7.5/Lib/test/test_support.py +--- Python-2.7.5/Lib/test/test_support.py.ssl 2015-02-24 12:12:20.587171888 +0100 ++++ Python-2.7.5/Lib/test/test_support.py 2015-02-24 12:12:59.495502649 +0100 +@@ -290,7 +290,8 @@ def requires(resource, msg=None): + msg = "Use of the `%s' resource not enabled" % resource + raise ResourceDenied(msg) + +-HOST = 'localhost' ++HOST = "127.0.0.1" ++HOSTv6 = "::1" + + def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): + """Returns an unused port that should be suitable for binding. This is diff --git a/SOURCES/00215-pep466-reflect-openssl-settings-ssltests.patch b/SOURCES/00215-pep466-reflect-openssl-settings-ssltests.patch new file mode 100644 index 0000000..34fd3ba --- /dev/null +++ b/SOURCES/00215-pep466-reflect-openssl-settings-ssltests.patch @@ -0,0 +1,26 @@ +diff -up Python-2.7.5/Lib/test/test_ssl.py.ssl2 Python-2.7.5/Lib/test/test_ssl.py +--- Python-2.7.5/Lib/test/test_ssl.py.ssl2 2015-03-04 12:19:26.345387741 +0100 ++++ Python-2.7.5/Lib/test/test_ssl.py 2015-03-04 12:32:43.485702679 +0100 +@@ -689,7 +689,8 @@ class ContextTests(unittest.TestCase): + @skip_if_broken_ubuntu_ssl + def test_options(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- # OP_ALL | OP_NO_SSLv2 is the default value ++ self.assertEqual(ssl.OP_ALL, ctx.options) ++ ctx.options |= ssl.OP_NO_SSLv2 + self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2, + ctx.options) + ctx.options |= ssl.OP_NO_SSLv3 +@@ -2142,9 +2143,9 @@ else: + # No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False, + client_options=ssl.OP_NO_SSLv2) +- try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False, ++ try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True, + client_options=ssl.OP_NO_SSLv3) +- try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False, ++ try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True, + client_options=ssl.OP_NO_TLSv1) + + @skip_if_broken_ubuntu_ssl + diff --git a/SOURCES/00216-pep466-fix-load-verify-locs-unicode.patch b/SOURCES/00216-pep466-fix-load-verify-locs-unicode.patch new file mode 100644 index 0000000..87ee80b --- /dev/null +++ b/SOURCES/00216-pep466-fix-load-verify-locs-unicode.patch @@ -0,0 +1,72 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1409232801 14400 +# Node ID 97081a80f487841d81aeed55d398a1dba1faca00 +# Parent 3ae399c6ecf685086ebf07e17717955f21e14cb8 +fix load_verify_locations on unicode paths (closes #22244) + +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -850,11 +850,14 @@ class ContextTests(unittest.TestCase): + ctx.load_verify_locations(cafile=CERTFILE, capath=None) + ctx.load_verify_locations(BYTES_CERTFILE) + ctx.load_verify_locations(cafile=BYTES_CERTFILE, capath=None) ++ ctx.load_verify_locations(cafile=BYTES_CERTFILE.decode('utf-8')) + self.assertRaises(TypeError, ctx.load_verify_locations) + self.assertRaises(TypeError, ctx.load_verify_locations, None, None, None) + with self.assertRaises(IOError) as cm: + ctx.load_verify_locations(WRONGCERT) + self.assertEqual(cm.exception.errno, errno.ENOENT) ++ with self.assertRaises(IOError): ++ ctx.load_verify_locations(u'') + with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"): + ctx.load_verify_locations(BADCERT) + ctx.load_verify_locations(CERTFILE, CAPATH) +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -2628,17 +2628,33 @@ load_verify_locations(PySSLContext *self + } + + if (cafile) { +- cafile_bytes = PyString_AsEncodedObject( +- cafile, Py_FileSystemDefaultEncoding, "strict"); +- if (!cafile_bytes) { +- goto error; ++ if (PyString_Check(cafile)) { ++ Py_INCREF(cafile); ++ cafile_bytes = cafile; ++ } else { ++ PyObject *u = PyUnicode_FromObject(cafile); ++ if (!u) ++ goto error; ++ cafile_bytes = PyUnicode_AsEncodedString( ++ u, Py_FileSystemDefaultEncoding, NULL); ++ Py_DECREF(u); ++ if (!cafile_bytes) ++ goto error; + } + } + if (capath) { +- capath_bytes = PyString_AsEncodedObject( +- capath, Py_FileSystemDefaultEncoding, "strict"); +- if (!capath_bytes) { +- goto error; ++ if (PyString_Check(capath)) { ++ Py_INCREF(capath); ++ capath_bytes = capath; ++ } else { ++ PyObject *u = PyUnicode_FromObject(capath); ++ if (!u) ++ goto error; ++ capath_bytes = PyUnicode_AsEncodedString( ++ u, Py_FileSystemDefaultEncoding, NULL); ++ Py_DECREF(u); ++ if (!capath_bytes) ++ goto error; + } + } + + diff --git a/SOURCES/00217-pep466-backport-hashlib-algorithm-consts.patch b/SOURCES/00217-pep466-backport-hashlib-algorithm-consts.patch new file mode 100644 index 0000000..eb7866c --- /dev/null +++ b/SOURCES/00217-pep466-backport-hashlib-algorithm-consts.patch @@ -0,0 +1,189 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1409233289 14400 +# Node ID 3f73c44b1fd1d442d6841493328e9756fb5e7ef5 +# Parent 97081a80f487841d81aeed55d398a1dba1faca00 +PEP 466: backport hashlib algorithm constants (closes #21307) + +diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst +--- a/Doc/library/hashlib.rst ++++ b/Doc/library/hashlib.rst +@@ -88,6 +88,24 @@ This module provides the following const + + .. versionadded:: 2.7 + ++.. data:: algorithms_guaranteed ++ ++ A set containing the names of the hash algorithms guaranteed to be supported ++ by this module on all platforms. ++ ++ .. versionadded:: 2.7.9 ++ ++.. data:: algorithms_available ++ ++ A set containing the names of the hash algorithms that are available in the ++ running Python interpreter. These names will be recognized when passed to ++ :func:`new`. :attr:`algorithms_guaranteed` will always be a subset. The ++ same algorithm may appear multiple times in this set under different names ++ (thanks to OpenSSL). ++ ++ .. versionadded:: 2.7.9 ++ ++ + The following values are provided as constant attributes of the hash objects + returned by the constructors: + +diff -up Python-2.7.5/Lib/hashlib.py.hash Python-2.7.5/Lib/hashlib.py +--- Python-2.7.5/Lib/hashlib.py.hash 2015-03-04 17:05:57.496598686 +0100 ++++ Python-2.7.5/Lib/hashlib.py 2015-03-04 17:11:34.872739103 +0100 +@@ -18,8 +18,9 @@ than using new(): + + md5(), sha1(), sha224(), sha256(), sha384(), and sha512() + +-More algorithms may be available on your platform but the above are +-guaranteed to exist. ++More algorithms may be available on your platform but the above are guaranteed ++to exist. See the algorithms_guaranteed and algorithms_available attributes ++to find out what algorithm names can be passed to new(). + + NOTE: If you want the adler32 or crc32 hash functions they are available in + the zlib module. +@@ -75,9 +76,14 @@ More condensed: + # always available algorithm is added. + __always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') + ++algorithms_guaranteed = set(__always_supported) ++algorithms_available = set(__always_supported) ++ + algorithms = __always_supported + +-__all__ = __always_supported + ('new', 'algorithms', 'pbkdf2_hmac') ++__all__ = __always_supported + ('new', 'algorithms_guaranteed', ++ 'algorithms_available', 'algorithms', ++ 'pbkdf2_hmac') + + + def __get_openssl_constructor(name): +@@ -110,6 +116,8 @@ try: + import _hashlib + new = __hash_new + __get_hash = __get_openssl_constructor ++ algorithms_available = algorithms_available.union( ++ _hashlib.openssl_md_meth_names) + except ImportError: + # We don't build the legacy modules + raise +diff -up Python-2.7.5/Modules/_hashopenssl.c.hash Python-2.7.5/Modules/_hashopenssl.c +--- Python-2.7.5/Modules/_hashopenssl.c.hash 2015-03-04 17:06:18.246791837 +0100 ++++ Python-2.7.5/Modules/_hashopenssl.c 2015-03-04 17:16:17.696369000 +0100 +@@ -784,6 +784,61 @@ pbkdf2_hmac(PyObject *self, PyObject *ar + + #endif + ++/* State for our callback function so that it can accumulate a result. */ ++typedef struct _internal_name_mapper_state { ++ PyObject *set; ++ int error; ++} _InternalNameMapperState; ++ ++ ++/* A callback function to pass to OpenSSL's OBJ_NAME_do_all(...) */ ++static void ++_openssl_hash_name_mapper(const OBJ_NAME *openssl_obj_name, void *arg) ++{ ++ _InternalNameMapperState *state = (_InternalNameMapperState *)arg; ++ PyObject *py_name; ++ ++ assert(state != NULL); ++ if (openssl_obj_name == NULL) ++ return; ++ /* Ignore aliased names, they pollute the list and OpenSSL appears to ++ * have a its own definition of alias as the resulting list still ++ * contains duplicate and alternate names for several algorithms. */ ++ if (openssl_obj_name->alias) ++ return; ++ ++ py_name = PyString_FromString(openssl_obj_name->name); ++ if (py_name == NULL) { ++ state->error = 1; ++ } else { ++ if (PySet_Add(state->set, py_name) != 0) { ++ state->error = 1; ++ } ++ Py_DECREF(py_name); ++ } ++} ++ ++ ++/* Ask OpenSSL for a list of supported ciphers, filling in a Python set. */ ++static PyObject* ++generate_hash_name_list(void) ++{ ++ _InternalNameMapperState state; ++ state.set = PyFrozenSet_New(NULL); ++ if (state.set == NULL) ++ return NULL; ++ state.error = 0; ++ ++ OBJ_NAME_do_all(OBJ_NAME_TYPE_MD_METH, &_openssl_hash_name_mapper, &state); ++ ++ if (state.error) { ++ Py_DECREF(state.set); ++ return NULL; ++ } ++ return state.set; ++} ++ ++ + /* + * This macro and function generates a family of constructor function + * definitions for specific hash algorithms. These constructors are much +@@ -924,11 +979,11 @@ static struct PyMethodDef EVP_functions[ + PyMODINIT_FUNC + init_hashlib(void) + { +- PyObject *m; ++ PyObject *m, *openssl_md_meth_names; + + SSL_load_error_strings(); + SSL_library_init(); +- OpenSSL_add_all_digests(); ++ ERR_load_crypto_strings(); + + Py_TYPE(&EVPtype) = &PyType_Type; + if (PyType_Ready(&EVPtype) < 0) +@@ -938,6 +993,14 @@ init_hashlib(void) + if (m == NULL) + return; + ++ openssl_md_meth_names = generate_hash_name_list(); ++ if (openssl_md_meth_names == NULL) { ++ return; ++ } ++ if (PyModule_AddObject(m, "openssl_md_meth_names", openssl_md_meth_names)) { ++ return; ++ } ++ + #if HASH_OBJ_CONSTRUCTOR + Py_INCREF(&EVPtype); + PyModule_AddObject(m, "HASH", (PyObject *)&EVPtype); +diff -up Python-2.7.5/Lib/test/test_hashlib.py.hash Python-2.7.5/Lib/test/test_hashlib.py +--- Python-2.7.5/Lib/test/test_hashlib.py.hash 2015-03-04 18:04:57.823553474 +0100 ++++ Python-2.7.5/Lib/test/test_hashlib.py 2015-03-04 18:06:39.395499123 +0100 +@@ -107,6 +107,15 @@ class HashLibTestCase(unittest.TestCase) + tuple([_algo for _algo in self.supported_hash_names if + _algo.islower()])) + ++ def test_algorithms_guaranteed(self): ++ self.assertEqual(hashlib.algorithms_guaranteed, ++ set(_algo for _algo in self.supported_hash_names ++ if _algo.islower())) ++ ++ def test_algorithms_available(self): ++ self.assertTrue(set(hashlib.algorithms_guaranteed). ++ issubset(hashlib.algorithms_available)) ++ + def test_unknown_hash(self): + self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam') + self.assertRaises(TypeError, hashlib.new, 1) diff --git a/SOURCES/00218-pep466-backport-urandom-pers-fd.patch b/SOURCES/00218-pep466-backport-urandom-pers-fd.patch new file mode 100644 index 0000000..8a97bdc --- /dev/null +++ b/SOURCES/00218-pep466-backport-urandom-pers-fd.patch @@ -0,0 +1,165 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1409243400 14400 +# Node ID 3e7f8855078855a9409bc2c1372de89cb021d6c8 +# Parent 3f73c44b1fd1d442d6841493328e9756fb5e7ef5 +PEP 466: backport persistent urandom fd (closes #21305) + +Patch from Alex Gaynor. + +diff --git a/Python/pythonrun.c b/Python/pythonrun.c +--- a/Python/pythonrun.c ++++ b/Python/pythonrun.c +@@ -536,6 +536,7 @@ Py_Finalize(void) + PyInt_Fini(); + PyFloat_Fini(); + PyDict_Fini(); ++ _PyRandom_Fini(); + + #ifdef Py_USING_UNICODE + /* Cleanup Unicode implementation */ +diff -up Python-2.7.5/Include/pythonrun.h.urandom Python-2.7.5/Include/pythonrun.h +--- Python-2.7.5/Include/pythonrun.h.urandom 2015-03-06 08:16:47.638584015 +0100 ++++ Python-2.7.5/Include/pythonrun.h 2015-03-06 08:21:48.009485462 +0100 +@@ -145,6 +145,7 @@ PyAPI_FUNC(void) PyInt_Fini(void); + PyAPI_FUNC(void) PyFloat_Fini(void); + PyAPI_FUNC(void) PyOS_FiniInterrupts(void); + PyAPI_FUNC(void) PyByteArray_Fini(void); ++PyAPI_FUNC(void) _PyRandom_Fini(void); + + /* Stuff with no proper home (yet) */ + PyAPI_FUNC(char *) PyOS_Readline(FILE *, FILE *, char *); +diff -up Python-2.7.5/Python/random.c.urandom Python-2.7.5/Python/random.c +--- Python-2.7.5/Python/random.c.urandom 2015-03-06 08:22:10.244699950 +0100 ++++ Python-2.7.5/Python/random.c 2015-03-06 08:24:57.907317272 +0100 +@@ -118,10 +118,16 @@ vms_urandom(unsigned char *buffer, Py_ss + + #if !defined(MS_WINDOWS) && !defined(__VMS) + ++static struct { ++ int fd; ++ dev_t st_dev; ++ ino_t st_ino; ++} urandom_cache = { -1 }; ++ + /* Read size bytes from /dev/urandom into buffer. + Call Py_FatalError() on error. */ + static void +-dev_urandom_noraise(char *buffer, Py_ssize_t size) ++dev_urandom_noraise(unsigned char *buffer, Py_ssize_t size) + { + int fd; + Py_ssize_t n; +@@ -156,18 +162,56 @@ dev_urandom_python(char *buffer, Py_ssiz + { + int fd; + Py_ssize_t n; ++ struct stat st; + + if (size <= 0) + return 0; + +- Py_BEGIN_ALLOW_THREADS +- fd = open("/dev/urandom", O_RDONLY); +- Py_END_ALLOW_THREADS +- if (fd < 0) +- { +- PyErr_SetString(PyExc_NotImplementedError, +- "/dev/urandom (or equivalent) not found"); +- return -1; ++ if (urandom_cache.fd >= 0) { ++ /* Does the fd point to the same thing as before? (issue #21207) */ ++ if (fstat(urandom_cache.fd, &st) ++ || st.st_dev != urandom_cache.st_dev ++ || st.st_ino != urandom_cache.st_ino) { ++ /* Something changed: forget the cached fd (but don't close it, ++ since it probably points to something important for some ++ third-party code). */ ++ urandom_cache.fd = -1; ++ } ++ } ++ if (urandom_cache.fd >= 0) ++ fd = urandom_cache.fd; ++ else { ++ Py_BEGIN_ALLOW_THREADS ++ fd = open("/dev/urandom", O_RDONLY); ++ Py_END_ALLOW_THREADS ++ if (fd < 0) ++ { ++ if (errno == ENOENT || errno == ENXIO || ++ errno == ENODEV || errno == EACCES) ++ PyErr_SetString(PyExc_NotImplementedError, ++ "/dev/urandom (or equivalent) not found"); ++ else ++ PyErr_SetFromErrno(PyExc_OSError); ++ return -1; ++ } ++ if (urandom_cache.fd >= 0) { ++ /* urandom_fd was initialized by another thread while we were ++ not holding the GIL, keep it. */ ++ close(fd); ++ fd = urandom_cache.fd; ++ } ++ else { ++ if (fstat(fd, &st)) { ++ PyErr_SetFromErrno(PyExc_OSError); ++ close(fd); ++ return -1; ++ } ++ else { ++ urandom_cache.fd = fd; ++ urandom_cache.st_dev = st.st_dev; ++ urandom_cache.st_ino = st.st_ino; ++ } ++ } + } + + Py_BEGIN_ALLOW_THREADS +@@ -191,12 +235,21 @@ dev_urandom_python(char *buffer, Py_ssiz + PyErr_Format(PyExc_RuntimeError, + "Failed to read %zi bytes from /dev/urandom", + size); +- close(fd); + return -1; + } +- close(fd); + return 0; + } ++ ++static void ++dev_urandom_close(void) ++{ ++ if (urandom_cache.fd >= 0) { ++ close(urandom_cache.fd); ++ urandom_cache.fd = -1; ++ } ++} ++ ++ + #endif /* !defined(MS_WINDOWS) && !defined(__VMS) */ + + /* Fill buffer with pseudo-random bytes generated by a linear congruent +@@ -300,8 +353,21 @@ _PyRandom_Init(void) + # ifdef __VMS + vms_urandom((unsigned char *)secret, secret_size, 0); + # else +- dev_urandom_noraise((char*)secret, secret_size); ++ dev_urandom_noraise((unsigned char*)secret, secret_size); + # endif + #endif + } + } ++ ++void ++_PyRandom_Fini(void) ++{ ++#ifdef MS_WINDOWS ++ if (hCryptProv) { ++ CryptReleaseContext(hCryptProv, 0); ++ hCryptProv = 0; ++ } ++#else ++ dev_urandom_close(); ++#endif ++} diff --git a/SOURCES/00219-pep466-fix-referenced-sslwrap.patch b/SOURCES/00219-pep466-fix-referenced-sslwrap.patch new file mode 100644 index 0000000..f8ccc90 --- /dev/null +++ b/SOURCES/00219-pep466-fix-referenced-sslwrap.patch @@ -0,0 +1,91 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1412221981 14400 +# Node ID 1a36d4e8cf4edfdc4c7d59a40075b8cf00e3ad3c +# Parent 222e0faa5fa9567f657f13fc78a60069142e09ae +fix sslwrap_simple (closes #22523) + +Thanks Alex Gaynor. + +diff --git a/Lib/ssl.py b/Lib/ssl.py +--- a/Lib/ssl.py ++++ b/Lib/ssl.py +@@ -969,16 +969,16 @@ def get_protocol_name(protocol_code): + # a replacement for the old socket.ssl function + + def sslwrap_simple(sock, keyfile=None, certfile=None): +- + """A replacement for the old socket.ssl function. Designed + for compability with Python 2.5 and earlier. Will disappear in + Python 3.0.""" +- + if hasattr(sock, "_sock"): + sock = sock._sock + +- ssl_sock = _ssl.sslwrap(sock, 0, keyfile, certfile, CERT_NONE, +- PROTOCOL_SSLv23, None) ++ ctx = SSLContext(PROTOCOL_SSLv23) ++ if keyfile or certfile: ++ ctx.load_cert_chain(certfile, keyfile) ++ ssl_sock = ctx._wrap_socket(sock, server_side=False) + try: + sock.getpeername() + except socket_error: +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -94,6 +94,8 @@ class BasicTests(unittest.TestCase): + pass + else: + raise ++ ++ + def can_clear_options(): + # 0.9.8m or higher + return ssl._OPENSSL_API_VERSION >= (0, 9, 8, 13, 15) +@@ -2944,7 +2946,7 @@ def test_main(verbose=False): + if not os.path.exists(filename): + raise support.TestFailed("Can't read certificate file %r" % filename) + +- tests = [ContextTests, BasicSocketTests, SSLErrorTests] ++ tests = [ContextTests, BasicTests, BasicSocketTests, SSLErrorTests] + + if support.is_resource_enabled('network'): + tests.append(NetworkedTests) +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -517,10 +517,12 @@ newPySSLSocket(PySSLContext *sslctx, PyS + self->socket_type = socket_type; + self->Socket = sock; + Py_INCREF(self->Socket); +- self->ssl_sock = PyWeakref_NewRef(ssl_sock, NULL); +- if (self->ssl_sock == NULL) { +- Py_DECREF(self); +- return NULL; ++ if (ssl_sock != Py_None) { ++ self->ssl_sock = PyWeakref_NewRef(ssl_sock, NULL); ++ if (self->ssl_sock == NULL) { ++ Py_DECREF(self); ++ return NULL; ++ } + } + return self; + } +@@ -2931,8 +2933,12 @@ static int + + ssl = SSL_get_app_data(s); + assert(PySSLSocket_Check(ssl)); +- ssl_socket = PyWeakref_GetObject(ssl->ssl_sock); +- Py_INCREF(ssl_socket); ++ if (ssl->ssl_sock == NULL) { ++ ssl_socket = Py_None; ++ } else { ++ ssl_socket = PyWeakref_GetObject(ssl->ssl_sock); ++ Py_INCREF(ssl_socket); ++ } + if (ssl_socket == Py_None) { + goto error; + } + diff --git a/SOURCES/00220-pep466-allow-passing-ssl-urrlib-httplib.patch b/SOURCES/00220-pep466-allow-passing-ssl-urrlib-httplib.patch new file mode 100644 index 0000000..77bdfe5 --- /dev/null +++ b/SOURCES/00220-pep466-allow-passing-ssl-urrlib-httplib.patch @@ -0,0 +1,673 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1416764565 21600 +# Node ID 1882157b298a164291d2b3a8b9525eb0902895f6 +# Parent 588ebc8fd3daf7307961cd614c4da9525bb67313 +allow passing cert/ssl information to urllib2.urlopen and httplib.HTTPSConnection + +This is basically a backport of issues #9003 and #22366. + +diff --git a/Doc/library/httplib.rst b/Doc/library/httplib.rst +--- a/Doc/library/httplib.rst ++++ b/Doc/library/httplib.rst +@@ -70,12 +70,25 @@ The module provides the following classe + *source_address* was added. + + +-.. class:: HTTPSConnection(host[, port[, key_file[, cert_file[, strict[, timeout[, source_address]]]]]]) ++.. class:: HTTPSConnection(host[, port[, key_file[, cert_file[, strict[, timeout[, source_address, context, check_hostname]]]]]]) + + A subclass of :class:`HTTPConnection` that uses SSL for communication with +- secure servers. Default port is ``443``. *key_file* is the name of a PEM +- formatted file that contains your private key. *cert_file* is a PEM formatted +- certificate chain file. ++ secure servers. Default port is ``443``. If *context* is specified, it must ++ be a :class:`ssl.SSLContext` instance describing the various SSL options. ++ ++ *key_file* and *cert_file* are deprecated, please use ++ :meth:`ssl.SSLContext.load_cert_chain` instead, or let ++ :func:`ssl.create_default_context` select the system's trusted CA ++ certificates for you. ++ ++ Please read :ref:`ssl-security` for more information on best practices. ++ ++ .. note:: ++ If *context* is specified and has a :attr:`~ssl.SSLContext.verify_mode` ++ of either :data:`~ssl.CERT_OPTIONAL` or :data:`~ssl.CERT_REQUIRED`, then ++ by default *host* is matched against the host name(s) allowed by the ++ server's certificate. If you want to change that behaviour, you can ++ explicitly set *check_hostname* to False. + + .. warning:: + This does not do any verification of the server's certificate. +@@ -88,6 +101,9 @@ The module provides the following classe + .. versionchanged:: 2.7 + *source_address* was added. + ++ .. versionchanged:: 2.7.9 ++ *context* and *check_hostname* was added. ++ + + .. class:: HTTPResponse(sock, debuglevel=0, strict=0) + +diff --git a/Lib/test/keycert2.pem b/Lib/test/keycert2.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/keycert2.pem +@@ -0,0 +1,31 @@ ++-----BEGIN PRIVATE KEY----- ++MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBANcLaMB7T/Wi9DBc ++PltGzgt8cxsv55m7PQPHMZvn6Ke8xmNqcmEzib8opRwKGrCV6TltKeFlNSg8dwQK ++Tl4ktyTkGCVweRQJ37AkBayvEBml5s+QD4vlhqkJPsL/Nsd+fnqngOGc5+59+C6r ++s3XpiLlF5ah/z8q92Mnw54nypw1JAgMBAAECgYBE3t2Mj7GbDLZB6rj5yKJioVfI ++BD6bSJEQ7bGgqdQkLFwpKMU7BiN+ekjuwvmrRkesYZ7BFgXBPiQrwhU5J28Tpj5B ++EOMYSIOHfzdalhxDGM1q2oK9LDFiCotTaSdEzMYadel5rmKXJ0zcK2Jho0PCuECf ++tf/ghRxK+h1Hm0tKgQJBAO6MdGDSmGKYX6/5kPDje7we/lSLorSDkYmV0tmVShsc ++JxgaGaapazceA/sHL3Myx7Eenkip+yPYDXEDFvAKNDECQQDmxsT9NOp6mo7ISvky ++GFr2vVHsJ745BMWoma4rFjPBVnS8RkgK+b2EpDCdZSrQ9zw2r8sKTgrEyrDiGTEg ++wJyZAkA8OOc0flYMJg2aHnYR6kwVjPmGHI5h5gk648EMPx0rROs1sXkiUwkHLCOz ++HvhCq+Iv+9vX2lnVjbiu/CmxRdIxAkA1YEfzoKeTD+hyXxTgB04Sv5sRGegfXAEz ++i8gC4zG5R/vcCA1lrHmvEiLEZL/QcT6WD3bQvVg0SAU9ZkI8pxARAkA7yqMSvP1l ++gJXy44R+rzpLYb1/PtiLkIkaKG3x9TUfPnfD2jY09fPkZlfsRU3/uS09IkhSwimV ++d5rWoljEfdou ++-----END PRIVATE KEY----- ++-----BEGIN CERTIFICATE----- ++MIICXTCCAcagAwIBAgIJALVQzebTtrXFMA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV ++BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u ++IFNvZnR3YXJlIEZvdW5kYXRpb24xFTATBgNVBAMMDGZha2Vob3N0bmFtZTAeFw0x ++NDExMjMxNzAwMDdaFw0yNDExMjAxNzAwMDdaMGIxCzAJBgNVBAYTAlhZMRcwFQYD ++VQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9uIFNvZnR3YXJlIEZv ++dW5kYXRpb24xFTATBgNVBAMMDGZha2Vob3N0bmFtZTCBnzANBgkqhkiG9w0BAQEF ++AAOBjQAwgYkCgYEA1wtowHtP9aL0MFw+W0bOC3xzGy/nmbs9A8cxm+fop7zGY2py ++YTOJvyilHAoasJXpOW0p4WU1KDx3BApOXiS3JOQYJXB5FAnfsCQFrK8QGaXmz5AP ++i+WGqQk+wv82x35+eqeA4Zzn7n34LquzdemIuUXlqH/Pyr3YyfDnifKnDUkCAwEA ++AaMbMBkwFwYDVR0RBBAwDoIMZmFrZWhvc3RuYW1lMA0GCSqGSIb3DQEBBQUAA4GB ++AKuay3vDKfWzt5+ch/HHBsert84ISot4fUjzXDA/oOgTOEjVcSShHxqNShMOW1oA ++QYBpBB/5Kx5RkD/w6imhucxt2WQPRgjX4x4bwMipVH/HvFDp03mG51/Cpi1TyZ74 ++El7qa/Pd4lHhOLzMKBA6503fpeYSFUIBxZbGLqylqRK7 ++-----END CERTIFICATE----- +diff --git a/Lib/test/selfsigned_pythontestdotnet.pem b/Lib/test/selfsigned_pythontestdotnet.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/selfsigned_pythontestdotnet.pem +@@ -0,0 +1,16 @@ ++-----BEGIN CERTIFICATE----- ++MIIChzCCAfCgAwIBAgIJAKGU95wKR8pSMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV ++BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u ++IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv ++bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG ++A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo ++b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 ++aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ ++Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm ++Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv ++EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjKTAnMCUGA1UdEQQeMByCGnNl ++bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MA0GCSqGSIb3DQEBBQUAA4GBAIOXmdtM ++eG9qzP9TiXW/Gc/zI4cBfdCpC+Y4gOfC9bQUC7hefix4iO3+iZjgy3X/FaRxUUoV ++HKiXcXIaWqTSUWp45cSh0MbwZXudp6JIAptzdAhvvCrPKeC9i9GvxsPD4LtDAL97 ++vSaxQBezA7hdxZd90/EeyMgVZgAnTCnvAWX9 ++-----END CERTIFICATE----- +diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py +--- a/Lib/test/test_urllib2.py ++++ b/Lib/test/test_urllib2.py +@@ -8,6 +8,11 @@ import StringIO + import urllib2 + from urllib2 import Request, OpenerDirector + ++try: ++ import ssl ++except ImportError: ++ ssl = None ++ + # XXX + # Request + # CacheFTPHandler (hard to write) +@@ -47,6 +52,14 @@ class TrivialTests(unittest.TestCase): + for string, list in tests: + self.assertEqual(urllib2.parse_http_list(string), list) + ++ @unittest.skipUnless(ssl, "ssl module required") ++ def test_cafile_and_context(self): ++ context = ssl.create_default_context() ++ with self.assertRaises(ValueError): ++ urllib2.urlopen( ++ "https://localhost", cafile="/nonexistent/path", context=context ++ ) ++ + + def test_request_headers_dict(): + """ +diff --git a/Lib/urllib2.py b/Lib/urllib2.py +--- a/Lib/urllib2.py ++++ b/Lib/urllib2.py +@@ -109,6 +109,14 @@ try: + except ImportError: + from StringIO import StringIO + ++# check for SSL ++try: ++ import ssl ++except ImportError: ++ _have_ssl = False ++else: ++ _have_ssl = True ++ + from urllib import (unwrap, unquote, splittype, splithost, quote, + addinfourl, splitport, splittag, toBytes, + splitattr, ftpwrapper, splituser, splitpasswd, splitvalue) +@@ -120,11 +128,30 @@ from urllib import localhost, url2pathna + __version__ = sys.version[:3] + + _opener = None +-def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): ++def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, ++ cafile=None, capath=None, cadefault=False, context=None): + global _opener +- if _opener is None: +- _opener = build_opener() +- return _opener.open(url, data, timeout) ++ if cafile or capath or cadefault: ++ if context is not None: ++ raise ValueError( ++ "You can't pass both context and any of cafile, capath, and " ++ "cadefault" ++ ) ++ if not _have_ssl: ++ raise ValueError('SSL support not available') ++ context = ssl._create_stdlib_context(cert_reqs=ssl.CERT_REQUIRED, ++ cafile=cafile, ++ capath=capath) ++ https_handler = HTTPSHandler(context=context, check_hostname=True) ++ opener = build_opener(https_handler) ++ elif context: ++ https_handler = HTTPSHandler(context=context) ++ opener = build_opener(https_handler) ++ elif _opener is None: ++ _opener = opener = build_opener() ++ else: ++ opener = _opener ++ return opener.open(url, data, timeout) + + def install_opener(opener): + global _opener +@@ -1121,7 +1148,7 @@ class AbstractHTTPHandler(BaseHandler): + + return request + +- def do_open(self, http_class, req): ++ def do_open(self, http_class, req, **http_conn_args): + """Return an addinfourl object for the request, using http_class. + + http_class must implement the HTTPConnection API from httplib. +@@ -1135,7 +1162,8 @@ class AbstractHTTPHandler(BaseHandler): + if not host: + raise URLError('no host given') + +- h = http_class(host, timeout=req.timeout) # will parse host:port ++ # will parse host:port ++ h = http_class(host, timeout=req.timeout, **http_conn_args) + h.set_debuglevel(self._debuglevel) + + headers = dict(req.unredirected_hdrs) +@@ -1203,8 +1231,14 @@ class HTTPHandler(AbstractHTTPHandler): + if hasattr(httplib, 'HTTPS'): + class HTTPSHandler(AbstractHTTPHandler): + ++ def __init__(self, debuglevel=0, context=None, check_hostname=None): ++ AbstractHTTPHandler.__init__(self, debuglevel) ++ self._context = context ++ self._check_hostname = check_hostname ++ + def https_open(self, req): +- return self.do_open(httplib.HTTPSConnection, req) ++ return self.do_open(httplib.HTTPSConnection, req, ++ context=self._context, check_hostname=self._check_hostname) + + https_request = AbstractHTTPHandler.do_request_ + +diff -up Python-2.7.5/Lib/test/test_urllib2_localnet.py.ctx Python-2.7.5/Lib/test/test_urllib2_localnet.py +--- Python-2.7.5/Lib/test/test_urllib2_localnet.py.ctx 2015-03-30 10:13:48.351310552 +0200 ++++ Python-2.7.5/Lib/test/test_urllib2_localnet.py 2015-03-30 10:14:54.715713679 +0200 +@@ -1,5 +1,6 @@ + #!/usr/bin/env python + ++import os + import urlparse + import urllib2 + import BaseHTTPServer +@@ -11,6 +12,17 @@ from test import test_support + mimetools = test_support.import_module('mimetools', deprecated=True) + threading = test_support.import_module('threading') + ++try: ++ import ssl ++except ImportError: ++ ssl = None ++ ++here = os.path.dirname(__file__) ++# Self-signed cert file for 'localhost' ++CERT_localhost = os.path.join(here, 'keycert.pem') ++# Self-signed cert file for 'fakehostname' ++CERT_fakehostname = os.path.join(here, 'keycert2.pem') ++ + # Loopback http server infrastructure + + class LoopbackHttpServer(BaseHTTPServer.HTTPServer): +@@ -25,7 +37,7 @@ class LoopbackHttpServer(BaseHTTPServer. + + # Set the timeout of our listening socket really low so + # that we can stop the server easily. +- self.socket.settimeout(1.0) ++ self.socket.settimeout(0.1) + + def get_request(self): + """BaseHTTPServer method, overridden.""" +@@ -354,6 +366,19 @@ class TestUrlopen(BaseTestCase): + urllib2.install_opener(opener) + super(TestUrlopen, self).setUp() + ++ def urlopen(self, url, data=None, **kwargs): ++ l = [] ++ f = urllib2.urlopen(url, data, **kwargs) ++ try: ++ # Exercise various methods ++ l.extend(f.readlines(200)) ++ l.append(f.readline()) ++ l.append(f.read(1024)) ++ l.append(f.read()) ++ finally: ++ f.close() ++ return b"".join(l) ++ + def start_server(self, responses): + handler = GetRequestHandler(responses) + +@@ -364,6 +389,16 @@ class TestUrlopen(BaseTestCase): + handler.port = port + return handler + ++ def start_https_server(self, responses=None, **kwargs): ++ if not hasattr(urllib2, 'HTTPSHandler'): ++ self.skipTest('ssl support required') ++ from test.ssl_servers import make_https_server ++ if responses is None: ++ responses = [(200, [], b"we care a bit")] ++ handler = GetRequestHandler(responses) ++ server = make_https_server(self, handler_class=handler, **kwargs) ++ handler.port = server.port ++ return handler + + def test_redirection(self): + expected_response = 'We got here...' +@@ -434,6 +469,28 @@ class TestUrlopen(BaseTestCase): + finally: + self.server.stop() + ++ def test_https(self): ++ handler = self.start_https_server() ++ context = ssl.create_default_context(cafile=CERT_localhost) ++ data = self.urlopen("https://localhost:%s/bizarre" % handler.port, context=context) ++ self.assertEqual(data, b"we care a bit") ++ ++ def test_https_with_cafile(self): ++ handler = self.start_https_server(certfile=CERT_localhost) ++ import ssl ++ # Good cert ++ data = self.urlopen("https://localhost:%s/bizarre" % handler.port, ++ cafile=CERT_localhost) ++ self.assertEqual(data, b"we care a bit") ++ # Bad cert ++ with self.assertRaises(urllib2.URLError) as cm: ++ self.urlopen("https://localhost:%s/bizarre" % handler.port, ++ cafile=CERT_fakehostname) ++ # Good cert, but mismatching hostname ++ handler = self.start_https_server(certfile=CERT_fakehostname) ++ with self.assertRaises(ssl.CertificateError) as cm: ++ self.urlopen("https://localhost:%s/bizarre" % handler.port, ++ cafile=CERT_fakehostname) + + def test_sending_headers(self): + handler = self.start_server([(200, [], "we don't care")]) +diff -up Python-2.7.5/Doc/library/urllib2.rst.ctx Python-2.7.5/Doc/library/urllib2.rst +--- Python-2.7.5/Doc/library/urllib2.rst.ctx 2015-03-30 10:20:15.958747076 +0200 ++++ Python-2.7.5/Doc/library/urllib2.rst 2015-03-30 10:30:46.172779366 +0200 +@@ -22,13 +22,10 @@ redirections, cookies and more. + The :mod:`urllib2` module defines the following functions: + + +-.. function:: urlopen(url[, data][, timeout]) ++.. function:: urlopen(url[, data[, timeout[, cafile[, capath[, cadefault[, context]]]]]) + + Open the URL *url*, which can be either a string or a :class:`Request` object. + +- .. warning:: +- HTTPS requests do not do any verification of the server's certificate. +- + *data* may be a string specifying additional data to send to the server, or + ``None`` if no such data is needed. Currently HTTP requests are the only ones + that use *data*; the HTTP request will be a POST instead of a GET when the +@@ -41,7 +38,19 @@ The :mod:`urllib2` module defines the fo + The optional *timeout* parameter specifies a timeout in seconds for blocking + operations like the connection attempt (if not specified, the global default + timeout setting will be used). This actually only works for HTTP, HTTPS and +- FTP connections. ++ FTP connections. ++ ++ If *context* is specified, it must be a :class:`ssl.SSLContext` instance ++ describing the various SSL options. See :class:`~httplib.HTTPSConnection` for ++ more details. ++ ++ The optional *cafile* and *capath* parameters specify a set of trusted CA ++ certificates for HTTPS requests. *cafile* should point to a single file ++ containing a bundle of CA certificates, whereas *capath* should point to a ++ directory of hashed certificate files. More information can be found in ++ :meth:`ssl.SSLContext.load_verify_locations`. ++ ++ The *cadefault* parameter is ignored. + + This function returns a file-like object with two additional methods: + +@@ -66,7 +75,10 @@ The :mod:`urllib2` module defines the fo + handled through the proxy. + + .. versionchanged:: 2.6 +- *timeout* was added. ++ *timeout* was added. ++ ++ .. versionchanged:: 2.7.9 ++ *cafile*, *capath*, *cadefault*, and *context* were added. + + + .. function:: install_opener(opener) +@@ -280,9 +292,13 @@ The following classes are provided: + A class to handle opening of HTTP URLs. + + +-.. class:: HTTPSHandler() ++.. class:: HTTPSHandler([debuglevel[, context[, check_hostname]]]) ++ ++ A class to handle opening of HTTPS URLs. *context* and *check_hostname* have ++ the same meaning as for :class:`httplib.HTTPSConnection`. + +- A class to handle opening of HTTPS URLs. ++ .. versionchanged:: 2.7.9 ++ *context* and *check_hostname* were added. + + + .. class:: FileHandler() +diff -up Python-2.7.5/Lib/httplib.py.ctx Python-2.7.5/Lib/httplib.py +--- Python-2.7.5/Lib/httplib.py.ctx 2015-03-30 10:19:52.551521393 +0200 ++++ Python-2.7.5/Lib/httplib.py 2015-03-30 10:30:05.045386751 +0200 +@@ -1159,21 +1159,44 @@ else: + + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, +- source_address=None): ++ source_address=None, context=None, check_hostname=None): + HTTPConnection.__init__(self, host, port, strict, timeout, + source_address) + self.key_file = key_file + self.cert_file = cert_file ++ if context is None: ++ context = ssl.create_default_context() ++ will_verify = context.verify_mode != ssl.CERT_NONE ++ if check_hostname is None: ++ check_hostname = will_verify ++ elif check_hostname and not will_verify: ++ raise ValueError("check_hostname needs a SSL context with " ++ "either CERT_OPTIONAL or CERT_REQUIRED") ++ if key_file or cert_file: ++ context.load_cert_chain(cert_file, key_file) ++ self._context = context ++ self._check_hostname = check_hostname + + def connect(self): + "Connect to a host on a given (SSL) port." + +- sock = socket.create_connection((self.host, self.port), +- self.timeout, self.source_address) ++ HTTPConnection.connect(self) ++ + if self._tunnel_host: +- self.sock = sock +- self._tunnel() +- self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file) ++ server_hostname = self._tunnel_host ++ else: ++ server_hostname = self.host ++ sni_hostname = server_hostname if ssl.HAS_SNI else None ++ ++ self.sock = self._context.wrap_socket(self.sock, ++ server_hostname=sni_hostname) ++ if not self._context.check_hostname and self._check_hostname: ++ try: ++ ssl.match_hostname(self.sock.getpeercert(), server_hostname) ++ except Exception: ++ self.sock.shutdown(socket.SHUT_RDWR) ++ self.sock.close() ++ raise + + __all__.append("HTTPSConnection") + +diff -up Python-2.7.5/Lib/test/test_httplib.py.ctx Python-2.7.5/Lib/test/test_httplib.py +--- Python-2.7.5/Lib/test/test_httplib.py.ctx 2015-03-30 10:19:12.905139139 +0200 ++++ Python-2.7.5/Lib/test/test_httplib.py 2015-03-30 10:27:41.822017804 +0200 +@@ -1,6 +1,7 @@ + import httplib + import array + import httplib ++import os + import StringIO + import socket + import errno +@@ -10,6 +11,14 @@ TestCase = unittest.TestCase + + from test import test_support + ++here = os.path.dirname(__file__) ++# Self-signed cert file for 'localhost' ++CERT_localhost = os.path.join(here, 'keycert.pem') ++# Self-signed cert file for 'fakehostname' ++CERT_fakehostname = os.path.join(here, 'keycert2.pem') ++# Self-signed cert file for self-signed.pythontest.net ++CERT_selfsigned_pythontestdotnet = os.path.join(here, 'selfsigned_pythontestdotnet.pem') ++ + HOST = test_support.HOST + + class FakeSocket: +@@ -493,40 +502,147 @@ class TimeoutTest(TestCase): + httpConn.close() + + +-class HTTPSTimeoutTest(TestCase): ++class HTTPSTest(TestCase): + # XXX Here should be tests for HTTPS, there isn't any right now! ++ def setUp(self): ++ if not hasattr(httplib, 'HTTPSConnection'): ++ self.skipTest('ssl support required') ++ ++ def make_server(self, certfile): ++ from test.ssl_servers import make_https_server ++ return make_https_server(self, certfile=certfile) + + def test_attributes(self): +- # simple test to check it's storing it +- if hasattr(httplib, 'HTTPSConnection'): +- h = httplib.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30) +- self.assertEqual(h.timeout, 30) ++ # simple test to check it's storing the timeout ++ h = httplib.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30) ++ self.assertEqual(h.timeout, 30) ++ ++ def test_networked(self): ++ # Default settings: requires a valid cert from a trusted CA ++ import ssl ++ test_support.requires('network') ++ with test_support.transient_internet('self-signed.pythontest.net'): ++ h = httplib.HTTPSConnection('self-signed.pythontest.net', 443) ++ with self.assertRaises(ssl.SSLError) as exc_info: ++ h.request('GET', '/') ++ self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED') ++ ++ def test_networked_noverification(self): ++ # Switch off cert verification ++ import ssl ++ test_support.requires('network') ++ with test_support.transient_internet('self-signed.pythontest.net'): ++ context = ssl._create_stdlib_context() ++ h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, ++ context=context) ++ h.request('GET', '/') ++ resp = h.getresponse() ++ self.assertIn('nginx', resp.getheader('server')) ++ ++ def test_networked_trusted_by_default_cert(self): ++ # Default settings: requires a valid cert from a trusted CA ++ test_support.requires('network') ++ with test_support.transient_internet('www.python.org'): ++ h = httplib.HTTPSConnection('www.python.org', 443) ++ h.request('GET', '/') ++ resp = h.getresponse() ++ content_type = resp.getheader('content-type') ++ self.assertIn('text/html', content_type) ++ ++ def test_networked_good_cert(self): ++ # We feed the server's cert as a validating cert ++ import ssl ++ test_support.requires('network') ++ with test_support.transient_internet('self-signed.pythontest.net'): ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.load_verify_locations(CERT_selfsigned_pythontestdotnet) ++ h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context) ++ h.request('GET', '/') ++ resp = h.getresponse() ++ server_string = resp.getheader('server') ++ self.assertIn('nginx', server_string) ++ ++ def test_networked_bad_cert(self): ++ # We feed a "CA" cert that is unrelated to the server's cert ++ import ssl ++ test_support.requires('network') ++ with test_support.transient_internet('self-signed.pythontest.net'): ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.load_verify_locations(CERT_localhost) ++ h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context) ++ with self.assertRaises(ssl.SSLError) as exc_info: ++ h.request('GET', '/') ++ self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED') ++ ++ def test_local_unknown_cert(self): ++ # The custom cert isn't known to the default trust bundle ++ import ssl ++ server = self.make_server(CERT_localhost) ++ h = httplib.HTTPSConnection('localhost', server.port) ++ with self.assertRaises(ssl.SSLError) as exc_info: ++ h.request('GET', '/') ++ self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED') ++ ++ def test_local_good_hostname(self): ++ # The (valid) cert validates the HTTP hostname ++ import ssl ++ server = self.make_server(CERT_localhost) ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.load_verify_locations(CERT_localhost) ++ h = httplib.HTTPSConnection('localhost', server.port, context=context) ++ h.request('GET', '/nonexistent') ++ resp = h.getresponse() ++ self.assertEqual(resp.status, 404) ++ ++ def test_local_bad_hostname(self): ++ # The (valid) cert doesn't validate the HTTP hostname ++ import ssl ++ server = self.make_server(CERT_fakehostname) ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.verify_mode = ssl.CERT_REQUIRED ++ context.load_verify_locations(CERT_fakehostname) ++ h = httplib.HTTPSConnection('localhost', server.port, context=context) ++ with self.assertRaises(ssl.CertificateError): ++ h.request('GET', '/') ++ # Same with explicit check_hostname=True ++ h = httplib.HTTPSConnection('localhost', server.port, context=context, ++ check_hostname=True) ++ with self.assertRaises(ssl.CertificateError): ++ h.request('GET', '/') ++ # With check_hostname=False, the mismatching is ignored ++ h = httplib.HTTPSConnection('localhost', server.port, context=context, ++ check_hostname=False) ++ h.request('GET', '/nonexistent') ++ resp = h.getresponse() ++ self.assertEqual(resp.status, 404) + +- @unittest.skipIf(not hasattr(httplib, 'HTTPS'), 'httplib.HTTPS not available') + def test_host_port(self): + # Check invalid host_port + +- # Note that httplib does not accept user:password@ in the host-port. + for hp in ("www.python.org:abc", "user:password@www.python.org"): +- self.assertRaises(httplib.InvalidURL, httplib.HTTP, hp) ++ self.assertRaises(httplib.InvalidURL, httplib.HTTPSConnection, hp) + +- for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000", "fe80::207:e9ff:fe9b", +- 8000), +- ("pypi.python.org:443", "pypi.python.org", 443), +- ("pypi.python.org", "pypi.python.org", 443), +- ("pypi.python.org:", "pypi.python.org", 443), +- ("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 443)): +- http = httplib.HTTPS(hp) +- c = http._conn +- if h != c.host: +- self.fail("Host incorrectly parsed: %s != %s" % (h, c.host)) +- if p != c.port: +- self.fail("Port incorrectly parsed: %s != %s" % (p, c.host)) ++ for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000", ++ "fe80::207:e9ff:fe9b", 8000), ++ ("www.python.org:443", "www.python.org", 443), ++ ("www.python.org:", "www.python.org", 443), ++ ("www.python.org", "www.python.org", 443), ++ ("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 443), ++ ("[fe80::207:e9ff:fe9b]:", "fe80::207:e9ff:fe9b", ++ 443)): ++ c = httplib.HTTPSConnection(hp) ++ self.assertEqual(h, c.host) ++ self.assertEqual(p, c.port) ++ + + ++@test_support.reap_threads + def test_main(verbose=None): + test_support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest, +- HTTPSTimeoutTest, SourceAddressTest) ++ HTTPSTest, SourceAddressTest) + + if __name__ == '__main__': + test_main() +diff -up Python-2.7.5/Lib/test/test_ssl.py.ctx Python-2.7.5/Lib/test/test_ssl.py +--- Python-2.7.5/Lib/test/test_ssl.py.ctx 2015-03-30 10:18:55.677973042 +0200 ++++ Python-2.7.5/Lib/test/test_ssl.py 2015-03-30 10:22:02.323772604 +0200 +@@ -14,7 +14,7 @@ import os + import errno + import pprint + import tempfile +-import urllib ++import urllib2 + import traceback + import weakref + import platform +@@ -2332,9 +2332,10 @@ else: + d1 = f.read() + d2 = '' + # now fetch the same data from the HTTPS server +- url = 'https://%s:%d/%s' % ( +- HOST, server.port, os.path.split(CERTFILE)[1]) +- f = urllib.urlopen(url) ++ url = 'https://localhost:%d/%s' % ( ++ server.port, os.path.split(CERTFILE)[1]) ++ context = ssl.create_default_context(cafile=CERTFILE) ++ f = urllib2.urlopen(url, context=context) + try: + dlen = f.info().getheader("content-length") + if dlen and (int(dlen) > 0): diff --git a/SOURCES/00222-add-2014-bit-dh-key.patch b/SOURCES/00222-add-2014-bit-dh-key.patch new file mode 100644 index 0000000..6a7838e --- /dev/null +++ b/SOURCES/00222-add-2014-bit-dh-key.patch @@ -0,0 +1,49 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1427947446 14400 +# Node ID 4f2391e866434a94ca6d87dff5ea01fcab91d08a +# Parent 5d88f653187203d85f4cfd4877f093af3919035b +replace 512 bit dh key with a 2014 bit one (closes #23844) + +Patch by Cédric Krier. + +diff --git a/Lib/test/dh1024.pem b/Lib/test/dh1024.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/dh1024.pem +@@ -0,0 +1,7 @@ ++-----BEGIN DH PARAMETERS----- ++MIGHAoGBAIbzw1s9CT8SV5yv6L7esdAdZYZjPi3qWFs61CYTFFQnf2s/d09NYaJt ++rrvJhIzWavqnue71qXCf83/J3nz3FEwUU/L0mGyheVbsSHiI64wUo3u50wK5Igo0 ++RNs/LD0irs7m0icZ//hijafTU+JOBiuA8zMI+oZfU7BGuc9XrUprAgEC ++-----END DH PARAMETERS----- ++ ++Generated with: openssl dhparam -out dh1024.pem 1024 +diff --git a/Lib/test/dh512.pem b/Lib/test/dh512.pem +deleted file mode 100644 +--- a/Lib/test/dh512.pem ++++ /dev/null +@@ -1,9 +0,0 @@ +------BEGIN DH PARAMETERS----- +-MEYCQQD1Kv884bEpQBgRjXyEpwpy1obEAxnIByl6ypUM2Zafq9AKUJsCRtMIPWak +-XUGfnHy9iUsiGSa6q6Jew1XpKgVfAgEC +------END DH PARAMETERS----- +- +-These are the 512 bit DH parameters from "Assigned Number for SKIP Protocols" +-(http://www.skip-vpn.org/spec/numbers.html). +-See there for how they were generated. +-Note that g is not a generator, but this is not a problem since p is a safe prime. +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -66,7 +66,7 @@ BADKEY = data_file("badkey.pem") + NOKIACERT = data_file("nokia.pem") + NULLBYTECERT = data_file("nullbytecert.pem") + +-DHFILE = data_file("dh512.pem") ++DHFILE = data_file("dh1024.pem") + BYTES_DHFILE = DHFILE.encode(sys.getfilesystemencoding()) + + + diff --git a/SOURCES/00223-pep476-verify-certs-by-default.patch b/SOURCES/00223-pep476-verify-certs-by-default.patch new file mode 100644 index 0000000..d62703f --- /dev/null +++ b/SOURCES/00223-pep476-verify-certs-by-default.patch @@ -0,0 +1,153 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1416798122 21600 +# Node ID fb83916c3ea12899569e88a7505469a90ab1f674 +# Parent c84f36a5f556867c2ec50453dc879a500032d377 +pep 476: verify certificates by default (#22417) + +diff --git a/Doc/library/httplib.rst b/Doc/library/httplib.rst +--- a/Doc/library/httplib.rst ++++ b/Doc/library/httplib.rst +@@ -90,9 +90,6 @@ The module provides the following classe + server's certificate. If you want to change that behaviour, you can + explicitly set *check_hostname* to False. + +- .. warning:: +- This does not do any verification of the server's certificate. +- + .. versionadded:: 2.0 + + .. versionchanged:: 2.6 +@@ -104,6 +101,11 @@ The module provides the following classe + .. versionchanged:: 2.7.9 + *context* and *check_hostname* was added. + ++ This class now performs all the necessary certificate and hostname checks ++ by default. To revert to the previous, unverified, behavior ++ :func:`ssl._create_unverified_context` can be passed to the *context* ++ parameter. ++ + + .. class:: HTTPResponse(sock, debuglevel=0, strict=0) + +diff --git a/Lib/httplib.py b/Lib/httplib.py +--- a/Lib/httplib.py ++++ b/Lib/httplib.py +@@ -1193,7 +1193,7 @@ else: + self.key_file = key_file + self.cert_file = cert_file + if context is None: +- context = ssl.create_default_context() ++ context = ssl._create_default_https_context() + will_verify = context.verify_mode != ssl.CERT_NONE + if check_hostname is None: + check_hostname = will_verify +diff --git a/Lib/ssl.py b/Lib/ssl.py +--- a/Lib/ssl.py ++++ b/Lib/ssl.py +@@ -427,8 +427,7 @@ def create_default_context(purpose=Purpo + context.load_default_certs(purpose) + return context + +- +-def _create_stdlib_context(protocol=PROTOCOL_SSLv23, cert_reqs=None, ++def _create_unverified_context(protocol=PROTOCOL_SSLv23, cert_reqs=None, + check_hostname=False, purpose=Purpose.SERVER_AUTH, + certfile=None, keyfile=None, + cafile=None, capath=None, cadata=None): +@@ -469,6 +468,14 @@ def _create_stdlib_context(protocol=PROT + + return context + ++# Used by http.client if no context is explicitly passed. ++_create_default_https_context = create_default_context ++ ++ ++# Backwards compatibility alias, even though it's not a public name. ++_create_stdlib_context = _create_unverified_context ++ ++ + class SSLSocket(socket): + """This class implements a subtype of socket.socket that wraps + the underlying OS socket in an SSL context when necessary, and +diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py +--- a/Lib/test/test_httplib.py ++++ b/Lib/test/test_httplib.py +@@ -1,10 +1,9 @@ + import httplib + import array +-import httplib +-import os + import StringIO + import socket + import errno ++import os + + import unittest + TestCase = unittest.TestCase +diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py +--- a/Lib/test/test_urllib2_localnet.py ++++ b/Lib/test/test_urllib2_localnet.py +@@ -5,6 +5,7 @@ import urllib2 + import BaseHTTPServer + import unittest + import hashlib ++import ssl + + from test import test_support + +@@ -562,15 +563,37 @@ class TestUrlopen(BaseTestCase): + cafile=CERT_localhost) + self.assertEqual(data, b"we care a bit") + # Bad cert +- with self.assertRaises(urllib2.URLError) as cm: ++ with self.assertRaises(urllib2.URLError): + self.urlopen("https://localhost:%s/bizarre" % handler.port, + cafile=CERT_fakehostname) + # Good cert, but mismatching hostname + handler = self.start_https_server(certfile=CERT_fakehostname) +- with self.assertRaises(ssl.CertificateError) as cm: ++ with self.assertRaises(ssl.CertificateError): + self.urlopen("https://localhost:%s/bizarre" % handler.port, + cafile=CERT_fakehostname) + ++ def test_https_with_cadefault(self): ++ handler = self.start_https_server(certfile=CERT_localhost) ++ # Self-signed cert should fail verification with system certificate store ++ with self.assertRaises(urllib2.URLError): ++ self.urlopen("https://localhost:%s/bizarre" % handler.port, ++ cadefault=True) ++ ++ def test_https_sni(self): ++ if ssl is None: ++ self.skipTest("ssl module required") ++ if not ssl.HAS_SNI: ++ self.skipTest("SNI support required in OpenSSL") ++ sni_name = [None] ++ def cb_sni(ssl_sock, server_name, initial_context): ++ sni_name[0] = server_name ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context.set_servername_callback(cb_sni) ++ handler = self.start_https_server(context=context, certfile=CERT_localhost) ++ context = ssl.create_default_context(cafile=CERT_localhost) ++ self.urlopen("https://localhost:%s" % handler.port, context=context) ++ self.assertEqual(sni_name[0], "localhost") ++ + def test_sending_headers(self): + handler = self.start_server([(200, [], "we don't care")]) + +diff -up Python-2.7.5/Doc/library/xmlrpclib.rst.ver Python-2.7.5/Doc/library/xmlrpclib.rst +--- Python-2.7.5/Doc/library/xmlrpclib.rst.ver 2015-03-30 13:59:29.243493601 +0200 ++++ Python-2.7.5/Doc/library/xmlrpclib.rst 2015-03-30 14:03:40.509532180 +0200 +@@ -34,6 +34,10 @@ between conformable Python objects and X + constructed data. If you need to parse untrusted or unauthenticated data see + :ref:`xml-vulnerabilities`. + ++.. versionchanged:: 2.7.9 ++ ++ For https URIs, :mod:`xmlrpclib` now performs all the necessary certificate ++ and hostname checks by default + + .. class:: ServerProxy(uri[, transport[, encoding[, verbose[, allow_none[, use_datetime]]]]]) + diff --git a/SOURCES/00224-pep476-add-toggle-for-cert-verify.patch b/SOURCES/00224-pep476-add-toggle-for-cert-verify.patch new file mode 100644 index 0000000..1b68ac0 --- /dev/null +++ b/SOURCES/00224-pep476-add-toggle-for-cert-verify.patch @@ -0,0 +1,103 @@ +diff -up Python-2.7.5/Lib/ssl.py.cert Python-2.7.5/Lib/ssl.py +--- Python-2.7.5/Lib/ssl.py.cert 2015-03-30 14:52:12.172241615 +0200 ++++ Python-2.7.5/Lib/ssl.py 2015-03-30 15:16:49.168185354 +0200 +@@ -466,8 +466,27 @@ def _create_unverified_context(protocol= + + return context + ++_cert_verification_config = '/etc/python/cert-verification.cfg' ++ ++def _get_verify_status(protocol): ++ context_factory = { ++ 'platform_default': _create_unverified_context, ++ 'enable': create_default_context, ++ 'disable': _create_unverified_context ++ } ++ import ConfigParser ++ try: ++ config = ConfigParser.RawConfigParser() ++ config.read(_cert_verification_config) ++ status = config.get(protocol, 'verify') ++ except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): ++ status = 'platform_default' ++ default = context_factory.get('platform_default') ++ return context_factory.get(status, default) ++ ++ + # Used by http.client if no context is explicitly passed. +-_create_default_https_context = create_default_context ++_create_default_https_context = _get_verify_status('https') + + + # Backwards compatibility alias, even though it's not a public name. +diff -up Python-2.7.5/Lib/test/test_httplib.py.cert Python-2.7.5/Lib/test/test_httplib.py +--- Python-2.7.5/Lib/test/test_httplib.py.cert 2015-03-30 16:45:30.738794461 +0200 ++++ Python-2.7.5/Lib/test/test_httplib.py 2015-03-30 16:54:48.065062351 +0200 +@@ -516,12 +516,24 @@ class HTTPSTest(TestCase): + h = httplib.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30) + self.assertEqual(h.timeout, 30) + ++ def test_networked_default(self): ++ # specific to RHEL ++ # Default settings: doesnt requires a valid cert from a trusted CA ++ test_support.requires('network') ++ with test_support.transient_internet('self-signed.pythontest.net'): ++ h = httplib.HTTPSConnection('self-signed.pythontest.net', 443) ++ h.request('GET', '/') ++ resp = h.getresponse() ++ self.assertIn('nginx', resp.getheader('server')) ++ ++ # We have to pass safe context to test cert verification ++ # RHEL by default disable cert verification + def test_networked(self): +- # Default settings: requires a valid cert from a trusted CA + import ssl + test_support.requires('network') + with test_support.transient_internet('self-signed.pythontest.net'): +- h = httplib.HTTPSConnection('self-signed.pythontest.net', 443) ++ context = ssl.create_default_context() ++ h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context) + with self.assertRaises(ssl.SSLError) as exc_info: + h.request('GET', '/') + self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED') +@@ -542,8 +554,10 @@ class HTTPSTest(TestCase): + def test_networked_trusted_by_default_cert(self): + # Default settings: requires a valid cert from a trusted CA + test_support.requires('network') ++ import ssl + with test_support.transient_internet('www.python.org'): +- h = httplib.HTTPSConnection('www.python.org', 443) ++ context = ssl.create_default_context() ++ h = httplib.HTTPSConnection('www.python.org', 443, context=context) + h.request('GET', '/') + resp = h.getresponse() + content_type = resp.getheader('content-type') +@@ -579,7 +592,8 @@ class HTTPSTest(TestCase): + # The custom cert isn't known to the default trust bundle + import ssl + server = self.make_server(CERT_localhost) +- h = httplib.HTTPSConnection('localhost', server.port) ++ context = ssl.create_default_context() ++ h = httplib.HTTPSConnection('localhost', server.port, context=context) + with self.assertRaises(ssl.SSLError) as exc_info: + h.request('GET', '/') + self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED') +@@ -624,6 +638,9 @@ class HTTPSTest(TestCase): + for hp in ("www.python.org:abc", "user:password@www.python.org"): + self.assertRaises(httplib.InvalidURL, httplib.HTTPSConnection, hp) + ++ import ssl ++ context = ssl.create_default_context() ++ + for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000", + "fe80::207:e9ff:fe9b", 8000), + ("www.python.org:443", "www.python.org", 443), +@@ -632,7 +648,7 @@ class HTTPSTest(TestCase): + ("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 443), + ("[fe80::207:e9ff:fe9b]:", "fe80::207:e9ff:fe9b", + 443)): +- c = httplib.HTTPSConnection(hp) ++ c = httplib.HTTPSConnection(hp, context=context) + self.assertEqual(h, c.host) + self.assertEqual(p, c.port) + diff --git a/SOURCES/00225-cprofile-sort-option.patch b/SOURCES/00225-cprofile-sort-option.patch new file mode 100644 index 0000000..13943bf --- /dev/null +++ b/SOURCES/00225-cprofile-sort-option.patch @@ -0,0 +1,25 @@ +diff -up Python-2.7.5/Lib/cProfile.py.sort Python-2.7.5/Lib/cProfile.py +--- Python-2.7.5/Lib/cProfile.py.sort 2015-02-09 12:45:46.525657065 +0100 ++++ Python-2.7.5/Lib/cProfile.py 2015-02-09 12:45:05.266316315 +0100 +@@ -161,7 +161,7 @@ def label(code): + # ____________________________________________________________ + + def main(): +- import os, sys ++ import os, pstats, sys + from optparse import OptionParser + usage = "cProfile.py [-o output_file_path] [-s sort] scriptfile [arg] ..." + parser = OptionParser(usage=usage) +@@ -169,8 +169,9 @@ def main(): + parser.add_option('-o', '--outfile', dest="outfile", + help="Save stats to ", default=None) + parser.add_option('-s', '--sort', dest="sort", +- help="Sort order when printing to stdout, based on pstats.Stats class", +- default=-1) ++ help="Sort order when printing to stdout, based on pstats.Stats class", ++ default=-1, ++ choices=pstats.Stats.sort_arg_dict_default.keys()) + + if not sys.argv[1:]: + parser.print_usage() + diff --git a/SOURCES/00227-accept-none-keyfile-loadcertchain.patch b/SOURCES/00227-accept-none-keyfile-loadcertchain.patch new file mode 100644 index 0000000..39d9073 --- /dev/null +++ b/SOURCES/00227-accept-none-keyfile-loadcertchain.patch @@ -0,0 +1,78 @@ +From c1f4979e7019f6c1ce9e5a02c2e3f8ca146645bc Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Mon, 11 Jul 2016 14:20:01 +0200 +Subject: [PATCH] Allow the keyfile argument of SSLContext.load_cert_chain to + be set to None + +--- + Modules/_ssl.c | 30 +++++++++++++++++++++++------- + 1 file changed, 23 insertions(+), 7 deletions(-) + +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +index 38eba1d..1786afd 100644 +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -2445,8 +2445,8 @@ static PyObject * + load_cert_chain(PySSLContext *self, PyObject *args, PyObject *kwds) + { + char *kwlist[] = {"certfile", "keyfile", "password", NULL}; +- PyObject *password = NULL; +- char *certfile_bytes = NULL, *keyfile_bytes = NULL; ++ PyObject *keyfile = NULL, *keyfile_bytes = NULL, *password = NULL; ++ char *certfile_bytes = NULL; + pem_password_cb *orig_passwd_cb = self->ctx->default_passwd_callback; + void *orig_passwd_userdata = self->ctx->default_passwd_callback_userdata; + _PySSLPasswordInfo pw_info = { NULL, NULL, NULL, 0, 0 }; +@@ -2455,11 +2455,27 @@ load_cert_chain(PySSLContext *self, PyObject *args, PyObject *kwds) + errno = 0; + ERR_clear_error(); + if (!PyArg_ParseTupleAndKeywords(args, kwds, +- "et|etO:load_cert_chain", kwlist, ++ "et|OO:load_cert_chain", kwlist, + Py_FileSystemDefaultEncoding, &certfile_bytes, +- Py_FileSystemDefaultEncoding, &keyfile_bytes, +- &password)) ++ &keyfile, &password)) + return NULL; ++ ++ if (keyfile && keyfile != Py_None) { ++ if (PyString_Check(keyfile)) { ++ Py_INCREF(keyfile); ++ keyfile_bytes = keyfile; ++ } else { ++ PyObject *u = PyUnicode_FromObject(keyfile); ++ if (!u) ++ goto error; ++ keyfile_bytes = PyUnicode_AsEncodedString( ++ u, Py_FileSystemDefaultEncoding, NULL); ++ Py_DECREF(u); ++ if (!keyfile_bytes) ++ goto error; ++ } ++ } ++ + if (password && password != Py_None) { + if (PyCallable_Check(password)) { + pw_info.callable = password; +@@ -2489,7 +2505,7 @@ load_cert_chain(PySSLContext *self, PyObject *args, PyObject *kwds) + } + PySSL_BEGIN_ALLOW_THREADS_S(pw_info.thread_state); + r = SSL_CTX_use_PrivateKey_file(self->ctx, +- keyfile_bytes ? keyfile_bytes : certfile_bytes, ++ keyfile_bytes ? PyBytes_AS_STRING(keyfile_bytes) : certfile_bytes, + SSL_FILETYPE_PEM); + PySSL_END_ALLOW_THREADS_S(pw_info.thread_state); + if (r != 1) { +@@ -2521,8 +2537,8 @@ load_cert_chain(PySSLContext *self, PyObject *args, PyObject *kwds) + error: + SSL_CTX_set_default_passwd_cb(self->ctx, orig_passwd_cb); + SSL_CTX_set_default_passwd_cb_userdata(self->ctx, orig_passwd_userdata); ++ Py_XDECREF(keyfile_bytes); + PyMem_Free(pw_info.password); +- PyMem_Free(keyfile_bytes); + PyMem_Free(certfile_bytes); + return NULL; + } +-- +2.7.4 + diff --git a/SOURCES/00228-backport-ssl-version.patch b/SOURCES/00228-backport-ssl-version.patch new file mode 100644 index 0000000..40ac04c --- /dev/null +++ b/SOURCES/00228-backport-ssl-version.patch @@ -0,0 +1,260 @@ + +# HG changeset patch +# User Alex Gaynor +# Date 1409862802 25200 +# Node ID 16c86a6bdbe2a545dd2de02dc9f347c2b3ae7220 +# Parent f17ab9fed3b03191df975ecdde2cc07cee915319 +Issue #20421: Add a .version() method to SSL sockets exposing the actual protocol version in use. + +Backport from default. + +diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst +--- a/Doc/library/ssl.rst ++++ b/Doc/library/ssl.rst +@@ -867,10 +867,10 @@ SSL sockets also have the following addi + + .. method:: SSLSocket.selected_npn_protocol() + +- Returns the protocol that was selected during the TLS/SSL handshake. If +- :meth:`SSLContext.set_npn_protocols` was not called, or if the other party +- does not support NPN, or if the handshake has not yet happened, this will +- return ``None``. ++ Returns the higher-level protocol that was selected during the TLS/SSL ++ handshake. If :meth:`SSLContext.set_npn_protocols` was not called, or ++ if the other party does not support NPN, or if the handshake has not yet ++ happened, this will return ``None``. + + .. versionadded:: 2.7.9 + +@@ -882,6 +882,16 @@ SSL sockets also have the following addi + returned socket should always be used for further communication with the + other side of the connection, rather than the original socket. + ++.. method:: SSLSocket.version() ++ ++ Return the actual SSL protocol version negotiated by the connection ++ as a string, or ``None`` is no secure connection is established. ++ As of this writing, possible return values include ``"SSLv2"``, ++ ``"SSLv3"``, ``"TLSv1"``, ``"TLSv1.1"`` and ``"TLSv1.2"``. ++ Recent OpenSSL versions may define more return values. ++ ++ .. versionadded:: 3.5 ++ + .. attribute:: SSLSocket.context + + The :class:`SSLContext` object this SSL socket is tied to. If the SSL +diff --git a/Lib/ssl.py b/Lib/ssl.py +--- a/Lib/ssl.py ++++ b/Lib/ssl.py +@@ -862,6 +862,15 @@ class SSLSocket(socket): + return None + return self._sslobj.tls_unique_cb() + ++ def version(self): ++ """ ++ Return a string identifying the protocol version used by the ++ current SSL channel, or None if there is no established channel. ++ """ ++ if self._sslobj is None: ++ return None ++ return self._sslobj.version() ++ + + def wrap_socket(sock, keyfile=None, certfile=None, + server_side=False, cert_reqs=CERT_NONE, +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -1904,7 +1904,8 @@ else: + 'compression': s.compression(), + 'cipher': s.cipher(), + 'peercert': s.getpeercert(), +- 'client_npn_protocol': s.selected_npn_protocol() ++ 'client_npn_protocol': s.selected_npn_protocol(), ++ 'version': s.version(), + }) + s.close() + stats['server_npn_protocols'] = server.selected_protocols +@@ -1912,6 +1913,13 @@ else: + + def try_protocol_combo(server_protocol, client_protocol, expect_success, + certsreqs=None, server_options=0, client_options=0): ++ """ ++ Try to SSL-connect using *client_protocol* to *server_protocol*. ++ If *expect_success* is true, assert that the connection succeeds, ++ if it's false, assert that the connection fails. ++ Also, if *expect_success* is a string, assert that it is the protocol ++ version actually used by the connection. ++ """ + if certsreqs is None: + certsreqs = ssl.CERT_NONE + certtype = { +@@ -1941,8 +1949,8 @@ else: + ctx.load_cert_chain(CERTFILE) + ctx.load_verify_locations(CERTFILE) + try: +- server_params_test(client_context, server_context, +- chatty=False, connectionchatty=False) ++ stats = server_params_test(client_context, server_context, ++ chatty=False, connectionchatty=False) + # Protocol mismatch can result in either an SSLError, or a + # "Connection reset by peer" error. + except ssl.SSLError: +@@ -1957,6 +1965,10 @@ else: + "Client protocol %s succeeded with server protocol %s!" + % (ssl.get_protocol_name(client_protocol), + ssl.get_protocol_name(server_protocol))) ++ elif (expect_success is not True ++ and expect_success != stats['version']): ++ raise AssertionError("version mismatch: expected %r, got %r" ++ % (expect_success, stats['version'])) + + + class ThreadedTests(unittest.TestCase): +@@ -2186,17 +2198,17 @@ else: + sys.stdout.write( + " SSL2 client to SSL23 server test unexpectedly failed:\n %s\n" + % str(x)) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True) ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, 'SSLv3') + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True) ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1') + +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL) ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, 'SSLv3', ssl.CERT_OPTIONAL) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_OPTIONAL) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL) ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) + +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED) ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, 'SSLv3', ssl.CERT_REQUIRED) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED) ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) + + # Server with specific SSL options + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, +@@ -2213,9 +2225,9 @@ else: + """Connecting to an SSLv3 server with various client options""" + if support.verbose: + sys.stdout.write("\n") +- try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True) +- try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL) +- try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED) ++ try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, 'SSLv3') ++ try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, 'SSLv3', ssl.CERT_OPTIONAL) ++ try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, 'SSLv3', ssl.CERT_REQUIRED) + if hasattr(ssl, 'PROTOCOL_SSLv2'): + try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False) + try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, False, +@@ -2223,7 +2235,7 @@ else: + try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False) + if no_sslv2_implies_sslv3_hello(): + # No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs +- try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, True, ++ try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, 'SSLv3', + client_options=ssl.OP_NO_SSLv2) + + @skip_if_broken_ubuntu_ssl +@@ -2231,9 +2243,9 @@ else: + """Connecting to a TLSv1 server with various client options""" + if support.verbose: + sys.stdout.write("\n") +- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True) +- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL) +- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1') ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) + if hasattr(ssl, 'PROTOCOL_SSLv2'): + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False) + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False) +@@ -2248,14 +2260,14 @@ else: + Testing against older TLS versions.""" + if support.verbose: + sys.stdout.write("\n") +- try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, True) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') + if hasattr(ssl, 'PROTOCOL_SSLv2'): + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv2, False) + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv3, False) + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv23, False, + client_options=ssl.OP_NO_TLSv1_1) + +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, True) ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1, False) + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1, False) + +@@ -2268,7 +2280,7 @@ else: + Testing against older TLS versions.""" + if support.verbose: + sys.stdout.write("\n") +- try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_2, True, ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_2, 'TLSv1.2', + server_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2, + client_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2,) + if hasattr(ssl, 'PROTOCOL_SSLv2'): +@@ -2277,7 +2289,7 @@ else: + try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv23, False, + client_options=ssl.OP_NO_TLSv1_2) + +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_2, True) ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_2, 'TLSv1.2') + try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1, False) + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_2, False) + try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_1, False) +@@ -2619,6 +2631,21 @@ else: + s.connect((HOST, server.port)) + self.assertIn("no shared cipher", str(server.conn_errors[0])) + ++ def test_version_basic(self): ++ """ ++ Basic tests for SSLSocket.version(). ++ More tests are done in the test_protocol_*() methods. ++ """ ++ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ with ThreadedEchoServer(CERTFILE, ++ ssl_version=ssl.PROTOCOL_TLSv1, ++ chatty=False) as server: ++ with closing(context.wrap_socket(socket.socket())) as s: ++ self.assertIs(s.version(), None) ++ s.connect((HOST, server.port)) ++ self.assertEqual(s.version(), "TLSv1") ++ self.assertIs(s.version(), None) ++ + @unittest.skipUnless(ssl.HAS_ECDH, "test requires ECDH-enabled OpenSSL") + def test_default_ecdh_curve(self): + # Issue #21015: elliptic curve-based Diffie Hellman key exchange +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -1384,6 +1384,18 @@ static PyObject *PySSL_cipher (PySSLSock + return NULL; + } + ++static PyObject *PySSL_version(PySSLSocket *self) ++{ ++ const char *version; ++ ++ if (self->ssl == NULL) ++ Py_RETURN_NONE; ++ version = SSL_get_version(self->ssl); ++ if (!strcmp(version, "unknown")) ++ Py_RETURN_NONE; ++ return PyUnicode_FromString(version); ++} ++ + #ifdef OPENSSL_NPN_NEGOTIATED + static PyObject *PySSL_selected_npn_protocol(PySSLSocket *self) { + const unsigned char *out; +@@ -1907,6 +1919,7 @@ static PyMethodDef PySSLMethods[] = { + {"peer_certificate", (PyCFunction)PySSL_peercert, METH_VARARGS, + PySSL_peercert_doc}, + {"cipher", (PyCFunction)PySSL_cipher, METH_NOARGS}, ++ {"version", (PyCFunction)PySSL_version, METH_NOARGS}, + #ifdef OPENSSL_NPN_NEGOTIATED + {"selected_npn_protocol", (PyCFunction)PySSL_selected_npn_protocol, METH_NOARGS}, + #endif + diff --git a/SOURCES/00229-Expect-a-failure-when-trying-to-connect-with-SSLv2-c.patch b/SOURCES/00229-Expect-a-failure-when-trying-to-connect-with-SSLv2-c.patch new file mode 100644 index 0000000..2fee0c0 --- /dev/null +++ b/SOURCES/00229-Expect-a-failure-when-trying-to-connect-with-SSLv2-c.patch @@ -0,0 +1,48 @@ +From a1d7acf899fccd0eda10e011e2d11d1d81c2d9e6 Mon Sep 17 00:00:00 2001 +From: Robert Kuska +Date: Wed, 9 Mar 2016 20:16:17 +0100 +Subject: [PATCH] Expect a failure when trying to connect with SSLv2 client to + SSLv23 server. Default value of options in tests enchanced to reflect SSLv2 + being disabled + +--- + Lib/test/test_ssl.py | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index 11122db..b2ba186 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -691,7 +691,7 @@ class ContextTests(unittest.TestCase): + @skip_if_broken_ubuntu_ssl + def test_options(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- self.assertEqual(ssl.OP_ALL, ctx.options) ++ self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2, ctx.options) + ctx.options |= ssl.OP_NO_SSLv2 + self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2, + ctx.options) +@@ -2152,17 +2152,17 @@ else: + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True) + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_OPTIONAL) + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_REQUIRED) +- try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True) ++ try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False) + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv3, False) + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLSv1, False) + # SSLv23 client with specific SSL options + if no_sslv2_implies_sslv3_hello(): + # No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs + try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False, + client_options=ssl.OP_NO_SSLv2) +- try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True, ++ try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False, + client_options=ssl.OP_NO_SSLv3) +- try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True, ++ try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False, + client_options=ssl.OP_NO_TLSv1) + + @skip_if_broken_ubuntu_ssl +-- +2.5.0 + diff --git a/SOURCES/00230-force-all-child-threads-to-terminate-in-TestForkInThread.patch b/SOURCES/00230-force-all-child-threads-to-terminate-in-TestForkInThread.patch new file mode 100644 index 0000000..b6b52b3 --- /dev/null +++ b/SOURCES/00230-force-all-child-threads-to-terminate-in-TestForkInThread.patch @@ -0,0 +1,18 @@ +diff --git a/Lib/test/test_thread.py b/Lib/test/test_thread.py +index 413889a..fb312aa 100644 +--- a/Lib/test/test_thread.py ++++ b/Lib/test/test_thread.py +@@ -237,7 +237,12 @@ class TestForkInThread(unittest.TestCase): + if pid == 0: # child + os.close(self.read_fd) + os.write(self.write_fd, "OK") +- sys.exit(0) ++ # Exiting the thread normally in the child process can leave ++ # any additional threads (such as the one started by ++ # importing _tkinter) still running, and this can prevent ++ # the half-zombie child process from being cleaned up. See ++ # Issue #26456. ++ os._exit(0) + else: # parent + os.close(self.write_fd) + diff --git a/SOURCES/00231-Initialize-OpenSSL_add_all_digests-in-_hashlib.patch b/SOURCES/00231-Initialize-OpenSSL_add_all_digests-in-_hashlib.patch new file mode 100644 index 0000000..3355ce2 --- /dev/null +++ b/SOURCES/00231-Initialize-OpenSSL_add_all_digests-in-_hashlib.patch @@ -0,0 +1,11 @@ +diff -up Python-2.7.5/Modules/_hashopenssl.c.digest Python-2.7.5/Modules/_hashopenssl.c +--- Python-2.7.5/Modules/_hashopenssl.c.digest 2016-01-05 10:53:02.947312688 +0100 ++++ Python-2.7.5/Modules/_hashopenssl.c 2016-01-05 10:53:15.504431960 +0100 +@@ -984,6 +984,7 @@ init_hashlib(void) + SSL_load_error_strings(); + SSL_library_init(); + ERR_load_crypto_strings(); ++ OpenSSL_add_all_digests(); + + Py_TYPE(&EVPtype) = &PyType_Type; + if (PyType_Ready(&EVPtype) < 0) diff --git a/SOURCES/00232-man-page-date-macro-removal.patch b/SOURCES/00232-man-page-date-macro-removal.patch new file mode 100644 index 0000000..c79aed6 --- /dev/null +++ b/SOURCES/00232-man-page-date-macro-removal.patch @@ -0,0 +1,9 @@ +diff -r 62e3b7af0697 -r db8d7f376d24 Misc/python.man +--- a/Misc/python.man Mon Mar 21 10:38:58 2016 +0100 ++++ b/Misc/python.man Mon Mar 21 13:54:28 2016 +0100 +@@ -1,4 +1,4 @@ +-.TH PYTHON "1" "$Date$" ++.TH PYTHON "1" + + .\" To view this file while editing, run it through groff: + .\" groff -Tascii -man python.man | less diff --git a/SOURCES/00233-Computed-Goto-dispatch.patch b/SOURCES/00233-Computed-Goto-dispatch.patch new file mode 100644 index 0000000..7b4c5fe --- /dev/null +++ b/SOURCES/00233-Computed-Goto-dispatch.patch @@ -0,0 +1,2274 @@ +From 66818f47df1e37b105fd42d6cbaa756c4d72393c Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Wed, 30 Mar 2016 15:54:29 +0200 +Subject: [PATCH] Computed Goto dispatch for Python2 + +--- + Include/opcode.h | 9 + + Makefile.pre.in | 15 + + Python/ceval.c | 770 +++++++++++++++++++++++++++++++------------- + Python/makeopcodetargets.py | 45 +++ + Python/opcode_targets.h | 258 +++++++++++++++ + configure | 81 +++++ + configure.ac | 51 +++ + pyconfig.h.in | 6 + + 8 files changed, 1005 insertions(+), 230 deletions(-) + +diff --git a/Include/opcode.h b/Include/opcode.h +index 9764109..9ed5487 100644 +--- a/Include/opcode.h ++++ b/Include/opcode.h +@@ -37,12 +37,21 @@ extern "C" { + + #define SLICE 30 + /* Also uses 31-33 */ ++#define SLICE_1 31 ++#define SLICE_2 32 ++#define SLICE_3 33 + + #define STORE_SLICE 40 + /* Also uses 41-43 */ ++#define STORE_SLICE_1 41 ++#define STORE_SLICE_2 42 ++#define STORE_SLICE_3 43 + + #define DELETE_SLICE 50 + /* Also uses 51-53 */ ++#define DELETE_SLICE_1 51 ++#define DELETE_SLICE_2 52 ++#define DELETE_SLICE_3 53 + + #define STORE_MAP 54 + #define INPLACE_ADD 55 +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 4ee61f6..611f63a 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -299,6 +299,16 @@ ASDLGEN= $(srcdir)/Parser/asdl_c.py + + ########################################################################## + # Python ++ ++OPCODETARGETS_H= \ ++ $(srcdir)/Python/opcode_targets.h ++ ++OPCODETARGETGEN= \ ++ $(srcdir)/Python/makeopcodetargets.py ++ ++OPCODETARGETGEN_FILES= \ ++ $(OPCODETARGETGEN) $(srcdir)/Lib/opcode.py ++ + PYTHON_OBJS= \ + Python/_warnings.o \ + Python/Python-ast.o \ +@@ -648,6 +658,11 @@ Objects/bytearrayobject.o: $(srcdir)/Objects/bytearrayobject.c \ + Objects/stringobject.o: $(srcdir)/Objects/stringobject.c \ + $(STRINGLIB_HEADERS) + ++$(OPCODETARGETS_H): $(OPCODETARGETGEN_FILES) ++ $(OPCODETARGETGEN) $(OPCODETARGETS_H) ++ ++Python/ceval.o: $(OPCODETARGETS_H) ++ + Python/formatter_unicode.o: $(srcdir)/Python/formatter_unicode.c \ + $(STRINGLIB_HEADERS) + +diff --git a/Python/ceval.c b/Python/ceval.c +index 8c6f8d7..67d1576 100644 +--- a/Python/ceval.c ++++ b/Python/ceval.c +@@ -757,6 +757,99 @@ PyEval_EvalFrame(PyFrameObject *f) { + PyObject * + PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + { ++#ifdef DYNAMIC_EXECUTION_PROFILE ++ #undef USE_COMPUTED_GOTOS ++#endif ++#ifdef HAVE_COMPUTED_GOTOS ++ #ifndef USE_COMPUTED_GOTOS ++ #define USE_COMPUTED_GOTOS 1 ++ #endif ++#else ++ #if defined(USE_COMPUTED_GOTOS) && USE_COMPUTED_GOTOS ++ #error "Computed gotos are not supported on this compiler." ++ #endif ++ #undef USE_COMPUTED_GOTOS ++ #define USE_COMPUTED_GOTOS 0 ++#endif ++#if USE_COMPUTED_GOTOS ++/* Import the static jump table */ ++#include "opcode_targets.h" ++ ++ /* This macro is used when several opcodes defer to the same implementation ++ (e.g. SETUP_LOOP, SETUP_FINALLY) */ ++#define TARGET_WITH_IMPL(op, impl) \ ++ TARGET_##op: \ ++ opcode = op; \ ++ oparg = NEXTARG(); \ ++ case op: \ ++ goto impl; \ ++ ++#define TARGET_WITH_IMPL_NOARG(op, impl) \ ++ TARGET_##op: \ ++ opcode = op; \ ++ case op: \ ++ goto impl; \ ++ ++#define TARGET_NOARG(op) \ ++ TARGET_##op: \ ++ opcode = op; \ ++ case op: \ ++ ++#define TARGET(op) \ ++ TARGET_##op: \ ++ opcode = op; \ ++ oparg = NEXTARG(); \ ++ case op:\ ++ ++#define DISPATCH() \ ++ { \ ++ int _tick = _Py_Ticker - 1; \ ++ _Py_Ticker = _tick; \ ++ if (_tick >= 0) { \ ++ FAST_DISPATCH(); \ ++ } \ ++ continue; \ ++ } ++ ++#ifdef LLTRACE ++#define FAST_DISPATCH() \ ++ { \ ++ if (!lltrace && !_Py_TracingPossible) { \ ++ f->f_lasti = INSTR_OFFSET(); \ ++ goto *opcode_targets[*next_instr++]; \ ++ } \ ++ goto fast_next_opcode; \ ++ } ++#else ++#define FAST_DISPATCH() { \ ++ if (!_Py_TracingPossible) { \ ++ f->f_lasti = INSTR_OFFSET(); \ ++ goto *opcode_targets[*next_instr++]; \ ++ } \ ++ goto fast_next_opcode;\ ++} ++#endif ++ ++#else ++#define TARGET(op) \ ++ case op: ++#define TARGET_WITH_IMPL(op, impl) \ ++ /* silence compiler warnings about `impl` unused */ \ ++ if (0) goto impl; ++ case op:\ ++ ++#define TARGET_NOARG(op) \ ++ case op:\ ++ ++#define TARGET_WITH_IMPL_NOARG(op, impl) \ ++ if (0) goto impl; \ ++ case op:\ ++ ++#define DISPATCH() continue ++#define FAST_DISPATCH() goto fast_next_opcode ++#endif ++ ++ + #ifdef DXPAIRS + int lastopcode = 0; + #endif +@@ -874,14 +967,17 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + counter updates for both opcodes. + */ + +-#ifdef DYNAMIC_EXECUTION_PROFILE ++// Next opcode prediction is also enabled for Computed Gotos as well. ++#if defined(DYNAMIC_EXECUTION_PROFILE) || USE_COMPUTED_GOTOS + #define PREDICT(op) if (0) goto PRED_##op ++#define PREDICTED(op) PRED_##op: ++#define PREDICTED_WITH_ARG(op) PRED_##op: + #else + #define PREDICT(op) if (*next_instr == op) goto PRED_##op +-#endif +- + #define PREDICTED(op) PRED_##op: next_instr++ + #define PREDICTED_WITH_ARG(op) PRED_##op: oparg = PEEKARG(); next_instr += 3 ++#endif ++ + + /* Stack manipulation macros */ + +@@ -1179,55 +1275,71 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + + /* case STOP_CODE: this is an error! */ + +- case NOP: +- goto fast_next_opcode; ++ TARGET_NOARG(NOP) ++ { ++ FAST_DISPATCH(); ++ } ++ TARGET(LOAD_FAST) ++ { + +- case LOAD_FAST: + x = GETLOCAL(oparg); + if (x != NULL) { + Py_INCREF(x); + PUSH(x); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + format_exc_check_arg(PyExc_UnboundLocalError, + UNBOUNDLOCAL_ERROR_MSG, + PyTuple_GetItem(co->co_varnames, oparg)); + break; + +- case LOAD_CONST: ++ } ++ ++ TARGET(LOAD_CONST) ++ { + x = GETITEM(consts, oparg); + Py_INCREF(x); + PUSH(x); +- goto fast_next_opcode; ++ FAST_DISPATCH(); ++ } + + PREDICTED_WITH_ARG(STORE_FAST); +- case STORE_FAST: ++ TARGET(STORE_FAST) ++ { + v = POP(); + SETLOCAL(oparg, v); +- goto fast_next_opcode; ++ FAST_DISPATCH(); ++ } + +- case POP_TOP: ++ TARGET_NOARG(POP_TOP) ++ { + v = POP(); + Py_DECREF(v); +- goto fast_next_opcode; ++ FAST_DISPATCH(); ++ } + +- case ROT_TWO: ++ TARGET_NOARG(ROT_TWO) ++ { + v = TOP(); + w = SECOND(); + SET_TOP(w); + SET_SECOND(v); +- goto fast_next_opcode; ++ FAST_DISPATCH(); ++ } + +- case ROT_THREE: ++ TARGET_NOARG(ROT_THREE) ++ { + v = TOP(); + w = SECOND(); + x = THIRD(); + SET_TOP(w); + SET_SECOND(x); + SET_THIRD(v); +- goto fast_next_opcode; ++ FAST_DISPATCH(); ++ } + +- case ROT_FOUR: ++ TARGET_NOARG(ROT_FOUR) ++ { + u = TOP(); + v = SECOND(); + w = THIRD(); +@@ -1236,15 +1348,19 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + SET_SECOND(w); + SET_THIRD(x); + SET_FOURTH(u); +- goto fast_next_opcode; ++ FAST_DISPATCH(); ++ } + +- case DUP_TOP: ++ TARGET_NOARG(DUP_TOP) ++ { + v = TOP(); + Py_INCREF(v); + PUSH(v); +- goto fast_next_opcode; ++ FAST_DISPATCH(); ++ } + +- case DUP_TOPX: ++ TARGET(DUP_TOPX) ++ { + if (oparg == 2) { + x = TOP(); + Py_INCREF(x); +@@ -1253,7 +1369,7 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + STACKADJ(2); + SET_TOP(x); + SET_SECOND(w); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } else if (oparg == 3) { + x = TOP(); + Py_INCREF(x); +@@ -1265,84 +1381,100 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + SET_TOP(x); + SET_SECOND(w); + SET_THIRD(v); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + Py_FatalError("invalid argument to DUP_TOPX" + " (bytecode corruption?)"); + /* Never returns, so don't bother to set why. */ + break; ++ } + +- case UNARY_POSITIVE: ++ TARGET_NOARG(UNARY_POSITIVE) ++ { + v = TOP(); + x = PyNumber_Positive(v); + Py_DECREF(v); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case UNARY_NEGATIVE: ++ TARGET_NOARG( UNARY_NEGATIVE) ++ { + v = TOP(); + x = PyNumber_Negative(v); + Py_DECREF(v); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case UNARY_NOT: ++ TARGET_NOARG(UNARY_NOT) ++ { + v = TOP(); + err = PyObject_IsTrue(v); + Py_DECREF(v); + if (err == 0) { + Py_INCREF(Py_True); + SET_TOP(Py_True); +- continue; ++ DISPATCH(); + } + else if (err > 0) { + Py_INCREF(Py_False); + SET_TOP(Py_False); + err = 0; +- continue; ++ DISPATCH(); + } + STACKADJ(-1); + break; ++ } + +- case UNARY_CONVERT: ++ TARGET_NOARG(UNARY_CONVERT) ++ { + v = TOP(); + x = PyObject_Repr(v); + Py_DECREF(v); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case UNARY_INVERT: ++ TARGET_NOARG(UNARY_INVERT) ++ { + v = TOP(); + x = PyNumber_Invert(v); + Py_DECREF(v); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_POWER: ++ TARGET_NOARG(BINARY_POWER) ++ { + w = POP(); + v = TOP(); + x = PyNumber_Power(v, w, Py_None); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_MULTIPLY: ++ TARGET_NOARG(BINARY_MULTIPLY) ++ { + w = POP(); + v = TOP(); + x = PyNumber_Multiply(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if(x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_DIVIDE: ++ TARGET_NOARG(BINARY_DIVIDE) ++ { + if (!_Py_QnewFlag) { + w = POP(); + v = TOP(); +@@ -1350,32 +1482,37 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; + } +- /* -Qnew is in effect: fall through to +- BINARY_TRUE_DIVIDE */ +- case BINARY_TRUE_DIVIDE: ++ } ++ /* -Qnew is in effect: fall through to BINARY_TRUE_DIVIDE */ ++ TARGET_NOARG(BINARY_TRUE_DIVIDE) ++ { + w = POP(); + v = TOP(); + x = PyNumber_TrueDivide(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_FLOOR_DIVIDE: ++ TARGET_NOARG(BINARY_FLOOR_DIVIDE) ++ { + w = POP(); + v = TOP(); + x = PyNumber_FloorDivide(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_MODULO: ++ TARGET_NOARG(BINARY_MODULO) ++ { + w = POP(); + v = TOP(); + if (PyString_CheckExact(v)) +@@ -1385,10 +1522,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_ADD: ++ TARGET_NOARG(BINARY_ADD) ++ { + w = POP(); + v = TOP(); + if (PyInt_CheckExact(v) && PyInt_CheckExact(w)) { +@@ -1417,10 +1556,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + skip_decref_vx: + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_SUBTRACT: ++ TARGET_NOARG(BINARY_SUBTRACT) ++ { + w = POP(); + v = TOP(); + if (PyInt_CheckExact(v) && PyInt_CheckExact(w)) { +@@ -1442,10 +1583,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_SUBSCR: ++ TARGET_NOARG(BINARY_SUBSCR) ++ { + w = POP(); + v = TOP(); + if (PyList_CheckExact(v) && PyInt_CheckExact(w)) { +@@ -1466,102 +1609,122 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_LSHIFT: ++ TARGET_NOARG(BINARY_LSHIFT) ++ { + w = POP(); + v = TOP(); + x = PyNumber_Lshift(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_RSHIFT: ++ TARGET_NOARG(BINARY_RSHIFT) ++ { + w = POP(); + v = TOP(); + x = PyNumber_Rshift(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_AND: ++ TARGET_NOARG(BINARY_AND) ++ { + w = POP(); + v = TOP(); + x = PyNumber_And(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_XOR: ++ TARGET_NOARG(BINARY_XOR) ++ { + w = POP(); + v = TOP(); + x = PyNumber_Xor(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case BINARY_OR: ++ TARGET_NOARG(BINARY_OR) ++ { + w = POP(); + v = TOP(); + x = PyNumber_Or(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case LIST_APPEND: ++ TARGET(LIST_APPEND) ++ { + w = POP(); + v = PEEK(oparg); + err = PyList_Append(v, w); + Py_DECREF(w); + if (err == 0) { + PREDICT(JUMP_ABSOLUTE); +- continue; ++ DISPATCH(); + } + break; ++ } + +- case SET_ADD: ++ TARGET(SET_ADD) ++ { + w = POP(); + v = stack_pointer[-oparg]; + err = PySet_Add(v, w); + Py_DECREF(w); + if (err == 0) { + PREDICT(JUMP_ABSOLUTE); +- continue; ++ DISPATCH(); + } + break; ++ } + +- case INPLACE_POWER: ++ TARGET_NOARG(INPLACE_POWER) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlacePower(v, w, Py_None); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_MULTIPLY: ++ TARGET_NOARG(INPLACE_MULTIPLY) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlaceMultiply(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_DIVIDE: ++ TARGET_NOARG(INPLACE_DIVIDE) ++ { + if (!_Py_QnewFlag) { + w = POP(); + v = TOP(); +@@ -1569,42 +1732,50 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; + } ++ } + /* -Qnew is in effect: fall through to + INPLACE_TRUE_DIVIDE */ +- case INPLACE_TRUE_DIVIDE: ++ TARGET_NOARG(INPLACE_TRUE_DIVIDE) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlaceTrueDivide(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_FLOOR_DIVIDE: ++ TARGET_NOARG(INPLACE_FLOOR_DIVIDE) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlaceFloorDivide(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_MODULO: ++ TARGET_NOARG(INPLACE_MODULO) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlaceRemainder(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_ADD: ++ TARGET_NOARG(INPLACE_ADD) ++ { + w = POP(); + v = TOP(); + if (PyInt_CheckExact(v) && PyInt_CheckExact(w)) { +@@ -1631,10 +1802,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + skip_decref_v: + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_SUBTRACT: ++ TARGET_NOARG(INPLACE_SUBTRACT) ++ { + w = POP(); + v = TOP(); + if (PyInt_CheckExact(v) && PyInt_CheckExact(w)) { +@@ -1654,63 +1827,78 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_LSHIFT: ++ TARGET_NOARG(INPLACE_LSHIFT) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlaceLshift(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_RSHIFT: ++ TARGET_NOARG(INPLACE_RSHIFT) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlaceRshift(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_AND: ++ TARGET_NOARG(INPLACE_AND) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlaceAnd(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_XOR: ++ TARGET_NOARG(INPLACE_XOR) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlaceXor(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case INPLACE_OR: ++ TARGET_NOARG(INPLACE_OR) ++ { + w = POP(); + v = TOP(); + x = PyNumber_InPlaceOr(v, w); + Py_DECREF(v); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } ++ + +- case SLICE+0: +- case SLICE+1: +- case SLICE+2: +- case SLICE+3: ++ ++ TARGET_WITH_IMPL_NOARG(SLICE, _slice) ++ TARGET_WITH_IMPL_NOARG(SLICE_1, _slice) ++ TARGET_WITH_IMPL_NOARG(SLICE_2, _slice) ++ TARGET_WITH_IMPL_NOARG(SLICE_3, _slice) ++ _slice: ++ { + if ((opcode-SLICE) & 2) + w = POP(); + else +@@ -1725,13 +1913,17 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_XDECREF(v); + Py_XDECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } ++ + +- case STORE_SLICE+0: +- case STORE_SLICE+1: +- case STORE_SLICE+2: +- case STORE_SLICE+3: ++ TARGET_WITH_IMPL_NOARG(STORE_SLICE, _store_slice) ++ TARGET_WITH_IMPL_NOARG(STORE_SLICE_1, _store_slice) ++ TARGET_WITH_IMPL_NOARG(STORE_SLICE_2, _store_slice) ++ TARGET_WITH_IMPL_NOARG(STORE_SLICE_3, _store_slice) ++ _store_slice: ++ { + if ((opcode-STORE_SLICE) & 2) + w = POP(); + else +@@ -1747,13 +1939,17 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(u); + Py_XDECREF(v); + Py_XDECREF(w); +- if (err == 0) continue; ++ if (err == 0) DISPATCH(); + break; ++ } ++ + +- case DELETE_SLICE+0: +- case DELETE_SLICE+1: +- case DELETE_SLICE+2: +- case DELETE_SLICE+3: ++ TARGET_WITH_IMPL_NOARG(DELETE_SLICE, _delete_slice) ++ TARGET_WITH_IMPL_NOARG(DELETE_SLICE_1, _delete_slice) ++ TARGET_WITH_IMPL_NOARG(DELETE_SLICE_2, _delete_slice) ++ TARGET_WITH_IMPL_NOARG(DELETE_SLICE_3, _delete_slice) ++ _delete_slice: ++ { + if ((opcode-DELETE_SLICE) & 2) + w = POP(); + else +@@ -1768,10 +1964,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(u); + Py_XDECREF(v); + Py_XDECREF(w); +- if (err == 0) continue; ++ if (err == 0) DISPATCH(); + break; ++ } + +- case STORE_SUBSCR: ++ TARGET_NOARG(STORE_SUBSCR) ++ { + w = TOP(); + v = SECOND(); + u = THIRD(); +@@ -1781,10 +1979,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(u); + Py_DECREF(v); + Py_DECREF(w); +- if (err == 0) continue; ++ if (err == 0) DISPATCH(); + break; ++ } + +- case DELETE_SUBSCR: ++ TARGET_NOARG(DELETE_SUBSCR) ++ { + w = TOP(); + v = SECOND(); + STACKADJ(-2); +@@ -1792,10 +1992,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + err = PyObject_DelItem(v, w); + Py_DECREF(v); + Py_DECREF(w); +- if (err == 0) continue; ++ if (err == 0) DISPATCH(); + break; ++ } + +- case PRINT_EXPR: ++ TARGET_NOARG(PRINT_EXPR) ++ { + v = POP(); + w = PySys_GetObject("displayhook"); + if (w == NULL) { +@@ -1818,12 +2020,16 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_XDECREF(x); + break; ++ } + +- case PRINT_ITEM_TO: ++ TARGET_NOARG(PRINT_ITEM_TO) ++ { + w = stream = POP(); + /* fall through to PRINT_ITEM */ ++ } + +- case PRINT_ITEM: ++ TARGET_NOARG(PRINT_ITEM) ++ { + v = POP(); + if (stream == NULL || stream == Py_None) { + w = PySys_GetObject("stdout"); +@@ -1869,16 +2075,20 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_XDECREF(stream); + stream = NULL; +- if (err == 0) +- continue; ++ if (err == 0) DISPATCH(); + break; ++ } + +- case PRINT_NEWLINE_TO: ++ TARGET_NOARG(PRINT_NEWLINE_TO) ++ { + w = stream = POP(); + /* fall through to PRINT_NEWLINE */ ++ } + +- case PRINT_NEWLINE: +- if (stream == NULL || stream == Py_None) { ++ TARGET_NOARG(PRINT_NEWLINE) ++ { ++ if (stream == NULL || stream == Py_None) ++ { + w = PySys_GetObject("stdout"); + if (w == NULL) { + PyErr_SetString(PyExc_RuntimeError, +@@ -1899,11 +2109,13 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + stream = NULL; + break; + +- ++ } + #ifdef CASE_TOO_BIG + default: switch (opcode) { + #endif +- case RAISE_VARARGS: ++ ++ TARGET(RAISE_VARARGS) ++ { + u = v = w = NULL; + switch (oparg) { + case 3: +@@ -1924,28 +2136,37 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + break; + } + break; ++ } + +- case LOAD_LOCALS: +- if ((x = f->f_locals) != NULL) { ++ TARGET_NOARG(LOAD_LOCALS) ++ { ++ if ((x = f->f_locals) != NULL) ++ { + Py_INCREF(x); + PUSH(x); +- continue; ++ DISPATCH(); + } + PyErr_SetString(PyExc_SystemError, "no locals"); + break; ++ } + +- case RETURN_VALUE: ++ TARGET_NOARG(RETURN_VALUE) ++ { + retval = POP(); + why = WHY_RETURN; + goto fast_block_end; ++ } + +- case YIELD_VALUE: ++ TARGET_NOARG(YIELD_VALUE) ++ { + retval = POP(); + f->f_stacktop = stack_pointer; + why = WHY_YIELD; + goto fast_yield; ++ } + +- case EXEC_STMT: ++ TARGET_NOARG(EXEC_STMT) ++ { + w = TOP(); + v = SECOND(); + u = THIRD(); +@@ -1957,8 +2178,10 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_DECREF(w); + break; ++ } + +- case POP_BLOCK: ++ TARGET_NOARG(POP_BLOCK) ++ { + { + PyTryBlock *b = PyFrame_BlockPop(f); + while (STACK_LEVEL() > b->b_level) { +@@ -1966,10 +2189,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + } + } +- continue; ++ DISPATCH(); ++ } + + PREDICTED(END_FINALLY); +- case END_FINALLY: ++ TARGET_NOARG(END_FINALLY) ++ { + v = POP(); + if (PyInt_Check(v)) { + why = (enum why_code) PyInt_AS_LONG(v); +@@ -1993,8 +2218,10 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + } + Py_DECREF(v); + break; ++ } + +- case BUILD_CLASS: ++ TARGET_NOARG(BUILD_CLASS) ++ { + u = TOP(); + v = SECOND(); + w = THIRD(); +@@ -2005,8 +2232,10 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_DECREF(w); + break; ++ } + +- case STORE_NAME: ++ TARGET(STORE_NAME) ++ { + w = GETITEM(names, oparg); + v = POP(); + if ((x = f->f_locals) != NULL) { +@@ -2015,15 +2244,17 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + else + err = PyObject_SetItem(x, w, v); + Py_DECREF(v); +- if (err == 0) continue; ++ if (err == 0) DISPATCH(); + break; + } + PyErr_Format(PyExc_SystemError, + "no locals found when storing %s", + PyObject_REPR(w)); + break; ++ } + +- case DELETE_NAME: ++ TARGET(DELETE_NAME) ++ { + w = GETITEM(names, oparg); + if ((x = f->f_locals) != NULL) { + if ((err = PyObject_DelItem(x, w)) != 0) +@@ -2036,9 +2267,11 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + "no locals when deleting %s", + PyObject_REPR(w)); + break; ++ } + + PREDICTED_WITH_ARG(UNPACK_SEQUENCE); +- case UNPACK_SEQUENCE: ++ TARGET(UNPACK_SEQUENCE) ++ { + v = POP(); + if (PyTuple_CheckExact(v) && + PyTuple_GET_SIZE(v) == oparg) { +@@ -2050,7 +2283,7 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + PUSH(w); + } + Py_DECREF(v); +- continue; ++ DISPATCH(); + } else if (PyList_CheckExact(v) && + PyList_GET_SIZE(v) == oparg) { + PyObject **items = \ +@@ -2069,8 +2302,10 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + } + Py_DECREF(v); + break; ++ } + +- case STORE_ATTR: ++ TARGET(STORE_ATTR) ++ { + w = GETITEM(names, oparg); + v = TOP(); + u = SECOND(); +@@ -2078,33 +2313,41 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + err = PyObject_SetAttr(v, w, u); /* v.w = u */ + Py_DECREF(v); + Py_DECREF(u); +- if (err == 0) continue; ++ if (err == 0) DISPATCH(); + break; ++ } + +- case DELETE_ATTR: ++ TARGET(DELETE_ATTR) ++ { + w = GETITEM(names, oparg); + v = POP(); + err = PyObject_SetAttr(v, w, (PyObject *)NULL); + /* del v.w */ + Py_DECREF(v); + break; ++ } + +- case STORE_GLOBAL: ++ TARGET(STORE_GLOBAL) ++ { + w = GETITEM(names, oparg); + v = POP(); + err = PyDict_SetItem(f->f_globals, w, v); + Py_DECREF(v); +- if (err == 0) continue; ++ if (err == 0) DISPATCH(); + break; ++ } + +- case DELETE_GLOBAL: ++ TARGET(DELETE_GLOBAL) ++ { + w = GETITEM(names, oparg); + if ((err = PyDict_DelItem(f->f_globals, w)) != 0) + format_exc_check_arg( + PyExc_NameError, GLOBAL_NAME_ERROR_MSG, w); + break; ++ } + +- case LOAD_NAME: ++ TARGET(LOAD_NAME) ++ { + w = GETITEM(names, oparg); + if ((v = f->f_locals) == NULL) { + PyErr_Format(PyExc_SystemError, +@@ -2140,9 +2383,11 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_INCREF(x); + } + PUSH(x); +- continue; ++ DISPATCH(); ++ } + +- case LOAD_GLOBAL: ++ TARGET(LOAD_GLOBAL) ++ { + w = GETITEM(names, oparg); + if (PyString_CheckExact(w)) { + /* Inline the PyDict_GetItem() calls. +@@ -2162,7 +2407,7 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + if (x != NULL) { + Py_INCREF(x); + PUSH(x); +- continue; ++ DISPATCH(); + } + d = (PyDictObject *)(f->f_builtins); + e = d->ma_lookup(d, w, hash); +@@ -2174,7 +2419,7 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + if (x != NULL) { + Py_INCREF(x); + PUSH(x); +- continue; ++ DISPATCH(); + } + goto load_global_error; + } +@@ -2193,13 +2438,15 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + } + Py_INCREF(x); + PUSH(x); +- continue; ++ DISPATCH(); ++ } + +- case DELETE_FAST: ++ TARGET(DELETE_FAST) ++ { + x = GETLOCAL(oparg); + if (x != NULL) { + SETLOCAL(oparg, NULL); +- continue; ++ DISPATCH(); + } + format_exc_check_arg( + PyExc_UnboundLocalError, +@@ -2207,20 +2454,24 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + PyTuple_GetItem(co->co_varnames, oparg) + ); + break; ++ } + +- case LOAD_CLOSURE: ++ TARGET(LOAD_CLOSURE) ++ { + x = freevars[oparg]; + Py_INCREF(x); + PUSH(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case LOAD_DEREF: ++ TARGET(LOAD_DEREF) ++ { + x = freevars[oparg]; + w = PyCell_Get(x); + if (w != NULL) { + PUSH(w); +- continue; ++ DISPATCH(); + } + err = -1; + /* Don't stomp existing exception */ +@@ -2240,15 +2491,19 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + UNBOUNDFREE_ERROR_MSG, v); + } + break; ++ } + +- case STORE_DEREF: ++ TARGET(STORE_DEREF) ++ { + w = POP(); + x = freevars[oparg]; + PyCell_Set(x, w); + Py_DECREF(w); +- continue; ++ DISPATCH(); ++ } + +- case BUILD_TUPLE: ++ TARGET(BUILD_TUPLE) ++ { + x = PyTuple_New(oparg); + if (x != NULL) { + for (; --oparg >= 0;) { +@@ -2256,11 +2511,13 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + PyTuple_SET_ITEM(x, oparg, w); + } + PUSH(x); +- continue; ++ DISPATCH(); + } + break; ++ } + +- case BUILD_LIST: ++ TARGET(BUILD_LIST) ++ { + x = PyList_New(oparg); + if (x != NULL) { + for (; --oparg >= 0;) { +@@ -2268,11 +2525,13 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + PyList_SET_ITEM(x, oparg, w); + } + PUSH(x); +- continue; ++ DISPATCH(); + } + break; ++ } + +- case BUILD_SET: ++ TARGET(BUILD_SET) ++ { + x = PySet_New(NULL); + if (x != NULL) { + for (; --oparg >= 0;) { +@@ -2286,18 +2545,21 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + break; + } + PUSH(x); +- continue; ++ DISPATCH(); + } + break; ++ } + +- +- case BUILD_MAP: ++ TARGET(BUILD_MAP) ++ { + x = _PyDict_NewPresized((Py_ssize_t)oparg); + PUSH(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case STORE_MAP: ++ TARGET_NOARG(STORE_MAP) ++ { + w = TOP(); /* key */ + u = SECOND(); /* value */ + v = THIRD(); /* dict */ +@@ -2306,10 +2568,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + err = PyDict_SetItem(v, w, u); /* v[w] = u */ + Py_DECREF(u); + Py_DECREF(w); +- if (err == 0) continue; ++ if (err == 0) DISPATCH(); + break; ++ } + +- case MAP_ADD: ++ TARGET(MAP_ADD) ++ { + w = TOP(); /* key */ + u = SECOND(); /* value */ + STACKADJ(-2); +@@ -2320,20 +2584,24 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(w); + if (err == 0) { + PREDICT(JUMP_ABSOLUTE); +- continue; ++ DISPATCH(); + } + break; ++ } + +- case LOAD_ATTR: ++ TARGET(LOAD_ATTR) ++ { + w = GETITEM(names, oparg); + v = TOP(); + x = PyObject_GetAttr(v, w); + Py_DECREF(v); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case COMPARE_OP: ++ TARGET(COMPARE_OP) ++ { + w = POP(); + v = TOP(); + if (PyInt_CheckExact(w) && PyInt_CheckExact(v)) { +@@ -2366,9 +2634,11 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + if (x == NULL) break; + PREDICT(POP_JUMP_IF_FALSE); + PREDICT(POP_JUMP_IF_TRUE); +- continue; ++ DISPATCH(); ++ } + +- case IMPORT_NAME: ++ TARGET(IMPORT_NAME) ++ { + w = GETITEM(names, oparg); + x = PyDict_GetItemString(f->f_builtins, "__import__"); + if (x == NULL) { +@@ -2409,10 +2679,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + READ_TIMESTAMP(intr1); + Py_DECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case IMPORT_STAR: ++ TARGET_NOARG(IMPORT_STAR) ++ { + v = POP(); + PyFrame_FastToLocals(f); + if ((x = f->f_locals) == NULL) { +@@ -2425,34 +2697,40 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + READ_TIMESTAMP(intr1); + PyFrame_LocalsToFast(f, 0); + Py_DECREF(v); +- if (err == 0) continue; ++ if (err == 0) DISPATCH(); + break; ++ } + +- case IMPORT_FROM: ++ TARGET(IMPORT_FROM) ++ { + w = GETITEM(names, oparg); + v = TOP(); + READ_TIMESTAMP(intr0); + x = import_from(v, w); + READ_TIMESTAMP(intr1); + PUSH(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case JUMP_FORWARD: ++ TARGET(JUMP_FORWARD) ++ { + JUMPBY(oparg); +- goto fast_next_opcode; ++ FAST_DISPATCH(); ++ } + + PREDICTED_WITH_ARG(POP_JUMP_IF_FALSE); +- case POP_JUMP_IF_FALSE: ++ TARGET(POP_JUMP_IF_FALSE) ++ { + w = POP(); + if (w == Py_True) { + Py_DECREF(w); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + if (w == Py_False) { + Py_DECREF(w); + JUMPTO(oparg); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + err = PyObject_IsTrue(w); + Py_DECREF(w); +@@ -2462,19 +2740,20 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + JUMPTO(oparg); + else + break; +- continue; +- ++ DISPATCH(); ++ } + PREDICTED_WITH_ARG(POP_JUMP_IF_TRUE); +- case POP_JUMP_IF_TRUE: ++ TARGET(POP_JUMP_IF_TRUE) ++ { + w = POP(); + if (w == Py_False) { + Py_DECREF(w); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + if (w == Py_True) { + Py_DECREF(w); + JUMPTO(oparg); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + err = PyObject_IsTrue(w); + Py_DECREF(w); +@@ -2486,18 +2765,20 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + ; + else + break; +- continue; ++ DISPATCH(); ++ } + +- case JUMP_IF_FALSE_OR_POP: ++ TARGET(JUMP_IF_FALSE_OR_POP) ++ { + w = TOP(); + if (w == Py_True) { + STACKADJ(-1); + Py_DECREF(w); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + if (w == Py_False) { + JUMPTO(oparg); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + err = PyObject_IsTrue(w); + if (err > 0) { +@@ -2509,18 +2790,20 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + JUMPTO(oparg); + else + break; +- continue; ++ DISPATCH(); ++ } + +- case JUMP_IF_TRUE_OR_POP: ++ TARGET(JUMP_IF_TRUE_OR_POP) ++ { + w = TOP(); + if (w == Py_False) { + STACKADJ(-1); + Py_DECREF(w); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + if (w == Py_True) { + JUMPTO(oparg); +- goto fast_next_opcode; ++ FAST_DISPATCH(); + } + err = PyObject_IsTrue(w); + if (err > 0) { +@@ -2533,10 +2816,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + } + else + break; +- continue; ++ DISPATCH(); ++ } + + PREDICTED_WITH_ARG(JUMP_ABSOLUTE); +- case JUMP_ABSOLUTE: ++ TARGET(JUMP_ABSOLUTE) ++ { + JUMPTO(oparg); + #if FAST_LOOPS + /* Enabling this path speeds-up all while and for-loops by bypassing +@@ -2548,10 +2833,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + */ + goto fast_next_opcode; + #else +- continue; ++ DISPATCH(); + #endif ++ } + +- case GET_ITER: ++ TARGET_NOARG(GET_ITER) ++ { + /* before: [obj]; after [getiter(obj)] */ + v = TOP(); + x = PyObject_GetIter(v); +@@ -2559,13 +2846,15 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + if (x != NULL) { + SET_TOP(x); + PREDICT(FOR_ITER); +- continue; ++ DISPATCH(); + } + STACKADJ(-1); + break; ++ } + + PREDICTED_WITH_ARG(FOR_ITER); +- case FOR_ITER: ++ TARGET(FOR_ITER) ++ { + /* before: [iter]; after: [iter, iter()] *or* [] */ + v = TOP(); + x = (*v->ob_type->tp_iternext)(v); +@@ -2573,7 +2862,7 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + PUSH(x); + PREDICT(STORE_FAST); + PREDICT(UNPACK_SEQUENCE); +- continue; ++ DISPATCH(); + } + if (PyErr_Occurred()) { + if (!PyErr_ExceptionMatches( +@@ -2585,13 +2874,17 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + x = v = POP(); + Py_DECREF(v); + JUMPBY(oparg); +- continue; ++ DISPATCH(); ++ } + +- case BREAK_LOOP: ++ TARGET_NOARG(BREAK_LOOP) ++ { + why = WHY_BREAK; + goto fast_block_end; ++ } + +- case CONTINUE_LOOP: ++ TARGET(CONTINUE_LOOP) ++ { + retval = PyInt_FromLong(oparg); + if (!retval) { + x = NULL; +@@ -2599,10 +2892,13 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + } + why = WHY_CONTINUE; + goto fast_block_end; ++ } + +- case SETUP_LOOP: +- case SETUP_EXCEPT: +- case SETUP_FINALLY: ++ TARGET_WITH_IMPL(SETUP_LOOP, _setup_finally) ++ TARGET_WITH_IMPL(SETUP_EXCEPT, _setup_finally) ++ TARGET(SETUP_FINALLY) ++ _setup_finally: ++ { + /* NOTE: If you add any new block-setup opcodes that + are not try/except/finally handlers, you may need + to update the PyGen_NeedsFinalizing() function. +@@ -2610,9 +2906,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + + PyFrame_BlockSetup(f, opcode, INSTR_OFFSET() + oparg, + STACK_LEVEL()); +- continue; ++ DISPATCH(); ++ } + +- case SETUP_WITH: ++ ++ TARGET(SETUP_WITH) ++ { + { + static PyObject *exit, *enter; + w = TOP(); +@@ -2638,10 +2937,11 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + STACK_LEVEL()); + + PUSH(x); +- continue; ++ DISPATCH(); ++ } + } + +- case WITH_CLEANUP: ++ TARGET_NOARG(WITH_CLEANUP) + { + /* At the top of the stack are 1-3 values indicating + how/why we entered the finally clause: +@@ -2729,7 +3029,7 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + break; + } + +- case CALL_FUNCTION: ++ TARGET(CALL_FUNCTION) + { + PyObject **sp; + PCALL(PCALL_ALL); +@@ -2741,14 +3041,14 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + #endif + stack_pointer = sp; + PUSH(x); +- if (x != NULL) +- continue; ++ if (x != NULL) DISPATCH(); + break; + } + +- case CALL_FUNCTION_VAR: +- case CALL_FUNCTION_KW: +- case CALL_FUNCTION_VAR_KW: ++ TARGET_WITH_IMPL(CALL_FUNCTION_VAR, _call_function_var_kw) ++ TARGET_WITH_IMPL(CALL_FUNCTION_KW, _call_function_var_kw) ++ TARGET(CALL_FUNCTION_VAR_KW) ++ _call_function_var_kw: + { + int na = oparg & 0xff; + int nk = (oparg>>8) & 0xff; +@@ -2786,12 +3086,12 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(w); + } + PUSH(x); +- if (x != NULL) +- continue; ++ if (x != NULL) DISPATCH(); + break; + } + +- case MAKE_FUNCTION: ++ TARGET(MAKE_FUNCTION) ++ { + v = POP(); /* code object */ + x = PyFunction_New(v, f->f_globals); + Py_DECREF(v); +@@ -2812,8 +3112,9 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + } + PUSH(x); + break; ++ } + +- case MAKE_CLOSURE: ++ TARGET(MAKE_CLOSURE) + { + v = POP(); /* code object */ + x = PyFunction_New(v, f->f_globals); +@@ -2848,7 +3149,8 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + break; + } + +- case BUILD_SLICE: ++ TARGET(BUILD_SLICE) ++ { + if (oparg == 3) + w = POP(); + else +@@ -2860,14 +3162,22 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) + Py_DECREF(v); + Py_XDECREF(w); + SET_TOP(x); +- if (x != NULL) continue; ++ if (x != NULL) DISPATCH(); + break; ++ } + +- case EXTENDED_ARG: ++ TARGET(EXTENDED_ARG) ++ { + opcode = NEXTOP(); + oparg = oparg<<16 | NEXTARG(); + goto dispatch_opcode; + ++ } ++ ++ ++#if USE_COMPUTED_GOTOS ++ _unknown_opcode: ++#endif + default: + fprintf(stderr, + "XXX lineno: %d, opcode: %d\n", +diff --git a/Python/makeopcodetargets.py b/Python/makeopcodetargets.py +index e69de29..703e5f2 100644 +--- a/Python/makeopcodetargets.py ++++ b/Python/makeopcodetargets.py +@@ -0,0 +1,45 @@ ++#! /usr/bin/env python ++"""Generate C code for the jump table of the threaded code interpreter ++(for compilers supporting computed gotos or "labels-as-values", such as gcc). ++""" ++ ++# This code should stay compatible with Python 2.3, at least while ++# some of the buildbots have Python 2.3 as their system Python. ++ ++import imp ++import os ++ ++ ++def find_module(modname): ++ """Finds and returns a module in the local dist/checkout. ++ """ ++ modpath = os.path.join( ++ os.path.dirname(os.path.dirname(__file__)), "Lib") ++ return imp.load_module(modname, *imp.find_module(modname, [modpath])) ++ ++def write_contents(f): ++ """Write C code contents to the target file object. ++ """ ++ opcode = find_module("opcode") ++ targets = ['_unknown_opcode'] * 256 ++ for opname, op in opcode.opmap.items(): ++ if opname == "STOP_CODE": ++ continue ++ targets[op] = "TARGET_%s" % opname.replace("+0", " ").replace("+", "_") ++ f.write("static void *opcode_targets[256] = {\n") ++ f.write(",\n".join([" &&%s" % s for s in targets])) ++ f.write("\n};\n") ++ ++ ++if __name__ == "__main__": ++ import sys ++ assert len(sys.argv) < 3, "Too many arguments" ++ if len(sys.argv) == 2: ++ target = sys.argv[1] ++ else: ++ target = "Python/opcode_targets.h" ++ f = open(target, "w") ++ try: ++ write_contents(f) ++ finally: ++ f.close() +\ No newline at end of file +diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h +index e69de29..2203566 100644 +--- a/Python/opcode_targets.h ++++ b/Python/opcode_targets.h +@@ -0,0 +1,258 @@ ++static void *opcode_targets[256] = { ++ &&_unknown_opcode, ++ &&TARGET_POP_TOP, ++ &&TARGET_ROT_TWO, ++ &&TARGET_ROT_THREE, ++ &&TARGET_DUP_TOP, ++ &&TARGET_ROT_FOUR, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&TARGET_NOP, ++ &&TARGET_UNARY_POSITIVE, ++ &&TARGET_UNARY_NEGATIVE, ++ &&TARGET_UNARY_NOT, ++ &&TARGET_UNARY_CONVERT, ++ &&_unknown_opcode, ++ &&TARGET_UNARY_INVERT, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&TARGET_BINARY_POWER, ++ &&TARGET_BINARY_MULTIPLY, ++ &&TARGET_BINARY_DIVIDE, ++ &&TARGET_BINARY_MODULO, ++ &&TARGET_BINARY_ADD, ++ &&TARGET_BINARY_SUBTRACT, ++ &&TARGET_BINARY_SUBSCR, ++ &&TARGET_BINARY_FLOOR_DIVIDE, ++ &&TARGET_BINARY_TRUE_DIVIDE, ++ &&TARGET_INPLACE_FLOOR_DIVIDE, ++ &&TARGET_INPLACE_TRUE_DIVIDE, ++ &&TARGET_SLICE , ++ &&TARGET_SLICE_1, ++ &&TARGET_SLICE_2, ++ &&TARGET_SLICE_3, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&TARGET_STORE_SLICE , ++ &&TARGET_STORE_SLICE_1, ++ &&TARGET_STORE_SLICE_2, ++ &&TARGET_STORE_SLICE_3, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&TARGET_DELETE_SLICE , ++ &&TARGET_DELETE_SLICE_1, ++ &&TARGET_DELETE_SLICE_2, ++ &&TARGET_DELETE_SLICE_3, ++ &&TARGET_STORE_MAP, ++ &&TARGET_INPLACE_ADD, ++ &&TARGET_INPLACE_SUBTRACT, ++ &&TARGET_INPLACE_MULTIPLY, ++ &&TARGET_INPLACE_DIVIDE, ++ &&TARGET_INPLACE_MODULO, ++ &&TARGET_STORE_SUBSCR, ++ &&TARGET_DELETE_SUBSCR, ++ &&TARGET_BINARY_LSHIFT, ++ &&TARGET_BINARY_RSHIFT, ++ &&TARGET_BINARY_AND, ++ &&TARGET_BINARY_XOR, ++ &&TARGET_BINARY_OR, ++ &&TARGET_INPLACE_POWER, ++ &&TARGET_GET_ITER, ++ &&_unknown_opcode, ++ &&TARGET_PRINT_EXPR, ++ &&TARGET_PRINT_ITEM, ++ &&TARGET_PRINT_NEWLINE, ++ &&TARGET_PRINT_ITEM_TO, ++ &&TARGET_PRINT_NEWLINE_TO, ++ &&TARGET_INPLACE_LSHIFT, ++ &&TARGET_INPLACE_RSHIFT, ++ &&TARGET_INPLACE_AND, ++ &&TARGET_INPLACE_XOR, ++ &&TARGET_INPLACE_OR, ++ &&TARGET_BREAK_LOOP, ++ &&TARGET_WITH_CLEANUP, ++ &&TARGET_LOAD_LOCALS, ++ &&TARGET_RETURN_VALUE, ++ &&TARGET_IMPORT_STAR, ++ &&TARGET_EXEC_STMT, ++ &&TARGET_YIELD_VALUE, ++ &&TARGET_POP_BLOCK, ++ &&TARGET_END_FINALLY, ++ &&TARGET_BUILD_CLASS, ++ &&TARGET_STORE_NAME, ++ &&TARGET_DELETE_NAME, ++ &&TARGET_UNPACK_SEQUENCE, ++ &&TARGET_FOR_ITER, ++ &&TARGET_LIST_APPEND, ++ &&TARGET_STORE_ATTR, ++ &&TARGET_DELETE_ATTR, ++ &&TARGET_STORE_GLOBAL, ++ &&TARGET_DELETE_GLOBAL, ++ &&TARGET_DUP_TOPX, ++ &&TARGET_LOAD_CONST, ++ &&TARGET_LOAD_NAME, ++ &&TARGET_BUILD_TUPLE, ++ &&TARGET_BUILD_LIST, ++ &&TARGET_BUILD_SET, ++ &&TARGET_BUILD_MAP, ++ &&TARGET_LOAD_ATTR, ++ &&TARGET_COMPARE_OP, ++ &&TARGET_IMPORT_NAME, ++ &&TARGET_IMPORT_FROM, ++ &&TARGET_JUMP_FORWARD, ++ &&TARGET_JUMP_IF_FALSE_OR_POP, ++ &&TARGET_JUMP_IF_TRUE_OR_POP, ++ &&TARGET_JUMP_ABSOLUTE, ++ &&TARGET_POP_JUMP_IF_FALSE, ++ &&TARGET_POP_JUMP_IF_TRUE, ++ &&TARGET_LOAD_GLOBAL, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&TARGET_CONTINUE_LOOP, ++ &&TARGET_SETUP_LOOP, ++ &&TARGET_SETUP_EXCEPT, ++ &&TARGET_SETUP_FINALLY, ++ &&_unknown_opcode, ++ &&TARGET_LOAD_FAST, ++ &&TARGET_STORE_FAST, ++ &&TARGET_DELETE_FAST, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&TARGET_RAISE_VARARGS, ++ &&TARGET_CALL_FUNCTION, ++ &&TARGET_MAKE_FUNCTION, ++ &&TARGET_BUILD_SLICE, ++ &&TARGET_MAKE_CLOSURE, ++ &&TARGET_LOAD_CLOSURE, ++ &&TARGET_LOAD_DEREF, ++ &&TARGET_STORE_DEREF, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&TARGET_CALL_FUNCTION_VAR, ++ &&TARGET_CALL_FUNCTION_KW, ++ &&TARGET_CALL_FUNCTION_VAR_KW, ++ &&TARGET_SETUP_WITH, ++ &&_unknown_opcode, ++ &&TARGET_EXTENDED_ARG, ++ &&TARGET_SET_ADD, ++ &&TARGET_MAP_ADD, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode, ++ &&_unknown_opcode ++}; +\ No newline at end of file +diff --git a/configure b/configure +index 6a47e1a..60a3335 100755 +--- a/configure ++++ b/configure +@@ -810,6 +810,7 @@ with_libm + with_libc + enable_big_digits + enable_unicode ++with_computed_gotos + ' + ac_precious_vars='build_alias + host_alias +@@ -1488,6 +1489,9 @@ Optional Packages: + --with-fpectl enable SIGFPE catching + --with-libm=STRING math library + --with-libc=STRING C library ++ --with(out)-computed-gotos ++ Use computed gotos in evaluation loop (enabled by ++ default on supported compilers) + + Some influential environment variables: + CC C compiler command +@@ -14608,6 +14612,83 @@ for dir in $SRCDIRS; do + mkdir $dir + fi + done ++ ++ BEGIN_COMPUTED_GOTO ++# Check for --with-computed-gotos ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-computed-gotos" >&5 ++$as_echo_n "checking for --with-computed-gotos... " >&6; } ++ ++# Check whether --with-computed-gotos was given. ++if test "${with_computed_gotos+set}" = set; then : ++ withval=$with_computed_gotos; ++if test "$withval" = yes ++then ++ ++$as_echo "#define USE_COMPUTED_GOTOS 1" >>confdefs.h ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++fi ++if test "$withval" = no ++then ++ ++$as_echo "#define USE_COMPUTED_GOTOS 0" >>confdefs.h ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no value specified" >&5 ++$as_echo "no value specified" >&6; } ++fi ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC supports computed gotos" >&5 ++$as_echo_n "checking whether $CC supports computed gotos... " >&6; } ++if ${ac_cv_computed_gotos+:} false; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test "$cross_compiling" = yes; then : ++ if test "${with_computed_gotos+set}" = set; then ++ ac_cv_computed_gotos="$with_computed_gotos -- configured --with(out)-computed-gotos" ++ else ++ ac_cv_computed_gotos=no ++ fi ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++int main(int argc, char **argv) ++{ ++ static void *targets[1] = { &&LABEL1 }; ++ goto LABEL2; ++LABEL1: ++ return 0; ++LABEL2: ++ goto *targets[0]; ++ return 1; ++} ++_ACEOF ++if ac_fn_c_try_run "$LINENO"; then : ++ ac_cv_computed_gotos=yes ++else ++ ac_cv_computed_gotos=no ++fi ++rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ ++ conftest.$ac_objext conftest.beam conftest.$ac_ext ++fi ++ ++fi ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_computed_gotos" >&5 ++$as_echo "$ac_cv_computed_gotos" >&6; } ++case "$ac_cv_computed_gotos" in yes*) ++ ++$as_echo "#define HAVE_COMPUTED_GOTOS 1" >>confdefs.h ++ ++esac ++# END_COMPUTED_GOTO ++ + { $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5 + $as_echo "done" >&6; } + +diff --git a/configure.ac b/configure.ac +index 48eccdd..74bb514 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -4561,6 +4561,57 @@ for dir in $SRCDIRS; do + mkdir $dir + fi + done ++ ++# BEGIN_COMPUTED_GOTO ++# Check for --with-computed-gotos ++AC_MSG_CHECKING(for --with-computed-gotos) ++AC_ARG_WITH(computed-gotos, ++ AS_HELP_STRING([--with(out)-computed-gotos], ++ [Use computed gotos in evaluation loop (enabled by default on supported compilers)]), ++[ ++if test "$withval" = yes ++then ++ AC_DEFINE(USE_COMPUTED_GOTOS, 1, ++ [Define if you want to use computed gotos in ceval.c.]) ++ AC_MSG_RESULT(yes) ++fi ++if test "$withval" = no ++then ++ AC_DEFINE(USE_COMPUTED_GOTOS, 0, ++ [Define if you want to use computed gotos in ceval.c.]) ++ AC_MSG_RESULT(no) ++fi ++], ++[AC_MSG_RESULT(no value specified)]) ++ ++AC_MSG_CHECKING(whether $CC supports computed gotos) ++AC_CACHE_VAL(ac_cv_computed_gotos, ++AC_RUN_IFELSE([AC_LANG_SOURCE([[[ ++int main(int argc, char **argv) ++{ ++ static void *targets[1] = { &&LABEL1 }; ++ goto LABEL2; ++LABEL1: ++ return 0; ++LABEL2: ++ goto *targets[0]; ++ return 1; ++} ++]]])], ++[ac_cv_computed_gotos=yes], ++[ac_cv_computed_gotos=no], ++[if test "${with_computed_gotos+set}" = set; then ++ ac_cv_computed_gotos="$with_computed_gotos -- configured --with(out)-computed-gotos" ++ else ++ ac_cv_computed_gotos=no ++ fi])) ++AC_MSG_RESULT($ac_cv_computed_gotos) ++case "$ac_cv_computed_gotos" in yes*) ++ AC_DEFINE(HAVE_COMPUTED_GOTOS, 1, ++ [Define if the C compiler supports computed gotos.]) ++esac ++# END_COMPUTED_GOTO ++ + AC_MSG_RESULT(done) + + # generate output files +diff --git a/pyconfig.h.in b/pyconfig.h.in +index eace285..ac85729 100644 +--- a/pyconfig.h.in ++++ b/pyconfig.h.in +@@ -121,6 +121,9 @@ + /* Define to 1 if you have the `clock' function. */ + #undef HAVE_CLOCK + ++/* Define if the C compiler supports computed gotos. */ ++#undef HAVE_COMPUTED_GOTOS ++ + /* Define to 1 if you have the `confstr' function. */ + #undef HAVE_CONFSTR + +@@ -1060,6 +1063,9 @@ + /* Define to 1 if your declares `struct tm'. */ + #undef TM_IN_SYS_TIME + ++/* Define if you want to use computed gotos in ceval.c. */ ++#undef USE_COMPUTED_GOTOS ++ + /* Enable extensions on AIX 3, Interix. */ + #ifndef _ALL_SOURCE + # undef _ALL_SOURCE +-- +2.5.5 + diff --git a/SOURCES/00234-PEP493-updated-implementation.patch b/SOURCES/00234-PEP493-updated-implementation.patch new file mode 100644 index 0000000..275fc53 --- /dev/null +++ b/SOURCES/00234-PEP493-updated-implementation.patch @@ -0,0 +1,137 @@ +@@ -, +, @@ +--- + Lib/ssl.py | 53 ++++++++++++++++++++++++++++++++++++++--------------- + 1 file changed, 38 insertions(+), 15 deletions(-) +--- a/Lib/ssl.py ++++ a/Lib/ssl.py +@@ -466,24 +466,47 @@ def _create_unverified_context(protocol=PROTOCOL_SSLv23, cert_reqs=None, + + return context + ++_https_verify_envvar = 'PYTHONHTTPSVERIFY' + _cert_verification_config = '/etc/python/cert-verification.cfg' + +-def _get_verify_status(protocol): +- context_factory = { +- 'platform_default': _create_unverified_context, +- 'enable': create_default_context, +- 'disable': _create_unverified_context +- } +- import ConfigParser +- try: +- config = ConfigParser.RawConfigParser() +- config.read(_cert_verification_config) +- status = config.get(protocol, 'verify') +- except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): +- status = 'platform_default' +- default = context_factory.get('platform_default') +- return context_factory.get(status, default) ++# To provide same function name as specified in PEP493 with keeping ++# the old name as defined in our previous patch ++_get_https_context_factory = lambda: _get_verify_status('https') + ++def _get_verify_status(protocol): ++ # See https://www.python.org/dev/peps/pep-0493/#recommendation-for-combined-feature-backports ++ # Check for an environmental override of the default behaviour ++ if not sys.flags.ignore_environment: ++ config_setting = os.environ.get(_https_verify_envvar) ++ if config_setting is not None: ++ if config_setting == '0': ++ return _create_unverified_context ++ return create_default_context ++ ++ # Check for a system-wide override of the default behaviour ++ context_factory = { ++ 'platform_default': create_default_context, ++ 'enable': create_default_context, ++ 'disable': _create_unverified_context ++ } ++ import ConfigParser ++ try: ++ config = ConfigParser.RawConfigParser() ++ config.read(_cert_verification_config) ++ status = config.get(protocol, 'verify') ++ except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): ++ status = 'platform_default' ++ default = context_factory.get('platform_default') ++ return context_factory.get(status, default) ++ ++# See https://www.python.org/dev/peps/pep-0493/#feature-configuration-api ++def _https_verify_certificates(enable=True): ++ """Verify server HTTPS certificates by default?""" ++ global _create_default_https_context ++ if enable: ++ _create_default_https_context = create_default_context ++ else: ++ _create_default_https_context = _create_unverified_context + + # Used by http.client if no context is explicitly passed. + _create_default_https_context = _get_verify_status('https') +--- a/Lib/test/test_ssl.py Thu Jan 14 21:57:57 2016 -0800 ++++ a/Lib/test/test_ssl.py Fri Jan 15 17:41:37 2016 +1000 +@@ -4,6 +4,7 @@ + import sys + import unittest + from test import test_support as support ++from test.script_helper import assert_python_ok + import asyncore + import socket + import select +@@ -1149,6 +1149,57 @@ + self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) + self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2) + ++ def test__https_verify_certificates(self): ++ # Unit test to check the contect factory mapping ++ # The factories themselves are tested above ++ # This test will fail by design if run under PYTHONHTTPSVERIFY=0 ++ # (as will various test_httplib tests) ++ ++ # Uses a fresh SSL module to avoid affecting the real one ++ local_ssl = support.import_fresh_module("ssl") ++ # Certificate verification is enabled by default ++ self.assertIs(local_ssl._create_default_https_context, ++ local_ssl.create_default_context) ++ # Turn default verification off ++ local_ssl._https_verify_certificates(enable=False) ++ self.assertIs(local_ssl._create_default_https_context, ++ local_ssl._create_unverified_context) ++ # And back on ++ local_ssl._https_verify_certificates(enable=True) ++ self.assertIs(local_ssl._create_default_https_context, ++ local_ssl.create_default_context) ++ # The default behaviour is to enable ++ local_ssl._https_verify_certificates(enable=False) ++ local_ssl._https_verify_certificates() ++ self.assertIs(local_ssl._create_default_https_context, ++ local_ssl.create_default_context) ++ ++ def test__https_verify_envvar(self): ++ # Unit test to check the PYTHONHTTPSVERIFY handling ++ # Need to use a subprocess so it can still be run under -E ++ https_is_verified = """import ssl, sys; \ ++ status = "Error: _create_default_https_context does not verify certs" \ ++ if ssl._create_default_https_context is \ ++ ssl._create_unverified_context \ ++ else None; \ ++ sys.exit(status)""" ++ https_is_not_verified = """import ssl, sys; \ ++ status = "Error: _create_default_https_context verifies certs" \ ++ if ssl._create_default_https_context is \ ++ ssl.create_default_context \ ++ else None; \ ++ sys.exit(status)""" ++ extra_env = {} ++ # Omitting it leaves verification on ++ assert_python_ok("-c", https_is_verified, **extra_env) ++ # Setting it to zero turns verification off ++ extra_env[ssl._https_verify_envvar] = "0" ++ assert_python_ok("-c", https_is_not_verified, **extra_env) ++ # Any other value should also leave it on ++ for setting in ("", "1", "enabled", "foo"): ++ extra_env[ssl._https_verify_envvar] = setting ++ assert_python_ok("-c", https_is_verified, **extra_env) ++ + def test_check_hostname(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + self.assertFalse(ctx.check_hostname) diff --git a/SOURCES/00235-JSON-decoder-lone-surrogates-fix.patch b/SOURCES/00235-JSON-decoder-lone-surrogates-fix.patch new file mode 100644 index 0000000..d78e8e7 --- /dev/null +++ b/SOURCES/00235-JSON-decoder-lone-surrogates-fix.patch @@ -0,0 +1,226 @@ +From 90986ef48c0df602ab38aa831a24e99e9ed61e7e Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Mon, 4 Apr 2016 15:55:28 +0200 +Subject: [PATCH] JSON decoder now accepts lone surrogates + +--- + Lib/json/decoder.py | 35 ++++++++++++------------ + Lib/json/tests/test_scanstring.py | 56 ++++++++++++++++++++++++++++++++++++--- + Modules/_json.c | 49 +++++++++------------------------- + 3 files changed, 83 insertions(+), 57 deletions(-) + +diff --git a/Lib/json/decoder.py b/Lib/json/decoder.py +index dfcc628..1b43238 100644 +--- a/Lib/json/decoder.py ++++ b/Lib/json/decoder.py +@@ -62,6 +62,16 @@ BACKSLASH = { + + DEFAULT_ENCODING = "utf-8" + ++def _decode_uXXXX(s, pos): ++ esc = s[pos + 1:pos + 5] ++ if len(esc) == 4 and esc[1] not in 'xX': ++ try: ++ return int(esc, 16) ++ except ValueError: ++ pass ++ msg = "Invalid \\uXXXX escape" ++ raise ValueError(errmsg(msg, s, pos)) ++ + def py_scanstring(s, end, encoding=None, strict=True, + _b=BACKSLASH, _m=STRINGCHUNK.match): + """Scan the string s for a JSON string. End is the index of the +@@ -116,25 +126,16 @@ def py_scanstring(s, end, encoding=None, strict=True, + end += 1 + else: + # Unicode escape sequence +- esc = s[end + 1:end + 5] +- next_end = end + 5 +- if len(esc) != 4: +- msg = "Invalid \\uXXXX escape" +- raise ValueError(errmsg(msg, s, end)) +- uni = int(esc, 16) ++ uni = _decode_uXXXX(s, end) ++ end += 5 + # Check for surrogate pair on UCS-4 systems +- if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535: +- msg = "Invalid \\uXXXX\\uXXXX surrogate pair" +- if not s[end + 5:end + 7] == '\\u': +- raise ValueError(errmsg(msg, s, end)) +- esc2 = s[end + 7:end + 11] +- if len(esc2) != 4: +- raise ValueError(errmsg(msg, s, end)) +- uni2 = int(esc2, 16) +- uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00)) +- next_end += 6 ++ if sys.maxunicode > 65535 and \ ++ 0xd800 <= uni <= 0xdbff and s[end:end + 2] == '\\u': ++ uni2 = _decode_uXXXX(s, end + 1) ++ if 0xdc00 <= uni2 <= 0xdfff: ++ uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00)) ++ end += 6 + char = unichr(uni) +- end = next_end + # Append the unescaped character + _append(char) + return u''.join(chunks), end +diff --git a/Lib/json/tests/test_scanstring.py b/Lib/json/tests/test_scanstring.py +index 4fef8cb..ed80a41 100644 +--- a/Lib/json/tests/test_scanstring.py ++++ b/Lib/json/tests/test_scanstring.py +@@ -5,10 +5,6 @@ from json.tests import PyTest, CTest + class TestScanstring(object): + def test_scanstring(self): + scanstring = self.json.decoder.scanstring +- self.assertEqual( +- scanstring('"z\\ud834\\udd20x"', 1, None, True), +- (u'z\U0001d120x', 16)) +- + if sys.maxunicode == 65535: + self.assertEqual( + scanstring(u'"z\U0001d120x"', 1, None, True), +@@ -94,6 +90,58 @@ class TestScanstring(object): + scanstring('["Bad value", truth]', 2, None, True), + (u'Bad value', 12)) + ++ def test_surrogates(self): ++ scanstring = self.json.decoder.scanstring ++ def assertScan(given, expect): ++ self.assertEqual(scanstring(given, 1, None, True), ++ (expect, len(given))) ++ if not isinstance(given, unicode): ++ given = unicode(given) ++ self.assertEqual(scanstring(given, 1, None, True), ++ (expect, len(given))) ++ ++ surrogates = unichr(0xd834) + unichr(0xdd20) ++ assertScan('"z\\ud834\\u0079x"', u'z\ud834yx') ++ assertScan('"z\\ud834\\udd20x"', u'z\U0001d120x') ++ assertScan('"z\\ud834\\ud834\\udd20x"', u'z\ud834\U0001d120x') ++ assertScan('"z\\ud834x"', u'z\ud834x') ++ assertScan(u'"z\\ud834\udd20x12345"', u'z%sx12345' % surrogates) ++ assertScan('"z\\udd20x"', u'z\udd20x') ++ assertScan(u'"z\ud834\udd20x"', u'z\ud834\udd20x') ++ assertScan(u'"z\ud834\\udd20x"', u'z%sx' % surrogates) ++ assertScan(u'"z\ud834x"', u'z\ud834x') ++ ++ def test_bad_escapes(self): ++ scanstring = self.json.decoder.scanstring ++ bad_escapes = [ ++ '"\\"', ++ '"\\x"', ++ '"\\u"', ++ '"\\u0"', ++ '"\\u01"', ++ '"\\u012"', ++ '"\\uz012"', ++ '"\\u0z12"', ++ '"\\u01z2"', ++ '"\\u012z"', ++ '"\\u0x12"', ++ '"\\u0X12"', ++ '"\\ud834\\"', ++ '"\\ud834\\u"', ++ '"\\ud834\\ud"', ++ '"\\ud834\\udd"', ++ '"\\ud834\\udd2"', ++ '"\\ud834\\uzdd2"', ++ '"\\ud834\\udzd2"', ++ '"\\ud834\\uddz2"', ++ '"\\ud834\\udd2z"', ++ '"\\ud834\\u0x20"', ++ '"\\ud834\\u0X20"', ++ ] ++ for s in bad_escapes: ++ with self.assertRaises(ValueError): ++ scanstring(s, 1, None, True) ++ + def test_issue3623(self): + self.assertRaises(ValueError, self.json.decoder.scanstring, b"xxx", 1, + "xxx") +diff --git a/Modules/_json.c b/Modules/_json.c +index 7c925fd..56d9ee4 100644 +--- a/Modules/_json.c ++++ b/Modules/_json.c +@@ -524,16 +524,10 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s + } + #ifdef Py_UNICODE_WIDE + /* Surrogate pair */ +- if ((c & 0xfc00) == 0xd800) { ++ if ((c & 0xfc00) == 0xd800 && end + 6 < len && ++ buf[next++] == '\\' && ++ buf[next++] == 'u') { + Py_UNICODE c2 = 0; +- if (end + 6 >= len) { +- raise_errmsg("Unpaired high surrogate", pystr, end - 5); +- goto bail; +- } +- if (buf[next++] != '\\' || buf[next++] != 'u') { +- raise_errmsg("Unpaired high surrogate", pystr, end - 5); +- goto bail; +- } + end += 6; + /* Decode 4 hex digits */ + for (; next < end; next++) { +@@ -554,15 +548,10 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s + goto bail; + } + } +- if ((c2 & 0xfc00) != 0xdc00) { +- raise_errmsg("Unpaired high surrogate", pystr, end - 5); +- goto bail; +- } +- c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); +- } +- else if ((c & 0xfc00) == 0xdc00) { +- raise_errmsg("Unpaired low surrogate", pystr, end - 5); +- goto bail; ++ if ((c2 & 0xfc00) == 0xdc00) ++ c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); ++ else ++ end -= 6; + } + #endif + } +@@ -703,16 +692,9 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next + } + #ifdef Py_UNICODE_WIDE + /* Surrogate pair */ +- if ((c & 0xfc00) == 0xd800) { ++ if ((c & 0xfc00) == 0xd800 && end + 6 < len && ++ buf[next++] == '\\' && buf[next++] == 'u') { + Py_UNICODE c2 = 0; +- if (end + 6 >= len) { +- raise_errmsg("Unpaired high surrogate", pystr, end - 5); +- goto bail; +- } +- if (buf[next++] != '\\' || buf[next++] != 'u') { +- raise_errmsg("Unpaired high surrogate", pystr, end - 5); +- goto bail; +- } + end += 6; + /* Decode 4 hex digits */ + for (; next < end; next++) { +@@ -733,15 +715,10 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next + goto bail; + } + } +- if ((c2 & 0xfc00) != 0xdc00) { +- raise_errmsg("Unpaired high surrogate", pystr, end - 5); +- goto bail; +- } +- c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); +- } +- else if ((c & 0xfc00) == 0xdc00) { +- raise_errmsg("Unpaired low surrogate", pystr, end - 5); +- goto bail; ++ if ((c2 & 0xfc00) == 0xdc00) ++ c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); ++ else ++ end -= 6; + } + #endif + } +-- +2.5.5 + diff --git a/SOURCES/00236-use-Py_ssize_t-for-file-offset-and-length-computations-in-iteration.patch b/SOURCES/00236-use-Py_ssize_t-for-file-offset-and-length-computations-in-iteration.patch new file mode 100644 index 0000000..7522dd8 --- /dev/null +++ b/SOURCES/00236-use-Py_ssize_t-for-file-offset-and-length-computations-in-iteration.patch @@ -0,0 +1,101 @@ +From 6bebd55022c82829c0b15d24d2ca99edd22562df Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Wed, 4 May 2016 10:39:40 +0200 +Subject: [PATCH] use Py_ssize_t for file offset and length computations in + iteration + +--- + Lib/test/test_file2k.py | 16 +++++++++++++++- + Objects/fileobject.c | 15 +++++++-------- + 2 files changed, 22 insertions(+), 9 deletions(-) + +diff --git a/Lib/test/test_file2k.py b/Lib/test/test_file2k.py +index 5a3c354..8f94cee 100644 +--- a/Lib/test/test_file2k.py ++++ b/Lib/test/test_file2k.py +@@ -14,7 +14,7 @@ except ImportError: + threading = None + + from test import test_support +-from test.test_support import TESTFN, run_unittest ++from test.test_support import TESTFN, run_unittest, requires + from UserList import UserList + + class AutoFileTests(unittest.TestCase): +@@ -416,6 +416,20 @@ class OtherFileTests(unittest.TestCase): + finally: + os.unlink(TESTFN) + ++ @test_support.precisionbigmemtest(2**31, 2.5, dry_run=False) ++ def test_very_long_line(self, size): ++ # Issue #22526 ++ requires('largefile') ++ with open(TESTFN, "wb") as fp: ++ fp.seek(size - 1) ++ fp.write("\0") ++ with open(TESTFN, "rb") as fp: ++ for l in fp: ++ pass ++ self.assertEqual(len(l), size) ++ self.assertEqual(l.count("\0"), size) ++ l = None ++ + class FileSubclassTests(unittest.TestCase): + + def testExit(self): +diff --git a/Objects/fileobject.c b/Objects/fileobject.c +index 76cdf74..fabe207 100644 +--- a/Objects/fileobject.c ++++ b/Objects/fileobject.c +@@ -2230,7 +2230,7 @@ drop_readahead(PyFileObject *f) + (unless at EOF) and no more than bufsize. Returns negative value on + error, will set MemoryError if bufsize bytes cannot be allocated. */ + static int +-readahead(PyFileObject *f, int bufsize) ++readahead(PyFileObject *f, Py_ssize_t bufsize) + { + Py_ssize_t chunksize; + +@@ -2268,7 +2268,7 @@ readahead(PyFileObject *f, int bufsize) + logarithmic buffer growth to about 50 even when reading a 1gb line. */ + + static PyStringObject * +-readahead_get_line_skip(PyFileObject *f, int skip, int bufsize) ++readahead_get_line_skip(PyFileObject *f, Py_ssize_t skip, Py_ssize_t bufsize) + { + PyStringObject* s; + char *bufptr; +@@ -2288,10 +2288,10 @@ readahead_get_line_skip(PyFileObject *f, int skip, int bufsize) + bufptr++; /* Count the '\n' */ + len = bufptr - f->f_bufptr; + s = (PyStringObject *) +- PyString_FromStringAndSize(NULL, skip+len); ++ PyString_FromStringAndSize(NULL, skip + len); + if (s == NULL) + return NULL; +- memcpy(PyString_AS_STRING(s)+skip, f->f_bufptr, len); ++ memcpy(PyString_AS_STRING(s) + skip, f->f_bufptr, len); + f->f_bufptr = bufptr; + if (bufptr == f->f_bufend) + drop_readahead(f); +@@ -2299,14 +2299,13 @@ readahead_get_line_skip(PyFileObject *f, int skip, int bufsize) + bufptr = f->f_bufptr; + buf = f->f_buf; + f->f_buf = NULL; /* Force new readahead buffer */ +- assert(skip+len < INT_MAX); +- s = readahead_get_line_skip( +- f, (int)(skip+len), bufsize + (bufsize>>2) ); ++ assert(len <= PY_SSIZE_T_MAX - skip); ++ s = readahead_get_line_skip(f, skip + len, bufsize + (bufsize>>2)); + if (s == NULL) { + PyMem_Free(buf); + return NULL; + } +- memcpy(PyString_AS_STRING(s)+skip, bufptr, len); ++ memcpy(PyString_AS_STRING(s) + skip, bufptr, len); + PyMem_Free(buf); + } + return s; +-- +2.5.5 + diff --git a/SOURCES/00237-CVE-2016-0772-smtplib.patch b/SOURCES/00237-CVE-2016-0772-smtplib.patch new file mode 100644 index 0000000..b11fac1 --- /dev/null +++ b/SOURCES/00237-CVE-2016-0772-smtplib.patch @@ -0,0 +1,34 @@ +From 5b67aca6fb4246e84981d6361ba218bd52e73ac2 Mon Sep 17 00:00:00 2001 +From: Tomas Orsava +Date: Tue, 21 Jun 2016 15:52:03 +0200 +Subject: [PATCH] Raise an error when STARTTLS fails + +CVE-2016-0772 python: smtplib StartTLS stripping attack +rhbz#1303647: https://bugzilla.redhat.com/show_bug.cgi?id=1303647 + +Based on an upstream change by Benjamin Peterson +- in changeset 101886:b3ce713fb9be 2.7 +- https://hg.python.org/cpython/rev/b3ce713fb9be +--- + Lib/smtplib.py | 5 +++++ + 1 file changed, 5 insertions(+) + +diff --git a/Lib/smtplib.py b/Lib/smtplib.py +index dc16ef6..8bc806b 100755 +--- a/Lib/smtplib.py ++++ b/Lib/smtplib.py +@@ -655,6 +655,11 @@ class SMTP: + self.ehlo_resp = None + self.esmtp_features = {} + self.does_esmtp = 0 ++ else: ++ # RFC 3207: ++ # 501 Syntax error (no parameters allowed) ++ # 454 TLS not available due to temporary reason ++ raise SMTPResponseException(resp, reply) + return (resp, reply) + + def sendmail(self, from_addr, to_addrs, msg, mail_options=[], +-- +2.5.5 + diff --git a/SOURCES/00238-CVE-2016-5699-httplib.patch b/SOURCES/00238-CVE-2016-5699-httplib.patch new file mode 100644 index 0000000..e8cf7a4 --- /dev/null +++ b/SOURCES/00238-CVE-2016-5699-httplib.patch @@ -0,0 +1,158 @@ +From e91c46c7a1a904eba04e29cdf896c99cb546d989 Mon Sep 17 00:00:00 2001 +From: Tomas Orsava +Date: Wed, 22 Jun 2016 17:06:01 +0200 +Subject: [PATCH] Disabled HTTP header injections in httplib + +CVE-2016-5699 python: http protocol steam injection attack +rhbz#1303699 : https://bugzilla.redhat.com/show_bug.cgi?id=1303699 + +Based on an upstream change by Demian Brecht and Serhiy Storchaka +- in changeset 94951:1c45047c5102 2.7 +- https://hg.python.org/cpython/rev/1c45047c5102 +--- + Lib/httplib.py | 40 +++++++++++++++++++++++++++++++++- + Lib/test/test_httplib.py | 56 ++++++++++++++++++++++++++++++++++++++++++++++++ + 2 files changed, 95 insertions(+), 1 deletion(-) + +diff --git a/Lib/httplib.py b/Lib/httplib.py +index 8e02ac3..592ee57 100644 +--- a/Lib/httplib.py ++++ b/Lib/httplib.py +@@ -68,6 +68,7 @@ Req-sent-unread-response _CS_REQ_SENT + + from array import array + import os ++import re + import socket + from sys import py3kwarning + from urlparse import urlsplit +@@ -218,6 +219,34 @@ _MAXLINE = 65536 + # maximum amount of headers accepted + _MAXHEADERS = 100 + ++# Header name/value ABNF (http://tools.ietf.org/html/rfc7230#section-3.2) ++# ++# VCHAR = %x21-7E ++# obs-text = %x80-FF ++# header-field = field-name ":" OWS field-value OWS ++# field-name = token ++# field-value = *( field-content / obs-fold ) ++# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] ++# field-vchar = VCHAR / obs-text ++# ++# obs-fold = CRLF 1*( SP / HTAB ) ++# ; obsolete line folding ++# ; see Section 3.2.4 ++ ++# token = 1*tchar ++# ++# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" ++# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" ++# / DIGIT / ALPHA ++# ; any VCHAR, except delimiters ++# ++# VCHAR defined in http://tools.ietf.org/html/rfc5234#appendix-B.1 ++ ++# the patterns for both name and value are more leniant than RFC ++# definitions to allow for backwards compatibility ++_is_legal_header_name = re.compile(r'\A[^:\s][^:\r\n]*\Z').match ++_is_illegal_header_value = re.compile(r'\n(?![ \t])|\r(?![ \t\n])').search ++ + + class HTTPMessage(mimetools.Message): + +@@ -956,7 +985,16 @@ class HTTPConnection: + if self.__state != _CS_REQ_STARTED: + raise CannotSendHeader() + +- hdr = '%s: %s' % (header, '\r\n\t'.join([str(v) for v in values])) ++ header = '%s' % header ++ if not _is_legal_header_name(header): ++ raise ValueError('Invalid header name %r' % (header,)) ++ ++ values = [str(v) for v in values] ++ for one_value in values: ++ if _is_illegal_header_value(one_value): ++ raise ValueError('Invalid header value %r' % (one_value,)) ++ ++ hdr = '%s: %s' % (header, '\r\n\t'.join(values)) + self._output(hdr) + + def endheaders(self, message_body=None): +diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py +index c29aceb..29af589 100644 +--- a/Lib/test/test_httplib.py ++++ b/Lib/test/test_httplib.py +@@ -133,6 +133,33 @@ class HeaderTests(TestCase): + conn.putheader('Content-length',42) + self.assertTrue('Content-length: 42' in conn._buffer) + ++ conn.putheader('Foo', ' bar ') ++ self.assertIn(b'Foo: bar ', conn._buffer) ++ conn.putheader('Bar', '\tbaz\t') ++ self.assertIn(b'Bar: \tbaz\t', conn._buffer) ++ conn.putheader('Authorization', 'Bearer mytoken') ++ self.assertIn(b'Authorization: Bearer mytoken', conn._buffer) ++ conn.putheader('IterHeader', 'IterA', 'IterB') ++ self.assertIn(b'IterHeader: IterA\r\n\tIterB', conn._buffer) ++ conn.putheader('LatinHeader', b'\xFF') ++ self.assertIn(b'LatinHeader: \xFF', conn._buffer) ++ conn.putheader('Utf8Header', b'\xc3\x80') ++ self.assertIn(b'Utf8Header: \xc3\x80', conn._buffer) ++ conn.putheader('C1-Control', b'next\x85line') ++ self.assertIn(b'C1-Control: next\x85line', conn._buffer) ++ conn.putheader('Embedded-Fold-Space', 'is\r\n allowed') ++ self.assertIn(b'Embedded-Fold-Space: is\r\n allowed', conn._buffer) ++ conn.putheader('Embedded-Fold-Tab', 'is\r\n\tallowed') ++ self.assertIn(b'Embedded-Fold-Tab: is\r\n\tallowed', conn._buffer) ++ conn.putheader('Key Space', 'value') ++ self.assertIn(b'Key Space: value', conn._buffer) ++ conn.putheader('KeySpace ', 'value') ++ self.assertIn(b'KeySpace : value', conn._buffer) ++ conn.putheader(b'Nonbreak\xa0Space', 'value') ++ self.assertIn(b'Nonbreak\xa0Space: value', conn._buffer) ++ conn.putheader(b'\xa0NonbreakSpace', 'value') ++ self.assertIn(b'\xa0NonbreakSpace: value', conn._buffer) ++ + def test_ipv6host_header(self): + # Default host header on IPv6 transaction should wrapped by [] if + # its actual IPv6 address +@@ -152,6 +179,35 @@ class HeaderTests(TestCase): + conn.request('GET', '/foo') + self.assertTrue(sock.data.startswith(expected)) + ++ def test_invalid_headers(self): ++ conn = httplib.HTTPConnection('example.com') ++ conn.sock = FakeSocket('') ++ conn.putrequest('GET', '/') ++ ++ # http://tools.ietf.org/html/rfc7230#section-3.2.4, whitespace is no ++ # longer allowed in header names ++ cases = ( ++ (b'Invalid\r\nName', b'ValidValue'), ++ (b'Invalid\rName', b'ValidValue'), ++ (b'Invalid\nName', b'ValidValue'), ++ (b'\r\nInvalidName', b'ValidValue'), ++ (b'\rInvalidName', b'ValidValue'), ++ (b'\nInvalidName', b'ValidValue'), ++ (b' InvalidName', b'ValidValue'), ++ (b'\tInvalidName', b'ValidValue'), ++ (b'Invalid:Name', b'ValidValue'), ++ (b':InvalidName', b'ValidValue'), ++ (b'ValidName', b'Invalid\r\nValue'), ++ (b'ValidName', b'Invalid\rValue'), ++ (b'ValidName', b'Invalid\nValue'), ++ (b'ValidName', b'InvalidValue\r\n'), ++ (b'ValidName', b'InvalidValue\r'), ++ (b'ValidName', b'InvalidValue\n'), ++ ) ++ for name, value in cases: ++ with self.assertRaisesRegexp(ValueError, 'Invalid header'): ++ conn.putheader(name, value) ++ + + class BasicTest(TestCase): + def test_status_lines(self): +-- +2.9.0 + diff --git a/SOURCES/00240-increase-smtplib-tests-timeouts.patch b/SOURCES/00240-increase-smtplib-tests-timeouts.patch new file mode 100644 index 0000000..ae61b18 --- /dev/null +++ b/SOURCES/00240-increase-smtplib-tests-timeouts.patch @@ -0,0 +1,66 @@ +diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py +index 14d0060d8ec..1bb66901880 100644 +--- a/Lib/test/test_smtplib.py ++++ b/Lib/test/test_smtplib.py +@@ -179,31 +179,31 @@ def tearDown(self): + + def testBasic(self): + # connect +- smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3) ++ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + smtp.quit() + + def testNOOP(self): +- smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3) ++ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + expected = (250, 'Ok') + self.assertEqual(smtp.noop(), expected) + smtp.quit() + + def testRSET(self): +- smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3) ++ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + expected = (250, 'Ok') + self.assertEqual(smtp.rset(), expected) + smtp.quit() + + def testNotImplemented(self): + # EHLO isn't implemented in DebuggingServer +- smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3) ++ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + expected = (502, 'Error: command "EHLO" not implemented') + self.assertEqual(smtp.ehlo(), expected) + smtp.quit() + + def testVRFY(self): + # VRFY isn't implemented in DebuggingServer +- smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3) ++ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + expected = (502, 'Error: command "VRFY" not implemented') + self.assertEqual(smtp.vrfy('nobody@nowhere.com'), expected) + self.assertEqual(smtp.verify('nobody@nowhere.com'), expected) +@@ -212,21 +212,21 @@ def testVRFY(self): + def testSecondHELO(self): + # check that a second HELO returns a message that it's a duplicate + # (this behavior is specific to smtpd.SMTPChannel) +- smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3) ++ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + smtp.helo() + expected = (503, 'Duplicate HELO/EHLO') + self.assertEqual(smtp.helo(), expected) + smtp.quit() + + def testHELP(self): +- smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3) ++ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + self.assertEqual(smtp.help(), 'Error: command "HELP" not implemented') + smtp.quit() + + def testSend(self): + # connect and send mail + m = 'A test message' +- smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3) ++ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + smtp.sendmail('John', 'Sally', m) + # XXX(nnorwitz): this test is flaky and dies with a bad file descriptor + # in asyncore. This sleep might help, but should really be fixed diff --git a/SOURCES/00241-CVE-2016-5636-buffer-overflow-in-zipimport-module-fix.patch b/SOURCES/00241-CVE-2016-5636-buffer-overflow-in-zipimport-module-fix.patch new file mode 100644 index 0000000..f166d01 --- /dev/null +++ b/SOURCES/00241-CVE-2016-5636-buffer-overflow-in-zipimport-module-fix.patch @@ -0,0 +1,39 @@ +From 0f12cb75c708978f9201c1dd3464d2a8572b4544 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Fri, 8 Jul 2016 20:24:10 +0200 +Subject: [PATCH] CVE-2016-5636 fix + +--- + Modules/zipimport.c | 9 +++++++++ + 1 file changed, 9 insertions(+) + +diff --git a/Modules/zipimport.c b/Modules/zipimport.c +index 7240cb4..2e6a61f 100644 +--- a/Modules/zipimport.c ++++ b/Modules/zipimport.c +@@ -861,6 +861,10 @@ get_data(char *archive, PyObject *toc_entry) + &date, &crc)) { + return NULL; + } ++ if (data_size < 0) { ++ PyErr_Format(ZipImportError, "negative data size"); ++ return NULL; ++ } + + fp = fopen(archive, "rb"); + if (!fp) { +@@ -895,6 +899,11 @@ get_data(char *archive, PyObject *toc_entry) + PyMarshal_ReadShortFromFile(fp); /* local header size */ + file_offset += l; /* Start of file data */ + ++ if (data_size > LONG_MAX - 1) { ++ fclose(fp); ++ PyErr_NoMemory(); ++ return NULL; ++ } + raw_data = PyString_FromStringAndSize((char *)NULL, compress == 0 ? + data_size : data_size + 1); + if (raw_data == NULL) { +-- +2.7.4 + diff --git a/SOURCES/00242-CVE-2016-1000110-httpoxy.patch b/SOURCES/00242-CVE-2016-1000110-httpoxy.patch new file mode 100644 index 0000000..ada467c --- /dev/null +++ b/SOURCES/00242-CVE-2016-1000110-httpoxy.patch @@ -0,0 +1,121 @@ +From 3a184cc875709d4324d234a4b939d614a2c9bb0f Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Mon, 1 Aug 2016 11:20:11 +0200 +Subject: [PATCH] =?UTF-8?q?Fix=20for=20CVE-2016-1000110=20http://bugs.pyth?= + =?UTF-8?q?on.org/issue27568=20Based=20on=20the=20patch=20by=20R=C3=A9mi?= + =?UTF-8?q?=20Rampin?= +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +--- + Doc/howto/urllib2.rst | 5 +++++ + Doc/library/urllib.rst | 10 ++++++++++ + Doc/library/urllib2.rst | 4 ++++ + Lib/test/test_urllib.py | 12 ++++++++++++ + Lib/urllib.py | 9 +++++++++ + 5 files changed, 40 insertions(+) + +diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst +index f84bf08..6542846 100644 +--- a/Doc/howto/urllib2.rst ++++ b/Doc/howto/urllib2.rst +@@ -523,6 +523,11 @@ setting up a `Basic Authentication`_ handler : :: + through a proxy. However, this can be enabled by extending urllib2 as + shown in the recipe [#]_. + ++.. note:: ++ ++ ``HTTP_PROXY`` will be ignored if a variable ``REQUEST_METHOD`` is set; see ++ the documentation on :func:`~urllib.getproxies`. ++ + + Sockets and Layers + ================== +diff --git a/Doc/library/urllib.rst b/Doc/library/urllib.rst +index c7d200d..3b9876e 100644 +--- a/Doc/library/urllib.rst ++++ b/Doc/library/urllib.rst +@@ -280,6 +280,16 @@ Utility functions + find it, looks for proxy information from Mac OSX System Configuration for + Mac OS X and Windows Systems Registry for Windows. + ++ .. note:: ++ ++ If the environment variable ``REQUEST_METHOD`` is set, which usually ++ indicates your script is running in a CGI environment, the environment ++ variable ``HTTP_PROXY`` (uppercase ``_PROXY``) will be ignored. This is ++ because that variable can be injected by a client using the "Proxy:" ++ HTTP header. If you need to use an HTTP proxy in a CGI environment, ++ either use ``ProxyHandler`` explicitly, or make sure the variable name ++ is in lowercase (or at least the ``_proxy`` suffix). ++ + .. note:: + urllib also exposes certain utility functions like splittype, splithost and + others parsing url into various components. But it is recommended to use +diff --git a/Doc/library/urllib2.rst b/Doc/library/urllib2.rst +index 24deeb4..46fce59 100644 +--- a/Doc/library/urllib2.rst ++++ b/Doc/library/urllib2.rst +@@ -224,6 +224,10 @@ The following classes are provided: + + To disable autodetected proxy pass an empty dictionary. + ++ .. note:: ++ ++ ``HTTP_PROXY`` will be ignored if a variable ``REQUEST_METHOD`` is set; ++ see the documentation on :func:`~urllib.getproxies`. + + .. class:: HTTPPasswordMgr() + +diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py +index 3a273f8..3845012 100644 +--- a/Lib/test/test_urllib.py ++++ b/Lib/test/test_urllib.py +@@ -161,6 +161,18 @@ class ProxyTests(unittest.TestCase): + self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com') + self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com')) + ++ def test_proxy_cgi_ignore(self): ++ try: ++ self.env.set('HTTP_PROXY', 'http://somewhere:3128') ++ proxies = urllib.getproxies_environment() ++ self.assertEqual('http://somewhere:3128', proxies['http']) ++ self.env.set('REQUEST_METHOD', 'GET') ++ proxies = urllib.getproxies_environment() ++ self.assertNotIn('http', proxies) ++ finally: ++ self.env.unset('REQUEST_METHOD') ++ self.env.unset('HTTP_PROXY') ++ + + class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin): + """Test urlopen() opening a fake http connection.""" +diff --git a/Lib/urllib.py b/Lib/urllib.py +index f9655f9..9b31df1 100644 +--- a/Lib/urllib.py ++++ b/Lib/urllib.py +@@ -1361,11 +1361,20 @@ def getproxies_environment(): + [Fancy]URLopener constructor. + + """ ++ # Get all variables + proxies = {} + for name, value in os.environ.items(): + name = name.lower() + if value and name[-6:] == '_proxy': + proxies[name[:-6]] = value ++ ++ # CVE-2016-1000110 - If we are running as CGI script, forget HTTP_PROXY ++ # (non-all-lowercase) as it may be set from the web server by a "Proxy:" ++ # header from the client ++ # If "proxy" is lowercase, it will still be used thanks to the next block ++ if 'REQUEST_METHOD' in os.environ: ++ proxies.pop('http', None) ++ + return proxies + + def proxy_bypass_environment(host): +-- +2.7.4 + diff --git a/SOURCES/00255-Fix-ssl-module-parsing-of-GEN_RID-subject-alternative-name-fields-in-X.509-certs.patch b/SOURCES/00255-Fix-ssl-module-parsing-of-GEN_RID-subject-alternative-name-fields-in-X.509-certs.patch new file mode 100644 index 0000000..e28bb04 --- /dev/null +++ b/SOURCES/00255-Fix-ssl-module-parsing-of-GEN_RID-subject-alternative-name-fields-in-X.509-certs.patch @@ -0,0 +1,235 @@ + +# HG changeset patch +# User Christian Heimes +# Date 1473197135 -7200 +# Node ID 74805fd9e7343649372d0b9c76b4490b2975a674 +# Parent 6f4f19217d9be12be7a9c86cf1e118b140564b4f +Issue #27691: Fix ssl module's parsing of GEN_RID subject alternative name fields in X.509 certs. + +diff --git a/Lib/test/allsans.pem b/Lib/test/allsans.pem +new file mode 100644 +--- /dev/null ++++ b/Lib/test/allsans.pem +@@ -0,0 +1,37 @@ ++-----BEGIN PRIVATE KEY----- ++MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAOoy7/QOtTjQ0niE ++6uDcTwtkC0R2Tvy1AjVnXohCntZfdzbTGDoYTgXSOLsP8A697jUiJ8VCePGH50xG ++Z4DKnAF3a9O3a9nr2pLXb0iY3XOMv+YEBii7CfI+3oxFYgCl0sMgHzDD2ZTVYAsm ++DWgLUVsE2gHEccRwrM2tPf2EgR+FAgMBAAECgYEA3qyfyYVSeTrTYxO93x6ZaVMu ++A2IZp9zSxMQL9bKiI2GRj+cV2ebSCGbg2btFnD6qBor7FWsmYz+8g6FNN/9sY4az ++61rMqMtQvLBe+7L8w70FeTze4qQ4Y1oQri0qD6tBWhDVlpnbI5Py9bkZKD67yVUk ++elcEA/5x4PrYXkuqsAECQQD80NjT0mDvaY0JOOaQFSEpMv6QiUA8GGX8Xli7IoKb ++tAolPG8rQBa+qSpcWfDMTrWw/aWHuMEEQoP/bVDH9W4FAkEA7SYQbBAKnojZ5A3G ++kOHdV7aeivRQxQk/JN8Fb8oKB9Csvpv/BsuGxPKXHdhFa6CBTTsNRtHQw/szPo4l ++xMIjgQJAPoMxqibR+0EBM6+TKzteSL6oPXsCnBl4Vk/J5vPgkbmR7KUl4+7j8N8J ++b2554TrxKEN/w7CGYZRE6UrRd7ATNQJAWD7Yz41sli+wfPdPU2xo1BHljyl4wMk/ ++EPZYbI/PCbdyAH/F935WyQTIjNeEhZc1Zkq6FwdOWw8ns3hrv3rKgQJAHXv1BqUa ++czGPIFxX2TNoqtcl6/En4vrxVB1wzsfzkkDAg98kBl7qsF+S3qujSzKikjeaVbI2 ++/CyWR2P3yLtOmA== ++-----END PRIVATE KEY----- ++-----BEGIN CERTIFICATE----- ++MIIDcjCCAtugAwIBAgIJAN5dc9TOWjB7MA0GCSqGSIb3DQEBCwUAMF0xCzAJBgNV ++BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u ++IFNvZnR3YXJlIEZvdW5kYXRpb24xEDAOBgNVBAMMB2FsbHNhbnMwHhcNMTYwODA1 ++MTAyMTExWhcNMjYwODAzMTAyMTExWjBdMQswCQYDVQQGEwJYWTEXMBUGA1UEBwwO ++Q2FzdGxlIEFudGhyYXgxIzAhBgNVBAoMGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0 ++aW9uMRAwDgYDVQQDDAdhbGxzYW5zMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB ++gQDqMu/0DrU40NJ4hOrg3E8LZAtEdk78tQI1Z16IQp7WX3c20xg6GE4F0ji7D/AO ++ve41IifFQnjxh+dMRmeAypwBd2vTt2vZ69qS129ImN1zjL/mBAYouwnyPt6MRWIA ++pdLDIB8ww9mU1WALJg1oC1FbBNoBxHHEcKzNrT39hIEfhQIDAQABo4IBODCCATQw ++ggEwBgNVHREEggEnMIIBI4IHYWxsc2Fuc6AeBgMqAwSgFwwVc29tZSBvdGhlciBp ++ZGVudGlmaWVyoDUGBisGAQUCAqArMCmgEBsOS0VSQkVST1MuUkVBTE2hFTAToAMC ++AQGhDDAKGwh1c2VybmFtZYEQdXNlckBleGFtcGxlLm9yZ4IPd3d3LmV4YW1wbGUu ++b3JnpGcwZTELMAkGA1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMw ++IQYDVQQKDBpQeXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEYMBYGA1UEAwwPZGly ++bmFtZSBleGFtcGxlhhdodHRwczovL3d3dy5weXRob24ub3JnL4cEfwAAAYcQAAAA ++AAAAAAAAAAAAAAAAAYgEKgMEBTANBgkqhkiG9w0BAQsFAAOBgQAy16h+F+nOmeiT ++VWR0fc8F/j6FcadbLseAUaogcC15OGxCl4UYpLV88HBkABOoGCpP155qwWTwOrdG ++iYPGJSusf1OnJEbvzFejZf6u078bPd9/ZL4VWLjv+FPGkjd+N+/OaqMvgj8Lu99f ++3Y/C4S7YbHxxwff6C6l2Xli+q6gnuQ== ++-----END CERTIFICATE----- +diff --git a/Lib/test/make_ssl_certs.py b/Lib/test/make_ssl_certs.py +--- a/Lib/test/make_ssl_certs.py ++++ b/Lib/test/make_ssl_certs.py +@@ -20,7 +20,28 @@ req_template = """ + CN = {hostname} + + [req_x509_extensions] +- subjectAltName = DNS:{hostname} ++ subjectAltName = @san ++ ++ [san] ++ DNS.1 = {hostname} ++ {extra_san} ++ ++ [dir_sect] ++ C = XY ++ L = Castle Anthrax ++ O = Python Software Foundation ++ CN = dirname example ++ ++ [princ_name] ++ realm = EXP:0, GeneralString:KERBEROS.REALM ++ principal_name = EXP:1, SEQUENCE:principal_seq ++ ++ [principal_seq] ++ name_type = EXP:0, INTEGER:1 ++ name_string = EXP:1, SEQUENCE:principals ++ ++ [principals] ++ princ1 = GeneralString:username + + [ ca ] + default_ca = CA_default +@@ -67,7 +88,7 @@ req_template = """ + + here = os.path.abspath(os.path.dirname(__file__)) + +-def make_cert_key(hostname, sign=False): ++def make_cert_key(hostname, sign=False, extra_san=''): + print("creating cert for " + hostname) + tempnames = [] + for i in range(3): +@@ -75,8 +96,9 @@ def make_cert_key(hostname, sign=False): + tempnames.append(f.name) + req_file, cert_file, key_file = tempnames + try: ++ req = req_template.format(hostname=hostname, extra_san=extra_san) + with open(req_file, 'w') as f: +- f.write(req_template.format(hostname=hostname)) ++ f.write(req) + args = ['req', '-new', '-days', '3650', '-nodes', + '-newkey', 'rsa:1024', '-keyout', key_file, + '-config', req_file] +@@ -120,7 +142,7 @@ def make_ca(): + f.write('unique_subject = no') + + with tempfile.NamedTemporaryFile("w") as t: +- t.write(req_template.format(hostname='our-ca-server')) ++ t.write(req_template.format(hostname='our-ca-server', extra_san='')) + t.flush() + with tempfile.NamedTemporaryFile() as f: + args = ['req', '-new', '-days', '3650', '-extensions', 'v3_ca', '-nodes', +@@ -171,6 +193,25 @@ if __name__ == '__main__': + f.write(key) + f.write(cert) + ++ extra_san = [ ++ 'otherName.1 = 1.2.3.4;UTF8:some other identifier', ++ 'otherName.2 = 1.3.6.1.5.2.2;SEQUENCE:princ_name', ++ 'email.1 = user@example.org', ++ 'DNS.2 = www.example.org', ++ # GEN_X400 ++ 'dirName.1 = dir_sect', ++ # GEN_EDIPARTY ++ 'URI.1 = https://www.python.org/', ++ 'IP.1 = 127.0.0.1', ++ 'IP.2 = ::1', ++ 'RID.1 = 1.2.3.4.5', ++ ] ++ ++ cert, key = make_cert_key('allsans', extra_san='\n'.join(extra_san)) ++ with open('allsans.pem', 'w') as f: ++ f.write(key) ++ f.write(cert) ++ + unmake_ca() + print("\n\nPlease change the values in test_ssl.py, test_parse_cert function related to notAfter,notBefore and serialNumber") + check_call(['openssl','x509','-in','keycert.pem','-dates','-serial','-noout']) +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index fa59641..9d5816b 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -57,6 +57,8 @@ CRLFILE = data_file("revocation.crl") + SIGNED_CERTFILE = data_file("keycert3.pem") + SIGNED_CERTFILE2 = data_file("keycert4.pem") + SIGNING_CA = data_file("pycacert.pem") ++# cert with all kinds of subject alt names ++ALLSANFILE = data_file("allsans.pem") + + SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem") + +@@ -236,6 +238,28 @@ class BasicSocketTests(unittest.TestCase): + ('IP Address', '2001:DB8:0:0:0:0:0:1\n')) + ) + ++ def test_parse_all_sans(self): ++ p = ssl._ssl._test_decode_cert(ALLSANFILE) ++ self.assertEqual(p['subjectAltName'], ++ ( ++ ('DNS', 'allsans'), ++ ('othername', ''), ++ ('othername', ''), ++ ('email', 'user@example.org'), ++ ('DNS', 'www.example.org'), ++ ('DirName', ++ ((('countryName', 'XY'),), ++ (('localityName', 'Castle Anthrax'),), ++ (('organizationName', 'Python Software Foundation'),), ++ (('commonName', 'dirname example'),))), ++ ('URI', 'https://www.python.org/'), ++ ('IP Address', '127.0.0.1'), ++ ('IP Address', '0:0:0:0:0:0:0:1\n'), ++ ('Registered ID', '1.2.3.4.5') ++ ) ++ ) ++ ++ + def test_DER_to_PEM(self): + with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: + pem = f.read() +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -953,6 +953,35 @@ static PyObject * + PyTuple_SET_ITEM(t, 1, v); + break; + ++ case GEN_RID: ++ t = PyTuple_New(2); ++ if (t == NULL) ++ goto fail; ++ ++ v = PyUnicode_FromString("Registered ID"); ++ if (v == NULL) { ++ Py_DECREF(t); ++ goto fail; ++ } ++ PyTuple_SET_ITEM(t, 0, v); ++ ++ len = i2t_ASN1_OBJECT(buf, sizeof(buf)-1, name->d.rid); ++ if (len < 0) { ++ Py_DECREF(t); ++ _setSSLError(NULL, 0, __FILE__, __LINE__); ++ goto fail; ++ } else if (len >= (int)sizeof(buf)) { ++ v = PyUnicode_FromString(""); ++ } else { ++ v = PyUnicode_FromStringAndSize(buf, len); ++ } ++ if (v == NULL) { ++ Py_DECREF(t); ++ goto fail; ++ } ++ PyTuple_SET_ITEM(t, 1, v); ++ break; ++ + default: + /* for everything else, we use the OpenSSL print form */ + switch (gntype) { +@@ -978,8 +1007,12 @@ static PyObject * + goto fail; + } + vptr = strchr(buf, ':'); +- if (vptr == NULL) ++ if (vptr == NULL) { ++ PyErr_Format(PyExc_ValueError, ++ "Invalid value %.200s", ++ buf); + goto fail; ++ } + t = PyTuple_New(2); + if (t == NULL) + goto fail; + diff --git a/SOURCES/00256-fix-incorrect-parsing-of-regular-expressions.patch b/SOURCES/00256-fix-incorrect-parsing-of-regular-expressions.patch new file mode 100644 index 0000000..9c467b8 --- /dev/null +++ b/SOURCES/00256-fix-incorrect-parsing-of-regular-expressions.patch @@ -0,0 +1,80 @@ +diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py +index 7cda2b6..15d2324 100644 +--- a/Lib/sre_compile.py ++++ b/Lib/sre_compile.py +@@ -355,8 +355,6 @@ def _optimize_unicode(charset, fixup): + def _simple(av): + # check if av is a "simple" operator + lo, hi = av[2].getwidth() +- if lo == 0 and hi == MAXREPEAT: +- raise error, "nothing to repeat" + return lo == hi == 1 and av[2][0][0] != SUBPATTERN + + def _compile_info(code, pattern, flags): +diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py +index 75f8c96..644441d 100644 +--- a/Lib/sre_parse.py ++++ b/Lib/sre_parse.py +@@ -147,7 +147,7 @@ class SubPattern: + REPEATCODES = (MIN_REPEAT, MAX_REPEAT) + for op, av in self.data: + if op is BRANCH: +- i = sys.maxint ++ i = MAXREPEAT - 1 + j = 0 + for av in av[1]: + l, h = av.getwidth() +@@ -165,14 +165,14 @@ class SubPattern: + hi = hi + j + elif op in REPEATCODES: + i, j = av[2].getwidth() +- lo = lo + long(i) * av[0] +- hi = hi + long(j) * av[1] ++ lo = lo + i * av[0] ++ hi = hi + j * av[1] + elif op in UNITCODES: + lo = lo + 1 + hi = hi + 1 + elif op == SUCCESS: + break +- self.width = int(min(lo, sys.maxint)), int(min(hi, sys.maxint)) ++ self.width = min(lo, MAXREPEAT - 1), min(hi, MAXREPEAT) + return self.width + + class Tokenizer: +diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py +index 18a81a2..f0827d8 100644 +--- a/Lib/test/test_re.py ++++ b/Lib/test/test_re.py +@@ -897,6 +897,17 @@ class ReTests(unittest.TestCase): + with self.assertRaisesRegexp(sre_constants.error, '\?foo'): + re.compile('(?P)') + ++ def test_bug_2537(self): ++ # issue 2537: empty submatches ++ for outer_op in ('{0,}', '*', '+', '{1,187}'): ++ for inner_op in ('{0,}', '*', '?'): ++ r = re.compile("^((x|y)%s)%s" % (inner_op, outer_op)) ++ m = r.match("xyyzy") ++ self.assertEqual(m.group(0), "xyy") ++ self.assertEqual(m.group(1), "") ++ self.assertEqual(m.group(2), "y") ++ ++ + + def run_re_tests(): + from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR +diff --git a/Lib/doctest.py b/Lib/doctest.py +index 90bcca1..0ee40a2 100644 +--- a/Lib/doctest.py ++++ b/Lib/doctest.py +@@ -564,7 +564,7 @@ class DocTestParser: + # Want consists of any non-blank lines that do not start with PS1. + (?P (?:(?![ ]*$) # Not a blank line + (?![ ]*>>>) # Not a line starting with PS1 +- .*$\n? # But any other line ++ .+$\n? # But any other line + )*) + ''', re.MULTILINE | re.VERBOSE) + + diff --git a/SOURCES/00257-threading-wait-clamp-remaining-time.patch b/SOURCES/00257-threading-wait-clamp-remaining-time.patch new file mode 100644 index 0000000..526d941 --- /dev/null +++ b/SOURCES/00257-threading-wait-clamp-remaining-time.patch @@ -0,0 +1,20 @@ +diff --git a/Lib/threading.py b/Lib/threading.py +index e4c7f35..91b3849 100644 +--- a/Lib/threading.py ++++ b/Lib/threading.py +@@ -351,13 +351,14 @@ class _Condition(_Verbose): + gotit = waiter.acquire(0) + if gotit: + break +- remaining = endtime - _time() ++ remaining = min(endtime - _time(), timeout) + if remaining <= 0: + break + if balancing: + delay = min(delay * 2, remaining, 0.05) + else: + delay = remaining ++ endtime = _time() + remaining + _sleep(delay) + if not gotit: + if __debug__: diff --git a/SOURCES/00263-fix-ssl-reference-leaks.patch b/SOURCES/00263-fix-ssl-reference-leaks.patch new file mode 100644 index 0000000..34d22c6 --- /dev/null +++ b/SOURCES/00263-fix-ssl-reference-leaks.patch @@ -0,0 +1,14 @@ +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +index a3ea254..d0a3830 100644 +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -2564,7 +2564,9 @@ load_cert_chain(PySSLContext *self, PyObject *args, PyObject *kwds) + } + SSL_CTX_set_default_passwd_cb(self->ctx, orig_passwd_cb); + SSL_CTX_set_default_passwd_cb_userdata(self->ctx, orig_passwd_userdata); ++ Py_XDECREF(keyfile_bytes); + PyMem_Free(pw_info.password); ++ PyMem_Free(certfile_bytes); + Py_RETURN_NONE; + + error: diff --git a/SOURCES/00265-protect-key-list-during-fork.patch b/SOURCES/00265-protect-key-list-during-fork.patch new file mode 100644 index 0000000..1f820f8 --- /dev/null +++ b/SOURCES/00265-protect-key-list-during-fork.patch @@ -0,0 +1,114 @@ +diff --git a/Include/pythread.h b/Include/pythread.h +index dfd6157..f3e6259 100644 +--- a/Include/pythread.h ++++ b/Include/pythread.h +@@ -30,6 +30,8 @@ PyAPI_FUNC(void) PyThread_delete_key(int); + PyAPI_FUNC(int) PyThread_set_key_value(int, void *); + PyAPI_FUNC(void *) PyThread_get_key_value(int); + PyAPI_FUNC(void) PyThread_delete_key_value(int key); ++PyAPI_FUNC(int) _PyThread_AcquireKeyLock(void); ++PyAPI_FUNC(void) _PyThread_ReleaseKeyLock(void); + + /* Cleanup after a fork */ + PyAPI_FUNC(void) PyThread_ReInitTLS(void); +diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c +index 022d7aa..8f6cbb2 100644 +--- a/Modules/posixmodule.c ++++ b/Modules/posixmodule.c +@@ -65,6 +65,10 @@ corresponding Unix manual entries for more information on calls."); + #include "osdefs.h" + #endif + ++#ifdef WITH_THREAD ++#include "pythread.h" ++#endif ++ + #ifdef HAVE_SYS_TYPES_H + #include + #endif /* HAVE_SYS_TYPES_H */ +@@ -3796,7 +3800,18 @@ posix_fork1(PyObject *self, PyObject *noargs) + pid_t pid; + int result = 0; + _PyImport_AcquireLock(); ++#ifdef WITH_THREAD ++ if (_PyThread_AcquireKeyLock() == 0) { ++ _PyImport_ReleaseLock(); ++ PyErr_SetString(PyExc_RuntimeError, ++ "could not acquire thread key lock"); ++ return NULL; ++ } ++#endif + pid = fork1(); ++#ifdef WITH_THREAD ++ _PyThread_ReleaseKeyLock(); ++#endif + if (pid == 0) { + /* child: this clobbers and resets the import lock. */ + PyOS_AfterFork(); +@@ -3829,7 +3844,18 @@ posix_fork(PyObject *self, PyObject *noargs) + pid_t pid; + int result = 0; + _PyImport_AcquireLock(); ++#ifdef WITH_THREAD ++ if (_PyThread_AcquireKeyLock() == 0) { ++ _PyImport_ReleaseLock(); ++ PyErr_SetString(PyExc_RuntimeError, ++ "could not acquire thread key lock"); ++ return NULL; ++ } ++#endif + pid = fork(); ++#ifdef WITH_THREAD ++ _PyThread_ReleaseKeyLock(); ++#endif + if (pid == 0) { + /* child: this clobbers and resets the import lock. */ + PyOS_AfterFork(); +@@ -3955,7 +3981,18 @@ posix_forkpty(PyObject *self, PyObject *noargs) + pid_t pid; + + _PyImport_AcquireLock(); ++#ifdef WITH_THREAD ++ if (_PyThread_AcquireKeyLock() == 0) { ++ _PyImport_ReleaseLock(); ++ PyErr_SetString(PyExc_RuntimeError, ++ "could not acquire thread key lock"); ++ return NULL; ++ } ++#endif + pid = forkpty(&master_fd, NULL, NULL, NULL); ++#ifdef WITH_THREAD ++ _PyThread_ReleaseKeyLock(); ++#endif + if (pid == 0) { + /* child: this clobbers and resets the import lock. */ + PyOS_AfterFork(); +diff --git a/Python/thread.c b/Python/thread.c +index dd333e8..957739e 100644 +--- a/Python/thread.c ++++ b/Python/thread.c +@@ -387,6 +387,24 @@ PyThread_delete_key_value(int key) + PyThread_release_lock(keymutex); + } + ++int ++_PyThread_AcquireKeyLock(void) ++{ ++ if (keymutex == NULL) { ++ keymutex = PyThread_allocate_lock(); ++ } ++ if (keymutex == NULL) { ++ return 0; ++ } ++ return PyThread_acquire_lock(keymutex, 1); ++} ++ ++void ++_PyThread_ReleaseKeyLock(void) ++{ ++ PyThread_release_lock(keymutex); ++} ++ + /* Forget everything not associated with the current thread id. + * This function is called from PyOS_AfterFork(). It is necessary + * because other thread ids which were in use at the time of the fork diff --git a/SOURCES/00266-fix-shutil.make_archive-ignoring-empty-dirs.patch b/SOURCES/00266-fix-shutil.make_archive-ignoring-empty-dirs.patch new file mode 100644 index 0000000..aa2d6ef --- /dev/null +++ b/SOURCES/00266-fix-shutil.make_archive-ignoring-empty-dirs.patch @@ -0,0 +1,376 @@ +diff --git a/Lib/shutil.py b/Lib/shutil.py +index 420802f..d0ff2ef 100644 +--- a/Lib/shutil.py ++++ b/Lib/shutil.py +@@ -446,17 +446,24 @@ def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): + zip_filename, base_dir) + + if not dry_run: +- zip = zipfile.ZipFile(zip_filename, "w", +- compression=zipfile.ZIP_DEFLATED) +- +- for dirpath, dirnames, filenames in os.walk(base_dir): +- for name in filenames: +- path = os.path.normpath(os.path.join(dirpath, name)) +- if os.path.isfile(path): +- zip.write(path, path) ++ with zipfile.ZipFile(zip_filename, "w", ++ compression=zipfile.ZIP_DEFLATED) as zf: ++ path = os.path.normpath(base_dir) ++ zf.write(path, path) ++ if logger is not None: ++ logger.info("adding '%s'", path) ++ for dirpath, dirnames, filenames in os.walk(base_dir): ++ for name in sorted(dirnames): ++ path = os.path.normpath(os.path.join(dirpath, name)) ++ zf.write(path, path) + if logger is not None: + logger.info("adding '%s'", path) +- zip.close() ++ for name in filenames: ++ path = os.path.normpath(os.path.join(dirpath, name)) ++ if os.path.isfile(path): ++ zf.write(path, path) ++ if logger is not None: ++ logger.info("adding '%s'", path) + + return zip_filename + +diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py +index 9bdb724..9238489 100644 +--- a/Lib/test/test_shutil.py ++++ b/Lib/test/test_shutil.py +@@ -10,13 +10,13 @@ import os.path + import errno + from os.path import splitdrive + from distutils.spawn import find_executable, spawn +-from shutil import (_make_tarball, _make_zipfile, make_archive, ++from shutil import (make_archive, + register_archive_format, unregister_archive_format, + get_archive_formats) + import tarfile + import warnings + +-from test import test_support ++from test import test_support as support + from test.test_support import TESTFN, check_warnings, captured_stdout + + TESTFN2 = TESTFN + "2" +@@ -372,139 +372,135 @@ class TestShutil(unittest.TestCase): + @unittest.skipUnless(zlib, "requires zlib") + def test_make_tarball(self): + # creating something to tar +- tmpdir = self.mkdtemp() +- self.write_file([tmpdir, 'file1'], 'xxx') +- self.write_file([tmpdir, 'file2'], 'xxx') +- os.mkdir(os.path.join(tmpdir, 'sub')) +- self.write_file([tmpdir, 'sub', 'file3'], 'xxx') ++ root_dir, base_dir = self._create_files('') + + tmpdir2 = self.mkdtemp() + # force shutil to create the directory + os.rmdir(tmpdir2) +- unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], ++ unittest.skipUnless(splitdrive(root_dir)[0] == splitdrive(tmpdir2)[0], + "source and target should be on same drive") + + base_name = os.path.join(tmpdir2, 'archive') + + # working with relative paths to avoid tar warnings +- old_dir = os.getcwd() +- os.chdir(tmpdir) +- try: +- _make_tarball(splitdrive(base_name)[1], '.') +- finally: +- os.chdir(old_dir) ++ make_archive(splitdrive(base_name)[1], 'gztar', root_dir, '.') + + # check if the compressed tarball was created + tarball = base_name + '.tar.gz' +- self.assertTrue(os.path.exists(tarball)) ++ self.assertTrue(os.path.isfile(tarball)) ++ self.assertTrue(tarfile.is_tarfile(tarball)) ++ with tarfile.open(tarball, 'r:gz') as tf: ++ self.assertEqual(sorted(tf.getnames()), ++ ['.', './file1', './file2', ++ './sub', './sub/file3', './sub2']) + + # trying an uncompressed one + base_name = os.path.join(tmpdir2, 'archive') +- old_dir = os.getcwd() +- os.chdir(tmpdir) +- try: +- _make_tarball(splitdrive(base_name)[1], '.', compress=None) +- finally: +- os.chdir(old_dir) ++ make_archive(splitdrive(base_name)[1], 'tar', root_dir, '.') + tarball = base_name + '.tar' +- self.assertTrue(os.path.exists(tarball)) ++ self.assertTrue(os.path.isfile(tarball)) ++ self.assertTrue(tarfile.is_tarfile(tarball)) ++ with tarfile.open(tarball, 'r') as tf: ++ self.assertEqual(sorted(tf.getnames()), ++ ['.', './file1', './file2', ++ './sub', './sub/file3', './sub2']) + + def _tarinfo(self, path): +- tar = tarfile.open(path) +- try: ++ with tarfile.open(path) as tar: + names = tar.getnames() + names.sort() + return tuple(names) +- finally: +- tar.close() + +- def _create_files(self): ++ def _create_files(self, base_dir='dist'): + # creating something to tar +- tmpdir = self.mkdtemp() +- dist = os.path.join(tmpdir, 'dist') +- os.mkdir(dist) +- self.write_file([dist, 'file1'], 'xxx') +- self.write_file([dist, 'file2'], 'xxx') ++ root_dir = self.mkdtemp() ++ dist = os.path.join(root_dir, base_dir) ++ if not os.path.isdir(dist): ++ os.makedirs(dist) ++ self.write_file((dist, 'file1'), 'xxx') ++ self.write_file((dist, 'file2'), 'xxx') + os.mkdir(os.path.join(dist, 'sub')) +- self.write_file([dist, 'sub', 'file3'], 'xxx') ++ self.write_file((dist, 'sub', 'file3'), 'xxx') + os.mkdir(os.path.join(dist, 'sub2')) +- tmpdir2 = self.mkdtemp() +- base_name = os.path.join(tmpdir2, 'archive') +- return tmpdir, tmpdir2, base_name ++ if base_dir: ++ self.write_file((root_dir, 'outer'), 'xxx') ++ return root_dir, base_dir + + @unittest.skipUnless(zlib, "Requires zlib") +- @unittest.skipUnless(find_executable('tar') and find_executable('gzip'), ++ @unittest.skipUnless(find_executable('tar'), + 'Need the tar command to run') + def test_tarfile_vs_tar(self): +- tmpdir, tmpdir2, base_name = self._create_files() +- old_dir = os.getcwd() +- os.chdir(tmpdir) +- try: +- _make_tarball(base_name, 'dist') +- finally: +- os.chdir(old_dir) ++ root_dir, base_dir = self._create_files() ++ base_name = os.path.join(self.mkdtemp(), 'archive') ++ make_archive(base_name, 'gztar', root_dir, base_dir) + + # check if the compressed tarball was created + tarball = base_name + '.tar.gz' +- self.assertTrue(os.path.exists(tarball)) ++ self.assertTrue(os.path.isfile(tarball)) + + # now create another tarball using `tar` +- tarball2 = os.path.join(tmpdir, 'archive2.tar.gz') +- tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist'] +- gzip_cmd = ['gzip', '-f9', 'archive2.tar'] +- old_dir = os.getcwd() +- os.chdir(tmpdir) +- try: +- with captured_stdout() as s: +- spawn(tar_cmd) +- spawn(gzip_cmd) +- finally: +- os.chdir(old_dir) ++ tarball2 = os.path.join(root_dir, 'archive2.tar') ++ tar_cmd = ['tar', '-cf', 'archive2.tar', base_dir] ++ with support.change_cwd(root_dir), captured_stdout(): ++ spawn(tar_cmd) + +- self.assertTrue(os.path.exists(tarball2)) ++ self.assertTrue(os.path.isfile(tarball2)) + # let's compare both tarballs + self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2)) + + # trying an uncompressed one +- base_name = os.path.join(tmpdir2, 'archive') +- old_dir = os.getcwd() +- os.chdir(tmpdir) +- try: +- _make_tarball(base_name, 'dist', compress=None) +- finally: +- os.chdir(old_dir) ++ make_archive(base_name, 'tar', root_dir, base_dir) + tarball = base_name + '.tar' +- self.assertTrue(os.path.exists(tarball)) ++ self.assertTrue(os.path.isfile(tarball)) + + # now for a dry_run +- base_name = os.path.join(tmpdir2, 'archive') +- old_dir = os.getcwd() +- os.chdir(tmpdir) +- try: +- _make_tarball(base_name, 'dist', compress=None, dry_run=True) +- finally: +- os.chdir(old_dir) ++ make_archive(base_name, 'tar', root_dir, base_dir, dry_run=True) + tarball = base_name + '.tar' +- self.assertTrue(os.path.exists(tarball)) ++ self.assertTrue(os.path.isfile(tarball)) + + @unittest.skipUnless(zlib, "Requires zlib") + @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') + def test_make_zipfile(self): +- # creating something to tar +- tmpdir = self.mkdtemp() +- self.write_file([tmpdir, 'file1'], 'xxx') +- self.write_file([tmpdir, 'file2'], 'xxx') ++ # creating something to zip ++ root_dir, base_dir = self._create_files() ++ base_name = os.path.join(self.mkdtemp(), 'archive') + +- tmpdir2 = self.mkdtemp() +- # force shutil to create the directory +- os.rmdir(tmpdir2) +- base_name = os.path.join(tmpdir2, 'archive') +- _make_zipfile(base_name, tmpdir) ++ res = make_archive(base_name, 'zip', root_dir, base_dir) + +- # check if the compressed tarball was created +- tarball = base_name + '.zip' +- self.assertTrue(os.path.exists(tarball)) ++ self.assertEqual(res, base_name + '.zip') ++ self.assertTrue(os.path.isfile(res)) ++ self.assertTrue(zipfile.is_zipfile(res)) ++ with zipfile.ZipFile(res) as zf: ++ self.assertEqual(sorted(zf.namelist()), ++ ['dist/', 'dist/file1', 'dist/file2', ++ 'dist/sub/', 'dist/sub/file3', 'dist/sub2/']) + ++ @unittest.skipUnless(zlib, "Requires zlib") ++ @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') ++ @unittest.skipUnless(find_executable('zip'), ++ 'Need the zip command to run') ++ def test_zipfile_vs_zip(self): ++ root_dir, base_dir = self._create_files() ++ base_name = os.path.join(self.mkdtemp(), 'archive') ++ archive = make_archive(base_name, 'zip', root_dir, base_dir) ++ ++ # check if ZIP file was created ++ self.assertEqual(archive, base_name + '.zip') ++ self.assertTrue(os.path.isfile(archive)) ++ ++ # now create another ZIP file using `zip` ++ archive2 = os.path.join(root_dir, 'archive2.zip') ++ zip_cmd = ['zip', '-q', '-r', 'archive2.zip', base_dir] ++ with support.change_cwd(root_dir): ++ spawn(zip_cmd) ++ ++ self.assertTrue(os.path.isfile(archive2)) ++ # let's compare both ZIP files ++ with zipfile.ZipFile(archive) as zf: ++ names = zf.namelist() ++ with zipfile.ZipFile(archive2) as zf: ++ names2 = zf.namelist() ++ self.assertEqual(sorted(names), sorted(names2)) + + def test_make_archive(self): + tmpdir = self.mkdtemp() +@@ -521,39 +517,36 @@ class TestShutil(unittest.TestCase): + else: + group = owner = 'root' + +- base_dir, root_dir, base_name = self._create_files() +- base_name = os.path.join(self.mkdtemp() , 'archive') ++ root_dir, base_dir = self._create_files() ++ base_name = os.path.join(self.mkdtemp(), 'archive') + res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, + group=group) +- self.assertTrue(os.path.exists(res)) ++ self.assertTrue(os.path.isfile(res)) + + res = make_archive(base_name, 'zip', root_dir, base_dir) +- self.assertTrue(os.path.exists(res)) ++ self.assertTrue(os.path.isfile(res)) + + res = make_archive(base_name, 'tar', root_dir, base_dir, + owner=owner, group=group) +- self.assertTrue(os.path.exists(res)) ++ self.assertTrue(os.path.isfile(res)) + + res = make_archive(base_name, 'tar', root_dir, base_dir, + owner='kjhkjhkjg', group='oihohoh') +- self.assertTrue(os.path.exists(res)) ++ self.assertTrue(os.path.isfile(res)) + + @unittest.skipUnless(zlib, "Requires zlib") + @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") + def test_tarfile_root_owner(self): +- tmpdir, tmpdir2, base_name = self._create_files() +- old_dir = os.getcwd() +- os.chdir(tmpdir) ++ root_dir, base_dir = self._create_files() ++ base_name = os.path.join(self.mkdtemp(), 'archive') + group = grp.getgrgid(0)[0] + owner = pwd.getpwuid(0)[0] +- try: +- archive_name = _make_tarball(base_name, 'dist', compress=None, +- owner=owner, group=group) +- finally: +- os.chdir(old_dir) ++ with support.change_cwd(root_dir): ++ archive_name = make_archive(base_name, 'gztar', root_dir, 'dist', ++ owner=owner, group=group) + + # check if the compressed tarball was created +- self.assertTrue(os.path.exists(archive_name)) ++ self.assertTrue(os.path.isfile(archive_name)) + + # now checks the rights + archive = tarfile.open(archive_name) +@@ -859,7 +852,7 @@ class TestCopyFile(unittest.TestCase): + + + def test_main(): +- test_support.run_unittest(TestShutil, TestMove, TestCopyFile) ++ support.run_unittest(TestShutil, TestMove, TestCopyFile) + + if __name__ == '__main__': + test_main() +diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py +index 42c1b4d..98a9275 100644 +--- a/Lib/test/test_support.py ++++ b/Lib/test/test_support.py +@@ -491,6 +491,33 @@ TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid()) + SAVEDCWD = os.getcwd() + + @contextlib.contextmanager ++def change_cwd(path, quiet=False): ++ """Return a context manager that changes the current working directory. ++ ++ Arguments: ++ ++ path: the directory to use as the temporary current working directory. ++ ++ quiet: if False (the default), the context manager raises an exception ++ on error. Otherwise, it issues only a warning and keeps the current ++ working directory the same. ++ ++ """ ++ saved_dir = os.getcwd() ++ try: ++ os.chdir(path) ++ except OSError: ++ if not quiet: ++ raise ++ warnings.warn('tests may fail, unable to change CWD to: ' + path, ++ RuntimeWarning, stacklevel=3) ++ try: ++ yield os.getcwd() ++ finally: ++ os.chdir(saved_dir) ++ ++ ++@contextlib.contextmanager + def temp_cwd(name='tempcwd', quiet=False): + """ + Context manager that creates a temporary directory and set it as CWD. diff --git a/SOURCES/00268-set-stream-name-to-None.patch b/SOURCES/00268-set-stream-name-to-None.patch new file mode 100644 index 0000000..0a79050 --- /dev/null +++ b/SOURCES/00268-set-stream-name-to-None.patch @@ -0,0 +1,20 @@ + +# HG changeset patch +# User Vinay Sajip +# Date 1402737594 -3600 +# Node ID bb8b0c7fefd0c5ed99b3f336178a4f9554a1d0ef +# Parent 31adcc4c43916f7448c9dd8048ad5be7e5bb6456 +Issue #21742: Set stream to None after closing. + +diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py +--- a/Lib/logging/handlers.py ++++ b/Lib/logging/handlers.py +@@ -423,6 +423,7 @@ class WatchedFileHandler(logging.FileHan + # we have an open file handle, clean it up + self.stream.flush() + self.stream.close() ++ self.stream = None # See Issue #21742: _open () might fail. + # open a new file handle and get new stat info from that fd + self.stream = self._open() + self._statstream() + diff --git a/SOURCES/00275-fix-fnctl-with-integer-on-big-endian.patch b/SOURCES/00275-fix-fnctl-with-integer-on-big-endian.patch new file mode 100644 index 0000000..c14b981 --- /dev/null +++ b/SOURCES/00275-fix-fnctl-with-integer-on-big-endian.patch @@ -0,0 +1,22 @@ +diff --git a/Modules/fcntlmodule.c b/Modules/fcntlmodule.c +index 997867a..2bd2f55 100644 +--- a/Modules/fcntlmodule.c ++++ b/Modules/fcntlmodule.c +@@ -34,7 +34,7 @@ fcntl_fcntl(PyObject *self, PyObject *args) + { + int fd; + int code; +- long arg; ++ int arg; + int ret; + char *str; + Py_ssize_t len; +@@ -61,7 +61,7 @@ fcntl_fcntl(PyObject *self, PyObject *args) + PyErr_Clear(); + arg = 0; + if (!PyArg_ParseTuple(args, +- "O&i|l;fcntl requires a file or file descriptor," ++ "O&i|I;fcntl requires a file or file descriptor," + " an integer and optionally a third integer or a string", + conv_descriptor, &fd, &code, &arg)) { + return NULL; diff --git a/SOURCES/00276-increase-imaplib-MAXLINE.patch b/SOURCES/00276-increase-imaplib-MAXLINE.patch new file mode 100644 index 0000000..34da36b --- /dev/null +++ b/SOURCES/00276-increase-imaplib-MAXLINE.patch @@ -0,0 +1,22 @@ +diff --git a/Lib/imaplib.py b/Lib/imaplib.py +index 4586fb3..d8243e5 100644 +--- a/Lib/imaplib.py ++++ b/Lib/imaplib.py +@@ -37,11 +37,12 @@ AllowedVersions = ('IMAP4REV1', 'IMAP4') # Most recent first + + # Maximal line length when calling readline(). This is to prevent + # reading arbitrary length lines. RFC 3501 and 2060 (IMAP 4rev1) +-# don't specify a line length. RFC 2683 however suggests limiting client +-# command lines to 1000 octets and server command lines to 8000 octets. +-# We have selected 10000 for some extra margin and since that is supposedly +-# also what UW and Panda IMAP does. +-_MAXLINE = 10000 ++# don't specify a line length. RFC 2683 suggests limiting client ++# command lines to 1000 octets and that servers should be prepared ++# to accept command lines up to 8000 octets, so we used to use 10K here. ++# In the modern world (eg: gmail) the response to, for example, a ++# search command can be quite large, so we now use 1M. ++_MAXLINE = 1000000 + + + # Commands diff --git a/SOURCES/00281-add-context-parameter-to-xmlrpclib.ServerProxy.patch b/SOURCES/00281-add-context-parameter-to-xmlrpclib.ServerProxy.patch new file mode 100644 index 0000000..282437f --- /dev/null +++ b/SOURCES/00281-add-context-parameter-to-xmlrpclib.ServerProxy.patch @@ -0,0 +1,91 @@ + +# HG changeset patch +# User Benjamin Peterson +# Date 1417319735 18000 +# Node ID 62bd574e95d5ec4b37ca8f72ae6523ea7d6c11cd +# Parent 1ac5aec658f6972c3372f139ce69ee6799dc0b2e +add context parameter to xmlrpclib.ServerProxy (#22960) + +Patch from Alex Gaynor. + +diff --git a/Doc/library/xmlrpclib.rst b/Doc/library/xmlrpclib.rst +--- a/Doc/library/xmlrpclib.rst ++++ b/Doc/library/xmlrpclib.rst +@@ -39,7 +39,7 @@ between conformable Python objects and X + For https URIs, :mod:`xmlrpclib` now performs all the necessary certificate + and hostname checks by default + +-.. class:: ServerProxy(uri[, transport[, encoding[, verbose[, allow_none[, use_datetime]]]]]) ++.. class:: ServerProxy(uri[, transport[, encoding[, verbose[, allow_none[, use_datetime[, context]]]]]]) + + A :class:`ServerProxy` instance is an object that manages communication with a + remote XML-RPC server. The required first argument is a URI (Uniform Resource +@@ -57,11 +57,13 @@ between conformable Python objects and X + :class:`datetime.datetime` objects may be passed to calls. + + Both the HTTP and HTTPS transports support the URL syntax extension for HTTP +- Basic Authentication: ``http://user:pass@host:port/path``. The ``user:pass`` ++ Basic Authentication: ``http://user:pass@host:port/path``. The ``user:pass`` + portion will be base64-encoded as an HTTP 'Authorization' header, and sent to + the remote server as part of the connection process when invoking an XML-RPC + method. You only need to use this if the remote server requires a Basic +- Authentication user and password. ++ Authentication user and password. If an HTTPS url is provided, *context* may ++ be :class:`ssl.SSLContext` and configures the SSL settings of the underlying ++ HTTPS connection. + + The returned instance is a proxy object with methods that can be used to invoke + corresponding RPC calls on the remote server. If the remote server supports the +@@ -131,6 +133,9 @@ between conformable Python objects and X + *__dict__* attribute and don't have a base class that is marshalled in a + special way. + ++ .. versionchanged:: 2.7.9 ++ Added the *context* argument. ++ + + .. seealso:: + +diff --git a/Lib/xmlrpclib.py b/Lib/xmlrpclib.py +--- a/Lib/xmlrpclib.py ++++ b/Lib/xmlrpclib.py +@@ -1478,6 +1478,10 @@ class Transport: + class SafeTransport(Transport): + """Handles an HTTPS transaction to an XML-RPC server.""" + ++ def __init__(self, use_datetime=0, context=None): ++ Transport.__init__(self, use_datetime=use_datetime) ++ self.context = context ++ + # FIXME: mostly untested + + def make_connection(self, host): +@@ -1493,7 +1497,7 @@ class SafeTransport(Transport): + ) + else: + chost, self._extra_headers, x509 = self.get_host_info(host) +- self._connection = host, HTTPS(chost, None, **(x509 or {})) ++ self._connection = host, HTTPS(chost, None, context=self.context, **(x509 or {})) + return self._connection[1] + + ## +@@ -1536,7 +1540,7 @@ class ServerProxy: + """ + + def __init__(self, uri, transport=None, encoding=None, verbose=0, +- allow_none=0, use_datetime=0): ++ allow_none=0, use_datetime=0, context=None): + # establish a "logical" server connection + + if isinstance(uri, unicode): +@@ -1553,7 +1557,7 @@ class ServerProxy: + + if transport is None: + if type == "https": +- transport = SafeTransport(use_datetime=use_datetime) ++ transport = SafeTransport(use_datetime=use_datetime, context=context) + else: + transport = Transport(use_datetime=use_datetime) + self.__transport = transport + + diff --git a/SOURCES/00282-obmalloc-mmap-threshold.patch b/SOURCES/00282-obmalloc-mmap-threshold.patch new file mode 100644 index 0000000..fde9c3f --- /dev/null +++ b/SOURCES/00282-obmalloc-mmap-threshold.patch @@ -0,0 +1,157 @@ +Make it more likely for the system allocator to release free()d memory arenas on glibc-based systems. +Patch by Charles-François Natali. +https://bugs.python.org/issue20494 + +diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c +--- a/Objects/obmalloc.c ++++ b/Objects/obmalloc.c +@@ -2,6 +2,13 @@ + + #ifdef WITH_PYMALLOC + ++#ifdef HAVE_MMAP ++ #include ++ #ifdef MAP_ANONYMOUS ++ #define ARENAS_USE_MMAP ++ #endif ++#endif ++ + #ifdef WITH_VALGRIND + #include + +@@ -75,7 +82,8 @@ static int running_on_valgrind = -1; + * Allocation strategy abstract: + * + * For small requests, the allocator sub-allocates blocks of memory. +- * Requests greater than 256 bytes are routed to the system's allocator. ++ * Requests greater than SMALL_REQUEST_THRESHOLD bytes are routed to the ++ * system's allocator. + * + * Small requests are grouped in size classes spaced 8 bytes apart, due + * to the required valid alignment of the returned address. Requests of +@@ -107,10 +115,11 @@ static int running_on_valgrind = -1; + * 57-64 64 7 + * 65-72 72 8 + * ... ... ... +- * 241-248 248 30 +- * 249-256 256 31 ++ * 497-504 504 62 ++ * 505-512 512 63 + * +- * 0, 257 and up: routed to the underlying allocator. ++ * 0, SMALL_REQUEST_THRESHOLD + 1 and up: routed to the underlying ++ * allocator. + */ + + /*==========================================================================*/ +@@ -143,10 +152,13 @@ static int running_on_valgrind = -1; + * 1) ALIGNMENT <= SMALL_REQUEST_THRESHOLD <= 256 + * 2) SMALL_REQUEST_THRESHOLD is evenly divisible by ALIGNMENT + * ++ * Note: a size threshold of 512 guarantees that newly created dictionaries ++ * will be allocated from preallocated memory pools on 64-bit. ++ * + * Although not required, for better performance and space efficiency, + * it is recommended that SMALL_REQUEST_THRESHOLD is set to a power of 2. + */ +-#define SMALL_REQUEST_THRESHOLD 256 ++#define SMALL_REQUEST_THRESHOLD 512 + #define NB_SMALL_SIZE_CLASSES (SMALL_REQUEST_THRESHOLD / ALIGNMENT) + + /* +@@ -174,15 +186,15 @@ static int running_on_valgrind = -1; + /* + * The allocator sub-allocates blocks of memory (called arenas) aligned + * on a page boundary. This is a reserved virtual address space for the +- * current process (obtained through a malloc call). In no way this means +- * that the memory arenas will be used entirely. A malloc() is usually +- * an address range reservation for bytes, unless all pages within this +- * space are referenced subsequently. So malloc'ing big blocks and not using +- * them does not mean "wasting memory". It's an addressable range wastage... ++ * current process (obtained through a malloc()/mmap() call). In no way this ++ * means that the memory arenas will be used entirely. A malloc() is ++ * usually an address range reservation for bytes, unless all pages within ++ * this space are referenced subsequently. So malloc'ing big blocks and not ++ * using them does not mean "wasting memory". It's an addressable range ++ * wastage... + * +- * Therefore, allocating arenas with malloc is not optimal, because there is +- * some address space wastage, but this is the most portable way to request +- * memory from the system across various platforms. ++ * Arenas are allocated with mmap() on systems supporting anonymous memory ++ * mappings to reduce heap fragmentation. + */ + #define ARENA_SIZE (256 << 10) /* 256KB */ + +@@ -440,6 +452,9 @@ static poolp usedpools[2 * ((NB_SMALL_SI + , PT(48), PT(49), PT(50), PT(51), PT(52), PT(53), PT(54), PT(55) + #if NB_SMALL_SIZE_CLASSES > 56 + , PT(56), PT(57), PT(58), PT(59), PT(60), PT(61), PT(62), PT(63) ++#if NB_SMALL_SIZE_CLASSES > 64 ++#error "NB_SMALL_SIZE_CLASSES should be less than 64" ++#endif /* NB_SMALL_SIZE_CLASSES > 64 */ + #endif /* NB_SMALL_SIZE_CLASSES > 56 */ + #endif /* NB_SMALL_SIZE_CLASSES > 48 */ + #endif /* NB_SMALL_SIZE_CLASSES > 40 */ +@@ -577,7 +592,12 @@ new_arena(void) + arenaobj = unused_arena_objects; + unused_arena_objects = arenaobj->nextarena; + assert(arenaobj->address == 0); ++#ifdef ARENAS_USE_MMAP ++ arenaobj->address = (uptr)mmap(NULL, ARENA_SIZE, PROT_READ|PROT_WRITE, ++ MAP_PRIVATE|MAP_ANONYMOUS, -1, 0); ++#else + arenaobj->address = (uptr)malloc(ARENA_SIZE); ++#endif + if (arenaobj->address == 0) { + /* The allocation failed: return NULL after putting the + * arenaobj back. +@@ -1054,7 +1074,11 @@ PyObject_Free(void *p) + unused_arena_objects = ao; + + /* Free the entire arena. */ ++#ifdef ARENAS_USE_MMAP ++ munmap((void *)ao->address, ARENA_SIZE); ++#else + free((void *)ao->address); ++#endif + ao->address = 0; /* mark unassociated */ + --narenas_currently_allocated; + +diff --git a/configure b/configure +--- a/configure ++++ b/configure +@@ -10164,7 +10164,7 @@ for ac_func in alarm setitimer getitimer + clock confstr ctermid execv fchmod fchown fork fpathconf ftime ftruncate \ + gai_strerror getgroups getlogin getloadavg getpeername getpgid getpid \ + getpriority getresuid getresgid getpwent getspnam getspent getsid getwd \ +- initgroups kill killpg lchmod lchown lstat mkfifo mknod mktime \ ++ initgroups kill killpg lchmod lchown lstat mkfifo mknod mktime mmap \ + mremap nice pathconf pause plock poll pthread_init \ + putenv readlink realpath \ + select sem_open sem_timedwait sem_getvalue sem_unlink setegid seteuid \ +diff --git a/configure.ac b/configure.ac +--- a/configure.ac ++++ b/configure.ac +@@ -2905,7 +2905,7 @@ AC_CHECK_FUNCS(alarm setitimer getitimer + clock confstr ctermid execv fchmod fchown fork fpathconf ftime ftruncate \ + gai_strerror getgroups getlogin getloadavg getpeername getpgid getpid \ + getpriority getresuid getresgid getpwent getspnam getspent getsid getwd \ +- initgroups kill killpg lchmod lchown lstat mkfifo mknod mktime \ ++ initgroups kill killpg lchmod lchown lstat mkfifo mknod mktime mmap \ + mremap nice pathconf pause plock poll pthread_init \ + putenv readlink realpath \ + select sem_open sem_timedwait sem_getvalue sem_unlink setegid seteuid \ +diff --git a/pyconfig.h.in b/pyconfig.h.in +--- a/pyconfig.h.in ++++ b/pyconfig.h.in +@@ -475,6 +475,9 @@ + /* Define to 1 if you have the `mktime' function. */ + #undef HAVE_MKTIME + ++/* Define to 1 if you have the `mmap' function. */ ++#undef HAVE_MMAP ++ + /* Define to 1 if you have the `mremap' function. */ + #undef HAVE_MREMAP + diff --git a/SOURCES/00285-fix-non-deterministic-read-in-test_pty.patch b/SOURCES/00285-fix-non-deterministic-read-in-test_pty.patch new file mode 100644 index 0000000..8605809 --- /dev/null +++ b/SOURCES/00285-fix-non-deterministic-read-in-test_pty.patch @@ -0,0 +1,59 @@ +diff --git a/Lib/test/test_pty.py b/Lib/test/test_pty.py +index bec38c45456..f623aa09620 100644 +--- a/Lib/test/test_pty.py ++++ b/Lib/test/test_pty.py +@@ -11,6 +11,7 @@ + import select + import signal + import socket ++import io # readline + import unittest + + TEST_STRING_1 = "I wish to buy a fish license.\n" +@@ -24,6 +25,16 @@ def debug(msg): + pass + + ++# Note that os.read() is nondeterministic so we need to be very careful ++# to make the test suite deterministic. A normal call to os.read() may ++# give us less than expected. ++# ++# Beware, on my Linux system, if I put 'foo\n' into a terminal fd, I get ++# back 'foo\r\n' at the other end. The behavior depends on the termios ++# setting. The newline translation may be OS-specific. To make the ++# test suite deterministic and OS-independent, the functions _readline ++# and normalize_output can be used. ++ + def normalize_output(data): + # Some operating systems do conversions on newline. We could possibly + # fix that by doing the appropriate termios.tcsetattr()s. I couldn't +@@ -45,6 +56,12 @@ def normalize_output(data): + + return data + ++def _readline(fd): ++ """Read one line. May block forever if no newline is read.""" ++ reader = io.FileIO(fd, mode='rb', closefd=False) ++ return reader.readline() ++ ++ + + # Marginal testing of pty suite. Cannot do extensive 'do or fail' testing + # because pty code is not too portable. +@@ -97,14 +114,14 @@ def test_basic(self): + + debug("Writing to slave_fd") + os.write(slave_fd, TEST_STRING_1) +- s1 = os.read(master_fd, 1024) ++ s1 = _readline(master_fd) + self.assertEqual('I wish to buy a fish license.\n', + normalize_output(s1)) + + debug("Writing chunked output") + os.write(slave_fd, TEST_STRING_2[:5]) + os.write(slave_fd, TEST_STRING_2[5:]) +- s2 = os.read(master_fd, 1024) ++ s2 = _readline(master_fd) + self.assertEqual('For my pet fish, Eric.\n', normalize_output(s2)) + + os.close(slave_fd) diff --git a/SOURCES/00287-fix-thread-hanging-on-inaccessible-nfs-server.patch b/SOURCES/00287-fix-thread-hanging-on-inaccessible-nfs-server.patch new file mode 100644 index 0000000..1dbb62f --- /dev/null +++ b/SOURCES/00287-fix-thread-hanging-on-inaccessible-nfs-server.patch @@ -0,0 +1,135 @@ +diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c +index 4a71a57ec0d..2b40ada195a 100644 +--- a/Modules/_io/fileio.c ++++ b/Modules/_io/fileio.c +@@ -146,9 +146,15 @@ dircheck(fileio* self, PyObject *nameobj) + { + #if defined(HAVE_FSTAT) && defined(S_IFDIR) && defined(EISDIR) + struct stat buf; ++ int res; + if (self->fd < 0) + return 0; +- if (fstat(self->fd, &buf) == 0 && S_ISDIR(buf.st_mode)) { ++ ++ Py_BEGIN_ALLOW_THREADS ++ res = fstat(self->fd, &buf); ++ Py_END_ALLOW_THREADS ++ ++ if (res == 0 && S_ISDIR(buf.st_mode)) { + errno = EISDIR; + PyErr_SetFromErrnoWithFilenameObject(PyExc_IOError, nameobj); + return -1; +@@ -162,17 +168,34 @@ check_fd(int fd) + { + #if defined(HAVE_FSTAT) + struct stat buf; +- if (!_PyVerify_fd(fd) || (fstat(fd, &buf) < 0 && errno == EBADF)) { +- PyObject *exc; +- char *msg = strerror(EBADF); +- exc = PyObject_CallFunction(PyExc_OSError, "(is)", +- EBADF, msg); +- PyErr_SetObject(PyExc_OSError, exc); +- Py_XDECREF(exc); +- return -1; ++ int res; ++ PyObject *exc; ++ char *msg; ++ ++ if (!_PyVerify_fd(fd)) { ++ goto badfd; + } +-#endif ++ ++ Py_BEGIN_ALLOW_THREADS ++ res = fstat(fd, &buf); ++ Py_END_ALLOW_THREADS ++ ++ if (res < 0 && errno == EBADF) { ++ goto badfd; ++ } ++ + return 0; ++ ++badfd: ++ msg = strerror(EBADF); ++ exc = PyObject_CallFunction(PyExc_OSError, "(is)", ++ EBADF, msg); ++ PyErr_SetObject(PyExc_OSError, exc); ++ Py_XDECREF(exc); ++ return -1; ++#else ++ return 0; ++#endif + } + + +@@ -519,9 +542,19 @@ new_buffersize(fileio *self, size_t currentsize) + #ifdef HAVE_FSTAT + off_t pos, end; + struct stat st; +- if (fstat(self->fd, &st) == 0) { ++ int res; ++ ++ Py_BEGIN_ALLOW_THREADS ++ res = fstat(self->fd, &st); ++ Py_END_ALLOW_THREADS ++ ++ if (res == 0) { + end = st.st_size; ++ ++ Py_BEGIN_ALLOW_THREADS + pos = lseek(self->fd, 0L, SEEK_CUR); ++ Py_END_ALLOW_THREADS ++ + /* Files claiming a size smaller than SMALLCHUNK may + actually be streaming pseudo-files. In this case, we + apply the more aggressive algorithm below. +diff --git a/Objects/fileobject.c b/Objects/fileobject.c +index 2f63c374d1e..8d1c5812f0d 100644 +--- a/Objects/fileobject.c ++++ b/Objects/fileobject.c +@@ -121,10 +121,15 @@ dircheck(PyFileObject* f) + { + #if defined(HAVE_FSTAT) && defined(S_IFDIR) && defined(EISDIR) + struct stat buf; ++ int res; + if (f->f_fp == NULL) + return f; +- if (fstat(fileno(f->f_fp), &buf) == 0 && +- S_ISDIR(buf.st_mode)) { ++ ++ Py_BEGIN_ALLOW_THREADS ++ res = fstat(fileno(f->f_fp), &buf); ++ Py_END_ALLOW_THREADS ++ ++ if (res == 0 && S_ISDIR(buf.st_mode)) { + char *msg = strerror(EISDIR); + PyObject *exc = PyObject_CallFunction(PyExc_IOError, "(isO)", + EISDIR, msg, f->f_name); +@@ -1010,7 +1015,13 @@ new_buffersize(PyFileObject *f, size_t currentsize) + #ifdef HAVE_FSTAT + off_t pos, end; + struct stat st; +- if (fstat(fileno(f->f_fp), &st) == 0) { ++ int res; ++ ++ Py_BEGIN_ALLOW_THREADS ++ res = fstat(fileno(f->f_fp), &st); ++ Py_END_ALLOW_THREADS ++ ++ if (res == 0) { + end = st.st_size; + /* The following is not a bug: we really need to call lseek() + *and* ftell(). The reason is that some stdio libraries +@@ -1021,7 +1032,11 @@ new_buffersize(PyFileObject *f, size_t currentsize) + works. We can't use the lseek() value either, because we + need to take the amount of buffered data into account. + (Yet another reason why stdio stinks. :-) */ ++ ++ Py_BEGIN_ALLOW_THREADS + pos = lseek(fileno(f->f_fp), 0L, SEEK_CUR); ++ Py_END_ALLOW_THREADS ++ + if (pos >= 0) { + pos = ftell(f->f_fp); + } diff --git a/SOURCES/00295-fix-https-behind-proxy.patch b/SOURCES/00295-fix-https-behind-proxy.patch new file mode 100644 index 0000000..2d54592 --- /dev/null +++ b/SOURCES/00295-fix-https-behind-proxy.patch @@ -0,0 +1,250 @@ +diff --git a/Lib/httplib.py b/Lib/httplib.py +index 592ee57..b69145b 100644 +--- a/Lib/httplib.py ++++ b/Lib/httplib.py +@@ -735,25 +735,40 @@ class HTTPConnection: + self._tunnel_host = None + self._tunnel_port = None + self._tunnel_headers = {} +- +- self._set_hostport(host, port) + if strict is not None: + self.strict = strict + ++ (self.host, self.port) = self._get_hostport(host, port) ++ ++ # This is stored as an instance variable to allow unittests ++ # to replace with a suitable mock ++ self._create_connection = socket.create_connection ++ + def set_tunnel(self, host, port=None, headers=None): +- """ Sets up the host and the port for the HTTP CONNECT Tunnelling. ++ """ Set up host and port for HTTP CONNECT tunnelling. ++ ++ In a connection that uses HTTP Connect tunneling, the host passed to the ++ constructor is used as proxy server that relays all communication to the ++ endpoint passed to set_tunnel. This is done by sending a HTTP CONNECT ++ request to the proxy server when the connection is established. ++ ++ This method must be called before the HTTP connection has been ++ established. + + The headers argument should be a mapping of extra HTTP headers + to send with the CONNECT request. + """ +- self._tunnel_host = host +- self._tunnel_port = port ++ # Verify if this is required. ++ if self.sock: ++ raise RuntimeError("Can't setup tunnel for established connection.") ++ ++ self._tunnel_host, self._tunnel_port = self._get_hostport(host, port) + if headers: + self._tunnel_headers = headers + else: + self._tunnel_headers.clear() + +- def _set_hostport(self, host, port): ++ def _get_hostport(self, host, port): + if port is None: + i = host.rfind(':') + j = host.rfind(']') # ipv6 addresses have [...] +@@ -770,15 +785,14 @@ class HTTPConnection: + port = self.default_port + if host and host[0] == '[' and host[-1] == ']': + host = host[1:-1] +- self.host = host +- self.port = port ++ return (host, port) + + def set_debuglevel(self, level): + self.debuglevel = level + + def _tunnel(self): +- self._set_hostport(self._tunnel_host, self._tunnel_port) +- self.send("CONNECT %s:%d HTTP/1.0\r\n" % (self.host, self.port)) ++ self.send("CONNECT %s:%d HTTP/1.0\r\n" % (self._tunnel_host, ++ self._tunnel_port)) + for header, value in self._tunnel_headers.iteritems(): + self.send("%s: %s\r\n" % (header, value)) + self.send("\r\n") +@@ -803,8 +817,8 @@ class HTTPConnection: + + def connect(self): + """Connect to the host and port specified in __init__.""" +- self.sock = socket.create_connection((self.host,self.port), +- self.timeout, self.source_address) ++ self.sock = self._create_connection((self.host,self.port), ++ self.timeout, self.source_address) + + if self._tunnel_host: + self._tunnel() +@@ -942,17 +956,24 @@ class HTTPConnection: + netloc_enc = netloc.encode("idna") + self.putheader('Host', netloc_enc) + else: ++ if self._tunnel_host: ++ host = self._tunnel_host ++ port = self._tunnel_port ++ else: ++ host = self.host ++ port = self.port ++ + try: +- host_enc = self.host.encode("ascii") ++ host_enc = host.encode("ascii") + except UnicodeEncodeError: +- host_enc = self.host.encode("idna") ++ host_enc = host.encode("idna") + # Wrap the IPv6 Host Header with [] (RFC 2732) + if host_enc.find(':') >= 0: + host_enc = "[" + host_enc + "]" +- if self.port == self.default_port: ++ if port == self.default_port: + self.putheader('Host', host_enc) + else: +- self.putheader('Host', "%s:%s" % (host_enc, self.port)) ++ self.putheader('Host', "%s:%s" % (host_enc, port)) + + # note: we are assuming that clients will not attempt to set these + # headers since *this* library must deal with the +@@ -1141,7 +1162,7 @@ class HTTP: + "Accept arguments to set the host/port, since the superclass doesn't." + + if host is not None: +- self._conn._set_hostport(host, port) ++ (self._conn.host, self._conn.port) = self._conn._get_hostport(host, port) + self._conn.connect() + + def getfile(self): +diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py +index 29af589..9db30cc 100644 +--- a/Lib/test/test_httplib.py ++++ b/Lib/test/test_httplib.py +@@ -21,10 +21,12 @@ CERT_selfsigned_pythontestdotnet = os.path.join(here, 'selfsigned_pythontestdotn + HOST = test_support.HOST + + class FakeSocket: +- def __init__(self, text, fileclass=StringIO.StringIO): ++ def __init__(self, text, fileclass=StringIO.StringIO, host=None, port=None): + self.text = text + self.fileclass = fileclass + self.data = '' ++ self.host = host ++ self.port = port + + def sendall(self, data): + self.data += ''.join(data) +@@ -34,6 +36,9 @@ class FakeSocket: + raise httplib.UnimplementedFileMode() + return self.fileclass(self.text) + ++ def close(self): ++ pass ++ + class EPipeSocket(FakeSocket): + + def __init__(self, text, pipe_trigger): +@@ -487,7 +492,11 @@ class OfflineTest(TestCase): + self.assertEqual(httplib.responses[httplib.NOT_FOUND], "Not Found") + + +-class SourceAddressTest(TestCase): ++class TestServerMixin: ++ """A limited socket server mixin. ++ ++ This is used by test cases for testing http connection end points. ++ """ + def setUp(self): + self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.port = test_support.bind_port(self.serv) +@@ -502,6 +511,7 @@ class SourceAddressTest(TestCase): + self.serv.close() + self.serv = None + ++class SourceAddressTest(TestServerMixin, TestCase): + def testHTTPConnectionSourceAddress(self): + self.conn = httplib.HTTPConnection(HOST, self.port, + source_address=('', self.source_port)) +@@ -518,6 +528,24 @@ class SourceAddressTest(TestCase): + # for an ssl_wrapped connect() to actually return from. + + ++class HTTPTest(TestServerMixin, TestCase): ++ def testHTTPConnection(self): ++ self.conn = httplib.HTTP(host=HOST, port=self.port, strict=None) ++ self.conn.connect() ++ self.assertEqual(self.conn._conn.host, HOST) ++ self.assertEqual(self.conn._conn.port, self.port) ++ ++ def testHTTPWithConnectHostPort(self): ++ testhost = 'unreachable.test.domain' ++ testport = '80' ++ self.conn = httplib.HTTP(host=testhost, port=testport) ++ self.conn.connect(host=HOST, port=self.port) ++ self.assertNotEqual(self.conn._conn.host, testhost) ++ self.assertNotEqual(self.conn._conn.port, testport) ++ self.assertEqual(self.conn._conn.host, HOST) ++ self.assertEqual(self.conn._conn.port, self.port) ++ ++ + class TimeoutTest(TestCase): + PORT = None + +@@ -716,13 +744,54 @@ class HTTPSTest(TestCase): + c = httplib.HTTPSConnection(hp, context=context) + self.assertEqual(h, c.host) + self.assertEqual(p, c.port) +- ++ ++class TunnelTests(TestCase): ++ def test_connect(self): ++ response_text = ( ++ 'HTTP/1.0 200 OK\r\n\r\n' # Reply to CONNECT ++ 'HTTP/1.1 200 OK\r\n' # Reply to HEAD ++ 'Content-Length: 42\r\n\r\n' ++ ) ++ ++ def create_connection(address, timeout=None, source_address=None): ++ return FakeSocket(response_text, host=address[0], port=address[1]) ++ ++ conn = httplib.HTTPConnection('proxy.com') ++ conn._create_connection = create_connection ++ ++ # Once connected, we should not be able to tunnel anymore ++ conn.connect() ++ self.assertRaises(RuntimeError, conn.set_tunnel, 'destination.com') ++ ++ # But if close the connection, we are good. ++ conn.close() ++ conn.set_tunnel('destination.com') ++ conn.request('HEAD', '/', '') ++ ++ self.assertEqual(conn.sock.host, 'proxy.com') ++ self.assertEqual(conn.sock.port, 80) ++ self.assertIn('CONNECT destination.com', conn.sock.data) ++ # issue22095 ++ self.assertNotIn('Host: destination.com:None', conn.sock.data) ++ # issue22095 ++ ++ self.assertNotIn('Host: proxy.com', conn.sock.data) ++ ++ conn.close() ++ ++ conn.request('PUT', '/', '') ++ self.assertEqual(conn.sock.host, 'proxy.com') ++ self.assertEqual(conn.sock.port, 80) ++ self.assertTrue('CONNECT destination.com' in conn.sock.data) ++ self.assertTrue('Host: destination.com' in conn.sock.data) ++ + + + @test_support.reap_threads + def test_main(verbose=None): + test_support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest, +- HTTPSTest, SourceAddressTest) ++ HTTPTest, HTTPSTest, SourceAddressTest, ++ TunnelTests) + + if __name__ == '__main__': + test_main() diff --git a/SOURCES/00296-Readd-the-private-_set_hostport-api-to-httplib.patch b/SOURCES/00296-Readd-the-private-_set_hostport-api-to-httplib.patch new file mode 100644 index 0000000..56c3952 --- /dev/null +++ b/SOURCES/00296-Readd-the-private-_set_hostport-api-to-httplib.patch @@ -0,0 +1,26 @@ +From 8a91bb4ea0a7f50d024fe55014c2e86e36e67751 Mon Sep 17 00:00:00 2001 +From: Tomas Orsava +Date: Mon, 19 Feb 2018 14:42:13 +0100 +Subject: [PATCH] Readd the private `_set_hostport` api to httplib + +--- + Lib/httplib.py | 3 +++ + 1 file changed, 3 insertions(+) + +diff --git a/Lib/httplib.py b/Lib/httplib.py +index b69145b..da2f346 100644 +--- a/Lib/httplib.py ++++ b/Lib/httplib.py +@@ -787,6 +787,9 @@ class HTTPConnection: + host = host[1:-1] + return (host, port) + ++ def _set_hostport(self, host, port): ++ (self.host, self.port) = self._get_hostport(host, port) ++ + def set_debuglevel(self, level): + self.debuglevel = level + +-- +2.13.6 + diff --git a/SOURCES/00298-do-not-send-IP-in-SNI-TLS-extension.patch b/SOURCES/00298-do-not-send-IP-in-SNI-TLS-extension.patch new file mode 100644 index 0000000..99ed83c --- /dev/null +++ b/SOURCES/00298-do-not-send-IP-in-SNI-TLS-extension.patch @@ -0,0 +1,60 @@ +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +index d0a3830..51b192c 100644 +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -50,6 +50,11 @@ + #include + #endif + ++#ifndef MS_WINDOWS ++/* inet_pton */ ++#include ++#endif ++ + /* Include OpenSSL header files */ + #include "openssl/rsa.h" + #include "openssl/crypto.h" +@@ -493,8 +498,41 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, + SSL_set_mode(self->ssl, mode); + + #if HAVE_SNI +- if (server_hostname != NULL) +- SSL_set_tlsext_host_name(self->ssl, server_hostname); ++ if (server_hostname != NULL) { ++/* Don't send SNI for IP addresses. We cannot simply use inet_aton() and ++ * inet_pton() here. inet_aton() may be linked weakly and inet_pton() isn't ++ * available on all platforms. Use OpenSSL's IP address parser. It's ++ * available since 1.0.2 and LibreSSL since at least 2.3.0. */ ++ int send_sni = 1; ++#if OPENSSL_VERSION_NUMBER >= 0x10200000L ++ ASN1_OCTET_STRING *ip = a2i_IPADDRESS(server_hostname); ++ if (ip == NULL) { ++ send_sni = 1; ++ ERR_clear_error(); ++ } else { ++ send_sni = 0; ++ ASN1_OCTET_STRING_free(ip); ++ } ++#elif defined(HAVE_INET_PTON) ++#ifdef ENABLE_IPV6 ++ char packed[Py_MAX(sizeof(struct in_addr), sizeof(struct in6_addr))]; ++#else ++ char packed[sizeof(struct in_addr)]; ++#endif /* ENABLE_IPV6 */ ++ if (inet_pton(AF_INET, server_hostname, packed)) { ++ send_sni = 0; ++#ifdef ENABLE_IPV6 ++ } else if(inet_pton(AF_INET6, server_hostname, packed)) { ++ send_sni = 0; ++#endif /* ENABLE_IPV6 */ ++ } else { ++ send_sni = 1; ++ } ++#endif /* HAVE_INET_PTON */ ++ if (send_sni) { ++ SSL_set_tlsext_host_name(self->ssl, server_hostname); ++ } ++ } + #endif + + /* If the socket is in non-blocking mode or timeout mode, set the BIO diff --git a/SOURCES/00299-fix-ssl-module-pymax.patch b/SOURCES/00299-fix-ssl-module-pymax.patch new file mode 100644 index 0000000..4f8c8da --- /dev/null +++ b/SOURCES/00299-fix-ssl-module-pymax.patch @@ -0,0 +1,24 @@ +From 439956a149f8a3eb44646498c63b2ef3337d5f3d Mon Sep 17 00:00:00 2001 +From: Christian Heimes +Date: Sun, 25 Feb 2018 13:08:05 +0100 +Subject: [PATCH] Fix ssl module, Python 2.7 doesn't have Py_MAX (#5878) + +Signed-off-by: Christian Heimes +--- + Modules/_ssl.c | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +index af66a581e15a..f9ed94dee1e1 100644 +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -610,7 +610,8 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, + } + #elif defined(HAVE_INET_PTON) + #ifdef ENABLE_IPV6 +- char packed[Py_MAX(sizeof(struct in_addr), sizeof(struct in6_addr))]; ++ #define PySSL_MAX(x, y) (((x) > (y)) ? (x) : (y)) ++ char packed[PySSL_MAX(sizeof(struct in_addr), sizeof(struct in6_addr))]; + #else + char packed[sizeof(struct in_addr)]; + #endif /* ENABLE_IPV6 */ diff --git a/SOURCES/00303-CVE-2018-1060-1.patch b/SOURCES/00303-CVE-2018-1060-1.patch new file mode 100644 index 0000000..4d455a5 --- /dev/null +++ b/SOURCES/00303-CVE-2018-1060-1.patch @@ -0,0 +1,86 @@ +diff --git a/Lib/difflib.py b/Lib/difflib.py +index 1c6fbdbedcb7..788a92df3f89 100644 +--- a/Lib/difflib.py ++++ b/Lib/difflib.py +@@ -1103,7 +1103,7 @@ def _qformat(self, aline, bline, atags, btags): + + import re + +-def IS_LINE_JUNK(line, pat=re.compile(r"\s*#?\s*$").match): ++def IS_LINE_JUNK(line, pat=re.compile(r"\s*(?:#\s*)?$").match): + r""" + Return 1 for ignorable line: iff `line` is blank or contains a single '#'. + +diff --git a/Lib/poplib.py b/Lib/poplib.py +index b91e5f72d2ca..a238510b38fc 100644 +--- a/Lib/poplib.py ++++ b/Lib/poplib.py +@@ -274,7 +274,7 @@ def rpop(self, user): + return self._shortcmd('RPOP %s' % user) + + +- timestamp = re.compile(r'\+OK.*(<[^>]+>)') ++ timestamp = re.compile(br'\+OK.[^<]*(<.*>)') + + def apop(self, user, secret): + """Authorisation +diff --git a/Lib/test/test_difflib.py b/Lib/test/test_difflib.py +index 35f2c36ca70a..d8277b79b880 100644 +--- a/Lib/test/test_difflib.py ++++ b/Lib/test/test_difflib.py +@@ -269,13 +269,33 @@ def test_range_format_context(self): + self.assertEqual(fmt(3,6), '4,6') + self.assertEqual(fmt(0,0), '0') + ++class TestJunkAPIs(unittest.TestCase): ++ def test_is_line_junk_true(self): ++ for line in ['#', ' ', ' #', '# ', ' # ', '']: ++ self.assertTrue(difflib.IS_LINE_JUNK(line), repr(line)) ++ ++ def test_is_line_junk_false(self): ++ for line in ['##', ' ##', '## ', 'abc ', 'abc #', 'Mr. Moose is up!']: ++ self.assertFalse(difflib.IS_LINE_JUNK(line), repr(line)) ++ ++ def test_is_line_junk_REDOS(self): ++ evil_input = ('\t' * 1000000) + '##' ++ self.assertFalse(difflib.IS_LINE_JUNK(evil_input)) ++ ++ def test_is_character_junk_true(self): ++ for char in [' ', '\t']: ++ self.assertTrue(difflib.IS_CHARACTER_JUNK(char), repr(char)) ++ ++ def test_is_character_junk_false(self): ++ for char in ['a', '#', '\n', '\f', '\r', '\v']: ++ self.assertFalse(difflib.IS_CHARACTER_JUNK(char), repr(char)) + + def test_main(): + difflib.HtmlDiff._default_prefix = 0 + Doctests = doctest.DocTestSuite(difflib) + run_unittest( + TestWithAscii, TestAutojunk, TestSFpatches, TestSFbugs, +- TestOutputFormat, Doctests) ++ TestOutputFormat, TestJunkAPIs) + + if __name__ == '__main__': + test_main() +diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py +index 23d688724b95..d2143759ba66 100644 +--- a/Lib/test/test_poplib.py ++++ b/Lib/test/test_poplib.py +@@ -211,6 +211,16 @@ def test_noop(self): + def test_rpop(self): + self.assertOK(self.client.rpop('foo')) + ++ def test_apop_REDOS(self): ++ # Replace welcome with very long evil welcome. ++ # NB The upper bound on welcome length is currently 2048. ++ # At this length, evil input makes each apop call take ++ # on the order of milliseconds instead of microseconds. ++ evil_welcome = b'+OK' + (b'<' * 1000000) ++ with test_support.swap_attr(self.client, 'welcome', evil_welcome): ++ # The evil welcome is invalid, so apop should throw. ++ self.assertRaises(poplib.error_proto, self.client.apop, 'a', 'kb') ++ + def test_top(self): + expected = ('+OK 116 bytes', + ['From: postmaster@python.org', 'Content-Type: text/plain', diff --git a/SOURCES/00305-CVE-2016-2183.patch b/SOURCES/00305-CVE-2016-2183.patch new file mode 100644 index 0000000..a846cb5 --- /dev/null +++ b/SOURCES/00305-CVE-2016-2183.patch @@ -0,0 +1,53 @@ +diff --git a/Lib/ssl.py b/Lib/ssl.py +index 038daa4..5311321 100644 +--- a/Lib/ssl.py ++++ b/Lib/ssl.py +@@ -143,38 +143,36 @@ if _ssl.HAS_TLS_UNIQUE: + else: + CHANNEL_BINDING_TYPES = [] + ++ + # Disable weak or insecure ciphers by default + # (OpenSSL's default setting is 'DEFAULT:!aNULL:!eNULL') + # Enable a better set of ciphers by default + # This list has been explicitly chosen to: + # * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE) + # * Prefer ECDHE over DHE for better performance +-# * Prefer any AES-GCM over any AES-CBC for better performance and security ++# * Prefer AEAD over CBC for better performance and security + # * Then Use HIGH cipher suites as a fallback +-# * Then Use 3DES as fallback which is secure but slow + # * Finally use RC4 as a fallback which is problematic but needed for + # compatibility some times. +-# * Disable NULL authentication, NULL encryption, and MD5 MACs for security +-# reasons ++# * Disable NULL authentication, NULL encryption, 3DES and MD5 MACs ++# for security reasons + _DEFAULT_CIPHERS = ( + 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' +- 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:' +- 'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5' ++ 'DH+HIGH:RSA+AESGCM:RSA+AES:RSA+HIGH:ECDH+RC4:DH+RC4:RSA+RC4:!aNULL:!eNULL:' ++ '!MD5:!3DES' + ) + + # Restricted and more secure ciphers for the server side + # This list has been explicitly chosen to: + # * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE) + # * Prefer ECDHE over DHE for better performance +-# * Prefer any AES-GCM over any AES-CBC for better performance and security ++# * Prefer AEAD over CBC for better performance and security + # * Then Use HIGH cipher suites as a fallback +-# * Then Use 3DES as fallback which is secure but slow +-# * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, and RC4 for +-# security reasons ++# * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, RC4, and ++# 3DES for security reasons + _RESTRICTED_SERVER_CIPHERS = ( + 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' +- 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' +- '!eNULL:!MD5:!DSS:!RC4' ++ 'DH+HIGH:RSA+AESGCM:RSA+AES:RSA+HIGH:!aNULL:!eNULL:!MD5:!DSS:!RC4:!3DES' + ) + + diff --git a/SOURCES/00306-fix-oserror-17-upon-semaphores-creation.patch b/SOURCES/00306-fix-oserror-17-upon-semaphores-creation.patch new file mode 100644 index 0000000..b6070a4 --- /dev/null +++ b/SOURCES/00306-fix-oserror-17-upon-semaphores-creation.patch @@ -0,0 +1,44 @@ + +# HG changeset patch +# User Charles-François Natali +# Date 1455316761 0 +# Node ID d3662c088db8fb2c89f754031f18b1543419fed9 +# Parent 5715a6d9ff12053e81f7ad75268ac059b079b351 +Issue #24303: Fix random EEXIST upon multiprocessing semaphores creation with +Linux PID namespaces enabled. + +diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c +--- a/Modules/_multiprocessing/semaphore.c ++++ b/Modules/_multiprocessing/semaphore.c +@@ -429,7 +429,7 @@ semlock_new(PyTypeObject *type, PyObject + int kind, maxvalue, value; + PyObject *result; + static char *kwlist[] = {"kind", "value", "maxvalue", NULL}; +- static int counter = 0; ++ int try = 0; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "iii", kwlist, + &kind, &value, &maxvalue)) +@@ -440,10 +440,18 @@ semlock_new(PyTypeObject *type, PyObject + return NULL; + } + +- PyOS_snprintf(buffer, sizeof(buffer), "/mp%ld-%d", (long)getpid(), counter++); ++ /* Create a semaphore with a unique name. The bytes returned by ++ * _PyOS_URandom() are treated as unsigned long to ensure that the filename ++ * is valid (no special characters). */ ++ do { ++ unsigned long suffix; ++ _PyOS_URandom((char *)&suffix, sizeof(suffix)); ++ PyOS_snprintf(buffer, sizeof(buffer), "/mp%ld-%lu", (long)getpid(), ++ suffix); ++ SEM_CLEAR_ERROR(); ++ handle = SEM_CREATE(buffer, value, maxvalue); ++ } while ((handle == SEM_FAILED) && (errno == EEXIST) && (++try < 100)); + +- SEM_CLEAR_ERROR(); +- handle = SEM_CREATE(buffer, value, maxvalue); + /* On Windows we should fail if GetLastError()==ERROR_ALREADY_EXISTS */ + if (handle == SEM_FAILED || SEM_GET_LAST_ERROR() != 0) + goto failure; + diff --git a/SOURCES/00310-use-xml-sethashsalt-in-elementtree.patch b/SOURCES/00310-use-xml-sethashsalt-in-elementtree.patch new file mode 100644 index 0000000..d336361 --- /dev/null +++ b/SOURCES/00310-use-xml-sethashsalt-in-elementtree.patch @@ -0,0 +1,54 @@ +diff --git a/Include/pyexpat.h b/Include/pyexpat.h +index 5340ef5..3fc5fa5 100644 +--- a/Include/pyexpat.h ++++ b/Include/pyexpat.h +@@ -3,7 +3,7 @@ + + /* note: you must import expat.h before importing this module! */ + +-#define PyExpat_CAPI_MAGIC "pyexpat.expat_CAPI 1.0" ++#define PyExpat_CAPI_MAGIC "pyexpat.expat_CAPI 1.1" + #define PyExpat_CAPSULE_NAME "pyexpat.expat_CAPI" + + struct PyExpat_CAPI +@@ -43,6 +43,8 @@ struct PyExpat_CAPI + XML_Parser parser, XML_UnknownEncodingHandler handler, + void *encodingHandlerData); + void (*SetUserData)(XML_Parser parser, void *userData); ++ /* might be none for expat < 2.1.0 */ ++ int (*SetHashSalt)(XML_Parser parser, unsigned long hash_salt); + /* always add new stuff to the end! */ + }; + +diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c +index 379aa01..ce62081 100644 +--- a/Modules/_elementtree.c ++++ b/Modules/_elementtree.c +@@ -2500,6 +2500,11 @@ xmlparser(PyObject* self_, PyObject* args, PyObject* kw) + PyErr_NoMemory(); + return NULL; + } ++ /* expat < 2.1.0 has no XML_SetHashSalt() */ ++ if (EXPAT(SetHashSalt) != NULL) { ++ EXPAT(SetHashSalt)(self->parser, ++ (unsigned long)_Py_HashSecret.prefix); ++ } + + /* setup target handlers */ + if (!target) { +diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c +index f269113..147b8a9 100644 +--- a/Modules/pyexpat.c ++++ b/Modules/pyexpat.c +@@ -2037,6 +2037,11 @@ MODULE_INITFUNC(void) + capi.SetProcessingInstructionHandler = XML_SetProcessingInstructionHandler; + capi.SetUnknownEncodingHandler = XML_SetUnknownEncodingHandler; + capi.SetUserData = XML_SetUserData; ++#if XML_COMBINED_VERSION >= 20100 ++ capi.SetHashSalt = XML_SetHashSalt; ++#else ++ capi.SetHashSalt = NULL; ++#endif + + /* export using capsule */ + capi_object = PyCapsule_New(&capi, PyExpat_CAPSULE_NAME, NULL); diff --git a/SOURCES/00314-parser-check-e_io.patch b/SOURCES/00314-parser-check-e_io.patch new file mode 100644 index 0000000..2119f58 --- /dev/null +++ b/SOURCES/00314-parser-check-e_io.patch @@ -0,0 +1,56 @@ +commit bcd39b7b9bd3a7f8a6a34410169794a6264a6fed +Author: Victor Stinner +Date: Wed Nov 7 00:45:13 2018 +0100 + + bpo-25083: Python can sometimes create incorrect .pyc files + + Python 2 never checked for I/O error when reading .py files and + thus could mistake an I/O error for EOF and create incorrect .pyc + files. This adds an check for this and aborts on an error. + + Patch by tzickel, commit f64c813de84011a84ca21d75a294861a9cc2dfdc. + + Resolves: rhbz#1629982 + +diff --git a/Include/errcode.h b/Include/errcode.h +index becec80..5c5a0f7 100644 +--- a/Include/errcode.h ++++ b/Include/errcode.h +@@ -29,6 +29,7 @@ extern "C" { + #define E_EOFS 23 /* EOF in triple-quoted string */ + #define E_EOLS 24 /* EOL in single-quoted string */ + #define E_LINECONT 25 /* Unexpected characters after a line continuation */ ++#define E_IO 26 /* I/O error */ + + #ifdef __cplusplus + } +diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c +index ee6313b..0217f2b 100644 +--- a/Parser/tokenizer.c ++++ b/Parser/tokenizer.c +@@ -1644,6 +1644,11 @@ int + PyTokenizer_Get(struct tok_state *tok, char **p_start, char **p_end) + { + int result = tok_get(tok, p_start, p_end); ++ if (tok->fp && ferror(tok->fp)) { ++ clearerr(tok->fp); ++ result = ERRORTOKEN; ++ tok->done = E_IO; ++ } + if (tok->decoding_erred) { + result = ERRORTOKEN; + tok->done = E_DECODE; +diff --git a/Python/pythonrun.c b/Python/pythonrun.c +index 0b73f3a..9f06236 100644 +--- a/Python/pythonrun.c ++++ b/Python/pythonrun.c +@@ -1643,6 +1643,9 @@ err_input(perrdetail *err) + Py_XDECREF(tb); + break; + } ++ case E_IO: ++ msg = "I/O error while reading"; ++ break; + case E_LINECONT: + msg = "unexpected character after line continuation character"; + break; diff --git a/SOURCES/00317-CVE-2019-5010-ssl-crl.patch b/SOURCES/00317-CVE-2019-5010-ssl-crl.patch new file mode 100644 index 0000000..bd53e2e --- /dev/null +++ b/SOURCES/00317-CVE-2019-5010-ssl-crl.patch @@ -0,0 +1,100 @@ +commit 88a31ffeccce13192a474f4981b9cf6cfdfe065e +Author: Victor Stinner +Date: Wed Mar 20 17:43:20 2019 +0100 + + bpo-35746: Fix segfault in ssl's cert parser (GH-11569) + + Fix a NULL pointer deref in ssl module. The cert parser did not handle CRL + distribution points with empty DP or URI correctly. A malicious or buggy + certificate can result into segfault. + + Signed-off-by: Christian Heimes + + https://bugs.python.org/issue35746 + (cherry picked from commit a37f52436f9aa4b9292878b72f3ff1480e2606c3) + + Co-authored-by: Christian Heimes + +diff --git a/Lib/test/talos-2019-0758.pem b/Lib/test/talos-2019-0758.pem +new file mode 100644 +index 0000000..13b95a7 +--- /dev/null ++++ b/Lib/test/talos-2019-0758.pem +@@ -0,0 +1,22 @@ ++-----BEGIN CERTIFICATE----- ++MIIDqDCCApKgAwIBAgIBAjALBgkqhkiG9w0BAQswHzELMAkGA1UEBhMCVUsxEDAO ++BgNVBAMTB2NvZHktY2EwHhcNMTgwNjE4MTgwMDU4WhcNMjgwNjE0MTgwMDU4WjA7 ++MQswCQYDVQQGEwJVSzEsMCoGA1UEAxMjY29kZW5vbWljb24tdm0tMi50ZXN0Lmxh ++bC5jaXNjby5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC63fGB ++J80A9Av1GB0bptslKRIUtJm8EeEu34HkDWbL6AJY0P8WfDtlXjlPaLqFa6sqH6ES ++V48prSm1ZUbDSVL8R6BYVYpOlK8/48xk4pGTgRzv69gf5SGtQLwHy8UPBKgjSZoD ++5a5k5wJXGswhKFFNqyyxqCvWmMnJWxXTt2XDCiWc4g4YAWi4O4+6SeeHVAV9rV7C ++1wxqjzKovVe2uZOHjKEzJbbIU6JBPb6TRfMdRdYOw98n1VXDcKVgdX2DuuqjCzHP ++WhU4Tw050M9NaK3eXp4Mh69VuiKoBGOLSOcS8reqHIU46Reg0hqeL8LIL6OhFHIF ++j7HR6V1X6F+BfRS/AgMBAAGjgdYwgdMwCQYDVR0TBAIwADAdBgNVHQ4EFgQUOktp ++HQjxDXXUg8prleY9jeLKeQ4wTwYDVR0jBEgwRoAUx6zgPygZ0ZErF9sPC4+5e2Io ++UU+hI6QhMB8xCzAJBgNVBAYTAlVLMRAwDgYDVQQDEwdjb2R5LWNhggkA1QEAuwb7 ++2s0wCQYDVR0SBAIwADAuBgNVHREEJzAlgiNjb2Rlbm9taWNvbi12bS0yLnRlc3Qu ++bGFsLmNpc2NvLmNvbTAOBgNVHQ8BAf8EBAMCBaAwCwYDVR0fBAQwAjAAMAsGCSqG ++SIb3DQEBCwOCAQEAvqantx2yBlM11RoFiCfi+AfSblXPdrIrHvccepV4pYc/yO6p ++t1f2dxHQb8rWH3i6cWag/EgIZx+HJQvo0rgPY1BFJsX1WnYf1/znZpkUBGbVmlJr ++t/dW1gSkNS6sPsM0Q+7HPgEv8CPDNK5eo7vU2seE0iWOkxSyVUuiCEY9ZVGaLVit ++p0C78nZ35Pdv4I+1cosmHl28+es1WI22rrnmdBpH8J1eY6WvUw2xuZHLeNVN0TzV ++Q3qq53AaCWuLOD1AjESWuUCxMZTK9DPS4JKXTK8RLyDeqOvJGjsSWp3kL0y3GaQ+ ++10T1rfkKJub2+m9A9duin1fn6tHc2wSvB7m3DA== ++-----END CERTIFICATE----- +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index f7a6746..31af578 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -68,6 +68,7 @@ WRONGCERT = data_file("XXXnonexisting.pem") + BADKEY = data_file("badkey.pem") + NOKIACERT = data_file("nokia.pem") + NULLBYTECERT = data_file("nullbytecert.pem") ++TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem") + + DHFILE = data_file("dh1024.pem") + BYTES_DHFILE = DHFILE.encode(sys.getfilesystemencoding()) +@@ -238,6 +239,27 @@ class BasicSocketTests(unittest.TestCase): + ('IP Address', '2001:DB8:0:0:0:0:0:1\n')) + ) + ++ def test_parse_cert_CVE_2019_5010(self): ++ p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP) ++ if support.verbose: ++ sys.stdout.write("\n" + pprint.pformat(p) + "\n") ++ self.assertEqual( ++ p, ++ { ++ 'issuer': ( ++ (('countryName', 'UK'),), (('commonName', 'cody-ca'),)), ++ 'notAfter': 'Jun 14 18:00:58 2028 GMT', ++ 'notBefore': 'Jun 18 18:00:58 2018 GMT', ++ 'serialNumber': '02', ++ 'subject': ((('countryName', 'UK'),), ++ (('commonName', ++ 'codenomicon-vm-2.test.lal.cisco.com'),)), ++ 'subjectAltName': ( ++ ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),), ++ 'version': 3 ++ } ++ ) ++ + def test_parse_all_sans(self): + p = ssl._ssl._test_decode_cert(ALLSANFILE) + self.assertEqual(p['subjectAltName'], +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +index 6220bea..baea6e1 100644 +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -1103,6 +1103,10 @@ _get_crl_dp(X509 *certificate) { + STACK_OF(GENERAL_NAME) *gns; + + dp = sk_DIST_POINT_value(dps, i); ++ if (dp->distpoint == NULL) { ++ /* Ignore empty DP value, CVE-2019-5010 */ ++ continue; ++ } + gns = dp->distpoint->name.fullname; + + for (j=0; j < sk_GENERAL_NAME_num(gns); j++) { diff --git a/SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch b/SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch new file mode 100644 index 0000000..6067497 --- /dev/null +++ b/SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch @@ -0,0 +1,156 @@ +diff --git a/Doc/library/urlparse.rst b/Doc/library/urlparse.rst +index efd112d..61022f7 100644 +--- a/Doc/library/urlparse.rst ++++ b/Doc/library/urlparse.rst +@@ -118,6 +118,12 @@ The :mod:`urlparse` module defines the following functions: + See section :ref:`urlparse-result-object` for more information on the result + object. + ++ Characters in the :attr:`netloc` attribute that decompose under NFKC ++ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, ++ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is ++ decomposed before parsing, or is not a Unicode string, no error will be ++ raised. ++ + .. versionchanged:: 2.5 + Added attributes to return value. + +@@ -125,6 +131,11 @@ The :mod:`urlparse` module defines the following functions: + Added IPv6 URL parsing capabilities. + + ++ .. versionchanged:: 2.7.17 ++ Characters that affect netloc parsing under NFKC normalization will ++ now raise :exc:`ValueError`. ++ ++ + .. function:: parse_qs(qs[, keep_blank_values[, strict_parsing]]) + + Parse a query string given as a string argument (data of type +@@ -219,11 +230,21 @@ The :mod:`urlparse` module defines the following functions: + See section :ref:`urlparse-result-object` for more information on the result + object. + ++ Characters in the :attr:`netloc` attribute that decompose under NFKC ++ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, ++ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is ++ decomposed before parsing, or is not a Unicode string, no error will be ++ raised. ++ + .. versionadded:: 2.2 + + .. versionchanged:: 2.5 + Added attributes to return value. + ++ .. versionchanged:: 2.7.17 ++ Characters that affect netloc parsing under NFKC normalization will ++ now raise :exc:`ValueError`. ++ + + .. function:: urlunsplit(parts) + +diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py +index 72ebfaa..2717163 100644 +--- a/Lib/test/test_urlparse.py ++++ b/Lib/test/test_urlparse.py +@@ -1,6 +1,8 @@ + #! /usr/bin/env python + + from test import test_support ++import sys ++import unicodedata + import unittest + import urlparse + +@@ -564,6 +566,45 @@ class UrlParseTestCase(unittest.TestCase): + self.assertEqual(urlparse.urlparse("http://www.python.org:80"), + ('http','www.python.org:80','','','','')) + ++ def test_urlsplit_normalization(self): ++ # Certain characters should never occur in the netloc, ++ # including under normalization. ++ # Ensure that ALL of them are detected and cause an error ++ illegal_chars = u'/:#?@' ++ hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars} ++ denorm_chars = [ ++ c for c in map(unichr, range(128, sys.maxunicode)) ++ if (hex_chars & set(unicodedata.decomposition(c).split())) ++ and c not in illegal_chars ++ ] ++ # Sanity check that we found at least one such character ++ self.assertIn(u'\u2100', denorm_chars) ++ self.assertIn(u'\uFF03', denorm_chars) ++ ++ # bpo-36742: Verify port separators are ignored when they ++ # existed prior to decomposition ++ urlparse.urlsplit(u'http://\u30d5\u309a:80') ++ with self.assertRaises(ValueError): ++ urlparse.urlsplit(u'http://\u30d5\u309a\ufe1380') ++ ++ for scheme in [u"http", u"https", u"ftp"]: ++ for netloc in [u"netloc{}false.netloc", u"n{}user@netloc"]: ++ for c in denorm_chars: ++ url = u"{}://{}/path".format(scheme, netloc.format(c)) ++ if test_support.verbose: ++ print "Checking %r" % url ++ with self.assertRaises(ValueError): ++ urlparse.urlsplit(url) ++ ++ # check error message: invalid netloc must be formated with repr() ++ # to get an ASCII error message ++ with self.assertRaises(ValueError) as cm: ++ urlparse.urlsplit(u'http://example.com\uFF03@bing.com') ++ self.assertEqual(str(cm.exception), ++ "netloc u'example.com\\uff03@bing.com' contains invalid characters " ++ "under NFKC normalization") ++ self.assertIsInstance(cm.exception.args[0], str) ++ + def test_main(): + test_support.run_unittest(UrlParseTestCase) + +diff --git a/Lib/urlparse.py b/Lib/urlparse.py +index 4ce982e..9a1df74 100644 +--- a/Lib/urlparse.py ++++ b/Lib/urlparse.py +@@ -164,6 +164,25 @@ def _splitnetloc(url, start=0): + delim = min(delim, wdelim) # use earliest delim position + return url[start:delim], url[delim:] # return (domain, rest) + ++def _checknetloc(netloc): ++ if not netloc or not isinstance(netloc, unicode): ++ return ++ # looking for characters like \u2100 that expand to 'a/c' ++ # IDNA uses NFKC equivalence, so normalize for this check ++ import unicodedata ++ n = netloc.replace(u'@', u'') # ignore characters already included ++ n = n.replace(u':', u'') # but not the surrounding text ++ n = n.replace(u'#', u'') ++ n = n.replace(u'?', u'') ++ netloc2 = unicodedata.normalize('NFKC', n) ++ if n == netloc2: ++ return ++ for c in '/?#@:': ++ if c in netloc2: ++ raise ValueError("netloc %r contains invalid characters " ++ "under NFKC normalization" ++ % netloc) ++ + def urlsplit(url, scheme='', allow_fragments=True): + """Parse a URL into 5 components: + :///?# +@@ -192,6 +211,7 @@ def urlsplit(url, scheme='', allow_fragments=True): + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) ++ _checknetloc(netloc) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return v +@@ -215,6 +235,7 @@ def urlsplit(url, scheme='', allow_fragments=True): + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) ++ _checknetloc(netloc) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return v diff --git a/SOURCES/00324-disallow-control-chars-in-http-urls.patch b/SOURCES/00324-disallow-control-chars-in-http-urls.patch new file mode 100644 index 0000000..e12aeb1 --- /dev/null +++ b/SOURCES/00324-disallow-control-chars-in-http-urls.patch @@ -0,0 +1,222 @@ +diff --git a/Lib/httplib.py b/Lib/httplib.py +index da2f346..fc8e895 100644 +--- a/Lib/httplib.py ++++ b/Lib/httplib.py +@@ -247,6 +247,15 @@ _MAXHEADERS = 100 + _is_legal_header_name = re.compile(r'\A[^:\s][^:\r\n]*\Z').match + _is_illegal_header_value = re.compile(r'\n(?![ \t])|\r(?![ \t\n])').search + ++# These characters are not allowed within HTTP URL paths. ++# See https://tools.ietf.org/html/rfc3986#section-3.3 and the ++# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition. ++# Prevents CVE-2019-9740. Includes control characters such as \r\n. ++# Restrict non-ASCII characters above \x7f (0x80-0xff). ++_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f-\xff]') ++# Arguably only these _should_ allowed: ++# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$") ++# We are more lenient for assumed real world compatibility purposes. + + class HTTPMessage(mimetools.Message): + +@@ -926,6 +935,12 @@ class HTTPConnection: + self._method = method + if not url: + url = '/' ++ # Prevent CVE-2019-9740. ++ match = _contains_disallowed_url_pchar_re.search(url) ++ if match: ++ raise InvalidURL("URL can't contain control characters. %r " ++ "(found at least %r)" ++ % (url, match.group())) + hdr = '%s %s %s' % (method, url, self._http_vsn_str) + + self._output(hdr) +diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py +index 3845012..d2da0f8 100644 +--- a/Lib/test/test_urllib.py ++++ b/Lib/test/test_urllib.py +@@ -198,6 +198,31 @@ class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin): + finally: + self.unfakehttp() + ++ def test_url_with_control_char_rejected(self): ++ for char_no in range(0, 0x21) + range(0x7f, 0x100): ++ char = chr(char_no) ++ schemeless_url = "//localhost:7777/test%s/" % char ++ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") ++ try: ++ # urllib quotes the URL so there is no injection. ++ resp = urllib.urlopen("http:" + schemeless_url) ++ self.assertNotIn(char, resp.geturl()) ++ finally: ++ self.unfakehttp() ++ ++ def test_url_with_newline_header_injection_rejected(self): ++ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") ++ host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123" ++ schemeless_url = "//" + host + ":8080/test/?test=a" ++ try: ++ # urllib quotes the URL so there is no injection. ++ resp = urllib.urlopen("http:" + schemeless_url) ++ self.assertNotIn(' ', resp.geturl()) ++ self.assertNotIn('\r', resp.geturl()) ++ self.assertNotIn('\n', resp.geturl()) ++ finally: ++ self.unfakehttp() ++ + def test_read_bogus(self): + # urlopen() should raise IOError for many error codes. + self.fakehttp('''HTTP/1.1 401 Authentication Required +@@ -786,6 +811,35 @@ class Pathname_Tests(unittest.TestCase): + class Utility_Tests(unittest.TestCase): + """Testcase to test the various utility functions in the urllib.""" + ++ def test_splithost(self): ++ splithost = urllib.splithost ++ self.assertEqual(splithost('//www.example.org:80/foo/bar/baz.html'), ++ ('www.example.org:80', '/foo/bar/baz.html')) ++ self.assertEqual(splithost('//www.example.org:80'), ++ ('www.example.org:80', '')) ++ self.assertEqual(splithost('/foo/bar/baz.html'), ++ (None, '/foo/bar/baz.html')) ++ ++ # bpo-30500: # starts a fragment. ++ self.assertEqual(splithost('//127.0.0.1#@host.com'), ++ ('127.0.0.1', '/#@host.com')) ++ self.assertEqual(splithost('//127.0.0.1#@host.com:80'), ++ ('127.0.0.1', '/#@host.com:80')) ++ self.assertEqual(splithost('//127.0.0.1:80#@host.com'), ++ ('127.0.0.1:80', '/#@host.com')) ++ ++ # Empty host is returned as empty string. ++ self.assertEqual(splithost("///file"), ++ ('', '/file')) ++ ++ # Trailing semicolon, question mark and hash symbol are kept. ++ self.assertEqual(splithost("//example.net/file;"), ++ ('example.net', '/file;')) ++ self.assertEqual(splithost("//example.net/file?"), ++ ('example.net', '/file?')) ++ self.assertEqual(splithost("//example.net/file#"), ++ ('example.net', '/file#')) ++ + def test_splitpasswd(self): + """Some of the password examples are not sensible, but it is added to + confirming to RFC2617 and addressing issue4675. +diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py +index c317b8d..63fefd6 100644 +--- a/Lib/test/test_urllib2.py ++++ b/Lib/test/test_urllib2.py +@@ -7,12 +7,16 @@ import StringIO + + import urllib2 + from urllib2 import Request, OpenerDirector ++import httplib + + try: + import ssl + except ImportError: + ssl = None + ++from test.test_urllib import FakeHTTPMixin ++ ++ + # XXX + # Request + # CacheFTPHandler (hard to write) +@@ -1243,7 +1247,7 @@ class HandlerTests(unittest.TestCase): + self.assertEqual(len(http_handler.requests), 1) + self.assertFalse(http_handler.requests[0].has_header(auth_header)) + +-class MiscTests(unittest.TestCase): ++class MiscTests(unittest.TestCase, FakeHTTPMixin): + + def test_build_opener(self): + class MyHTTPHandler(urllib2.HTTPHandler): pass +@@ -1289,6 +1293,53 @@ class MiscTests(unittest.TestCase): + else: + self.assertTrue(False) + ++ @unittest.skipUnless(ssl, "ssl module required") ++ def test_url_with_control_char_rejected(self): ++ for char_no in range(0, 0x21) + range(0x7f, 0x100): ++ char = chr(char_no) ++ schemeless_url = "//localhost:7777/test%s/" % char ++ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") ++ try: ++ # We explicitly test urllib.request.urlopen() instead of the top ++ # level 'def urlopen()' function defined in this... (quite ugly) ++ # test suite. They use different url opening codepaths. Plain ++ # urlopen uses FancyURLOpener which goes via a codepath that ++ # calls urllib.parse.quote() on the URL which makes all of the ++ # above attempts at injection within the url _path_ safe. ++ escaped_char_repr = repr(char).replace('\\', r'\\') ++ InvalidURL = httplib.InvalidURL ++ with self.assertRaisesRegexp( ++ InvalidURL, "contain control.*" + escaped_char_repr): ++ urllib2.urlopen("http:" + schemeless_url) ++ with self.assertRaisesRegexp( ++ InvalidURL, "contain control.*" + escaped_char_repr): ++ urllib2.urlopen("https:" + schemeless_url) ++ finally: ++ self.unfakehttp() ++ ++ @unittest.skipUnless(ssl, "ssl module required") ++ def test_url_with_newline_header_injection_rejected(self): ++ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") ++ host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123" ++ schemeless_url = "//" + host + ":8080/test/?test=a" ++ try: ++ # We explicitly test urllib2.urlopen() instead of the top ++ # level 'def urlopen()' function defined in this... (quite ugly) ++ # test suite. They use different url opening codepaths. Plain ++ # urlopen uses FancyURLOpener which goes via a codepath that ++ # calls urllib.parse.quote() on the URL which makes all of the ++ # above attempts at injection within the url _path_ safe. ++ InvalidURL = httplib.InvalidURL ++ with self.assertRaisesRegexp( ++ InvalidURL, r"contain control.*\\r.*(found at least . .)"): ++ urllib2.urlopen("http:" + schemeless_url) ++ with self.assertRaisesRegexp(InvalidURL, r"contain control.*\\n"): ++ urllib2.urlopen("https:" + schemeless_url) ++ finally: ++ self.unfakehttp() ++ ++ ++ + class RequestTests(unittest.TestCase): + + def setUp(self): +diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py +index 79e862a..347b494 100644 +--- a/Lib/test/test_xmlrpc.py ++++ b/Lib/test/test_xmlrpc.py +@@ -592,7 +592,13 @@ class SimpleServerTestCase(BaseServerTestCase): + def test_partial_post(self): + # Check that a partial POST doesn't make the server loop: issue #14001. + conn = httplib.HTTPConnection(ADDR, PORT) +- conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye') ++ conn.send('POST /RPC2 HTTP/1.0\r\n' ++ 'Content-Length: 100\r\n\r\n' ++ 'bye HTTP/1.1\r\n' ++ 'Host: %s:%s\r\n' ++ 'Accept-Encoding: identity\r\n' ++ 'Content-Length: 0\r\n\r\n' ++ % (ADDR, PORT)) + conn.close() + + class MultiPathServerTestCase(BaseServerTestCase): +diff --git a/Lib/urllib.py b/Lib/urllib.py +index 9b31df1..2201e3e 100644 +--- a/Lib/urllib.py ++++ b/Lib/urllib.py +@@ -1079,8 +1079,7 @@ def splithost(url): + """splithost('//host[:port]/path') --> 'host[:port]', '/path'.""" + global _hostprog + if _hostprog is None: +- import re +- _hostprog = re.compile('^//([^/?]*)(.*)$') ++ _hostprog = re.compile('//([^/#?]*)(.*)', re.DOTALL) + + match = _hostprog.match(url) + if match: diff --git a/SOURCES/00325-CVE-2019-9948.patch b/SOURCES/00325-CVE-2019-9948.patch new file mode 100644 index 0000000..890bf71 --- /dev/null +++ b/SOURCES/00325-CVE-2019-9948.patch @@ -0,0 +1,37 @@ +diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py +index d2da0f8..7813b9f 100644 +--- a/Lib/test/test_urllib.py ++++ b/Lib/test/test_urllib.py +@@ -872,6 +872,17 @@ class URLopener_Tests(unittest.TestCase): + "spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"), + "//c:|windows%/:=&?~#+!$,;'@()*[]|/path/") + ++ def test_local_file_open(self): ++ # bpo-35907, CVE-2019-9948: urllib must reject local_file:// scheme ++ class DummyURLopener(urllib.URLopener): ++ def open_local_file(self, url): ++ return url ++ for url in ('local_file://example', 'local-file://example'): ++ self.assertRaises(IOError, urllib.urlopen, url) ++ self.assertRaises(IOError, urllib.URLopener().open, url) ++ self.assertRaises(IOError, urllib.URLopener().retrieve, url) ++ self.assertRaises(IOError, DummyURLopener().open, url) ++ self.assertRaises(IOError, DummyURLopener().retrieve, url) + + # Just commented them out. + # Can't really tell why keep failing in windows and sparc. +diff --git a/Lib/urllib.py b/Lib/urllib.py +index 2201e3e..71e3637 100644 +--- a/Lib/urllib.py ++++ b/Lib/urllib.py +@@ -198,7 +198,9 @@ class URLopener: + name = 'open_' + urltype + self.type = urltype + name = name.replace('-', '_') +- if not hasattr(self, name): ++ ++ # bpo-35907: disallow the file reading with the type not allowed ++ if not hasattr(self, name) or name == 'open_local_file': + if proxy: + return self.open_unknown_proxy(proxy, fullurl, data) + else: diff --git a/SOURCES/00330-CVE-2018-20852.patch b/SOURCES/00330-CVE-2018-20852.patch new file mode 100644 index 0000000..97ff719 --- /dev/null +++ b/SOURCES/00330-CVE-2018-20852.patch @@ -0,0 +1,93 @@ +diff --git a/Lib/cookielib.py b/Lib/cookielib.py +index f9c8d2f..9144e1f 100644 +--- a/Lib/cookielib.py ++++ b/Lib/cookielib.py +@@ -1123,6 +1123,11 @@ class DefaultCookiePolicy(CookiePolicy): + req_host, erhn = eff_request_host(request) + domain = cookie.domain + ++ if domain and not domain.startswith("."): ++ dotdomain = "." + domain ++ else: ++ dotdomain = domain ++ + # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't + if (cookie.version == 0 and + (self.strict_ns_domain & self.DomainStrictNonDomain) and +@@ -1135,7 +1140,7 @@ class DefaultCookiePolicy(CookiePolicy): + _debug(" effective request-host name %s does not domain-match " + "RFC 2965 cookie domain %s", erhn, domain) + return False +- if cookie.version == 0 and not ("."+erhn).endswith(domain): ++ if cookie.version == 0 and not ("."+erhn).endswith(dotdomain): + _debug(" request-host %s does not match Netscape cookie domain " + "%s", req_host, domain) + return False +@@ -1149,7 +1154,11 @@ class DefaultCookiePolicy(CookiePolicy): + req_host = "."+req_host + if not erhn.startswith("."): + erhn = "."+erhn +- if not (req_host.endswith(domain) or erhn.endswith(domain)): ++ if domain and not domain.startswith("."): ++ dotdomain = "." + domain ++ else: ++ dotdomain = domain ++ if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)): + #_debug(" request domain %s does not match cookie domain %s", + # req_host, domain) + return False +diff --git a/Lib/test/test_cookielib.py b/Lib/test/test_cookielib.py +index dd0ad32..b4f5ea0 100644 +--- a/Lib/test/test_cookielib.py ++++ b/Lib/test/test_cookielib.py +@@ -353,6 +353,7 @@ class CookieTests(TestCase): + ("http://foo.bar.com/", ".foo.bar.com", True), + ("http://foo.bar.com/", "foo.bar.com", True), + ("http://foo.bar.com/", ".bar.com", True), ++ ("http://foo.bar.com/", "bar.com", True), + ("http://foo.bar.com/", "com", True), + ("http://foo.com/", "rhubarb.foo.com", False), + ("http://foo.com/", ".foo.com", True), +@@ -363,6 +364,8 @@ class CookieTests(TestCase): + ("http://foo/", "foo", True), + ("http://foo/", "foo.local", True), + ("http://foo/", ".local", True), ++ ("http://barfoo.com", ".foo.com", False), ++ ("http://barfoo.com", "foo.com", False), + ]: + request = urllib2.Request(url) + r = pol.domain_return_ok(domain, request) +@@ -910,6 +913,33 @@ class CookieTests(TestCase): + c.add_cookie_header(req) + self.assertTrue(not req.has_header("Cookie")) + ++ c.clear() ++ ++ pol.set_blocked_domains([]) ++ req = Request("http://acme.com/") ++ res = FakeResponse(headers, "http://acme.com/") ++ cookies = c.make_cookies(res, req) ++ c.extract_cookies(res, req) ++ self.assertEqual(len(c), 1) ++ ++ req = Request("http://acme.com/") ++ c.add_cookie_header(req) ++ self.assertTrue(req.has_header("Cookie")) ++ ++ req = Request("http://badacme.com/") ++ c.add_cookie_header(req) ++ self.assertFalse(pol.return_ok(cookies[0], req)) ++ self.assertFalse(req.has_header("Cookie")) ++ ++ p = pol.set_blocked_domains(["acme.com"]) ++ req = Request("http://acme.com/") ++ c.add_cookie_header(req) ++ self.assertFalse(req.has_header("Cookie")) ++ ++ req = Request("http://badacme.com/") ++ c.add_cookie_header(req) ++ self.assertFalse(req.has_header("Cookie")) ++ + def test_secure(self): + from cookielib import CookieJar, DefaultCookiePolicy + diff --git a/SOURCES/00332-CVE-2019-16056.patch b/SOURCES/00332-CVE-2019-16056.patch new file mode 100644 index 0000000..aff257f --- /dev/null +++ b/SOURCES/00332-CVE-2019-16056.patch @@ -0,0 +1,54 @@ +diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py +index 690db2c..dc49d2e 100644 +--- a/Lib/email/_parseaddr.py ++++ b/Lib/email/_parseaddr.py +@@ -336,7 +336,12 @@ class AddrlistClass: + aslist.append('@') + self.pos += 1 + self.gotonext() +- return EMPTYSTRING.join(aslist) + self.getdomain() ++ domain = self.getdomain() ++ if not domain: ++ # Invalid domain, return an empty address instead of returning a ++ # local part to denote failed parsing. ++ return EMPTYSTRING ++ return EMPTYSTRING.join(aslist) + domain + + def getdomain(self): + """Get the complete domain name from an address.""" +@@ -351,6 +356,10 @@ class AddrlistClass: + elif self.field[self.pos] == '.': + self.pos += 1 + sdlist.append('.') ++ elif self.field[self.pos] == '@': ++ # bpo-34155: Don't parse domains with two `@` like ++ # `a@malicious.org@important.com`. ++ return EMPTYSTRING + elif self.field[self.pos] in self.atomends: + break + else: +diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py +index b32da9d..1739033 100644 +--- a/Lib/email/test/test_email.py ++++ b/Lib/email/test/test_email.py +@@ -2308,6 +2308,20 @@ class TestMiscellaneous(TestEmailBase): + self.assertEqual(Utils.parseaddr('<>'), ('', '')) + self.assertEqual(Utils.formataddr(Utils.parseaddr('<>')), '') + ++ def test_parseaddr_multiple_domains(self): ++ self.assertEqual( ++ Utils.parseaddr('a@b@c'), ++ ('', '') ++ ) ++ self.assertEqual( ++ Utils.parseaddr('a@b.c@c'), ++ ('', '') ++ ) ++ self.assertEqual( ++ Utils.parseaddr('a@172.17.0.1@c'), ++ ('', '') ++ ) ++ + def test_noquote_dump(self): + self.assertEqual( + Utils.formataddr(('A Silly Person', 'person@dom.ain')), diff --git a/SOURCES/05000-autotool-intermediates.patch b/SOURCES/05000-autotool-intermediates.patch new file mode 100644 index 0000000..cfedc25 --- /dev/null +++ b/SOURCES/05000-autotool-intermediates.patch @@ -0,0 +1,216 @@ +diff -up ./configure.autotool-intermediates ./configure +--- ./configure.autotool-intermediates 2013-04-09 11:24:01.024185796 +0200 ++++ ./configure 2013-04-09 11:24:01.780183954 +0200 +@@ -639,6 +639,8 @@ TRUE + MACHDEP_OBJS + DYNLOADFILE + DLINCLDIR ++DTRACEHDRS ++DTRACEOBJS + THREADOBJ + LDLAST + USE_THREAD_MODULE +@@ -659,6 +661,8 @@ OTHER_LIBTOOL_OPT + UNIVERSAL_ARCH_FLAGS + BASECFLAGS + OPT ++DEBUG_SUFFIX ++DEBUG_EXT + LN + MKDIR_P + INSTALL_DATA +@@ -795,8 +799,11 @@ with_pth + enable_ipv6 + with_doc_strings + with_tsc ++with_count_allocs ++with_call_profile + with_pymalloc + with_valgrind ++with_dtrace + with_wctype_functions + with_fpectl + with_libm +@@ -1472,8 +1479,11 @@ Optional Packages: + --with-pth use GNU pth threading libraries + --with(out)-doc-strings disable/enable documentation strings + --with(out)-tsc enable/disable timestamp counter profile ++ --with(out)count-allocs enable/disable per-type instance accounting ++ --with(out)-call-profile enable/disable statistics on function call invocation + --with(out)-pymalloc disable/enable specialized mallocs + --with-valgrind Enable Valgrind support ++ --with(out)-dtrace disable/enable dtrace support + --with-wctype-functions use wctype.h functions + --with-fpectl enable SIGFPE catching + --with-libm=STRING math library +@@ -5171,7 +5181,7 @@ esac + $as_echo_n "checking LIBRARY... " >&6; } + if test -z "$LIBRARY" + then +- LIBRARY='libpython$(VERSION).a' ++ LIBRARY='libpython$(VERSION)$(DEBUG_EXT).a' + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBRARY" >&5 + $as_echo "$LIBRARY" >&6; } +@@ -5343,8 +5353,8 @@ $as_echo "#define Py_ENABLE_SHARED 1" >> + INSTSONAME="$LDLIBRARY".$SOVERSION + ;; + Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*) +- LDLIBRARY='libpython$(VERSION).so' +- BLDLIBRARY='-L. -lpython$(VERSION)' ++ LDLIBRARY='libpython$(VERSION)$(DEBUG_EXT).so' ++ BLDLIBRARY='-L. -lpython$(VERSION)$(DEBUG_EXT)' + RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH} + case $ac_sys_system in + FreeBSD*) +@@ -5367,7 +5377,7 @@ $as_echo "#define Py_ENABLE_SHARED 1" >> + ;; + OSF*) + LDLIBRARY='libpython$(VERSION).so' +- BLDLIBRARY='-rpath $(LIBDIR) -L. -lpython$(VERSION)' ++ BLDLIBRARY='-L. -lpython$(VERSION)' + RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH} + ;; + atheos*) +@@ -5894,6 +5904,14 @@ $as_echo "no" >&6; } + fi + + ++if test "$Py_DEBUG" = 'true' ++then ++ DEBUG_EXT=_d ++ DEBUG_SUFFIX=-debug ++fi ++ ++ ++ + # XXX Shouldn't the code above that fiddles with BASECFLAGS and OPT be + # merged with this chunk of code? + +@@ -9958,6 +9976,50 @@ $as_echo "no" >&6; } + fi + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-count-allocs" >&5 ++$as_echo_n "checking for --with-count-allocs... " >&6; } ++ ++# Check whether --with-count-allocs was given. ++if test "${with_count_allocs+set}" = set; then : ++ withval=$with_count_allocs; ++if test "$withval" != no ++then ++ ++$as_echo "#define COUNT_ALLOCS 1" >>confdefs.h ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-call-profile" >&5 ++$as_echo_n "checking for --with-call-profile... " >&6; } ++ ++# Check whether --with-call-profile was given. ++if test "${with_call_profile+set}" = set; then : ++ withval=$with_call_profile; ++if test "$withval" != no ++then ++ ++$as_echo "#define CALL_PROFILE 1" >>confdefs.h ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ + # Check for Python-specific malloc support + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-pymalloc" >&5 + $as_echo_n "checking for --with-pymalloc... " >&6; } +@@ -10007,6 +10069,46 @@ fi + + fi + ++# Check for dtrace support ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-dtrace" >&5 ++$as_echo_n "checking for --with-dtrace... " >&6; } ++ ++# Check whether --with-dtrace was given. ++if test "${with_dtrace+set}" = set; then : ++ withval=$with_dtrace; ++fi ++ ++ ++if test ! -z "$with_dtrace" ++then ++ if dtrace -G -o /dev/null -s $srcdir/Include/pydtrace.d 2>/dev/null ++ then ++ ++$as_echo "#define WITH_DTRACE 1" >>confdefs.h ++ ++ with_dtrace="Sun" ++ DTRACEOBJS="Python/dtrace.o" ++ DTRADEHDRS="" ++ elif dtrace -h -o /dev/null -s $srcdir/Include/pydtrace.d ++ then ++ ++$as_echo "#define WITH_DTRACE 1" >>confdefs.h ++ ++ with_dtrace="Apple" ++ DTRACEOBJS="" ++ DTRADEHDRS="pydtrace.h" ++ else ++ with_dtrace="no" ++ fi ++else ++ with_dtrace="no" ++fi ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_dtrace" >&5 ++$as_echo "$with_dtrace" >&6; } ++ ++ ++ + # Check for --with-wctype-functions + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-wctype-functions" >&5 + $as_echo_n "checking for --with-wctype-functions... " >&6; } +diff -up ./pyconfig.h.in.autotool-intermediates ./pyconfig.h.in +--- ./pyconfig.h.in.autotool-intermediates 2013-04-09 11:24:01.020185806 +0200 ++++ ./pyconfig.h.in 2013-04-09 11:24:02.088183204 +0200 +@@ -18,6 +18,12 @@ + /* Define this if you have BeOS threads. */ + #undef BEOS_THREADS + ++/* Define to keep records on function call invocation */ ++#undef CALL_PROFILE ++ ++/* Define to keep records of the number of instances of each type */ ++#undef COUNT_ALLOCS ++ + /* Define if you have the Mach cthreads package */ + #undef C_THREADS + +@@ -1119,12 +1125,6 @@ + /* Define to profile with the Pentium timestamp counter */ + #undef WITH_TSC + +-/* Define to keep records of the number of instances of each type */ +-#undef COUNT_ALLOCS +- +-/* Define to keep records on function call invocation */ +-#undef CALL_PROFILE +- + /* Define if you want pymalloc to be disabled when running under valgrind */ + #undef WITH_VALGRIND + diff --git a/SOURCES/cert-verification.cfg b/SOURCES/cert-verification.cfg new file mode 100755 index 0000000..3583e46 --- /dev/null +++ b/SOURCES/cert-verification.cfg @@ -0,0 +1,8 @@ +# Possible values are: +# 'enable' to ensure HTTPS certificate verification is enabled by default +# 'disable' to ensure HTTPS certificate verification is disabled by default +# 'platform_default' to delegate the decision to the redistributor providing this particular Python version + +# For more info refer to https://www.python.org/dev/peps/pep-0493/ +[https] +verify=platform_default diff --git a/SOURCES/libpython.stp b/SOURCES/libpython.stp new file mode 100644 index 0000000..56cf2fb --- /dev/null +++ b/SOURCES/libpython.stp @@ -0,0 +1,17 @@ +/* Systemtap tapset to make it easier to trace Python */ + +/* + Define python.function.entry/return: +*/ +probe python.function.entry = process("python").library("LIBRARY_PATH").mark("function__entry") +{ + filename = user_string($arg1); + funcname = user_string($arg2); + lineno = $arg3; +} +probe python.function.return = process("python").library("LIBRARY_PATH").mark("function__return") +{ + filename = user_string($arg1); + funcname = user_string($arg2); + lineno = $arg3; +} diff --git a/SOURCES/pyfuntop.stp b/SOURCES/pyfuntop.stp new file mode 100644 index 0000000..f235a23 --- /dev/null +++ b/SOURCES/pyfuntop.stp @@ -0,0 +1,21 @@ +#!/usr/bin/stap + +global fn_calls; + +probe python.function.entry +{ + fn_calls[pid(), filename, funcname, lineno] += 1; +} + +probe timer.ms(1000) { + printf("\033[2J\033[1;1H") /* clear screen */ + printf("%6s %80s %6s %30s %6s\n", + "PID", "FILENAME", "LINE", "FUNCTION", "CALLS") + foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) { + printf("%6d %80s %6d %30s %6d\n", + pid, filename, lineno, funcname, + fn_calls[pid, filename, funcname, lineno]); + } + + delete fn_calls; +} diff --git a/SOURCES/pynche b/SOURCES/pynche new file mode 100644 index 0000000..368d740 --- /dev/null +++ b/SOURCES/pynche @@ -0,0 +1,2 @@ +#!/bin/bash +exec `python -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific = True))"`/pynche/pynche diff --git a/SOURCES/python-2.5-cflags.patch b/SOURCES/python-2.5-cflags.patch new file mode 100644 index 0000000..32243bf --- /dev/null +++ b/SOURCES/python-2.5-cflags.patch @@ -0,0 +1,11 @@ +--- Python-2.5c1/Makefile.pre.in.cflags 2006-08-18 11:05:40.000000000 -0400 ++++ Python-2.5c1/Makefile.pre.in 2006-08-18 11:09:26.000000000 -0400 +@@ -334,7 +334,7 @@ + + # Build the interpreter + $(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY) +- $(LINKCC) $(LDFLAGS) $(LINKFORSHARED) -o $@ \ ++ $(LINKCC) $(CFLAGS) $(LDFLAGS) $(LINKFORSHARED) -o $@ \ + Modules/python.o \ + $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) + diff --git a/SOURCES/python-2.5.1-plural-fix.patch b/SOURCES/python-2.5.1-plural-fix.patch new file mode 100644 index 0000000..040adaf --- /dev/null +++ b/SOURCES/python-2.5.1-plural-fix.patch @@ -0,0 +1,12 @@ +diff -up Python-2.5.1/Lib/gettext.py.plural Python-2.5.1/Lib/gettext.py +--- Python-2.5.1/Lib/gettext.py.plural 2007-09-10 11:38:57.000000000 -0400 ++++ Python-2.5.1/Lib/gettext.py 2007-09-10 11:39:00.000000000 -0400 +@@ -299,6 +299,8 @@ class GNUTranslations(NullTranslations): + item = item.strip() + if not item: + continue ++ if item.startswith("#"): ++ continue + if ':' in item: + k, v = item.split(':', 1) + k = k.strip().lower() diff --git a/SOURCES/python-2.5.1-sqlite-encoding.patch b/SOURCES/python-2.5.1-sqlite-encoding.patch new file mode 100644 index 0000000..ff2a3f8 --- /dev/null +++ b/SOURCES/python-2.5.1-sqlite-encoding.patch @@ -0,0 +1,24 @@ +diff -up Python-2.5.1/Lib/sqlite3/dbapi2.py.encoding Python-2.5.1/Lib/sqlite3/dbapi2.py +--- Python-2.5.1/Lib/sqlite3/dbapi2.py.encoding 2007-09-14 10:41:50.000000000 -0400 ++++ Python-2.5.1/Lib/sqlite3/dbapi2.py 2007-09-14 10:42:00.000000000 -0400 +@@ -1,7 +1,6 @@ +-# -*- coding: iso-8859-1 -*- + # pysqlite2/dbapi2.py: the DB-API 2.0 interface + # +-# Copyright (C) 2004-2005 Gerhard H�ring ++# Copyright (C) 2004-2005 Gerhard Haering + # + # This file is part of pysqlite. + # +diff -up Python-2.5.1/Lib/sqlite3/__init__.py.encoding Python-2.5.1/Lib/sqlite3/__init__.py +--- Python-2.5.1/Lib/sqlite3/__init__.py.encoding 2007-09-14 10:41:47.000000000 -0400 ++++ Python-2.5.1/Lib/sqlite3/__init__.py 2007-09-14 10:42:06.000000000 -0400 +@@ -1,7 +1,6 @@ +-#-*- coding: ISO-8859-1 -*- + # pysqlite2/__init__.py: the pysqlite2 package. + # +-# Copyright (C) 2005 Gerhard H�ring ++# Copyright (C) 2005 Gerhard Haering + # + # This file is part of pysqlite. + # diff --git a/SOURCES/python-2.6-rpath.patch b/SOURCES/python-2.6-rpath.patch new file mode 100644 index 0000000..43e3ec4 --- /dev/null +++ b/SOURCES/python-2.6-rpath.patch @@ -0,0 +1,12 @@ +diff -up Python-2.6/configure.ac.rpath Python-2.6/configure.ac +--- Python-2.6/configure.ac.rpath 2008-11-24 02:51:06.000000000 -0500 ++++ Python-2.6/configure.ac 2008-11-24 02:51:21.000000000 -0500 +@@ -729,7 +729,7 @@ if test $enable_shared = "yes"; then + ;; + OSF*) + LDLIBRARY='libpython$(VERSION).so' +- BLDLIBRARY='-rpath $(LIBDIR) -L. -lpython$(VERSION)' ++ BLDLIBRARY='-L. -lpython$(VERSION)' + RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH} + ;; + atheos*) diff --git a/SOURCES/python-2.6.4-distutils-rpath.patch b/SOURCES/python-2.6.4-distutils-rpath.patch new file mode 100644 index 0000000..f156507 --- /dev/null +++ b/SOURCES/python-2.6.4-distutils-rpath.patch @@ -0,0 +1,20 @@ +diff -up Python-2.6.4/Lib/distutils/unixccompiler.py.distutils-rpath Python-2.6.4/Lib/distutils/unixccompiler.py +--- Python-2.6.4/Lib/distutils/unixccompiler.py.distutils-rpath 2009-09-09 04:34:06.000000000 -0400 ++++ Python-2.6.4/Lib/distutils/unixccompiler.py 2010-03-15 21:33:25.000000000 -0400 +@@ -142,6 +142,16 @@ class UnixCCompiler(CCompiler): + if sys.platform == "cygwin": + exe_extension = ".exe" + ++ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): ++ """Remove standard library path from rpath""" ++ libraries, library_dirs, runtime_library_dirs = \ ++ CCompiler._fix_lib_args(self, libraries, library_dirs, ++ runtime_library_dirs) ++ libdir = sysconfig.get_config_var('LIBDIR') ++ if runtime_library_dirs and (libdir in runtime_library_dirs): ++ runtime_library_dirs.remove(libdir) ++ return libraries, library_dirs, runtime_library_dirs ++ + def preprocess(self, source, + output_file=None, macros=None, include_dirs=None, + extra_preargs=None, extra_postargs=None): diff --git a/SOURCES/python-2.7-lib64-sysconfig.patch b/SOURCES/python-2.7-lib64-sysconfig.patch new file mode 100644 index 0000000..0cef361 --- /dev/null +++ b/SOURCES/python-2.7-lib64-sysconfig.patch @@ -0,0 +1,44 @@ +diff -up Python-2.7/Lib/sysconfig.py.lib64-sysconfig Python-2.7/Lib/sysconfig.py +--- Python-2.7/Lib/sysconfig.py.lib64-sysconfig 2010-07-08 14:18:41.386898476 -0400 ++++ Python-2.7/Lib/sysconfig.py 2010-07-08 14:22:02.837896461 -0400 +@@ -7,20 +7,20 @@ from os.path import pardir, realpath + + _INSTALL_SCHEMES = { + 'posix_prefix': { +- 'stdlib': '{base}/lib/python{py_version_short}', +- 'platstdlib': '{platbase}/lib/python{py_version_short}', ++ 'stdlib': '{base}/lib64/python{py_version_short}', ++ 'platstdlib': '{platbase}/lib64/python{py_version_short}', + 'purelib': '{base}/lib/python{py_version_short}/site-packages', +- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages', + 'include': '{base}/include/python{py_version_short}', + 'platinclude': '{platbase}/include/python{py_version_short}', + 'scripts': '{base}/bin', + 'data': '{base}', + }, + 'posix_home': { +- 'stdlib': '{base}/lib/python', +- 'platstdlib': '{base}/lib/python', ++ 'stdlib': '{base}/lib64/python', ++ 'platstdlib': '{base}/lib64/python', + 'purelib': '{base}/lib/python', +- 'platlib': '{base}/lib/python', ++ 'platlib': '{base}/lib64/python', + 'include': '{base}/include/python', + 'platinclude': '{base}/include/python', + 'scripts': '{base}/bin', +@@ -65,10 +65,10 @@ _INSTALL_SCHEMES = { + 'data' : '{userbase}', + }, + 'posix_user': { +- 'stdlib': '{userbase}/lib/python{py_version_short}', +- 'platstdlib': '{userbase}/lib/python{py_version_short}', ++ 'stdlib': '{userbase}/lib64/python{py_version_short}', ++ 'platstdlib': '{userbase}/lib64/python{py_version_short}', + 'purelib': '{userbase}/lib/python{py_version_short}/site-packages', +- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages', + 'include': '{userbase}/include/python{py_version_short}', + 'scripts': '{userbase}/bin', + 'data' : '{userbase}', diff --git a/SOURCES/python-2.7.1-config.patch b/SOURCES/python-2.7.1-config.patch new file mode 100644 index 0000000..6976d5c --- /dev/null +++ b/SOURCES/python-2.7.1-config.patch @@ -0,0 +1,283 @@ +--- Python-2.7.4/Modules/Setup.dist.rhconfig 2013-04-06 16:02:34.000000000 +0200 ++++ Python-2.7.4/Modules/Setup.dist 2013-04-08 10:05:16.369985654 +0200 +@@ -153,7 +153,7 @@ GLHACK=-Dclear=__GLclear + # modules are to be built as shared libraries (see above for more + # detail; also note that *static* reverses this effect): + +-#*shared* ++*shared* + + # GNU readline. Unlike previous Python incarnations, GNU readline is + # now incorporated in an optional module, configured in the Setup file +@@ -163,77 +163,77 @@ GLHACK=-Dclear=__GLclear + # it, depending on your system -- see the GNU readline instructions. + # It's okay for this to be a shared library, too. + +-#readline readline.c -lreadline -ltermcap ++readline readline.c -lreadline -ltermcap + + + # Modules that should always be present (non UNIX dependent): + +-#array arraymodule.c # array objects +-#cmath cmathmodule.c _math.c # -lm # complex math library functions +-#math mathmodule.c _math.c # -lm # math library functions, e.g. sin() +-#_struct _struct.c # binary structure packing/unpacking +-#time timemodule.c # -lm # time operations and variables +-#operator operator.c # operator.add() and similar goodies +-#_testcapi _testcapimodule.c # Python C API test module +-#_random _randommodule.c # Random number generator +-#_collections _collectionsmodule.c # Container types ++array arraymodule.c # array objects ++cmath cmathmodule.c _math.c # -lm # complex math library functions ++math mathmodule.c _math.c # -lm # math library functions, e.g. sin() ++_struct _struct.c # binary structure packing/unpacking ++time timemodule.c # -lm # time operations and variables ++operator operator.c # operator.add() and similar goodies ++_testcapi _testcapimodule.c # Python C API test module ++_random _randommodule.c # Random number generator ++_collections _collectionsmodule.c # Container types + #_heapq _heapqmodule.c # Heapq type +-#itertools itertoolsmodule.c # Functions creating iterators for efficient looping +-#strop stropmodule.c # String manipulations +-#_functools _functoolsmodule.c # Tools for working with functions and callable objects ++itertools itertoolsmodule.c # Functions creating iterators for efficient looping ++strop stropmodule.c # String manipulations ++_functools _functoolsmodule.c # Tools for working with functions and callable objects + #_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator + #_pickle _pickle.c # pickle accelerator + #datetime datetimemodule.c # date/time type +-#_bisect _bisectmodule.c # Bisection algorithms ++_bisect _bisectmodule.c # Bisection algorithms + +-#unicodedata unicodedata.c # static Unicode character database ++unicodedata unicodedata.c # static Unicode character database + + # access to ISO C locale support +-#_locale _localemodule.c # -lintl ++_locale _localemodule.c # -lintl + + # Standard I/O baseline + #_io -I$(srcdir)/Modules/_io _io/bufferedio.c _io/bytesio.c _io/fileio.c _io/iobase.c _io/_iomodule.c _io/stringio.c _io/textio.c + + + # Modules with some UNIX dependencies -- on by default: + # (If you have a really backward UNIX, select and socket may not be + # supported...) + +-#fcntl fcntlmodule.c # fcntl(2) and ioctl(2) +-#spwd spwdmodule.c # spwd(3) +-#grp grpmodule.c # grp(3) +-#select selectmodule.c # select(2); not on ancient System V ++fcntl fcntlmodule.c # fcntl(2) and ioctl(2) ++spwd spwdmodule.c # spwd(3) ++grp grpmodule.c # grp(3) ++select selectmodule.c # select(2); not on ancient System V + + # Memory-mapped files (also works on Win32). +-#mmap mmapmodule.c ++mmap mmapmodule.c + + # CSV file helper +-#_csv _csv.c ++_csv _csv.c + + # Socket module helper for socket(2) +-#_socket socketmodule.c timemodule.c ++_socket socketmodule.c timemodule.c + + # Socket module helper for SSL support; you must comment out the other + # socket line above, and possibly edit the SSL variable: + #SSL=/usr/local/ssl +-#_ssl _ssl.c \ +-# -DUSE_SSL -I$(SSL)/include -I$(SSL)/include/openssl \ +-# -L$(SSL)/lib -lssl -lcrypto ++_ssl _ssl.c \ ++ -DUSE_SSL -I$(SSL)/include -I$(SSL)/include/openssl \ ++ -L$(SSL)/lib -lssl -lcrypto + + # The crypt module is now disabled by default because it breaks builds + # on many systems (where -lcrypt is needed), e.g. Linux (I believe). + # + # First, look at Setup.config; configure may have set this for you. + +-#crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems ++crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems + + + # Some more UNIX dependent modules -- off by default, since these + # are not supported by all UNIX systems: + +-#nis nismodule.c -lnsl # Sun yellow pages -- not everywhere +-#termios termios.c # Steen Lumholt's termios module +-#resource resource.c # Jeremy Hylton's rlimit interface ++nis nismodule.c -lnsl # Sun yellow pages -- not everywhere ++termios termios.c # Steen Lumholt's termios module ++resource resource.c # Jeremy Hylton's rlimit interface + + + # Multimedia modules -- off by default. +@@ -238,8 +238,8 @@ GLHACK=-Dclear=__GLclear + # #993173 says audioop works on 64-bit platforms, though. + # These represent audio samples or images as strings: + +-#audioop audioop.c # Operations on audio samples +-#imageop imageop.c # Operations on images ++audioop audioop.c # Operations on audio samples ++imageop imageop.c # Operations on images + + + # Note that the _md5 and _sha modules are normally only built if the +@@ -249,14 +249,14 @@ GLHACK=-Dclear=__GLclear + # Message-Digest Algorithm, described in RFC 1321. The necessary files + # md5.c and md5.h are included here. + +-#_md5 md5module.c md5.c ++_md5 md5module.c md5.c + + + # The _sha module implements the SHA checksum algorithms. + # (NIST's Secure Hash Algorithms.) +-#_sha shamodule.c +-#_sha256 sha256module.c +-#_sha512 sha512module.c ++_sha shamodule.c ++_sha256 sha256module.c ++_sha512 sha512module.c + + + # SGI IRIX specific modules -- off by default. +@@ -303,12 +303,12 @@ GLHACK=-Dclear=__GLclear + # A Linux specific module -- off by default; this may also work on + # some *BSDs. + +-#linuxaudiodev linuxaudiodev.c ++linuxaudiodev linuxaudiodev.c + + + # George Neville-Neil's timing module: + +-#timing timingmodule.c ++timing timingmodule.c + + + # The _tkinter module. +@@ -323,7 +323,7 @@ GLHACK=-Dclear=__GLclear + # every system. + + # *** Always uncomment this (leave the leading underscore in!): +-# _tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \ ++_tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \ + # *** Uncomment and edit to reflect where your Tcl/Tk libraries are: + # -L/usr/local/lib \ + # *** Uncomment and edit to reflect where your Tcl/Tk headers are: +@@ -333,7 +333,7 @@ GLHACK=-Dclear=__GLclear + # *** Or uncomment this for Solaris: + # -I/usr/openwin/include \ + # *** Uncomment and edit for Tix extension only: +-# -DWITH_TIX -ltix8.1.8.2 \ ++ -DWITH_TIX -ltix \ + # *** Uncomment and edit for BLT extension only: + # -DWITH_BLT -I/usr/local/blt/blt8.0-unoff/include -lBLT8.0 \ + # *** Uncomment and edit for PIL (TkImaging) extension only: +@@ -342,7 +342,7 @@ GLHACK=-Dclear=__GLclear + # *** Uncomment and edit for TOGL extension only: + # -DWITH_TOGL togl.c \ + # *** Uncomment and edit to reflect your Tcl/Tk versions: +-# -ltk8.2 -ltcl8.2 \ ++ -ltk -ltcl \ + # *** Uncomment and edit to reflect where your X11 libraries are: + # -L/usr/X11R6/lib \ + # *** Or uncomment this for Solaris: +@@ -352,7 +352,7 @@ GLHACK=-Dclear=__GLclear + # *** Uncomment for AIX: + # -lld \ + # *** Always uncomment this; X11 libraries to link with: +-# -lX11 ++ -lX11 + + # Lance Ellinghaus's syslog module + #syslog syslogmodule.c # syslog daemon interface +@@ -374,7 +374,7 @@ GLHACK=-Dclear=__GLclear + # it is a highly experimental and dangerous device for calling + # *arbitrary* C functions in *arbitrary* shared libraries: + +-#dl dlmodule.c ++dl dlmodule.c + + + # Modules that provide persistent dictionary-like semantics. You will +@@ -397,7 +397,7 @@ GLHACK=-Dclear=__GLclear + # + # First, look at Setup.config; configure may have set this for you. + +-#gdbm gdbmmodule.c -I/usr/local/include -L/usr/local/lib -lgdbm ++gdbm gdbmmodule.c -lgdbm + + + # Sleepycat Berkeley DB interface. +@@ -412,11 +412,9 @@ GLHACK=-Dclear=__GLclear + # + # Edit the variables DB and DBLIBVERto point to the db top directory + # and the subdirectory of PORT where you built it. +-#DB=/usr/local/BerkeleyDB.4.0 +-#DBLIBVER=4.0 +-#DBINC=$(DB)/include +-#DBLIB=$(DB)/lib +-#_bsddb _bsddb.c -I$(DBINC) -L$(DBLIB) -ldb-$(DBLIBVER) ++DBINC=/usr/include/libdb ++DBLIB=/usr/lib ++_bsddb _bsddb.c -I$(DBINC) -L$(DBLIB) -ldb + + # Historical Berkeley DB 1.85 + # +@@ -431,14 +430,14 @@ GLHACK=-Dclear=__GLclear + + + # Helper module for various ascii-encoders +-#binascii binascii.c ++binascii binascii.c + + # Fred Drake's interface to the Python parser +-#parser parsermodule.c ++parser parsermodule.c + + # cStringIO and cPickle +-#cStringIO cStringIO.c +-#cPickle cPickle.c ++cStringIO cStringIO.c ++cPickle cPickle.c + + + # Lee Busby's SIGFPE modules. +@@ -461,7 +460,7 @@ GLHACK=-Dclear=__GLclear + # Andrew Kuchling's zlib module. + # This require zlib 1.1.3 (or later). + # See http://www.gzip.org/zlib/ +-#zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz ++zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz + + # Interface to the Expat XML parser + # +@@ -480,14 +479,14 @@ GLHACK=-Dclear=__GLclear + # Hye-Shik Chang's CJKCodecs + + # multibytecodec is required for all the other CJK codec modules +-#_multibytecodec cjkcodecs/multibytecodec.c ++_multibytecodec cjkcodecs/multibytecodec.c + +-#_codecs_cn cjkcodecs/_codecs_cn.c +-#_codecs_hk cjkcodecs/_codecs_hk.c +-#_codecs_iso2022 cjkcodecs/_codecs_iso2022.c +-#_codecs_jp cjkcodecs/_codecs_jp.c +-#_codecs_kr cjkcodecs/_codecs_kr.c +-#_codecs_tw cjkcodecs/_codecs_tw.c ++_codecs_cn cjkcodecs/_codecs_cn.c ++_codecs_hk cjkcodecs/_codecs_hk.c ++_codecs_iso2022 cjkcodecs/_codecs_iso2022.c ++_codecs_jp cjkcodecs/_codecs_jp.c ++_codecs_kr cjkcodecs/_codecs_kr.c ++_codecs_tw cjkcodecs/_codecs_tw.c + + # Example -- included for reference only: + # xx xxmodule.c diff --git a/SOURCES/python-2.7.1-fix_test_abc_with_COUNT_ALLOCS.patch b/SOURCES/python-2.7.1-fix_test_abc_with_COUNT_ALLOCS.patch new file mode 100644 index 0000000..bb38281 --- /dev/null +++ b/SOURCES/python-2.7.1-fix_test_abc_with_COUNT_ALLOCS.patch @@ -0,0 +1,27 @@ +diff -up Python-2.7.1/Lib/test/test_abc.py.cache_leak Python-2.7.1/Lib/test/test_abc.py +--- Python-2.7.1/Lib/test/test_abc.py.cache_leak 2010-12-28 18:06:35.551938356 -0500 ++++ Python-2.7.1/Lib/test/test_abc.py 2010-12-28 18:09:09.021059202 -0500 +@@ -3,6 +3,8 @@ + + """Unit tests for abc.py.""" + ++import sys ++ + import unittest, weakref + from test import test_support + +@@ -229,8 +231,12 @@ class TestABC(unittest.TestCase): + # Trigger cache. + C().f() + del C +- test_support.gc_collect() +- self.assertEqual(r(), None) ++ # This doesn't work in our debug build, presumably due to its use ++ # of COUNT_ALLOCS, which makes heap-allocated types immortal (once ++ # they've ever had an instance): ++ if not hasattr(sys, 'getcounts'): ++ test_support.gc_collect() ++ self.assertEqual(r(), None) + + def test_main(): + test_support.run_unittest(TestABC) diff --git a/SOURCES/python-2.7.2-add-extension-suffix-to-python-config.patch b/SOURCES/python-2.7.2-add-extension-suffix-to-python-config.patch new file mode 100644 index 0000000..d1ff052 --- /dev/null +++ b/SOURCES/python-2.7.2-add-extension-suffix-to-python-config.patch @@ -0,0 +1,18 @@ +diff -up Python-2.7.2/Misc/python-config.in.add-extension-suffix-to-python-config Python-2.7.2/Misc/python-config.in +--- Python-2.7.2/Misc/python-config.in.add-extension-suffix-to-python-config 2011-08-23 18:15:41.832497124 -0400 ++++ Python-2.7.2/Misc/python-config.in 2011-08-23 18:17:25.854490011 -0400 +@@ -6,7 +6,7 @@ import getopt + from distutils import sysconfig + + valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags', +- 'ldflags', 'help'] ++ 'ldflags', 'extension-suffix', 'help'] + + def exit_with_usage(code=1): + print >>sys.stderr, "Usage: %s [%s]" % (sys.argv[0], +@@ -54,3 +54,5 @@ for opt in opt_flags: + libs.extend(getvar('LINKFORSHARED').split()) + print ' '.join(libs) + ++ elif opt == '--extension-suffix': ++ print (sys.pydebug and "_d" or "") + sysconfig.get_config_var('SO') diff --git a/SOURCES/python-2.7.3-debug-build.patch b/SOURCES/python-2.7.3-debug-build.patch new file mode 100644 index 0000000..5b6cda7 --- /dev/null +++ b/SOURCES/python-2.7.3-debug-build.patch @@ -0,0 +1,292 @@ +diff -up Python-2.7.3/configure.ac.debug-build Python-2.7.3/configure.ac +--- Python-2.7.3/configure.ac.debug-build 2012-04-18 19:46:22.066498521 -0400 ++++ Python-2.7.3/configure.ac 2012-04-18 19:46:22.078498372 -0400 +@@ -635,7 +635,7 @@ AC_SUBST(LIBRARY) + AC_MSG_CHECKING(LIBRARY) + if test -z "$LIBRARY" + then +- LIBRARY='libpython$(VERSION).a' ++ LIBRARY='libpython$(VERSION)$(DEBUG_EXT).a' + fi + AC_MSG_RESULT($LIBRARY) + +@@ -780,8 +780,8 @@ if test $enable_shared = "yes"; then + INSTSONAME="$LDLIBRARY".$SOVERSION + ;; + Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*) +- LDLIBRARY='libpython$(VERSION).so' +- BLDLIBRARY='-L. -lpython$(VERSION)' ++ LDLIBRARY='libpython$(VERSION)$(DEBUG_EXT).so' ++ BLDLIBRARY='-L. -lpython$(VERSION)$(DEBUG_EXT)' + RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH} + case $ac_sys_system in + FreeBSD*) +@@ -905,6 +905,14 @@ else AC_MSG_RESULT(no); Py_DEBUG='false' + fi], + [AC_MSG_RESULT(no)]) + ++if test "$Py_DEBUG" = 'true' ++then ++ DEBUG_EXT=_d ++ DEBUG_SUFFIX=-debug ++fi ++AC_SUBST(DEBUG_EXT) ++AC_SUBST(DEBUG_SUFFIX) ++ + # XXX Shouldn't the code above that fiddles with BASECFLAGS and OPT be + # merged with this chunk of code? + +diff -up Python-2.7.3/Lib/distutils/command/build_ext.py.debug-build Python-2.7.3/Lib/distutils/command/build_ext.py +--- Python-2.7.3/Lib/distutils/command/build_ext.py.debug-build 2012-04-09 19:07:29.000000000 -0400 ++++ Python-2.7.3/Lib/distutils/command/build_ext.py 2012-04-18 19:46:22.079498360 -0400 +@@ -676,7 +676,10 @@ class build_ext (Command): + so_ext = get_config_var('SO') + if os.name == 'nt' and self.debug: + return os.path.join(*ext_path) + '_d' + so_ext +- return os.path.join(*ext_path) + so_ext ++ ++ # Similarly, extensions in debug mode are named 'module_d.so', to ++ # avoid adding the _d to the SO config variable: ++ return os.path.join(*ext_path) + (sys.pydebug and "_d" or "") + so_ext + + def get_export_symbols (self, ext): + """Return the list of symbols that a shared extension has to +@@ -761,6 +764,8 @@ class build_ext (Command): + template = "python%d.%d" + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) ++ if sys.pydebug: ++ pythonlib += '_d' + return ext.libraries + [pythonlib] + else: + return ext.libraries +diff -up Python-2.7.3/Lib/distutils/sysconfig.py.debug-build Python-2.7.3/Lib/distutils/sysconfig.py +--- Python-2.7.3/Lib/distutils/sysconfig.py.debug-build 2012-04-18 19:46:21.988499499 -0400 ++++ Python-2.7.3/Lib/distutils/sysconfig.py 2012-04-18 19:46:22.080498348 -0400 +@@ -85,7 +85,8 @@ def get_python_inc(plat_specific=0, pref + # Include is located in the srcdir + inc_dir = os.path.join(srcdir, "Include") + return inc_dir +- return os.path.join(prefix, "include", "python" + get_python_version()) ++ return os.path.join(prefix, "include", ++ "python" + get_python_version() + (sys.pydebug and '-debug' or '')) + elif os.name == "nt": + return os.path.join(prefix, "include") + elif os.name == "os2": +@@ -250,7 +251,7 @@ def get_makefile_filename(): + if python_build: + return os.path.join(project_base, "Makefile") + lib_dir = get_python_lib(plat_specific=1, standard_lib=1) +- return os.path.join(lib_dir, "config", "Makefile") ++ return os.path.join(lib_dir, "config" + (sys.pydebug and "-debug" or ""), "Makefile") + + + def parse_config_h(fp, g=None): +diff -up Python-2.7.3/Lib/distutils/tests/test_install.py.debug-build Python-2.7.3/Lib/distutils/tests/test_install.py +--- Python-2.7.3/Lib/distutils/tests/test_install.py.debug-build 2012-04-18 19:46:21.997499385 -0400 ++++ Python-2.7.3/Lib/distutils/tests/test_install.py 2012-04-18 19:46:22.080498348 -0400 +@@ -20,8 +20,9 @@ from distutils.tests import support + + + def _make_ext_name(modname): +- if os.name == 'nt' and sys.executable.endswith('_d.exe'): ++ if sys.pydebug: + modname += '_d' ++ + return modname + sysconfig.get_config_var('SO') + + +diff -up Python-2.7.3/Makefile.pre.in.debug-build Python-2.7.3/Makefile.pre.in +--- Python-2.7.3/Makefile.pre.in.debug-build 2012-04-18 19:46:22.073498437 -0400 ++++ Python-2.7.3/Makefile.pre.in 2012-04-18 19:48:46.336694896 -0400 +@@ -102,8 +102,8 @@ SCRIPTDIR= $(prefix)/lib64 + # Detailed destination directories + BINLIBDEST= $(LIBDIR)/python$(VERSION) + LIBDEST= $(SCRIPTDIR)/python$(VERSION) +-INCLUDEPY= $(INCLUDEDIR)/python$(VERSION) +-CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(VERSION) ++INCLUDEPY= $(INCLUDEDIR)/python$(VERSION)$(DEBUG_SUFFIX) ++CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(VERSION)$(DEBUG_SUFFIX) + LIBP= $(LIBDIR)/python$(VERSION) + + # Symbols used for using shared libraries +@@ -117,6 +117,12 @@ DESTSHARED= $(BINLIBDEST)/lib-dynload + EXE= @EXEEXT@ + BUILDEXE= @BUILDEXEEXT@ + ++# DEBUG_EXT is used by ELF files (names and SONAMEs); it will be "_d" for a debug build ++# DEBUG_SUFFIX is used by filesystem paths; it will be "-debug" for a debug build ++# Both will be empty in an optimized build ++DEBUG_EXT= @DEBUG_EXT@ ++DEBUG_SUFFIX= @DEBUG_SUFFIX@ ++ + # Short name and location for Mac OS X Python framework + UNIVERSALSDK=@UNIVERSALSDK@ + PYTHONFRAMEWORK= @PYTHONFRAMEWORK@ +@@ -180,8 +186,8 @@ LIBOBJDIR= Python/ + LIBOBJS= @LIBOBJS@ + UNICODE_OBJS= @UNICODE_OBJS@ + +-PYTHON= python$(EXE) +-BUILDPYTHON= python$(BUILDEXE) ++PYTHON= python$(DEBUG_SUFFIX)$(EXE) ++BUILDPYTHON= python$(DEBUG_SUFFIX)$(BUILDEXE) + + PYTHON_FOR_BUILD=@PYTHON_FOR_BUILD@ + _PYTHON_HOST_PLATFORM=@_PYTHON_HOST_PLATFORM@ +@@ -413,7 +419,7 @@ sharedmods: $(BUILDPYTHON) + $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + +-libpython$(VERSION).so: $(LIBRARY_OBJS) ++libpython$(VERSION)$(DEBUG_EXT).so: $(LIBRARY_OBJS) + if test $(INSTSONAME) != $(LDLIBRARY); then \ + $(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \ + $(LN) -f $(INSTSONAME) $@; \ +@@ -796,18 +802,18 @@ bininstall: altbininstall + then rm -f $(DESTDIR)$(BINDIR)/$(PYTHON); \ + else true; \ + fi +- (cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(EXE) $(PYTHON)) +- -rm -f $(DESTDIR)$(BINDIR)/python2$(EXE) +- (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(EXE) python2$(EXE)) +- -rm -f $(DESTDIR)$(BINDIR)/python2-config +- (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)-config python2-config) +- -rm -f $(DESTDIR)$(BINDIR)/python-config +- (cd $(DESTDIR)$(BINDIR); $(LN) -s python2-config python-config) ++ (cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(DEBUG_SUFFIX)$(EXE) $(PYTHON)) ++ -rm -f $(DESTDIR)$(BINDIR)/python2$(DEBUG_SUFFIX)$(EXE) ++ (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(DEBUG_SUFFIX)$(EXE) python2$(DEBUG_SUFFIX)$(EXE)) ++ -rm -f $(DESTDIR)$(BINDIR)/python2$(DEBUG_SUFFIX)-config ++ (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(DEBUG_SUFFIX)-config python2$(DEBUG_SUFFIX)-config) ++ -rm -f $(DESTDIR)$(BINDIR)/python$(DEBUG_SUFFIX)-config ++ (cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(DEBUG_SUFFIX)-config python$(DEBUG_SUFFIX)-config) + -test -d $(DESTDIR)$(LIBPC) || $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(LIBPC) +- -rm -f $(DESTDIR)$(LIBPC)/python2.pc +- (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION).pc python2.pc) +- -rm -f $(DESTDIR)$(LIBPC)/python.pc +- (cd $(DESTDIR)$(LIBPC); $(LN) -s python2.pc python.pc) ++ -rm -f $(DESTDIR)$(LIBPC)/python2$(DEBUG_SUFFIX).pc ++ (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION)$(DEBUG_SUFFIX).pc python2$(DEBUG_SUFFIX).pc) ++ -rm -f $(DESTDIR)$(LIBPC)/python$(DEBUG_SUFFIX).pc ++ (cd $(DESTDIR)$(LIBPC); $(LN) -s python2$(DEBUG_SUFFIX).pc python$(DEBUG_SUFFIX).pc) + + # Install the interpreter with $(VERSION) affixed + # This goes into $(exec_prefix) +@@ -820,7 +826,7 @@ altbininstall: $(BUILDPYTHON) + else true; \ + fi; \ + done +- $(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE) ++ $(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(VERSION)$(DEBUG_SUFFIX)$(EXE) + if test -f $(LDLIBRARY); then \ + if test -n "$(DLLLIBRARY)" ; then \ + $(INSTALL_SHARED) $(DLLLIBRARY) $(DESTDIR)$(BINDIR); \ +@@ -970,10 +976,11 @@ $(srcdir)/Lib/$(PLATDIR): + fi; \ + cd $(srcdir)/Lib/$(PLATDIR); $(RUNSHARED) ./regen + +-python-config: $(srcdir)/Misc/python-config.in ++python$(DEBUG_SUFFIX)-config: $(srcdir)/Misc/python-config.in + # Substitution happens here, as the completely-expanded BINDIR + # is not available in configure +- sed -e "s,@EXENAME@,$(BINDIR)/python$(VERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config ++ sed -e "s,@EXENAME@,$(BINDIR)/python$(VERSION)$(DEBUG_SUFFIX)$(EXE)," < $(srcdir)/Misc/python-config.in >python$(DEBUG_SUFFIX)-config ++ + + # Install the include files + INCLDIRSTOMAKE=$(INCLUDEDIR) $(CONFINCLUDEDIR) $(INCLUDEPY) $(CONFINCLUDEPY) +@@ -994,13 +1001,13 @@ inclinstall: + $(INSTALL_DATA) pyconfig.h $(DESTDIR)$(CONFINCLUDEPY)/pyconfig.h + + # Install the library and miscellaneous stuff needed for extending/embedding +-# This goes into $(exec_prefix) +-LIBPL= $(LIBP)/config ++# This goes into $(exec_prefix)$(DEBUG_SUFFIX) ++LIBPL= $(LIBP)/config$(DEBUG_SUFFIX) + + # pkgconfig directory + LIBPC= $(LIBDIR)/pkgconfig + +-libainstall: all python-config ++libainstall: all python$(DEBUG_SUFFIX)-config + @for i in $(LIBDIR) $(LIBP) $(LIBPL) $(LIBPC); \ + do \ + if test ! -d $(DESTDIR)$$i; then \ +@@ -1016,11 +1023,10 @@ libainstall: all python-config + $(INSTALL_DATA) Modules/Setup $(DESTDIR)$(LIBPL)/Setup + $(INSTALL_DATA) Modules/Setup.local $(DESTDIR)$(LIBPL)/Setup.local + $(INSTALL_DATA) Modules/Setup.config $(DESTDIR)$(LIBPL)/Setup.config +- $(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION).pc ++ $(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION)$(DEBUG_SUFFIX).pc + $(INSTALL_SCRIPT) $(srcdir)/Modules/makesetup $(DESTDIR)$(LIBPL)/makesetup + $(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh +- $(INSTALL_SCRIPT) python-config $(DESTDIR)$(BINDIR)/python$(VERSION)-config +- rm python-config ++ $(INSTALL_SCRIPT) python$(DEBUG_SUFFIX)-config $(DESTDIR)$(BINDIR)/python$(VERSION)$(DEBUG_SUFFIX)-config + @if [ -s Modules/python.exp -a \ + "`echo $(MACHDEP) | sed 's/^\(...\).*/\1/'`" = "aix" ]; then \ + echo; echo "Installing support files for building shared extension modules on AIX:"; \ +diff -up Python-2.7.3/Misc/python-config.in.debug-build Python-2.7.3/Misc/python-config.in +--- Python-2.7.3/Misc/python-config.in.debug-build 2012-04-09 19:07:33.000000000 -0400 ++++ Python-2.7.3/Misc/python-config.in 2012-04-18 19:46:22.082498324 -0400 +@@ -45,7 +45,7 @@ for opt in opt_flags: + + elif opt in ('--libs', '--ldflags'): + libs = getvar('LIBS').split() + getvar('SYSLIBS').split() +- libs.append('-lpython'+pyver) ++ libs.append('-lpython' + pyver + (sys.pydebug and "_d" or "")) + # add the prefix/lib/pythonX.Y/config dir, but only if there is no + # shared library in prefix/lib/. + if opt == '--ldflags': +diff -up Python-2.7.3/Modules/makesetup.debug-build Python-2.7.3/Modules/makesetup +--- Python-2.7.3/Modules/makesetup.debug-build 2012-04-09 19:07:34.000000000 -0400 ++++ Python-2.7.3/Modules/makesetup 2012-04-18 19:46:22.083498312 -0400 +@@ -233,7 +233,7 @@ sed -e 's/[ ]*#.*//' -e '/^[ ]*$/d' | + *$mod.o*) base=$mod;; + *) base=${mod}module;; + esac +- file="$srcdir/$base\$(SO)" ++ file="$srcdir/$base\$(DEBUG_EXT)\$(SO)" + case $doconfig in + no) SHAREDMODS="$SHAREDMODS $file";; + esac +diff -up Python-2.7.3/Python/dynload_shlib.c.debug-build Python-2.7.3/Python/dynload_shlib.c +--- Python-2.7.3/Python/dynload_shlib.c.debug-build 2012-04-09 19:07:35.000000000 -0400 ++++ Python-2.7.3/Python/dynload_shlib.c 2012-04-18 19:46:22.083498312 -0400 +@@ -46,11 +46,16 @@ const struct filedescr _PyImport_DynLoad + {"module.exe", "rb", C_EXTENSION}, + {"MODULE.EXE", "rb", C_EXTENSION}, + #else ++#ifdef Py_DEBUG ++ {"_d.so", "rb", C_EXTENSION}, ++ {"module_d.so", "rb", C_EXTENSION}, ++#else + {".so", "rb", C_EXTENSION}, + {"module.so", "rb", C_EXTENSION}, +-#endif +-#endif +-#endif ++#endif /* Py_DEBUG */ ++#endif /* __VMS */ ++#endif /* defined(PYOS_OS2) && defined(PYCC_GCC) */ ++#endif /* __CYGWIN__ */ + {0, 0} + }; + +diff -up Python-2.7.3/Python/sysmodule.c.debug-build Python-2.7.3/Python/sysmodule.c +--- Python-2.7.3/Python/sysmodule.c.debug-build 2012-04-09 19:07:35.000000000 -0400 ++++ Python-2.7.3/Python/sysmodule.c 2012-04-18 19:46:22.083498312 -0400 +@@ -1506,6 +1506,12 @@ _PySys_Init(void) + PyString_FromString("legacy")); + #endif + ++#ifdef Py_DEBUG ++ PyDict_SetItemString(sysdict, "pydebug", Py_True); ++#else ++ PyDict_SetItemString(sysdict, "pydebug", Py_False); ++#endif ++ + #undef SET_SYS_FROM_STRING + if (PyErr_Occurred()) + return NULL; diff --git a/SOURCES/python-2.7.3-lib64.patch b/SOURCES/python-2.7.3-lib64.patch new file mode 100644 index 0000000..71f32c5 --- /dev/null +++ b/SOURCES/python-2.7.3-lib64.patch @@ -0,0 +1,196 @@ +diff -up Python-2.7.3/Lib/distutils/command/install.py.lib64 Python-2.7.3/Lib/distutils/command/install.py +--- Python-2.7.3/Lib/distutils/command/install.py.lib64 2012-04-09 19:07:29.000000000 -0400 ++++ Python-2.7.3/Lib/distutils/command/install.py 2013-02-19 13:58:20.446015129 -0500 +@@ -42,14 +42,14 @@ else: + INSTALL_SCHEMES = { + 'unix_prefix': { + 'purelib': '$base/lib/python$py_version_short/site-packages', +- 'platlib': '$platbase/lib/python$py_version_short/site-packages', ++ 'platlib': '$platbase/lib64/python$py_version_short/site-packages', + 'headers': '$base/include/python$py_version_short/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', + }, + 'unix_home': { + 'purelib': '$base/lib/python', +- 'platlib': '$base/lib/python', ++ 'platlib': '$base/lib64/python', + 'headers': '$base/include/python/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', +diff -up Python-2.7.3/Lib/distutils/sysconfig.py.lib64 Python-2.7.3/Lib/distutils/sysconfig.py +--- Python-2.7.3/Lib/distutils/sysconfig.py.lib64 2012-04-09 19:07:29.000000000 -0400 ++++ Python-2.7.3/Lib/distutils/sysconfig.py 2013-02-19 13:58:20.446015129 -0500 +@@ -114,8 +114,12 @@ def get_python_lib(plat_specific=0, stan + prefix = plat_specific and EXEC_PREFIX or PREFIX + + if os.name == "posix": ++ if plat_specific or standard_lib: ++ lib = "lib64" ++ else: ++ lib = "lib" + libpython = os.path.join(prefix, +- "lib", "python" + get_python_version()) ++ lib, "python" + get_python_version()) + if standard_lib: + return libpython + else: +diff -up Python-2.7.3/Lib/site.py.lib64 Python-2.7.3/Lib/site.py +--- Python-2.7.3/Lib/site.py.lib64 2012-04-09 19:07:31.000000000 -0400 ++++ Python-2.7.3/Lib/site.py 2013-02-19 13:58:20.447015128 -0500 +@@ -300,12 +300,16 @@ def getsitepackages(): + if sys.platform in ('os2emx', 'riscos'): + sitepackages.append(os.path.join(prefix, "Lib", "site-packages")) + elif os.sep == '/': ++ sitepackages.append(os.path.join(prefix, "lib64", ++ "python" + sys.version[:3], ++ "site-packages")) + sitepackages.append(os.path.join(prefix, "lib", + "python" + sys.version[:3], + "site-packages")) + sitepackages.append(os.path.join(prefix, "lib", "site-python")) + else: + sitepackages.append(prefix) ++ sitepackages.append(os.path.join(prefix, "lib64", "site-packages")) + sitepackages.append(os.path.join(prefix, "lib", "site-packages")) + if sys.platform == "darwin": + # for framework builds *only* we add the standard Apple +diff -up Python-2.7.3/Lib/test/test_site.py.lib64 Python-2.7.3/Lib/test/test_site.py +--- Python-2.7.3/Lib/test/test_site.py.lib64 2012-04-09 19:07:32.000000000 -0400 ++++ Python-2.7.3/Lib/test/test_site.py 2013-02-19 13:58:20.447015128 -0500 +@@ -241,17 +241,20 @@ class HelperFunctionsTests(unittest.Test + self.assertEqual(dirs[2], wanted) + elif os.sep == '/': + # OS X non-framwework builds, Linux, FreeBSD, etc +- self.assertEqual(len(dirs), 2) +- wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3], ++ self.assertEqual(len(dirs), 3) ++ wanted = os.path.join('xoxo', 'lib64', 'python' + sys.version[:3], + 'site-packages') + self.assertEqual(dirs[0], wanted) +- wanted = os.path.join('xoxo', 'lib', 'site-python') ++ wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3], ++ 'site-packages') + self.assertEqual(dirs[1], wanted) ++ wanted = os.path.join('xoxo', 'lib', 'site-python') ++ self.assertEqual(dirs[2], wanted) + else: + # other platforms + self.assertEqual(len(dirs), 2) + self.assertEqual(dirs[0], 'xoxo') +- wanted = os.path.join('xoxo', 'lib', 'site-packages') ++ wanted = os.path.join('xoxo', 'lib64', 'site-packages') + self.assertEqual(dirs[1], wanted) + + class PthFile(object): +diff -up Python-2.7.3/Makefile.pre.in.lib64 Python-2.7.3/Makefile.pre.in +--- Python-2.7.3/Makefile.pre.in.lib64 2013-02-19 13:58:20.435015131 -0500 ++++ Python-2.7.3/Makefile.pre.in 2013-02-19 13:58:20.447015128 -0500 +@@ -97,7 +97,7 @@ LIBDIR= @libdir@ + MANDIR= @mandir@ + INCLUDEDIR= @includedir@ + CONFINCLUDEDIR= $(exec_prefix)/include +-SCRIPTDIR= $(prefix)/lib ++SCRIPTDIR= $(prefix)/lib64 + + # Detailed destination directories + BINLIBDEST= $(LIBDIR)/python$(VERSION) +diff -up Python-2.7.3/Modules/getpath.c.lib64 Python-2.7.3/Modules/getpath.c +--- Python-2.7.3/Modules/getpath.c.lib64 2012-04-09 19:07:34.000000000 -0400 ++++ Python-2.7.3/Modules/getpath.c 2013-02-19 13:58:20.448015128 -0500 +@@ -117,8 +117,8 @@ + #endif + + #ifndef PYTHONPATH +-#define PYTHONPATH PREFIX "/lib/python" VERSION ":" \ +- EXEC_PREFIX "/lib/python" VERSION "/lib-dynload" ++#define PYTHONPATH PREFIX "/lib64/python" VERSION ":" \ ++ EXEC_PREFIX "/lib64/python" VERSION "/lib-dynload" + #endif + + #ifndef LANDMARK +@@ -129,7 +129,7 @@ static char prefix[MAXPATHLEN+1]; + static char exec_prefix[MAXPATHLEN+1]; + static char progpath[MAXPATHLEN+1]; + static char *module_search_path = NULL; +-static char lib_python[] = "lib/python" VERSION; ++static char lib_python[] = "lib64/python" VERSION; + + static void + reduce(char *dir) +@@ -528,7 +528,7 @@ calculate_path(void) + } + else + strncpy(zip_path, PREFIX, MAXPATHLEN); +- joinpath(zip_path, "lib/python00.zip"); ++ joinpath(zip_path, "lib64/python00.zip"); + bufsz = strlen(zip_path); /* Replace "00" with version */ + zip_path[bufsz - 6] = VERSION[0]; + zip_path[bufsz - 5] = VERSION[2]; +@@ -538,7 +538,7 @@ calculate_path(void) + fprintf(stderr, + "Could not find platform dependent libraries \n"); + strncpy(exec_prefix, EXEC_PREFIX, MAXPATHLEN); +- joinpath(exec_prefix, "lib/lib-dynload"); ++ joinpath(exec_prefix, "lib64/lib-dynload"); + } + /* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */ + +diff -up Python-2.7.3/Modules/Setup.dist.lib64 Python-2.7.3/Modules/Setup.dist +--- Python-2.7.3/Modules/Setup.dist.lib64 2013-02-19 13:58:20.442015131 -0500 ++++ Python-2.7.3/Modules/Setup.dist 2013-02-19 14:02:25.255998391 -0500 +@@ -413,7 +413,7 @@ gdbm gdbmmodule.c -lgdbm + # Edit the variables DB and DBLIBVERto point to the db top directory + # and the subdirectory of PORT where you built it. + DBINC=/usr/include/libdb +-DBLIB=/usr/lib ++DBLIB=/usr/lib64 + _bsddb _bsddb.c -I$(DBINC) -L$(DBLIB) -ldb + + # Historical Berkeley DB 1.85 +@@ -459,7 +459,7 @@ cPickle cPickle.c + # Andrew Kuchling's zlib module. + # This require zlib 1.1.3 (or later). + # See http://www.gzip.org/zlib/ +-zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz ++zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib64 -lz + + # Interface to the Expat XML parser + # +diff -up Python-2.7.3/setup.py.lib64 Python-2.7.3/setup.py +--- Python-2.7.3/setup.py.lib64 2012-04-09 19:07:36.000000000 -0400 ++++ Python-2.7.3/setup.py 2013-02-19 13:58:20.449015129 -0500 +@@ -369,7 +369,7 @@ class PyBuildExt(build_ext): + + def detect_modules(self): + # Ensure that /usr/local is always used +- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') ++ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64') + add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') + self.add_gcc_paths() + self.add_multiarch_paths() +@@ -677,11 +677,11 @@ class PyBuildExt(build_ext): + elif curses_library: + readline_libs.append(curses_library) + elif self.compiler.find_library_file(lib_dirs + +- ['/usr/lib/termcap'], ++ ['/usr/lib64/termcap'], + 'termcap'): + readline_libs.append('termcap') + exts.append( Extension('readline', ['readline.c'], +- library_dirs=['/usr/lib/termcap'], ++ library_dirs=['/usr/lib64/termcap'], + extra_link_args=readline_extra_link_args, + libraries=readline_libs) ) + else: +@@ -715,8 +715,8 @@ class PyBuildExt(build_ext): + if krb5_h: + ssl_incs += krb5_h + ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, +- ['/usr/local/ssl/lib', +- '/usr/contrib/ssl/lib/' ++ ['/usr/local/ssl/lib64', ++ '/usr/contrib/ssl/lib64/' + ] ) + + if (ssl_incs is not None and diff --git a/SOURCES/python-2.7rc1-binutils-no-dep.patch b/SOURCES/python-2.7rc1-binutils-no-dep.patch new file mode 100644 index 0000000..ee6157c --- /dev/null +++ b/SOURCES/python-2.7rc1-binutils-no-dep.patch @@ -0,0 +1,14 @@ +diff -up Python-2.7rc1/Lib/ctypes/util.py.binutils-no-dep Python-2.7rc1/Lib/ctypes/util.py +--- Python-2.7rc1/Lib/ctypes/util.py.binutils-no-dep 2010-03-15 09:42:23.000000000 -0400 ++++ Python-2.7rc1/Lib/ctypes/util.py 2010-06-06 05:03:02.155975210 -0400 +@@ -140,7 +140,9 @@ elif os.name == "posix": + dump = f.read() + rv = f.close() + if rv == 10: +- raise OSError, 'objdump command not found' ++ return os.path.basename(f) # This is good for GLibc, I think, ++ # and a dep on binutils is big (for ++ # live CDs). + f = os.popen(cmd) + try: + data = f.read() diff --git a/SOURCES/python-2.7rc1-socketmodule-constants.patch b/SOURCES/python-2.7rc1-socketmodule-constants.patch new file mode 100644 index 0000000..c32e103 --- /dev/null +++ b/SOURCES/python-2.7rc1-socketmodule-constants.patch @@ -0,0 +1,64 @@ +--- Python-2.7rc1/Modules/socketmodule.c.socketmodule 2010-05-09 10:46:46.000000000 -0400 ++++ Python-2.7rc1/Modules/socketmodule.c 2010-06-07 23:04:19.374234780 -0400 +@@ -4783,6 +4783,61 @@ init_socket(void) + PyModule_AddIntConstant(m, "SO_SETFIB", SO_SETFIB); + #endif + ++#ifdef SO_SNDBUFFORCE ++ PyModule_AddIntConstant(m, "SO_SNDBUFFORCE", SO_SNDBUFFORCE); ++#endif ++#ifdef SO_RCVBUFFORCE ++ PyModule_AddIntConstant(m, "SO_RCVBUFFORCE", SO_RCVBUFFORCE); ++#endif ++#ifdef SO_NO_CHECK ++ PyModule_AddIntConstant(m, "SO_NO_CHECK", SO_NO_CHECK); ++#endif ++#ifdef SO_PRIORITY ++ PyModule_AddIntConstant(m, "SO_PRIORITY", SO_PRIORITY); ++#endif ++#ifdef SO_BSDCOMPAT ++ PyModule_AddIntConstant(m, "SO_BSDCOMPAT", SO_BSDCOMPAT); ++#endif ++#ifdef SO_PASSCRED ++ PyModule_AddIntConstant(m, "SO_PASSCRED", SO_PASSCRED); ++#endif ++#ifdef SO_PEERCRED ++ PyModule_AddIntConstant(m, "SO_PEERCRED", SO_PEERCRED); ++#endif ++#ifdef SO_SECURITY_AUTHENTICATION ++ PyModule_AddIntConstant(m, "SO_SECURITY_AUTHENTICATION", SO_SECURITY_AUTHENTICATION); ++#endif ++#ifdef SO_SECURITY_ENCRYPTION_TRANSPORT ++ PyModule_AddIntConstant(m, "SO_SECURITY_ENCRYPTION_TRANSPORT", SO_SECURITY_ENCRYPTION_TRANSPORT); ++#endif ++#ifdef SO_SECURITY_ENCRYPTION_NETWORK ++ PyModule_AddIntConstant(m, "SO_SECURITY_ENCRYPTION_NETWORK", SO_SECURITY_ENCRYPTION_NETWORK); ++#endif ++#ifdef SO_BINDTODEVICE ++ PyModule_AddIntConstant(m, "SO_BINDTODEVICE", SO_BINDTODEVICE); ++#endif ++#ifdef SO_ATTACH_FILTER ++ PyModule_AddIntConstant(m, "SO_ATTACH_FILTER", SO_ATTACH_FILTER); ++#endif ++#ifdef SO_DETACH_FILTER ++ PyModule_AddIntConstant(m, "SO_DETACH_FILTER", SO_DETACH_FILTER); ++#endif ++#ifdef SO_PEERNAME ++ PyModule_AddIntConstant(m, "SO_PEERNAME", SO_PEERNAME); ++#endif ++#ifdef SO_TIMESTAMP ++ PyModule_AddIntConstant(m, "SO_TIMESTAMP", SO_TIMESTAMP); ++#endif ++#ifdef SO_PEERSEC ++ PyModule_AddIntConstant(m, "SO_PEERSEC", SO_PEERSEC); ++#endif ++#ifdef SO_PASSSEC ++ PyModule_AddIntConstant(m, "SO_PASSSEC", SO_PASSSEC); ++#endif ++#ifdef SO_TIMESTAMPNS ++ PyModule_AddIntConstant(m, "SO_TIMESTAMPNS", SO_TIMESTAMPNS); ++#endif ++ + /* Maximum number of connections for "listen" */ + #ifdef SOMAXCONN + PyModule_AddIntConstant(m, "SOMAXCONN", SOMAXCONN); diff --git a/SOURCES/python-2.7rc1-socketmodule-constants2.patch b/SOURCES/python-2.7rc1-socketmodule-constants2.patch new file mode 100644 index 0000000..896ac88 --- /dev/null +++ b/SOURCES/python-2.7rc1-socketmodule-constants2.patch @@ -0,0 +1,19 @@ +diff -up Python-2.7rc1/Modules/socketmodule.c.socketmodule2 Python-2.7rc1/Modules/socketmodule.c +--- Python-2.7rc1/Modules/socketmodule.c.socketmodule2 2010-06-07 23:06:59.133498087 -0400 ++++ Python-2.7rc1/Modules/socketmodule.c 2010-06-07 23:11:51.249520087 -0400 +@@ -5253,6 +5253,15 @@ init_socket(void) + #ifdef TCP_QUICKACK + PyModule_AddIntConstant(m, "TCP_QUICKACK", TCP_QUICKACK); + #endif ++#ifdef TCP_CONGESTION ++ PyModule_AddIntConstant(m, "TCP_CONGESTION", TCP_CONGESTION); ++#endif ++#ifdef TCP_MD5SIG ++ PyModule_AddIntConstant(m, "TCP_MD5SIG", TCP_MD5SIG); ++#endif ++#ifdef TCP_MD5SIG_MAXKEYLEN ++ PyModule_AddIntConstant(m, "TCP_MD5SIG_MAXKEYLEN", TCP_MD5SIG_MAXKEYLEN); ++#endif + + + /* IPX options */ diff --git a/SOURCES/python.conf b/SOURCES/python.conf new file mode 100644 index 0000000..9002b3b --- /dev/null +++ b/SOURCES/python.conf @@ -0,0 +1 @@ +x /tmp/pymp-* diff --git a/SOURCES/pythondeps.sh b/SOURCES/pythondeps.sh new file mode 100755 index 0000000..10a060a --- /dev/null +++ b/SOURCES/pythondeps.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +[ $# -ge 1 ] || { + cat > /dev/null + exit 0 +} + +case $1 in +-P|--provides) + shift + # Match buildroot/payload paths of the form + # /PATH/OF/BUILDROOT/usr/bin/pythonMAJOR.MINOR + # generating a line of the form + # python(abi) = MAJOR.MINOR + # (Don't match against -config tools e.g. /usr/bin/python2.6-config) + grep "/usr/bin/python.\..$" \ + | sed -e "s|.*/usr/bin/python\(.\..\)|python(abi) = \1|" + ;; +-R|--requires) + shift + # Match buildroot paths of the form + # /PATH/OF/BUILDROOT/usr/lib/pythonMAJOR.MINOR/ and + # /PATH/OF/BUILDROOT/usr/lib64/pythonMAJOR.MINOR/ + # generating (uniqely) lines of the form: + # python(abi) = MAJOR.MINOR + grep "/usr/lib[^/]*/python.\../.*" \ + | sed -e "s|.*/usr/lib[^/]*/python\(.\..\)/.*|python(abi) = \1|g" \ + | sort | uniq + ;; +esac + +exit 0 diff --git a/SOURCES/systemtap-example.stp b/SOURCES/systemtap-example.stp new file mode 100644 index 0000000..164333a --- /dev/null +++ b/SOURCES/systemtap-example.stp @@ -0,0 +1,19 @@ +/* + Example usage of the Python systemtap tapset to show a nested view of all + Python function calls (and returns) across the whole system. + + Run this using + stap systemtap-example.stp + to instrument all Python processes on the system, or (for example) using + stap systemtap-example.stp -c COMMAND + to instrument a specific program (implemented in Python) +*/ +probe python.function.entry +{ + printf("%s => %s in %s:%d\n", thread_indent(1), funcname, filename, lineno); +} + +probe python.function.return +{ + printf("%s <= %s in %s:%d\n", thread_indent(-1), funcname, filename, lineno); +} diff --git a/SPECS/python.spec b/SPECS/python.spec new file mode 100644 index 0000000..f302aaf --- /dev/null +++ b/SPECS/python.spec @@ -0,0 +1,4332 @@ + +# ====================================================== +# Conditionals and other variables controlling the build +# ====================================================== + +%{!?__python_ver:%global __python_ver EMPTY} +#global __python_ver 27 +%global unicode ucs4 + +%if "%{__python_ver}" != "EMPTY" +%global main_python 0 +%global python python%{__python_ver} +%global tkinter tkinter%{__python_ver} +%else +%global main_python 1 +%global python python +%global tkinter tkinter +%endif + +%global pybasever 2.7 +%global pylibdir %{_libdir}/python%{pybasever} +%global tools_dir %{pylibdir}/Tools +%global demo_dir %{pylibdir}/Demo +%global doc_tools_dir %{pylibdir}/Doc/tools +%global dynload_dir %{pylibdir}/lib-dynload +%global site_packages %{pylibdir}/site-packages + +# Python's configure script defines SOVERSION, and this is used in the Makefile +# to determine INSTSONAME, the name of the libpython DSO: +# LDLIBRARY='libpython$(VERSION).so' +# INSTSONAME="$LDLIBRARY".$SOVERSION +# We mirror this here in order to make it easier to add the -gdb.py hooks. +# (if these get out of sync, the payload of the libs subpackage will fail +# and halt the build) +%global py_SOVERSION 1.0 +%global py_INSTSONAME_optimized libpython%{pybasever}.so.%{py_SOVERSION} +%global py_INSTSONAME_debug libpython%{pybasever}_d.so.%{py_SOVERSION} + +%global with_debug_build 1 + +# Disabled for now: +%global with_huntrleaks 0 + +%global with_gdb_hooks 1 + +%global with_systemtap 1 + +# some arches don't have valgrind so we need to disable its support on them +%ifarch %{ix86} x86_64 ppc %{power64} s390x aarch64 +%global with_valgrind 1 +%else +%global with_valgrind 0 +%endif + +# Having more than 28 cores on a PPC machine will lead to race conditions +# during build so we have to set a limit. +# See: https://bugzilla.redhat.com/show_bug.cgi?id=1568974 +%ifarch ppc %{power64} && %_smp_ncpus_max > 28 +%global _smp_ncpus_max 28 +%endif + +%global with_gdbm 1 + +# Turn this to 0 to turn off the "check" phase: +%global run_selftest_suite 1 + +# Some of the files below /usr/lib/pythonMAJOR.MINOR/test (e.g. bad_coding.py) +# are deliberately invalid, leading to SyntaxError exceptions if they get +# byte-compiled. +# +# These errors are ignored by the normal python build, and aren't normally a +# problem in the buildroots since /usr/bin/python isn't present. +# +# However, for the case where we're rebuilding the python srpm on a machine +# that does have python installed we need to set this to avoid +# brp-python-bytecompile treating these as fatal errors: +# +%global _python_bytecompile_errors_terminate_build 0 + +# We need to get a newer configure generated out of configure.in for the following +# patches: +# patch 4 (CFLAGS) +# patch 52 (valgrind) +# patch 55 (systemtap) +# patch 145 (linux2) +# +# For patch 55 (systemtap), we need to get a new header for configure to use +# +# configure.in requires autoconf-2.65, but the version in Fedora is currently +# autoconf-2.66 +# +# For now, we'll generate a patch to the generated configure script and +# pyconfig.h.in on a machine that has a local copy of autoconf 2.65 +# +# Instructions on obtaining such a copy can be seen at +# http://bugs.python.org/issue7997 +# +# To make it easy to regenerate the patch, this specfile can be run in two +# ways: +# (i) regenerate_autotooling_patch 0 : the normal approach: prep the +# source tree using a pre-generated patch to the "configure" script, and do a +# full build +# (ii) regenerate_autotooling_patch 1 : intended to be run on a developer's +# workstation: prep the source tree without patching configure, then rerun a +# local copy of autoconf-2.65, regenerate the patch, then exit, without doing +# the rest of the build +%global regenerate_autotooling_patch 0 + + +# ================== +# Top-level metadata +# ================== +Summary: An interpreted, interactive, object-oriented programming language +Name: %{python} +# Remember to also rebase python-docs when changing this: +Version: 2.7.5 +Release: 88%{?dist} +License: Python +Group: Development/Languages +Requires: %{python}-libs%{?_isa} = %{version}-%{release} +Provides: python-abi = %{pybasever} +Provides: python(abi) = %{pybasever} + + +# ======================= +# Build-time requirements +# ======================= + +# (keep this list alphabetized) + +BuildRequires: autoconf +BuildRequires: bluez-libs-devel +BuildRequires: bzip2 +BuildRequires: bzip2-devel + +# expat 2.1.0 added the symbol XML_SetHashSalt without bumping SONAME. We use +# it (in pyexpat) in order to enable the fix in Python-2.7.3 for CVE-2012-0876: +BuildRequires: expat-devel >= 2.1.0 + +BuildRequires: findutils +BuildRequires: gcc-c++ +%if %{with_gdbm} +BuildRequires: gdbm-devel +%endif +BuildRequires: glibc-devel +BuildRequires: gmp-devel +BuildRequires: libdb-devel +BuildRequires: libffi-devel +BuildRequires: libGL-devel +BuildRequires: libX11-devel +BuildRequires: ncurses-devel +BuildRequires: openssl-devel +BuildRequires: pkgconfig +BuildRequires: readline-devel +BuildRequires: sqlite-devel + +%if 0%{?with_systemtap} +BuildRequires: systemtap-sdt-devel +# (this introduces a circular dependency, in that systemtap-sdt-devel's +# /usr/bin/dtrace is a python script) +%global tapsetdir /usr/share/systemtap/tapset +%endif # with_systemtap + +BuildRequires: tar +BuildRequires: tcl-devel +BuildRequires: tix-devel +BuildRequires: tk-devel + +%if 0%{?with_valgrind} +BuildRequires: valgrind-devel +%endif + +BuildRequires: zlib-devel + + + +# ======================= +# Source code and patches +# ======================= + +Source: http://www.python.org/ftp/python/%{version}/Python-%{version}.tar.xz + +# Work around bug 562906 until it's fixed in rpm-build by providing a fixed +# version of pythondeps.sh: +Source2: pythondeps.sh +%global __python_requires %{SOURCE2} + +# Systemtap tapset to make it easier to use the systemtap static probes +# (actually a template; LIBRARY_PATH will get fixed up during install) +# Written by dmalcolm; not yet sent upstream +Source3: libpython.stp + + +# Example systemtap script using the tapset +# Written by wcohen, mjw, dmalcolm; not yet sent upstream +Source4: systemtap-example.stp + +# Another example systemtap script that uses the tapset +# Written by dmalcolm; not yet sent upstream +Source5: pyfuntop.stp + +Source7: pynche + +# Configuration file to change ssl verification settings globally +# Downstream only see Patch224 +Source8: cert-verification.cfg + +# configuration for systemd's tmpfiles +Source9: python.conf + +# Modules/Setup.dist is ultimately used by the "makesetup" script to construct +# the Makefile and config.c +# +# Upstream leaves many things disabled by default, to try to make it easy as +# possible to build the code on as many platforms as possible. +# +# TODO: many modules can also now be built by setup.py after the python binary +# has been built; need to assess if we should instead build things there +# +# We patch it downstream as follows: +# - various modules are built by default by upstream as static libraries; +# we built them as shared libraries +# - build the "readline" module (appears to also be handled by setup.py now) +# - enable the build of the following modules: +# - array arraymodule.c # array objects +# - cmath cmathmodule.c # -lm # complex math library functions +# - math mathmodule.c # -lm # math library functions, e.g. sin() +# - _struct _struct.c # binary structure packing/unpacking +# - time timemodule.c # -lm # time operations and variables +# - operator operator.c # operator.add() and similar goodies +# - _weakref _weakref.c # basic weak reference support +# - _testcapi _testcapimodule.c # Python C API test module +# - _random _randommodule.c # Random number generator +# - _collections _collectionsmodule.c # Container types +# - itertools itertoolsmodule.c +# - strop stropmodule.c +# - _functools _functoolsmodule.c +# - _bisect _bisectmodule.c # Bisection algorithms +# - unicodedata unicodedata.c # static Unicode character database +# - _locale _localemodule.c +# - fcntl fcntlmodule.c # fcntl(2) and ioctl(2) +# - spwd spwdmodule.c # spwd(3) +# - grp grpmodule.c # grp(3) +# - select selectmodule.c # select(2); not on ancient System V +# - mmap mmapmodule.c # Memory-mapped files +# - _csv _csv.c # CSV file helper +# - _socket socketmodule.c # Socket module helper for socket(2) +# - _ssl _ssl.c +# - crypt cryptmodule.c -lcrypt # crypt(3) +# - nis nismodule.c -lnsl # Sun yellow pages -- not everywhere +# - termios termios.c # Steen Lumholt's termios module +# - resource resource.c # Jeremy Hylton's rlimit interface +# - audioop audioop.c # Operations on audio samples +# - imageop imageop.c # Operations on images +# - _md5 md5module.c md5.c +# - _sha shamodule.c +# - _sha256 sha256module.c +# - _sha512 sha512module.c +# - linuxaudiodev linuxaudiodev.c +# - timing timingmodule.c +# - _tkinter _tkinter.c tkappinit.c +# - dl dlmodule.c +# - gdbm gdbmmodule.c +# - _bsddb _bsddb.c +# - binascii binascii.c +# - parser parsermodule.c +# - cStringIO cStringIO.c +# - cPickle cPickle.c +# - zlib zlibmodule.c +# - _multibytecodec cjkcodecs/multibytecodec.c +# - _codecs_cn cjkcodecs/_codecs_cn.c +# - _codecs_hk cjkcodecs/_codecs_hk.c +# - _codecs_iso2022 cjkcodecs/_codecs_iso2022.c +# - _codecs_jp cjkcodecs/_codecs_jp.c +# - _codecs_kr cjkcodecs/_codecs_kr.c +# - _codecs_tw cjkcodecs/_codecs_tw.c +Patch0: python-2.7.1-config.patch + +# Removes the "-g" option from "pydoc", for some reason; I believe +# (dmalcolm 2010-01-29) that this was introduced in this change: +# - fix pydoc (#68082) +# in 2.2.1-12 as a response to the -g option needing TkInter installed +# (Red Hat Linux 8) +# Not upstream +Patch1: 00001-pydocnogui.patch + +# Add $(CFLAGS) to the linker arguments when linking the "python" binary +# since some architectures (sparc64) need this (rhbz:199373). +# Not yet filed upstream +Patch4: python-2.5-cflags.patch + +# Work around a bug in Python' gettext module relating to the "Plural-Forms" +# header (rhbz:252136) +# Related to upstream issues: +# http://bugs.python.org/issue1448060 and http://bugs.python.org/issue1475523 +# though the proposed upstream patches are, alas, different +Patch6: python-2.5.1-plural-fix.patch + +# This patch was listed in the changelog as: +# * Fri Sep 14 2007 Jeremy Katz - 2.5.1-11 +# - fix encoding of sqlite .py files to work around weird encoding problem +# in Turkish (#283331) +# A traceback attached to rhbz 244016 shows the problem most clearly: a +# traceback on attempting to import the sqlite module, with: +# "SyntaxError: encoding problem: with BOM (__init__.py, line 1)" +# This seems to come from Parser/tokenizer.c:check_coding_spec +# Our patch changes two source files within sqlite3, removing the +# "coding: ISO-8859-1" specs and character E4 = U+00E4 = +# LATIN SMALL LETTER A WITH DIAERESIS from in ghaering's surname. +# +# It may be that the conversion of "ISO-8859-1" to "iso-8859-1" is thwarted +# by the implementation of "tolower" in the Turkish locale; see: +# https://bugzilla.redhat.com/show_bug.cgi?id=191096#c9 +# +# TODO: Not yet sent upstream, and appears to me (dmalcolm 2010-01-29) that +# it may be papering over a symptom +Patch7: python-2.5.1-sqlite-encoding.patch + +# FIXME: Lib/ctypes/util.py posix implementation defines a function +# _get_soname(f). Upstreams's implementation of this uses objdump to read the +# SONAME from a library; we avoid this, apparently to minimize space +# requirements on the live CD: +# (rhbz:307221) +Patch10: python-2.7rc1-binutils-no-dep.patch + +# Upstream as of Python 2.7.3: +# Patch11: python-2.7rc1-codec-ascii-tolower.patch + +# Add various constants to the socketmodule (rhbz#436560): +# TODO: these patches were added in 2.5.1-22 and 2.5.1-24 but appear not to +# have been sent upstream yet: +Patch13: python-2.7rc1-socketmodule-constants.patch +Patch14: python-2.7rc1-socketmodule-constants2.patch + +# Remove an "-rpath $(LIBDIR)" argument from the linkage args in configure.in: +# FIXME: is this for OSF, not Linux? +Patch16: python-2.6-rpath.patch + +# Fixup distutils/unixccompiler.py to remove standard library path from rpath: +# Adapted from Patch0 in ivazquez' python3000 specfile, removing usage of +# super() as it's an old-style class +Patch17: python-2.6.4-distutils-rpath.patch + +# 00055 # +# Systemtap support: add statically-defined probe points +# Patch based on upstream bug: http://bugs.python.org/issue4111 +# fixed up by mjw and wcohen for 2.6.2, then fixed up by dmalcolm for 2.6.4 +# then rewritten by mjw (attachment 390110 of rhbz 545179), then reformatted +# for 2.7rc1 by dmalcolm: +Patch55: 00055-systemtap.patch + +# Upstream as of Python 2.7.4 +# Patch101: 00101-lib64-regex.patch + +# Only used when "%%{_lib}" == "lib64" +# Fixup various paths throughout the build and in distutils from "lib" to "lib64", +# and add the /usr/lib64/pythonMAJOR.MINOR/site-packages to sitedirs, in front of +# /usr/lib/pythonMAJOR.MINOR/site-packages +# Not upstream +Patch102: python-2.7.3-lib64.patch + +# Python 2.7 split out much of the path-handling from distutils/sysconfig.py to +# a new sysconfig.py (in r77704). +# We need to make equivalent changes to that new file to ensure that the stdlib +# and platform-specific code go to /usr/lib64 not /usr/lib, on 64-bit archs: +Patch103: python-2.7-lib64-sysconfig.patch + +# 00104 # +# Only used when "%%{_lib}" == "lib64" +# Another lib64 fix, for distutils/tests/test_install.py; not upstream: +Patch104: 00104-lib64-fix-for-test_install.patch + +# 00111 # +# Patch the Makefile.pre.in so that the generated Makefile doesn't try to build +# a libpythonMAJOR.MINOR.a (bug 550692): +# Downstream only: not appropriate for upstream +Patch111: 00111-no-static-lib.patch + +# 00112 # +# Patch to support building both optimized vs debug stacks DSO ABIs, sharing +# the same .py and .pyc files, using "_d.so" to signify a debug build of an +# extension module. +# +# Based on Debian's patch for the same, +# http://patch-tracker.debian.org/patch/series/view/python2.6/2.6.5-2/debug-build.dpatch +# +# (which was itself based on the upstream Windows build), but with some +# changes: +# +# * Debian's patch to dynload_shlib.c looks for module_d.so, then module.so, +# but this can potentially find a module built against the wrong DSO ABI. We +# instead search for just module_d.so in a debug build +# +# * We remove this change from configure.in's build of the Makefile: +# SO=$DEBUG_EXT.so +# so that sysconfig.py:customize_compiler stays with shared_lib_extension='.so' +# on debug builds, so that UnixCCompiler.find_library_file can find system +# libraries (otherwise "make sharedlibs" fails to find system libraries, +# erroneously looking e.g. for "libffi_d.so" rather than "libffi.so") +# +# * We change Lib/distutils/command/build_ext.py:build_ext.get_ext_filename +# to add the _d there, when building an extension. This way, "make sharedlibs" +# can build ctypes, by finding the sysmtem libffi.so (rather than failing to +# find "libffi_d.so"), and builds the module as _ctypes_d.so +# +# * Similarly, update build_ext:get_libraries handling of Py_ENABLE_SHARED by +# appending "_d" to the python library's name for the debug configuration +# +# * We modify Modules/makesetup to add the "_d" to the generated Makefile +# rules for the various Modules/*.so targets +# +# This may introduce issues when building an extension that links directly +# against another extension (e.g. users of NumPy?), but seems more robust when +# searching for external libraries +# +# * We don't change Lib/distutils/command/build.py: build.build_purelib to +# embed plat_specifier, leaving it as is, as pure python builds should be +# unaffected by these differences (we'll be sharing the .py and .pyc files) +# +# * We introduce DEBUG_SUFFIX as well as DEBUG_EXT: +# - DEBUG_EXT is used by ELF files (names and SONAMEs); it will be "_d" for +# a debug build +# - DEBUG_SUFFIX is used by filesystem paths; it will be "-debug" for a +# debug build +# +# Both will be empty in an optimized build. "_d" contains characters that +# are valid ELF metadata, but this leads to various ugly filesystem paths (such +# as the include path), and DEBUG_SUFFIX allows these paths to have more natural +# names. Changing this requires changes elsewhere in the distutils code. +# +# * We add DEBUG_SUFFIX to PYTHON in the Makefile, so that the two +# configurations build parallel-installable binaries with different names +# ("python-debug" vs "python"). +# +# * Similarly, we add DEBUG_SUFFIX within python-config and +# python$(VERSION)-config, so that the two configuration get different paths +# for these. +# +# See also patch 130 below +# +Patch112: python-2.7.3-debug-build.patch + + +# 00113 # +# Add configure-time support for the COUNT_ALLOCS and CALL_PROFILE options +# described at http://svn.python.org/projects/python/trunk/Misc/SpecialBuilds.txt +# so that if they are enabled, they will be in that build's pyconfig.h, so that +# extension modules will reliably use them +# Not yet sent upstream +Patch113: 00113-more-configuration-flags.patch + +# 00114 # +# Add flags for statvfs.f_flag to the constant list in posixmodule (i.e. "os") +# (rhbz:553020); partially upstream as http://bugs.python.org/issue7647 +# Not yet sent upstream +Patch114: 00114-statvfs-f_flag-constants.patch + +# Upstream as of Python 2.7.3: +# Patch115: make-pydoc-more-robust-001.patch + +# Upstream r79310 removed the "Modules" directory from sys.path when Python is +# running from the build directory on POSIX to fix a unit test (issue #8205). +# This seems to have broken the compileall.py done in "make install": it cannot +# find shared library extension modules at this point in the build (sys.path +# does not contain DESTDIR/usr/lib(64)/python-2.7/lib-dynload for some reason), +# leading to the build failing with: +# Traceback (most recent call last): +# File "/home/david/rpmbuild/BUILDROOT/python-2.7-0.1.rc2.fc14.x86_64/usr/lib64/python2.7/compileall.py", line 17, in +# import struct +# File "/home/david/rpmbuild/BUILDROOT/python-2.7-0.1.rc2.fc14.x86_64/usr/lib64/python2.7/struct.py", line 1, in +# from _struct import * +# ImportError: No module named _struct +# This patch adds the build Modules directory to build path. +Patch121: 00121-add-Modules-to-build-path.patch + +# 00125 # +# COUNT_ALLOCS is useful for debugging, but the upstream behaviour of always +# emitting debug info to stdout on exit is too verbose and makes it harder to +# use the debug build. Add a "PYTHONDUMPCOUNTS" environment variable which +# must be set to enable the output on exit +# Not yet sent upstream +Patch125: 00125-less-verbose-COUNT_ALLOCS.patch + +# Upstream as of Python 2.7.5 +# Patch126: fix-dbm_contains-on-64bit-bigendian.patch + +# Upstream as of Python 2.7.5 +# Patch127: fix-test_structmember-on-64bit-bigendian.patch + +# 2.7.1 (in r84230) added a test to test_abc which fails if python is +# configured with COUNT_ALLOCS, which is the case for our debug build +# (the COUNT_ALLOCS instrumentation keeps "C" alive). +# Not yet sent upstream +Patch128: python-2.7.1-fix_test_abc_with_COUNT_ALLOCS.patch + +# 00130 # +# Add "--extension-suffix" option to python-config and python-debug-config +# (rhbz#732808) +# +# This is adapted from 3.2's PEP-3149 support. +# +# Fedora's debug build has some non-standard features (see also patch 112 +# above), though largely shared with Debian/Ubuntu and Windows +# +# In particular, SO in the Makefile is currently always just ".so" for our +# python 2 optimized builds, but for python 2 debug it should be '_d.so', to +# distinguish the debug vs optimized ABI, following the pattern in the above +# patch. +# +# Not yet sent upstream +Patch130: python-2.7.2-add-extension-suffix-to-python-config.patch + +# 00131 # +# The four tests in test_io built on top of check_interrupted_write_retry +# fail when built in Koji, for ppc and ppc64; for some reason, the SIGALRM +# handlers are never called, and the call to write runs to completion +# (rhbz#732998) +Patch131: 00131-disable-tests-in-test_io.patch + +# 00132 # +# Add non-standard hooks to unittest for use in the "check" phase below, when +# running selftests within the build: +# @unittest._skipInRpmBuild(reason) +# for tests that hang or fail intermittently within the build environment, and: +# @unittest._expectedFailureInRpmBuild +# for tests that always fail within the build environment +# +# The hooks only take effect if WITHIN_PYTHON_RPM_BUILD is set in the +# environment, which we set manually in the appropriate portion of the "check" +# phase below (and which potentially other python-* rpms could set, to reuse +# these unittest hooks in their own "check" phases) +Patch132: 00132-add-rpmbuild-hooks-to-unittest.patch + +# 00133 # +# "dl" is deprecated, and test_dl doesn't work on 64-bit builds: +Patch133: 00133-skip-test_dl.patch + +# 00134 # +# Fix a failure in test_sys.py when configured with COUNT_ALLOCS enabled +# Not yet sent upstream +Patch134: 00134-fix-COUNT_ALLOCS-failure-in-test_sys.patch + +# 00135 # +# Skip "test_callback_in_cycle_resurrection" in a debug build, where it fails: +# Not yet sent upstream +Patch135: 00135-skip-test-within-test_weakref-in-debug-build.patch + +# 00136 # +# Some tests try to seek on sys.stdin, but don't work as expected when run +# within Koji/mock; skip them within the rpm build: +Patch136: 00136-skip-tests-of-seeking-stdin-in-rpmbuild.patch + +# 00137 # +# Some tests within distutils fail when run in an rpmbuild: +Patch137: 00137-skip-distutils-tests-that-fail-in-rpmbuild.patch + +# 00138 # +# Fixup some tests within distutils to work with how debug builds are set up: +Patch138: 00138-fix-distutils-tests-in-debug-build.patch + +# 00139 # +# ARM-specific: skip known failure in test_float: +# http://bugs.python.org/issue8265 (rhbz#706253) +Patch139: 00139-skip-test_float-known-failure-on-arm.patch + +# 00140 # +# Sparc-specific: skip known failure in test_ctypes: +# http://bugs.python.org/issue8314 (rhbz#711584) +# which appears to be a libffi bug +Patch140: 00140-skip-test_ctypes-known-failure-on-sparc.patch + +# 00141 # +# Fix test_gc's test_newinstance case when configured with COUNT_ALLOCS: +# Not yet sent upstream +Patch141: 00141-fix-test_gc_with_COUNT_ALLOCS.patch + +# 00142 # +# Some pty tests fail when run in mock (rhbz#714627): +Patch142: 00142-skip-failing-pty-tests-in-rpmbuild.patch + +# 00143 # +# Fix the --with-tsc option on ppc64, and rework it on 32-bit ppc to avoid +# aliasing violations (rhbz#698726) +# Sent upstream as http://bugs.python.org/issue12872 +Patch143: 00143-tsc-on-ppc.patch + +# 00144 # +# (Optionally) disable the gdbm module: +Patch144: 00144-no-gdbm.patch + +# 00145 # +# Upstream as of Python 2.7.3: +# Patch145: 00145-force-sys-platform-to-be-linux2.patch + +# 00146 # +# Support OpenSSL FIPS mode (e.g. when OPENSSL_FORCE_FIPS_MODE=1 is set) +# - handle failures from OpenSSL (e.g. on attempts to use MD5 in a +# FIPS-enforcing environment) +# - add a new "usedforsecurity" keyword argument to the various digest +# algorithms in hashlib so that you can whitelist a callsite with +# "usedforsecurity=False" +# (sent upstream for python 3 as http://bugs.python.org/issue9216; this is a +# backport to python 2.7; see RHEL6 patch 119) +# - enforce usage of the _hashlib implementation: don't fall back to the _md5 +# and _sha* modules (leading to clearer error messages if fips selftests +# fail) +# - don't build the _md5 and _sha* modules; rely on the _hashlib implementation +# of hashlib (for example, md5.py will use _hashlib's implementation of MD5, +# if permitted by the FIPS setting) +# (rhbz#563986) +Patch146: 00146-hashlib-fips.patch + +# 00147 # +# Add a sys._debugmallocstats() function +# Based on patch 202 from RHEL 5's python.spec, with updates from rhbz#737198 +# Sent upstream as http://bugs.python.org/issue14785 +Patch147: 00147-add-debug-malloc-stats.patch + +# 00148 # +# Upstream as of Python 2.7.3: +# Patch148: 00148-gdbm-1.9-magic-values.patch + +# 00149 # +# python3.spec's +# Patch149: 00149-backport-issue11254-pycache-bytecompilation-fix.patch +# is not relevant for Python 2 + +# 00150 # +# python3.spec has: +# Patch150: 00150-disable-rAssertAlmostEqual-cmath-on-ppc.patch +# as a workaround for a glibc bug on PPC (bz #750811) + +# 00151 # +# Upstream as of Python 2.7.3: +# Patch151: 00151-fork-deadlock.patch + +# 00152 # +# python3.spec has: +# Patch152: 00152-fix-test-gdb-regex.patch + +# 00153 # +# Strip out lines of the form "warning: Unable to open ..." from gdb's stderr +# when running test_gdb.py; also cope with change to gdb in F17 onwards in +# which values are printed as "v@entry" rather than just "v": +# Not yet sent upstream +Patch153: 00153-fix-test_gdb-noise.patch + +# 00154 # +# python3.spec on f15 has: +# Patch154: 00154-skip-urllib-test-requiring-working-DNS.patch + +# 00155 # +# Avoid allocating thunks in ctypes unless absolutely necessary, to avoid +# generating SELinux denials on "import ctypes" and "import uuid" when +# embedding Python within httpd (rhbz#814391) +Patch155: 00155-avoid-ctypes-thunks.patch + +# 00156 # +# Recent builds of gdb will only auto-load scripts from certain safe +# locations. Turn off this protection when running test_gdb in the selftest +# suite to ensure that it can load our -gdb.py script (rhbz#817072): +# Not yet sent upstream +Patch156: 00156-gdb-autoload-safepath.patch + +# 00157 # +# Update uid/gid handling throughout the standard library: uid_t and gid_t are +# unsigned 32-bit values, but existing code often passed them through C long +# values, which are signed 32-bit values on 32-bit architectures, leading to +# negative int objects for uid/gid values >= 2^31 on 32-bit architectures. +# +# Introduce _PyObject_FromUid/Gid to convert uid_t/gid_t values to python +# objects, using int objects where the value will fit (long objects otherwise), +# and _PyArg_ParseUid/Gid to convert int/long to uid_t/gid_t, with -1 allowed +# as a special case (since this is given special meaning by the chown syscall) +# +# Update standard library to use this throughout for uid/gid values, so that +# very large uid/gid values are round-trippable, and -1 remains usable. +# (rhbz#697470) +Patch157: 00157-uid-gid-overflows.patch + +# Upstream as of Python 2.7.4 +# Patch158: 00158-fix-hashlib-leak.patch + +# 00160 # +# python3.spec's +# Patch160: 00160-disable-test_fs_holes-in-rpm-build.patch +# is not relevant for Python 2 + +# 00161 # +# python3.spec has: +# Patch161: 00161-fix-test_tools-directory.patch +# which will likely become relevant for Python 2 next time we rebase + +# 00162 # +# python3.spec has: +# Patch162: 00162-distutils-sysconfig-fix-CC-options.patch + +# 00163 # +# python3.spec has: +# Patch163: 00163-disable-parts-of-test_socket-in-rpm-build.patch + +# 00164 # +# python3.spec has: +# Patch164: 00164-disable-interrupted_write-tests-on-ppc.patch + +# 00165 # +# Backport to Python 2 from Python 3.3 of improvements to the "crypt" module +# adding precanned ways of salting a password (rhbz#835021) +# Based on r88500 patch to py3k from Python 3.3 +# plus 6482dd1c11ed, 0586c699d467, 62994662676a, 74a1110a3b50, plus edits +# to docstrings to note that this additional functionality is not standard +# within 2.7 +Patch165: 00165-crypt-module-salt-backport.patch + +# 00166 # +# Bulletproof the gdb debugging hooks against the case where co_filename for +# a frame can't be read from the inferior process (rhbz#912025) +# +# Not yet sent upstream +Patch166: 00166-fix-fake-repr-in-gdb-hooks.patch + +# 00167 # +# Don't run any of the stack navigation tests in test_gdb when Python is +# optimized, since there appear to be many different ways in which gdb can +# fail to read the PyFrameObject* for arbitrary places in the callstack, +# presumably due to compiler optimization (rhbz#912025) +# +# Not yet sent upstream +Patch167: 00167-disable-stack-navigation-tests-when-optimized-in-test_gdb.patch + +# 00168 # +# Update distutils.sysconfig so that if CFLAGS is defined in the environment, +# when building extension modules, it is appended to the full compilation +# flags from Python's Makefile, rather than instead reducing the compilation +# flags to the subset within OPT and adding it to those. +# +# In particular, this should ensure that "-fno-strict-aliasing" is used by +# "python setup.py build" even when CFLAGS is defined in the environment. +# +# (rhbz#849994) +Patch168: 00168-distutils-cflags.patch + +# 00169 # +# Use SHA-256 rather than implicitly using MD5 within the challenge handling +# in multiprocessing.connection +# +# Sent upstream as http://bugs.python.org/issue17258 +# (rhbz#879695) +Patch169: 00169-avoid-implicit-usage-of-md5-in-multiprocessing.patch + +# 00170 # +# In debug builds, try to print repr() when a C-level assert fails in the +# garbage collector (typically indicating a reference-counting error +# somewhere else e.g in an extension module) +# Backported to 2.7 from a patch I sent upstream for py3k +# http://bugs.python.org/issue9263 (rhbz#614680) +# hiding the proposed new macros/functions within gcmodule.c to avoid exposing +# them within the extension API. +# (rhbz#850013) +Patch170: 00170-gc-assertions.patch + +# Upstream as of Python 2.7.4 +# Patch171: 00171-raise-correct-exception-when-dev-urandom-is-missing.patch + +# Upstream as of Python 2.7.4 +# Patch172: 00172-use-poll-for-multiprocessing-socket-connection.patch + +# 00173 # +# Workaround for ENOPROTOOPT seen in Koji within +# test.test_support.bind_port() +# (rhbz#913732) +Patch173: 00173-workaround-ENOPROTOOPT-in-bind_port.patch + +# 00174 # +# Workaround for failure to set up prefix/exec_prefix when running +# an embededed libpython that sets Py_SetProgramName() to a name not +# on $PATH when run from the root directory due to +# https://fedoraproject.org/wiki/Features/UsrMove +# e.g. cmpi-bindings under systemd (rhbz#817554): +Patch174: 00174-fix-for-usr-move.patch + +# 00175 # +# Upstream as of Python 2.7.5 +# Patch175: 00175-fix-configure-Wformat.patch + +# 00176 # +# python3.spec had: +# Patch176: 00176-upstream-issue16754-so-extension.patch +# doesn't affect python2 + +# 00177 # +# python3.spec has +# Patch177: 00177-platform-unicode.patch +# Does not affect python2 + +# 00178 # +# python3.spec has +# Patch178: 00178-dont-duplicate-flags-in-sysconfig.patch +# Does not affect python2 AFAICS (different sysconfig values initialization) + +# 00179 # +# python3.spec has +# Patch179: 00179-dont-raise-error-on-gdb-corrupted-frames-in-backtrace.patch +# Doesn't seem to affect python2 + +# 00180 # +# Enable building on ppc64p7 +# Not appropriate for upstream, Fedora-specific naming +Patch180: 00180-python-add-support-for-ppc64p7.patch + +# 00181 # +# Allow arbitrary timeout for Condition.wait, as reported in +# https://bugzilla.redhat.com/show_bug.cgi?id=917709 +# Upstream doesn't want this: http://bugs.python.org/issue17748 +# But we have no better solution downstream yet, and since there is +# no API breakage, we apply this patch. +# Doesn't apply to Python 3, where this is fixed otherwise and works. +Patch181: 00181-allow-arbitrary-timeout-in-condition-wait.patch + +# 00182 # +# python3.spec had +# Patch182: 00182-fix-test_gdb-test_threads.patch + +# 00183 # +# python3.spec has +# Patch183: 00183-cve-2013-2099-fix-ssl-match_hostname-dos.patch + +# 00184 # +# Fix for https://bugzilla.redhat.com/show_bug.cgi?id=979696 +# Fixes build of ctypes against libffi with multilib wrapper +# Python recognizes ffi.h only if it contains "#define LIBFFI_H", +# but the wrapper doesn't contain that, which makes the build fail +# We patch this by also accepting "#define ffi_wrapper_h" +Patch184: 00184-ctypes-should-build-with-libffi-multilib-wrapper.patch + +# 00185 # +# Makes urllib2 honor "no_proxy" enviroment variable for "ftp:" URLs +# when ftp_proxy is set +# Resolves rhbz#971267 +Patch185: 00185-urllib2-honors-noproxy-for-ftp.patch + +# 00186 # +# Fix memory leak of variable utf8 in marshal.c +# (rhbz#990554) +Patch186: 00186-memory-leak-marshalc.patch + +# 00187 # +# Add an explicit RPATH to pyexpat.so pointing at the directory +# containing the system expat (which has the extra XML_SetHashSalt +# symbol), to avoid an ImportError with a link error if there's an +# LD_LIBRARY_PATH containing a "vanilla" build of expat (without the +# symbol) (originally rhbz#833271, for rhel 7 rhbz#996665): +Patch187: 00187-add-RPATH-to-pyexpat.patch + +# 00188 # +# Fix for CVE-2013-4238 -- +# SSL module fails to handle NULL bytes inside subjectAltNames general names +# http://bugs.python.org/issue18709 +# rhbz#998781 +Patch188: 00188-CVE-2013-4238-hostname-check-bypass-in-SSL-module.patch + +# 00189 # +# Fixes gdb py-bt command not to raise exception while processing +# statements from eval +# rhbz#1008154 (patch by Attila Fazekas) +Patch189: 00189-gdb-py-bt-dont-raise-exception-from-eval.patch + +# 190 # +# Don't fail various gdb tests on ppc64 if glibc debug +# symbols are installed +Patch190: 00190-gdb-fix-ppc64-failures.patch + +# 00191 # +# Add explicit RPATH to _elementtree.so +# rhbz#1019345 +Patch191: 00191-add-RPATH-to-elementtree.patch + +# 00192 # +# Fix missing documentation for some keywords +# rhbz#1032116 +Patch192: 00192-Fix-missing-documentation-for-some-keywords.patch + +# 00193 # +# Fix buffer overflow (upstream patch, http://bugs.python.org/issue20246) +# rhbz#1062376 +Patch193: 00193-buffer-overflow.patch + +# 00194 # +# Make GDB test pass even if GDB prints program counter for +# the first trace frame +Patch194: 00194-gdb-dont-fail-on-frame-with-address.patch + +# 0195 # +# Make multiprocessing ignore EINTR +# rhbz#1181624 +Patch195: 00195-make-multiproc-ignore-EINTR.patch + +# 00196 # +# Avoid double close of subprocess pipes when the child process +# fails starting +# rhbz#110345p +Patch196: 00196-avoid-double-close-of-pipes.patch + +# 00197 # +# Add missing import in bdist_rpm +# rhbz#1177613 +Patch197: 00197-add-missing-import-in-bdist_rpm.patch + +# 00198 # +# Fix importing readline producing erroneous output +Patch198: 00198-fix-readline-erroneous-output.patch + +# 00199 # +# Fix for CVE-2013-1753 +# rhbz#1046170 +Patch199: 00199-CVE-2013-1753.patch + +# 00200 # +# Fix for CVE-2014-4616 +# rhbz#1112285 +Patch200: 00200-CVE-2014-4616.patch + +# 00201 # +# Fix for CVE-2014-4650 +# rhbz#1113527 +Patch201: 00201-CVE-2014-4650.patch + +# 00202 # +# Fix for CVE-2014-7185 +# rhbz#1146026 +Patch202: 00202-CVE-2014-7185.patch + +# Fixes for CVE-2013-1752 +# rhbz#1046174 +Patch203: 00203-CVE-2013-1752-nntplib.patch +Patch204: 00204-CVE-2013-1752-ftplib.patch +Patch205: 00205-CVE-2013-1752-httplib-headers.patch +Patch206: 00206-CVE-2013-1752-poplib.patch +Patch207: 00207-CVE-2013-1752-smtplib.patch +Patch208: 00208-CVE-2013-1752-imaplib.patch + +# ================== PEP466=========================== +# Massive backport of PEP466 and relevant other fixes +# ================rhbz#1111461======================== +# 00209 # +# backport hmac.compare_digest +# http://bugs.python.org/issue21306 +Patch209: 00209-pep466-backport-hmac.compare_digest.patch +# 00210 # +# backport hashlib.pbkdf2_hmac +# http://bugs.python.org/issue21304 +Patch210: 00210-pep466-backport-hashlib.pbkdf2_hmac.patch +# 00211 # +# UTF-7 decoder can produce inconsistent Unicode string +# http://bugs.python.org/issue19279 +Patch211: 00211-pep466-UTF-7-decoder-fix-illegal-unicode.patch +# 00212 # +# Add range check for %c in PyUnicode_FromFormat +# http://bugs.python.org/issue18184 +Patch212: 00212-pep466-pyunicode_fromformat-raise-overflow.patch +# 00213 # +# Fix %S, %R and %V formats of PyUnicode_FromFormat(). +# http://bugs.python.org/issue122023 +Patch213: 00213-pep466-pyunicode_fromformat-fix-formats.patch +# 00214 # +# Backport SSL module from Python3 +# http://bugs.python.org/issue21308 +Patch214: 00214-pep466-backport-py3-ssl-changes.patch +# 00215 # +# OpenSSL disabled various ciphers and protocols +# we have to reflect it in tests +Patch215: 00215-pep466-reflect-openssl-settings-ssltests.patch +# 00216 # +# fix load_verify_locations on unicode paths +# http://bugs.python.org/issue22244 +Patch216: 00216-pep466-fix-load-verify-locs-unicode.patch +# 00217 # +# backport hashlib changes +# http://bugs.python.org/issue21307 +Patch217: 00217-pep466-backport-hashlib-algorithm-consts.patch +# 00218 # +# update os.urandom +# http://bugs.python.org/issue21305 +Patch218: 00218-pep466-backport-urandom-pers-fd.patch +# 00219 # +# Lib/ssl.py still references _ssl.sslwrap +# http://bugs.python.org/issue22523 +Patch219: 00219-pep466-fix-referenced-sslwrap.patch +# 00220 # +# allow passing cert/ssl information to urllib2.urlopen and httplib.HTTPSConnection +Patch220: 00220-pep466-allow-passing-ssl-urrlib-httplib.patch +# 00221 # +# Patch214 remove sslwrap from _ssl.c this so we have to reimplement it +#Patch221: 00221-pep466-backport-sslwrap-c-ssl.patch +# Backporting sslwrap introduces regressions so patch 221 was dropped (rhbz#1331425) +# 00222 # +# test_ssl: fails on recent libressl version with BAD_DH_P_LENGTH +# https://bugs.python.org/issue23844 +Patch222: 00222-add-2014-bit-dh-key.patch +# 00223 # +# PEP 476: verify HTTPS certificates by default +# http://bugs.python.org/issue22417 +# Resolves:rhbz#1219110 +Patch223: 00223-pep476-verify-certs-by-default.patch +# 00224 # +# Add switch to toggle global verification on and off +# Resolves:rhbz#1219108 +# For more information see PEP493 +Patch224: 00224-pep476-add-toggle-for-cert-verify.patch + +# 00225 # +# Add list of choices to sort option of cProfile +# Resolves:rhbz#1237107 +Patch225: 00225-cprofile-sort-option.patch + +# 00227 # +# Make load_cert_chain function of SSLContext accept a +# keyfile argument set to None +# Upstream issue: http://bugs.python.org/issue22787 +# Resolves: rhbz#1250611 +Patch227: 00227-accept-none-keyfile-loadcertchain.patch + +# 00228 # +# Backport SSLSocket.version function +# Resolves: rhbz#1259421 +Patch228: 00228-backport-ssl-version.patch + +# 00229 # +# Adjusted tests to acknowledge lack of SSLv2 support +# Resolves: rhbz#1315310 +Patch229: 00229-Expect-a-failure-when-trying-to-connect-with-SSLv2-c.patch + +# 00230 # +# Force all child threads to terminate in TestForkInThread, so no zombie +# processes get left behind with stalled threads which hang Python builds +# https://bugs.python.org/issue26456 +# Resolves: rhbz#1313259 +Patch230: 00230-force-all-child-threads-to-terminate-in-TestForkInThread.patch + +# 00231 # +# Fix hashlib algorithms breaking by initializing OpenSSL_add_all_digests +# function in _hashlib library of _hashopenssl.c module +# Resolves: rhbz#1295459 +Patch231: 00231-Initialize-OpenSSL_add_all_digests-in-_hashlib.patch + +# 00232 # +# Removal of the '$Date$' placeholder on the python(1) man page footer, +# which does not expand during build +# http://bugs.python.org/issue17167 +# Resolves: rhbz#1268310 +Patch232: 00232-man-page-date-macro-removal.patch + +# 00233 # +# Backport of Computed Goto dispatch +# http://bugs.python.org/issue4753 +# https://lwn.net/Articles/646888/ +# Resolves: rhbz#1289277 +Patch233: 00233-Computed-Goto-dispatch.patch + +# 00234 # +# Updated implementation of PEP493 +# The following features were backported: +# https://www.python.org/dev/peps/pep-0493/#feature-configuration-api +# https://www.python.org/dev/peps/pep-0493/#feature-environment-based-configuration +# Combined usage explained: +# https://www.python.org/dev/peps/pep-0493/#recommendation-for-combined-feature-backports +# Resolves: rhbz#1315758 +# Patch was modified to enable the certificate verification globally as the platform default +# See also patch224 +# Resolves: rhbz#1219110 +Patch234: 00234-PEP493-updated-implementation.patch + +# 0235 # +# JSON decoder lone surrogates fix. +# https://bugs.python.org/issue11489 +# Resolves: rhbz#1301017 +Patch235: 00235-JSON-decoder-lone-surrogates-fix.patch + +# 0236 # +# Fix for iteration over files vith very long lines +# http://bugs.python.org/issue22526 +# Resolves: rhbz#1271760 +Patch236: 00236-use-Py_ssize_t-for-file-offset-and-length-computations-in-iteration.patch + +# 00237 # +# CVE-2016-0772 python: smtplib StartTLS stripping attack +# https://bugzilla.redhat.com/show_bug.cgi?id=1303647 +# FIXED UPSTREAM: https://hg.python.org/cpython/rev/b3ce713fb9be +# Raise an error when STARTTLS fails +# Resolves: rhbz#1346357 +Patch237: 00237-CVE-2016-0772-smtplib.patch + +# 00238 # +# CVE-2016-5699 python: http protocol steam injection attack +# https://bugzilla.redhat.com/show_bug.cgi?id=1303699 +# FIXED UPSTREAM: https://hg.python.org/cpython/rev/1c45047c5102 +# Disabled HTTP header injections in httplib +# Resolves: rhbz#1346357 +Patch238: 00238-CVE-2016-5699-httplib.patch + +# 00240 # +# Increase the timeouts of test_smtplib +# as there are failures on various powerpc architectures. +# FIXED UPSTREAM: https://github.com/python/cpython/commit/1122236c89770466c629aa0f0b0de2b2731b82ee +# Resolves: rhbz#1497795 +Patch240: 00240-increase-smtplib-tests-timeouts.patch + +# 00241 # +# CVE-2016-5636: http://seclists.org/oss-sec/2016/q2/560 +# https://hg.python.org/cpython/rev/985fc64c60d6/ +# https://hg.python.org/cpython/rev/2edbdb79cd6d +# Fix possible integer overflow and heap corruption in zipimporter.get_data() +# FIXED UPSTREAM: https://bugs.python.org/issue26171 +# Resolves: rhbz#1356364 +Patch241: 00241-CVE-2016-5636-buffer-overflow-in-zipimport-module-fix.patch + +# 00242 # +# HTTPoxy attack (CVE-2016-1000110) +# https://httpoxy.org/ +# FIXED UPSTREAM: http://bugs.python.org/issue27568 +# Based on a patch by Rémi Rampin +# Resolves: rhbz#1359164 +Patch242: 00242-CVE-2016-1000110-httpoxy.patch + +# 00255 # +# Fix Python's failure to decode X.509 certificates +# with a GEN_RID general name in subject alternative names. +# FIXED UPSTREAM: http://bugs.python.org/issue27691 +# Resolves: rhbz#1364444 +Patch255: 00255-Fix-ssl-module-parsing-of-GEN_RID-subject-alternative-name-fields-in-X.509-certs.patch + +# 00256 # +# Fix Python's incorrect parsing of certain regular expressions +# FIXED UPSTREAM: http://bugs.python.org/issue18647 +# Resolves: rhbz#1373363 +Patch256: 00256-fix-incorrect-parsing-of-regular-expressions.patch + +# 00257 # +# Python's threading library doesn't use the monotonic clock when handling wait timeouts, +# so when the system clock is set backwards, the wait doesn't return after the timeout, +# causing deadlocks. +# This patch works around the issue. +# Resolves: rhbz#1368076 +# DOWNSTREAM ONLY PATCH +Patch257: 00257-threading-wait-clamp-remaining-time.patch + +# 00263 # +# Fix reference leaks of certfile_bytes and keyfile_bytes at _ssl.c +# FIXED UPSTREAM: http://bugs.python.org/issue27267 +# https://github.com/python/cpython/commit/b3e073cbb3af2999e6e589f55ec2fc8a109fdc14 +# https://github.com/python/cpython/commit/3b91de5a76aad471476f5bc5943e44bf386c0e6d +# Resolves: rhbz#1272562 +Patch263: 00263-fix-ssl-reference-leaks.patch + +# 00265 # +# Protect the key list during fork() in order for the forked process to not inherit an inconsistent key list. +# Reported upstream: http://bugs.python.org/issue29640 +# Resolves: rhbz#1268226 +Patch265: 00265-protect-key-list-during-fork.patch + +# 00266 # +# Make shutil.make_archive() to not ingore empty directories when creating a zip file. +# Also refactor and extend the shutil test suite. +# FIXED UPSTREAM: https://bugs.python.org/issue24982 +# https://github.com/python/cpython/commit/04861dc82f595e3e2f0ab4b1a62de2f812c8fa37 +# Resolves: rhbz#1439734 +Patch266: 00266-fix-shutil.make_archive-ignoring-empty-dirs.patch + +# 00268 # +# Set stream to None in case an _open() fails. +# FIXED UPSTREAM: https://bugs.python.org/issue21742 +# Resolves: rhbz#1432003 +Patch268: 00268-set-stream-name-to-None.patch + +# 00275 # +# Fixe fcntl() with integer argument on 64-bit big-endian platforms. +# FIXED UPSTREAM: https://bugs.python.org/issue22821 +# Resolves: rhbz#1489858 +Patch275: 00275-fix-fnctl-with-integer-on-big-endian.patch + +# 00276 # +# Increase imaplib's MAXLINE to accommodate modern mailbox sizes. +# FIXED UPSTREAM: https://bugs.python.org/issue23647 +# Resolves: rhbz#1485808 +Patch276: 00276-increase-imaplib-MAXLINE.patch + +# 00281 # +# Add context parameter to xmlrpclib.ServerProxy +# FIXED UPSTREAM: https://bugs.python.org/issue22960 +# Resolves: rhbz#1490392 +Patch281: 00281-add-context-parameter-to-xmlrpclib.ServerProxy.patch + +# 00282 # +# Make it more likely for the system allocator to release free()d memory arenas +# - Use mmap for arena allocation +# - Increase SMALL_REQUEST_THRESHOLD so that small dictionaries use +# pre-allocated arena pools +# FIXED UPSTREAM: https://bugs.python.org/issue20494 +# Resolves: rhbz#1468410 +Patch282: 00282-obmalloc-mmap-threshold.patch + +# 00285 # +# Fix nondeterministic read in test_pty which fails randomly in brew. +# FIXED UPSTREAM: https://bugs.python.org/issue31158 +# Resolves: rhbz#1512160 +Patch285: 00285-fix-non-deterministic-read-in-test_pty.patch + +# 00287 # +# On the creation of io.FileIO() and builtin file() objects the GIL is now released +# when checking the file descriptor. io.FileIO.readall(), io.FileIO.read(), and +# file.read() also now release the GIL when getting the file size, which fixes hanging +# of all threads when trying to access an inaccessible NFS server. +# FIXED UPSTREAM: https://bugs.python.org/issue32186 +# Resolves: rhbz#1520068 +Patch287: 00287-fix-thread-hanging-on-inaccessible-nfs-server.patch + +# 00295 # +# Fix http.client.HTTPConnection tunneling and HTTPConnection.set_tunnel with default port, +# which was breaking https connections behind a proxy. +# FIXED UPSTREAM: https://bugs.python.org/issue7776 +# https://bugs.python.org/issue22095 +# https://bugs.python.org/issue23300 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1483438 +Patch295: 00295-fix-https-behind-proxy.patch + +# 00296 # +# Re-add the private `_set_hostport` api to httplib +# DOWNSTREAM ONLY: backwards compatibility backport +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1546351 +Patch296: 00296-Readd-the-private-_set_hostport-api-to-httplib.patch + +# 00298 # +# The SSL module no longer sends IP addresses in SNI TLS extension on +# platforms with OpenSSL 1.0.2+ or inet_pton. +# FIXED UPSTREAM: https://bugs.python.org/issue32185 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1555314 +Patch298: 00298-do-not-send-IP-in-SNI-TLS-extension.patch + +# 00299 # +# Fix ssl module, Python 2.7 doesn't have Py_MAX +# The previous patch 298 broke python2. This is a fixup. +# FIXED UPSTREAM: https://github.com/python/cpython/pull/5878 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1555314 +Patch299: 00299-fix-ssl-module-pymax.patch + +# 00303 # +# Fix CVE-2018-1060 and CVE-2018-1061 +# FIXED UPSTREAM: https://bugs.python.org/issue32981 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1563454 +# and https://bugzilla.redhat.com/show_bug.cgi?id=1549192 +Patch303: 00303-CVE-2018-1060-1.patch + +# 00305 # +# Remove 3DES from the cipher list to mitigate CVE-2016-2183 (sweet32). +# FIXED UPSTREAM: https://bugs.python.org/issue27850 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1581901 +Patch305: 00305-CVE-2016-2183.patch + +# 00306 # +# Fix OSERROR 17 due to _multiprocessing/semaphore.c +# assuming a one-to-one Pid -> process mapping +# FIXED UPSTREAM: https://bugs.python.org/issue24303 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1579432 +Patch306: 00306-fix-oserror-17-upon-semaphores-creation.patch + +# 00310 # +# CVE-2018-14647 +# Use XML_SetHashSalt in _elementtree +# FIXED UPSTREAM: https://bugs.python.org/issue34623 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1636838 +Patch310: 00310-use-xml-sethashsalt-in-elementtree.patch + +# 00314 # +# Python can sometimes create incorrect .pyc files: check I/O error. +# FIXED UPSTREAM: https://bugs.python.org/issue25083 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1629982 +Patch314: 00314-parser-check-e_io.patch + +# 00317 # +# CVE-2019-5010 Crash on parsing a specially crafted X509 certificate +# FIXED UPSTREAM: https://bugs.python.org/issue35746 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1666788 +Patch317: 00317-CVE-2019-5010-ssl-crl.patch + +# 00320 # +# Security fix for CVE-2019-9636 and CVE-2019-10160: Information Disclosure due to urlsplit improper NFKC normalization +# FIXED UPSTREAM: https://bugs.python.org/issue36216 and https://bugs.python.org/issue36742 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1689317 +# and https://bugzilla.redhat.com/show_bug.cgi?id=1718388 +Patch320: 00320-CVE-2019-9636-and-CVE-2019-10160.patch + +# 00324 # +# Disallow control chars in http URLs +# Security fix for CVE-2019-9740 and CVE-2019-9947 +# Fixed upstream: https://bugs.python.org/issue30458 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1704362 +# and https://bugzilla.redhat.com/show_bug.cgi?id=1703530 +# Also backport https://bugs.python.org/issue30500 as the urllib2 +# tests rely on that, and include the test_splithost case added in +# https://github.com/python/cpython/commit/f0b630b826949e51f429418e6675fb6a8a131f3c +Patch324: 00324-disallow-control-chars-in-http-urls.patch + +# 00325 # +# Unnecessary URL scheme exists to allow local_file:// reading file in urllib +# Security fix for CVE-2019-9948 +# Fixed upstream: https://bugs.python.org/issue35907 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1704174 +Patch325: 00325-CVE-2019-9948.patch + +# 00330 # +# Fix CVE-2018-20852: cookie domain check returning incorrect results +# Fixed upstream: https://bugs.python.org/issue35121 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1741551 +Patch330: 00330-CVE-2018-20852.patch + +# 00332 # +# Fix CVE-2019-16056: Dont parse domains containing @ +# Fixed upstream: https://bugs.python.org/issue34155 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1750773 +Patch332: 00332-CVE-2019-16056.patch + +# (New patches go here ^^^) +# +# When adding new patches to "python" and "python3" in Fedora 17 onwards, +# please try to keep the patch numbers in-sync between the two specfiles: +# +# - use the same patch number across both specfiles for conceptually-equivalent +# fixes, ideally with the same name +# +# - when a patch is relevant to both specfiles, use the same introductory +# comment in both specfiles where possible (to improve "diff" output when +# comparing them) +# +# - when a patch is only relevant for one of the two specfiles, leave a gap +# in the patch numbering in the other specfile, adding a comment when +# omitting a patch, both in the manifest section here, and in the "prep" +# phase below +# +# Hopefully this will make it easier to ensure that all relevant fixes are +# applied to both versions. + +# This is the generated patch to "configure"; see the description of +# %%{regenerate_autotooling_patch} +# above: +Patch5000: 05000-autotool-intermediates.patch + +# ====================================================== +# Additional metadata, and subpackages +# ====================================================== + +%if %{main_python} +Obsoletes: Distutils +Provides: Distutils +Provides: python2 = %{version} +Obsoletes: python-elementtree <= 1.2.6 +Obsoletes: python-sqlite < 2.3.2 +Provides: python-sqlite = 2.3.2 +Obsoletes: python-ctypes < 1.0.1 +Provides: python-ctypes = 1.0.1 +Obsoletes: python-hashlib < 20081120 +Provides: python-hashlib = 20081120 +Obsoletes: python-uuid < 1.31 +Provides: python-uuid = 1.31 + +# python-sqlite2-2.3.5-5.fc18 was retired. Obsolete the old package here +# so it gets uninstalled on updates +%if 0%{?fedora} >= 17 +Obsoletes: python-sqlite2 <= 2.3.5-6 +%endif + +# python-argparse is part of python as of version 2.7 +# drop this Provides in F17 +# (having Obsoletes here caused problems with multilib; see rhbz#667984) +Provides: python-argparse = %{version}-%{release} +%endif + +BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) + +URL: http://www.python.org/ + +%description +Python is an interpreted, interactive, object-oriented programming +language often compared to Tcl, Perl, Scheme or Java. Python includes +modules, classes, exceptions, very high level dynamic data types and +dynamic typing. Python supports interfaces to many system calls and +libraries, as well as to various windowing systems (X11, Motif, Tk, +Mac and MFC). + +Programmers can write new built-in modules for Python in C or C++. +Python can be used as an extension language for applications that need +a programmable interface. + +Note that documentation for Python is provided in the python-docs +package. + +This package provides the "python" executable; most of the actual +implementation is within the "python-libs" package. + +%package libs +Summary: Runtime libraries for Python +Group: Applications/System + +# New behaviour of httplib (patch 295) doesn't play well with really old pip +# version (1.4.1) bundled in the old virtualenv package. This new version of +# virtualenv updated bundled pip to 9.0.1 which works fine. +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1483438 +Conflicts: python-virtualenv < 15.1.0-1 + +# Needed for ctypes, to load libraries, worked around for Live CDs size +# Requires: binutils + +# expat 2.1.0 added the symbol XML_SetHashSalt without bumping SONAME. We use +# this symbol (in pyexpat), so we must explicitly state this dependency to +# prevent "import pyexpat" from failing with a linker error if someone hasn't +# yet upgraded expat: +Requires: expat >= 2.1.0 + +Provides: python2-libs = %{version}-%{release} +Provides: python2-libs%{?_isa} = %{version}-%{release} + +%description libs +This package contains runtime libraries for use by Python: +- the libpython dynamic library, for use by applications that embed Python as +a scripting language, and by the main "python" executable +- the Python standard library + +%package devel +Summary: The libraries and header files needed for Python development +Group: Development/Libraries +Requires: %{python}%{?_isa} = %{version}-%{release} +Requires: pkgconfig + +# Macros were previously here, but were moved to their respective packages +Requires: python-rpm-macros > 3-30 +Requires: python2-rpm-macros > 3-30 + +# Needed here because of the migration of Makefile from -devel to the main +# package +Conflicts: %{python} < %{version}-%{release} +%if %{main_python} +Obsoletes: python2-devel < %{version}-%{release} +Provides: python2-devel = %{version}-%{release} +Provides: python2-devel%{?_isa} = %{version}-%{release} +%endif + +%description devel +The Python programming language's interpreter can be extended with +dynamically loaded extensions and can be embedded in other programs. +This package contains the header files and libraries needed to do +these types of tasks. + +Install python-devel if you want to develop Python extensions. The +python package will also need to be installed. You'll probably also +want to install the python-docs package, which contains Python +documentation. + +%package tools +Summary: A collection of development tools included with Python +Group: Development/Tools +Requires: %{name} = %{version}-%{release} +Requires: %{tkinter} = %{version}-%{release} +%if %{main_python} +Obsoletes: python2-tools < %{version}-%{release} +Provides: python2-tools = %{version} +%endif + +%description tools +This package includes several tools to help with the development of Python +programs, including IDLE (an IDE with editing and debugging facilities), a +color editor (pynche), and a python gettext program (pygettext.py). + +%package -n %{tkinter} +Summary: A graphical user interface for the Python scripting language +Group: Development/Languages +Requires: %{name} = %{version}-%{release} +%if %{main_python} +Obsoletes: tkinter2 < %{version}-%{release} +Provides: tkinter2 = %{version} +%endif + +%description -n %{tkinter} + +The Tkinter (Tk interface) program is an graphical user interface for +the Python scripting language. + +You should install the tkinter package if you'd like to use a graphical +user interface for Python programming. + +%package test +Summary: The test modules from the main python package +Group: Development/Languages +Requires: %{name} = %{version}-%{release} + +%description test + +The test modules from the main python package: %{name} +These have been removed to save space, as they are never or almost +never used in production. + +You might want to install the python-test package if you're developing python +code that uses more than just unittest and/or test_support.py. + +%if 0%{?with_debug_build} +%package debug +Summary: Debug version of the Python runtime +Group: Applications/System + +# The debug build is an all-in-one package version of the regular build, and +# shares the same .py/.pyc files and directories as the regular build. Hence +# we depend on all of the subpackages of the regular build: +Requires: %{name}%{?_isa} = %{version}-%{release} +Requires: %{name}-libs%{?_isa} = %{version}-%{release} +Requires: %{name}-devel%{?_isa} = %{version}-%{release} +Requires: %{name}-test%{?_isa} = %{version}-%{release} +Requires: tkinter%{?_isa} = %{version}-%{release} +Requires: %{name}-tools%{?_isa} = %{version}-%{release} + +%description debug +python-debug provides a version of the Python runtime with numerous debugging +features enabled, aimed at advanced Python users, such as developers of Python +extension modules. + +This version uses more memory and will be slower than the regular Python build, +but is useful for tracking down reference-counting issues, and other bugs. + +The bytecodes are unchanged, so that .pyc files are compatible between the two +version of Python, but the debugging features mean that C/C++ extension modules +are ABI-incompatible with those built for the standard runtime. + +It shares installation directories with the standard Python runtime, so that +.py and .pyc files can be shared. All compiled extension modules gain a "_d" +suffix ("foo_d.so" rather than "foo.so") so that each Python implementation can +load its own extensions. +%endif # with_debug_build + + +# ====================================================== +# The prep phase of the build: +# ====================================================== + +%prep +%setup -q -n Python-%{version} + +%if 0%{?with_systemtap} +# Provide an example of usage of the tapset: +cp -a %{SOURCE4} . +cp -a %{SOURCE5} . +%endif # with_systemtap + +# Ensure that we're using the system copy of various libraries, rather than +# copies shipped by upstream in the tarball: +# Remove embedded copy of expat: +rm -r Modules/expat || exit 1 + +# Remove embedded copy of libffi: +for SUBDIR in darwin libffi libffi_arm_wince libffi_msvc libffi_osx ; do + rm -r Modules/_ctypes/$SUBDIR || exit 1 ; +done + +# Remove embedded copy of zlib: +rm -r Modules/zlib || exit 1 + +# Don't build upstream Python's implementation of these crypto algorithms; +# instead rely on _hashlib and OpenSSL. +# +# For example, in our builds md5.py uses always uses hashlib.md5 (rather than +# falling back to _md5 when hashlib.md5 is not available); hashlib.md5 is +# implemented within _hashlib via OpenSSL (and thus respects FIPS mode) +for f in md5module.c md5.c shamodule.c sha256module.c sha512module.c; do + rm Modules/$f +done + +# +# Apply patches: +# +%patch0 -p1 -b .rhconfig +%patch1 -p1 -b .no_gui +%patch4 -p1 -b .cflags +%patch6 -p1 -b .plural +%patch7 -p1 + +# Try not disabling egg-infos, bz#414711 +#patch50 -p1 -b .egginfo + +# patch101: upstream as of Python 2.7.4 +%if "%{_lib}" == "lib64" +%patch102 -p1 -b .lib64 +%patch103 -p1 -b .lib64-sysconfig +%patch104 -p1 +%endif + +%patch10 -p1 -b .binutils-no-dep +# patch11: upstream as of Python 2.7.3 +%patch13 -p1 -b .socketmodule +%patch14 -p1 -b .socketmodule2 +%patch16 -p1 -b .rpath +%patch17 -p1 -b .distutils-rpath + +%if 0%{?with_systemtap} +%patch55 -p1 -b .systemtap +%endif + +%patch111 -p1 -b .no-static-lib + +%patch112 -p1 -b .debug-build + +%patch113 -p1 -b .more-configuration-flags + +%patch114 -p1 -b .statvfs-f-flag-constants + +# patch115: upstream as of Python 2.7.3 + +%patch121 -p1 +%patch125 -p1 -b .less-verbose-COUNT_ALLOCS +# 00126: upstream as of Python 2.7.5 +# 00127: upstream as of Python 2.7.5 +%patch128 -p1 + +%patch130 -p1 + +%ifarch ppc %{power64} +%patch131 -p1 +%endif + +%patch132 -p1 +%patch133 -p1 +%patch134 -p1 +%patch135 -p1 +%patch136 -p1 +%patch137 -p1 +%patch138 -p1 +%ifarch %{arm} +%patch139 -p1 +%endif +%ifarch %{sparc} +%patch140 -p1 +%endif +%patch141 -p1 +%patch142 -p1 +%patch143 -p1 -b .tsc-on-ppc +%if !%{with_gdbm} +%patch144 -p1 +%endif +# 00145: upstream as of Python 2.7.3 +%patch146 -p1 +%patch147 -p1 +# 00148: upstream as of Python 2.7.3 +# 00149: not for python 2 +# 00150: not for python 2 +# 00151: upstream as of Python 2.7.3 +# 00152: not for python 2 +%patch153 -p0 +# 00154: not for python 2 +%patch155 -p1 +%patch156 -p1 +%patch157 -p1 +# 00158: upstream as of Python 2.7.4 +# 00160: not for python 2 +# 00161: not for python 2 yet +# 00162: not for python 2 yet +# 00163: not for python 2 yet +# 00164: not for python 2 yet +%patch165 -p1 +mv Modules/cryptmodule.c Modules/_cryptmodule.c +%patch166 -p1 +%patch167 -p1 +%patch168 -p1 +%patch169 -p1 +%patch170 -p1 +# 00171: upstream as of Python 2.7.4 +# 00171: upstream as of Python 2.7.4 +%patch173 -p1 +%patch174 -p1 -b .fix-for-usr-move +# 00175: upstream as of Python 2.7.5 +# 00176: not for python 2 +# 00177: not for python 2 +# 00178: not for python 2 +# 00179: not for python 2 +%patch180 -p1 +%patch181 -p1 +# 00182: not for python2 +# 00183: not for python2 +%patch184 -p1 +%patch185 -p1 +%patch186 -p1 +%patch187 -p1 +%patch188 -p1 +%patch189 -p1 +%patch190 -p0 +%patch191 -p1 +%patch192 -p1 +%patch193 -p1 +%patch194 -p0 +%patch195 -p1 +%patch196 -p1 +%patch197 -p1 +%patch198 -p1 +%patch199 -p1 +%patch200 -p1 +%patch201 -p1 +%patch202 -p1 +%patch203 -p1 +%patch204 -p1 +%patch205 -p1 +%patch206 -p1 +%patch207 -p1 +%patch208 -p1 +%patch209 -p1 +%patch210 -p1 +%patch211 -p1 +%patch212 -p1 +%patch213 -p1 +%patch214 -p1 +%patch215 -p1 +%patch216 -p1 +%patch217 -p1 +%patch218 -p1 +%patch219 -p1 +%patch220 -p1 +# 00221: Dropped because of regressions +%patch222 -p1 +%patch223 -p1 +%patch224 -p1 +%patch225 -p1 +%patch227 -p1 +%patch228 -p1 +%patch229 -p1 +%patch230 -p1 +%patch231 -p1 +%patch232 -p1 +%patch233 -p1 +%patch234 -p1 +%patch235 -p1 +%patch236 -p1 +%patch237 -p1 +%patch238 -p1 +%patch240 -p1 +%patch241 -p1 +%patch242 -p1 +%patch255 -p1 +%patch256 -p1 +%patch257 -p1 +%patch263 -p1 +%patch265 -p1 +%patch266 -p1 +%patch268 -p1 +%patch275 -p1 +%patch276 -p1 +%patch281 -p1 +%patch282 -p1 +%patch285 -p1 +%patch287 -p1 +%patch295 -p1 +%patch296 -p1 +%patch298 -p1 +%patch299 -p1 +%patch303 -p1 +%patch305 -p1 +%patch306 -p1 +%patch310 -p1 +%patch314 -p1 +%patch317 -p1 +%patch320 -p1 +%patch324 -p1 +%patch325 -p1 +%patch330 -p1 +%patch332 -p1 + + +# This shouldn't be necesarry, but is right now (2.2a3) +find -name "*~" |xargs rm -f + +%if ! 0%{regenerate_autotooling_patch} +# Normally we apply the patch to "configure" +# We don't apply the patch if we're working towards regenerating it +%patch5000 -p0 -b .autotool-intermediates +%endif + + +# ====================================================== +# Configuring and building the code: +# ====================================================== + +%build +topdir=$(pwd) +export CFLAGS="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv" +export CXXFLAGS="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv" +export CPPFLAGS="$(pkg-config --cflags-only-I libffi)" +export OPT="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv" +export LINKCC="gcc" +export LDFLAGS="$RPM_LD_FLAGS" +if pkg-config openssl ; then + export CFLAGS="$CFLAGS $(pkg-config --cflags openssl)" + export LDFLAGS="$LDFLAGS $(pkg-config --libs-only-L openssl)" +fi +# compile with -O3 for ppc64 as requested in +# https://bugzilla.redhat.com/show_bug.cgi?id=1051076 +%ifarch %{power64} +export CFLAGS="$CFLAGS -O3" +export CXXFLAGS="$CXXFLAGS -O3" +export OPT="$OPT -O3" +%endif +# Force CC +export CC=gcc + +%if 0%{regenerate_autotooling_patch} +# If enabled, this code regenerates the patch to "configure", using a +# local copy of autoconf-2.65, then exits the build +# +# The following assumes that the copy is installed to ~/autoconf-2.65/bin +# as per these instructions: +# http://bugs.python.org/issue7997 + +for f in pyconfig.h.in configure ; do + cp $f $f.autotool-intermediates ; +done + +# Rerun the autotools: +PATH=~/autoconf-2.65/bin:$PATH autoconf +autoheader + +# Regenerate the patch: +gendiff . .autotool-intermediates > %{PATCH5000} + + +# Exit the build +exit 1 +%endif + +# Define a function, for how to perform a "build" of python for a given +# configuration: +BuildPython() { + ConfName=$1 + BinaryName=$2 + SymlinkName=$3 + ExtraConfigArgs=$4 + PathFixWithThisBinary=$5 + + ConfDir=build/$ConfName + + echo STARTING: BUILD OF PYTHON FOR CONFIGURATION: $ConfName - %{_bindir}/$BinaryName + mkdir -p $ConfDir + + pushd $ConfDir + + # Use the freshly created "configure" script, but in the directory two above: + %global _configure $topdir/configure + +%configure \ + --enable-ipv6 \ + --enable-shared \ + --enable-unicode=%{unicode} \ + --with-dbmliborder=gdbm:ndbm:bdb \ + --with-system-expat \ + --with-system-ffi \ +%if 0%{?with_systemtap} + --with-dtrace \ + --with-tapset-install-dir=%{tapsetdir} \ +%endif +%if 0%{?with_valgrind} + --with-valgrind \ +%endif + $ExtraConfigArgs \ + %{nil} + +make EXTRA_CFLAGS="$CFLAGS" %{?_smp_mflags} + +# We need to fix shebang lines across the full source tree. +# +# We do this using the pathfix.py script, which requires one of the +# freshly-built Python binaries. +# +# We use the optimized python binary, and make the shebangs point at that same +# optimized python binary: +if $PathFixWithThisBinary +then + LD_LIBRARY_PATH="$topdir/$ConfDir" ./$BinaryName \ + $topdir/Tools/scripts/pathfix.py \ + -i "%{_bindir}/env $BinaryName" \ + $topdir +fi + +# Rebuild with new python +# We need a link to a versioned python in the build directory +ln -s $BinaryName $SymlinkName +LD_LIBRARY_PATH="$topdir/$ConfDir" PATH=$PATH:$topdir/$ConfDir make -s EXTRA_CFLAGS="$CFLAGS" %{?_smp_mflags} + + popd + echo FINISHED: BUILD OF PYTHON FOR CONFIGURATION: $ConfDir +} + +# Use "BuildPython" to support building with different configurations: + +%if 0%{?with_debug_build} +BuildPython debug \ + python-debug \ + python%{pybasever}-debug \ +%ifarch %{ix86} x86_64 ppc %{power64} + "--with-pydebug --with-tsc --with-count-allocs --with-call-profile" \ +%else + "--with-pydebug --with-count-allocs --with-call-profile" \ +%endif + false +%endif # with_debug_build + +BuildPython optimized \ + python \ + python%{pybasever} \ + "" \ + true + + +# ====================================================== +# Installing the built code: +# ====================================================== + +%install +topdir=$(pwd) +rm -rf %{buildroot} +mkdir -p %{buildroot}%{_prefix} %{buildroot}%{_mandir} + +# Clean up patched .py files that are saved as .lib64 +for f in distutils/command/install distutils/sysconfig; do + rm -f Lib/$f.py.lib64 +done + +InstallPython() { + + ConfName=$1 + BinaryName=$2 + PyInstSoName=$3 + + ConfDir=build/$ConfName + + echo STARTING: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName - %{_bindir}/$BinaryName + mkdir -p $ConfDir + + pushd $ConfDir + +make install DESTDIR=%{buildroot} + +# We install a collection of hooks for gdb that make it easier to debug +# executables linked against libpython (such as /usr/lib/python itself) +# +# These hooks are implemented in Python itself +# +# gdb-archer looks for them in the same path as the ELF file, with a -gdb.py suffix. +# We put them in the debuginfo package by installing them to e.g.: +# /usr/lib/debug/usr/lib/libpython2.6.so.1.0.debug-gdb.py +# (note that the debug path is /usr/lib/debug for both 32/64 bit) +# +# See https://fedoraproject.org/wiki/Features/EasierPythonDebugging for more +# information +# +# Initially I tried: +# /usr/lib/libpython2.6.so.1.0-gdb.py +# but doing so generated noise when ldconfig was rerun (rhbz:562980) +# + +%if 0%{?with_gdb_hooks} +DirHoldingGdbPy=%{_prefix}/lib/debug/%{_libdir} +PathOfGdbPy=$DirHoldingGdbPy/$PyInstSoName.debug-gdb.py + +mkdir -p %{buildroot}$DirHoldingGdbPy +cp $topdir/Tools/gdb/libpython.py %{buildroot}$PathOfGdbPy + +# Manually byte-compile the file, in case find-debuginfo.sh is run before +# brp-python-bytecompile, so that the .pyc/.pyo files are properly listed in +# the debuginfo manifest: +LD_LIBRARY_PATH="$topdir/$ConfDir" $topdir/$ConfDir/$BinaryName \ + -c "import compileall; import sys; compileall.compile_dir('%{buildroot}$DirHoldingGdbPy', ddir='$DirHoldingGdbPy')" + +LD_LIBRARY_PATH="$topdir/$ConfDir" $topdir/$ConfDir/$BinaryName -O \ + -c "import compileall; import sys; compileall.compile_dir('%{buildroot}$DirHoldingGdbPy', ddir='$DirHoldingGdbPy')" +%endif # with_gdb_hooks + + popd + + echo FINISHED: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName +} + +# Use "InstallPython" to support building with different configurations: + +# Install the "debug" build first, so that we can move some files aside +%if 0%{?with_debug_build} +InstallPython debug \ + python%{pybasever}-debug \ + %{py_INSTSONAME_debug} +%endif # with_debug_build + +# Now the optimized build: +InstallPython optimized \ + python%{pybasever} \ + %{py_INSTSONAME_optimized} + + +# Fix the interpreter path in binaries installed by distutils +# (which changes them by itself) +# Make sure we preserve the file permissions +for fixed in %{buildroot}%{_bindir}/pydoc; do + sed 's,#!.*/python$,#!%{_bindir}/env python%{pybasever},' $fixed > $fixed- \ + && cat $fixed- > $fixed && rm -f $fixed- +done + +# Junk, no point in putting in -test sub-pkg +rm -f %{buildroot}/%{pylibdir}/idlelib/testcode.py* + +# don't include tests that are run at build time in the package +# This is documented, and used: rhbz#387401 +if /bin/false; then + # Move this to -test subpackage. +mkdir save_bits_of_test +for i in test_support.py __init__.py; do + cp -a %{buildroot}/%{pylibdir}/test/$i save_bits_of_test +done +rm -rf %{buildroot}/%{pylibdir}/test +mkdir %{buildroot}/%{pylibdir}/test +cp -a save_bits_of_test/* %{buildroot}/%{pylibdir}/test +fi + +%if %{main_python} +%else +mv %{buildroot}%{_bindir}/python %{buildroot}%{_bindir}/%{python} +%if 0%{?with_debug_build} +mv %{buildroot}%{_bindir}/python-debug %{buildroot}%{_bindir}/%{python}-debug +%endif # with_debug_build +mv %{buildroot}/%{_mandir}/man1/python.1 %{buildroot}/%{_mandir}/man1/python%{pybasever}.1 +%endif + +# tools + +mkdir -p ${RPM_BUILD_ROOT}%{site_packages} + +#pynche +install -p -m755 %{SOURCE7} ${RPM_BUILD_ROOT}%{_bindir}/pynche +chmod 755 ${RPM_BUILD_ROOT}%{_bindir}/pynche +rm -f Tools/pynche/*.pyw +cp -rp Tools/pynche \ + ${RPM_BUILD_ROOT}%{site_packages}/ + +mv Tools/pynche/README Tools/pynche/README.pynche + +#gettext +install -m755 Tools/i18n/pygettext.py %{buildroot}%{_bindir}/ +install -m755 Tools/i18n/msgfmt.py %{buildroot}%{_bindir}/ + +# Useful development tools +install -m755 -d %{buildroot}%{tools_dir}/scripts +install Tools/README %{buildroot}%{tools_dir}/ +install Tools/scripts/*py %{buildroot}%{tools_dir}/scripts/ + +# Documentation tools +install -m755 -d %{buildroot}%{doc_tools_dir} +#install -m755 Doc/tools/mkhowto %{buildroot}%{doc_tools_dir} + +# Useful demo scripts +install -m755 -d %{buildroot}%{demo_dir} +cp -ar Demo/* %{buildroot}%{demo_dir} + +# Get rid of crap +find %{buildroot}/ -name "*~"|xargs rm -f +find %{buildroot}/ -name ".cvsignore"|xargs rm -f +find %{buildroot}/ -name "*.bat"|xargs rm -f +find . -name "*~"|xargs rm -f +find . -name ".cvsignore"|xargs rm -f +#zero length +rm -f %{buildroot}%{pylibdir}/LICENSE.txt + + +#make the binaries install side by side with the main python +%if !%{main_python} +pushd %{buildroot}%{_bindir} +mv idle idle%{__python_ver} +mv pynche pynche%{__python_ver} +mv pygettext.py pygettext%{__python_ver}.py +mv msgfmt.py msgfmt%{__python_ver}.py +mv smtpd.py smtpd%{__python_ver}.py +mv pydoc pydoc%{__python_ver} +popd +%endif + +# Fix for bug #136654 +rm -f %{buildroot}%{pylibdir}/email/test/data/audiotest.au %{buildroot}%{pylibdir}/test/audiotest.au + +# Fix bug #143667: python should own /usr/lib/python2.x on 64-bit machines +%if "%{_lib}" == "lib64" +install -d %{buildroot}/usr/lib/python%{pybasever}/site-packages +%endif + +# Make python-devel multilib-ready (bug #192747, #139911) +%global _pyconfig32_h pyconfig-32.h +%global _pyconfig64_h pyconfig-64.h + +%ifarch %{power64} s390x x86_64 ia64 alpha sparc64 aarch64 +%global _pyconfig_h %{_pyconfig64_h} +%else +%global _pyconfig_h %{_pyconfig32_h} +%endif + +%if 0%{?with_debug_build} +%global PyIncludeDirs python%{pybasever} python%{pybasever}-debug +%else +%global PyIncludeDirs python%{pybasever} +%endif + +for PyIncludeDir in %{PyIncludeDirs} ; do + mv %{buildroot}%{_includedir}/$PyIncludeDir/pyconfig.h \ + %{buildroot}%{_includedir}/$PyIncludeDir/%{_pyconfig_h} + cat > %{buildroot}%{_includedir}/$PyIncludeDir/pyconfig.h << EOF +#include + +#if __WORDSIZE == 32 +#include "%{_pyconfig32_h}" +#elif __WORDSIZE == 64 +#include "%{_pyconfig64_h}" +#else +#error "Unknown word size" +#endif +EOF +done +ln -s ../../libpython%{pybasever}.so %{buildroot}%{pylibdir}/config/libpython%{pybasever}.so + +# Fix for bug 201434: make sure distutils looks at the right pyconfig.h file +# Similar for sysconfig: sysconfig.get_config_h_filename tries to locate +# pyconfig.h so it can be parsed, and needs to do this at runtime in site.py +# when python starts up. +# +# Split this out so it goes directly to the pyconfig-32.h/pyconfig-64.h +# variants: +sed -i -e "s/'pyconfig.h'/'%{_pyconfig_h}'/" \ + %{buildroot}%{pylibdir}/distutils/sysconfig.py \ + %{buildroot}%{pylibdir}/sysconfig.py + +# Make python folder for config files under /etc +mkdir -p %{buildroot}/%{_sysconfdir}/python +install -m 644 %{SOURCE8} %{buildroot}/%{_sysconfdir}/python + +# Ensure that the curses module was linked against libncursesw.so, rather than +# libncurses.so (bug 539917) +ldd %{buildroot}/%{dynload_dir}/_curses*.so \ + | grep curses \ + | grep libncurses.so && (echo "_curses.so linked against libncurses.so" ; exit 1) + +# Ensure that the debug modules are linked against the debug libpython, and +# likewise for the optimized modules and libpython: +for Module in %{buildroot}/%{dynload_dir}/*.so ; do + case $Module in + *_d.so) + ldd $Module | grep %{py_INSTSONAME_optimized} && + (echo Debug module $Module linked against optimized %{py_INSTSONAME_optimized} ; exit 1) + + ;; + *) + ldd $Module | grep %{py_INSTSONAME_debug} && + (echo Optimized module $Module linked against debug %{py_INSTSONAME_optimized} ; exit 1) + ;; + esac +done + +# +# Systemtap hooks: +# +%if 0%{?with_systemtap} +# Install a tapset for this libpython into tapsetdir, fixing up the path to the +# library: +mkdir -p %{buildroot}%{tapsetdir} +%ifarch %{power64} s390x x86_64 ia64 alpha sparc64 aarch64 +%global libpython_stp_optimized libpython%{pybasever}-64.stp +%global libpython_stp_debug libpython%{pybasever}-debug-64.stp +%else +%global libpython_stp_optimized libpython%{pybasever}-32.stp +%global libpython_stp_debug libpython%{pybasever}-debug-32.stp +%endif + +sed \ + -e "s|LIBRARY_PATH|%{_libdir}/%{py_INSTSONAME_optimized}|" \ + %{SOURCE3} \ + > %{buildroot}%{tapsetdir}/%{libpython_stp_optimized} + +%if 0%{?with_debug_build} +sed \ + -e "s|LIBRARY_PATH|%{_libdir}/%{py_INSTSONAME_debug}|" \ + %{SOURCE3} \ + > %{buildroot}%{tapsetdir}/%{libpython_stp_debug} +%endif # with_debug_build +%endif # with_systemtap + +# Replace scripts shebangs in usr/bin of subpackage tools +#(rhbz#987038) +sed -i "s|^#\!.\?/usr/bin.*$|#\! %{__python}|" \ + %{buildroot}%{_bindir}/pygettext.py \ + %{buildroot}%{_bindir}/msgfmt.py \ + %{buildroot}%{_bindir}/smtpd.py \ + %{buildroot}%{demo_dir}/scripts/find-uname.py \ + %{buildroot}%{demo_dir}/pdist/rcvs \ + %{buildroot}%{demo_dir}/pdist/rcsbump \ + %{buildroot}%{demo_dir}/pdist/rrcs \ + %{buildroot}%{site_packages}/pynche/pynche + +# Make library-files user writable +# rhbz#1046276 +/usr/bin/chmod 755 %{buildroot}%{dynload_dir}/*.so +/usr/bin/chmod 755 %{buildroot}%{_libdir}/libpython%{pybasever}.so.1.0 +%if 0%{?with_debug_build} +/usr/bin/chmod 755 %{buildroot}%{_libdir}/libpython%{pybasever}_d.so.1.0 +%endif # with_debug_build + +mkdir %{buildroot}%{_tmpfilesdir} +cp %{SOURCE9} %{buildroot}%{_tmpfilesdir}/python.conf + +# Create the platform-python symlink pointing to usr/bin/python2.7 +mkdir -p %{buildroot}%{_libexecdir} +ln -s %{_bindir}/python%{pybasever} %{buildroot}%{_libexecdir}/platform-python + +# ====================================================== +# Running the upstream test suite +# ====================================================== + +%check +topdir=$(pwd) +CheckPython() { + ConfName=$1 + BinaryName=$2 + ConfDir=$(pwd)/build/$ConfName + + echo STARTING: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + + # Note that we're running the tests using the version of the code in the + # builddir, not in the buildroot. + + pushd $ConfDir + + EXTRATESTOPTS="--verbose" + # skipping test_gdb on ppc64le until rhbz1260558 gets resolved + %ifarch ppc64le + EXTRATESTOPTS="$EXTRATESTOPTS -x test_gdb " + %endif + + +%if 0%{?with_huntrleaks} + # Try to detect reference leaks on debug builds. By default this means + # running every test 10 times (6 to stabilize, then 4 to watch): + if [ "$ConfName" = "debug" ] ; then + EXTRATESTOPTS="$EXTRATESTOPTS --huntrleaks : " + fi +%endif + + # Run the upstream test suite, setting "WITHIN_PYTHON_RPM_BUILD" so that the + # our non-standard decorators take effect on the relevant tests: + # @unittest._skipInRpmBuild(reason) + # @unittest._expectedFailureInRpmBuild + WITHIN_PYTHON_RPM_BUILD= EXTRATESTOPTS="$EXTRATESTOPTS" make test + + popd + + echo FINISHED: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + +} + +%if 0%{run_selftest_suite} + +# Check each of the configurations: +%if 0%{?with_debug_build} +CheckPython \ + debug \ + python%{pybasever}-debug +%endif # with_debug_build +CheckPython \ + optimized \ + python%{pybasever} + +%endif # run_selftest_suite + + +# ====================================================== +# Cleaning up +# ====================================================== + +%clean +rm -fr %{buildroot} + + +# ====================================================== +# Scriptlets +# ====================================================== + +%post libs -p /sbin/ldconfig + +%postun libs -p /sbin/ldconfig + + + +%files +%defattr(-, root, root, -) +%doc LICENSE README +%{_bindir}/pydoc* +%{_bindir}/%{python} +%if %{main_python} +%{_bindir}/python2 +%endif +%{_bindir}/python%{pybasever} + +%{_libexecdir}/platform-python + +%{_mandir}/*/* + +%files libs +%defattr(-,root,root,-) +%doc LICENSE README +%dir %{pylibdir} +%dir %{dynload_dir} +%dir %{_sysconfdir}/python +%{_tmpfilesdir}/python.conf +%config(noreplace) %{_sysconfdir}/python/cert-verification.cfg +%{dynload_dir}/Python-%{version}-py%{pybasever}.egg-info +%{dynload_dir}/_bisectmodule.so +%{dynload_dir}/_bsddb.so +%{dynload_dir}/_codecs_cn.so +%{dynload_dir}/_codecs_hk.so +%{dynload_dir}/_codecs_iso2022.so +%{dynload_dir}/_codecs_jp.so +%{dynload_dir}/_codecs_kr.so +%{dynload_dir}/_codecs_tw.so +%{dynload_dir}/_collectionsmodule.so +%{dynload_dir}/_csv.so +%{dynload_dir}/_ctypes.so +%{dynload_dir}/_curses.so +%{dynload_dir}/_curses_panel.so +%{dynload_dir}/_elementtree.so +%{dynload_dir}/_functoolsmodule.so +%{dynload_dir}/_hashlib.so +%{dynload_dir}/_heapq.so +%{dynload_dir}/_hotshot.so +%{dynload_dir}/_io.so +%{dynload_dir}/_json.so +%{dynload_dir}/_localemodule.so +%{dynload_dir}/_lsprof.so +%{dynload_dir}/_multibytecodecmodule.so +%{dynload_dir}/_multiprocessing.so +%{dynload_dir}/_randommodule.so +%{dynload_dir}/_socketmodule.so +%{dynload_dir}/_sqlite3.so +%{dynload_dir}/_ssl.so +%{dynload_dir}/_struct.so +%{dynload_dir}/arraymodule.so +%{dynload_dir}/audioop.so +%{dynload_dir}/binascii.so +%{dynload_dir}/bz2.so +%{dynload_dir}/cPickle.so +%{dynload_dir}/cStringIO.so +%{dynload_dir}/cmathmodule.so +%{dynload_dir}/_cryptmodule.so +%{dynload_dir}/datetime.so +%{dynload_dir}/dbm.so +%{dynload_dir}/dlmodule.so +%{dynload_dir}/fcntlmodule.so +%{dynload_dir}/future_builtins.so +%if %{with_gdbm} +%{dynload_dir}/gdbmmodule.so +%endif +%{dynload_dir}/grpmodule.so +%{dynload_dir}/imageop.so +%{dynload_dir}/itertoolsmodule.so +%{dynload_dir}/linuxaudiodev.so +%{dynload_dir}/math.so +%{dynload_dir}/mmapmodule.so +%{dynload_dir}/nismodule.so +%{dynload_dir}/operator.so +%{dynload_dir}/ossaudiodev.so +%{dynload_dir}/parsermodule.so +%{dynload_dir}/pyexpat.so +%{dynload_dir}/readline.so +%{dynload_dir}/resource.so +%{dynload_dir}/selectmodule.so +%{dynload_dir}/spwdmodule.so +%{dynload_dir}/stropmodule.so +%{dynload_dir}/syslog.so +%{dynload_dir}/termios.so +%{dynload_dir}/timemodule.so +%{dynload_dir}/timingmodule.so +%{dynload_dir}/unicodedata.so +%{dynload_dir}/xxsubtype.so +%{dynload_dir}/zlibmodule.so + +%dir %{site_packages} +%{site_packages}/README +%{pylibdir}/*.py* +%{pylibdir}/*.doc +%{pylibdir}/wsgiref.egg-info +%dir %{pylibdir}/bsddb +%{pylibdir}/bsddb/*.py* +%{pylibdir}/compiler +%dir %{pylibdir}/ctypes +%{pylibdir}/ctypes/*.py* +%{pylibdir}/ctypes/macholib +%{pylibdir}/curses +%dir %{pylibdir}/distutils +%{pylibdir}/distutils/*.py* +%{pylibdir}/distutils/README +%{pylibdir}/distutils/command +%exclude %{pylibdir}/distutils/command/wininst-*.exe +%dir %{pylibdir}/email +%{pylibdir}/email/*.py* +%{pylibdir}/email/mime +%{pylibdir}/encodings +%{pylibdir}/hotshot +%{pylibdir}/idlelib +%{pylibdir}/importlib +%dir %{pylibdir}/json +%{pylibdir}/json/*.py* +%{pylibdir}/lib2to3 +%exclude %{pylibdir}/lib2to3/tests +%{pylibdir}/logging +%{pylibdir}/multiprocessing +%{pylibdir}/plat-linux2 +%{pylibdir}/pydoc_data +%dir %{pylibdir}/sqlite3 +%{pylibdir}/sqlite3/*.py* +%dir %{pylibdir}/test +%{pylibdir}/test/test_support.py* +%{pylibdir}/test/__init__.py* +%{pylibdir}/unittest +%{pylibdir}/wsgiref +%{pylibdir}/xml +%if "%{_lib}" == "lib64" +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever} +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever}/site-packages +%endif + +# "Makefile" and the config-32/64.h file are needed by +# distutils/sysconfig.py:_init_posix(), so we include them in the libs +# package, along with their parent directories (bug 531901): +%dir %{pylibdir}/config +%{pylibdir}/config/Makefile +%dir %{_includedir}/python%{pybasever} +%{_includedir}/python%{pybasever}/%{_pyconfig_h} + +%{_libdir}/%{py_INSTSONAME_optimized} +%if 0%{?with_systemtap} +%{tapsetdir}/%{libpython_stp_optimized} +%doc systemtap-example.stp pyfuntop.stp +%endif + +%files devel +%defattr(-,root,root,-) +%{_libdir}/pkgconfig/python-%{pybasever}.pc +%{_libdir}/pkgconfig/python.pc +%{_libdir}/pkgconfig/python2.pc +%{pylibdir}/config/* +%exclude %{pylibdir}/config/Makefile +%{pylibdir}/distutils/command/wininst-*.exe +%{_includedir}/python%{pybasever}/*.h +%exclude %{_includedir}/python%{pybasever}/%{_pyconfig_h} +%doc Misc/README.valgrind Misc/valgrind-python.supp Misc/gdbinit +%if %{main_python} +%{_bindir}/python-config +%{_bindir}/python2-config +%endif +%{_bindir}/python%{pybasever}-config +%{_libdir}/libpython%{pybasever}.so + +%files tools +%defattr(-,root,root,755) +%doc Tools/pynche/README.pynche +%{site_packages}/pynche +%{_bindir}/smtpd*.py* +%{_bindir}/2to3* +%{_bindir}/idle* +%{_bindir}/pynche* +%{_bindir}/pygettext*.py* +%{_bindir}/msgfmt*.py* +%{tools_dir} +%{demo_dir} +%{pylibdir}/Doc + +%files -n %{tkinter} +%defattr(-,root,root,755) +%{pylibdir}/lib-tk +%{dynload_dir}/_tkinter.so + +%files test +%defattr(-, root, root, -) +%{pylibdir}/bsddb/test +%{pylibdir}/ctypes/test +%{pylibdir}/distutils/tests +%{pylibdir}/email/test +%{pylibdir}/json/tests +%{pylibdir}/lib2to3/tests +%{pylibdir}/sqlite3/test +%{pylibdir}/test/* +# These two are shipped in the main subpackage: +%exclude %{pylibdir}/test/test_support.py* +%exclude %{pylibdir}/test/__init__.py* +%{dynload_dir}/_ctypes_test.so +%{dynload_dir}/_testcapimodule.so + + +# We don't bother splitting the debug build out into further subpackages: +# if you need it, you're probably a developer. + +# Hence the manifest is the combination of analogous files in the manifests of +# all of the other subpackages + +%if 0%{?with_debug_build} +%files debug +%defattr(-,root,root,-) + +# Analog of the core subpackage's files: +%{_bindir}/%{python}-debug +%if %{main_python} +%{_bindir}/python2-debug +%endif +%{_bindir}/python%{pybasever}-debug + +# Analog of the -libs subpackage's files, with debug builds of the built-in +# "extension" modules: +%{dynload_dir}/_bisectmodule_d.so +%{dynload_dir}/_bsddb_d.so +%{dynload_dir}/_codecs_cn_d.so +%{dynload_dir}/_codecs_hk_d.so +%{dynload_dir}/_codecs_iso2022_d.so +%{dynload_dir}/_codecs_jp_d.so +%{dynload_dir}/_codecs_kr_d.so +%{dynload_dir}/_codecs_tw_d.so +%{dynload_dir}/_collectionsmodule_d.so +%{dynload_dir}/_csv_d.so +%{dynload_dir}/_ctypes_d.so +%{dynload_dir}/_curses_d.so +%{dynload_dir}/_curses_panel_d.so +%{dynload_dir}/_elementtree_d.so +%{dynload_dir}/_functoolsmodule_d.so +%{dynload_dir}/_hashlib_d.so +%{dynload_dir}/_heapq_d.so +%{dynload_dir}/_hotshot_d.so +%{dynload_dir}/_io_d.so +%{dynload_dir}/_json_d.so +%{dynload_dir}/_localemodule_d.so +%{dynload_dir}/_lsprof_d.so +%{dynload_dir}/_multibytecodecmodule_d.so +%{dynload_dir}/_multiprocessing_d.so +%{dynload_dir}/_randommodule_d.so +%{dynload_dir}/_socketmodule_d.so +%{dynload_dir}/_sqlite3_d.so +%{dynload_dir}/_ssl_d.so +%{dynload_dir}/_struct_d.so +%{dynload_dir}/arraymodule_d.so +%{dynload_dir}/audioop_d.so +%{dynload_dir}/binascii_d.so +%{dynload_dir}/bz2_d.so +%{dynload_dir}/cPickle_d.so +%{dynload_dir}/cStringIO_d.so +%{dynload_dir}/cmathmodule_d.so +%{dynload_dir}/_cryptmodule_d.so +%{dynload_dir}/datetime_d.so +%{dynload_dir}/dbm_d.so +%{dynload_dir}/dlmodule_d.so +%{dynload_dir}/fcntlmodule_d.so +%{dynload_dir}/future_builtins_d.so +%if %{with_gdbm} +%{dynload_dir}/gdbmmodule_d.so +%endif +%{dynload_dir}/grpmodule_d.so +%{dynload_dir}/imageop_d.so +%{dynload_dir}/itertoolsmodule_d.so +%{dynload_dir}/linuxaudiodev_d.so +%{dynload_dir}/math_d.so +%{dynload_dir}/mmapmodule_d.so +%{dynload_dir}/nismodule_d.so +%{dynload_dir}/operator_d.so +%{dynload_dir}/ossaudiodev_d.so +%{dynload_dir}/parsermodule_d.so +%{dynload_dir}/pyexpat_d.so +%{dynload_dir}/readline_d.so +%{dynload_dir}/resource_d.so +%{dynload_dir}/selectmodule_d.so +%{dynload_dir}/spwdmodule_d.so +%{dynload_dir}/stropmodule_d.so +%{dynload_dir}/syslog_d.so +%{dynload_dir}/termios_d.so +%{dynload_dir}/timemodule_d.so +%{dynload_dir}/timingmodule_d.so +%{dynload_dir}/unicodedata_d.so +%{dynload_dir}/xxsubtype_d.so +%{dynload_dir}/zlibmodule_d.so + +# No need to split things out the "Makefile" and the config-32/64.h file as we +# do for the regular build above (bug 531901), since they're all in one package +# now; they're listed below, under "-devel": + +%{_libdir}/%{py_INSTSONAME_debug} +%if 0%{?with_systemtap} +%{tapsetdir}/%{libpython_stp_debug} +%endif + +# Analog of the -devel subpackage's files: +%dir %{pylibdir}/config-debug +%{_libdir}/pkgconfig/python-%{pybasever}-debug.pc +%{_libdir}/pkgconfig/python-debug.pc +%{_libdir}/pkgconfig/python2-debug.pc +%{pylibdir}/config-debug/* +%{_includedir}/python%{pybasever}-debug/*.h +%if %{main_python} +%{_bindir}/python-debug-config +%{_bindir}/python2-debug-config +%endif +%{_bindir}/python%{pybasever}-debug-config +%{_libdir}/libpython%{pybasever}_d.so + +# Analog of the -tools subpackage's files: +# None for now; we could build precanned versions that have the appropriate +# shebang if needed + +# Analog of the tkinter subpackage's files: +%{dynload_dir}/_tkinter_d.so + +# Analog of the -test subpackage's files: +%{dynload_dir}/_ctypes_test_d.so +%{dynload_dir}/_testcapimodule_d.so + +%endif # with_debug_build + +# We put the debug-gdb.py file inside /usr/lib/debug to avoid noise from +# ldconfig (rhbz:562980). +# +# The /usr/lib/rpm/redhat/macros defines the __debug_package macro to use +# debugfiles.list, and it appears that everything below /usr/lib/debug and +# (/usr/src/debug) gets added to this file (via LISTFILES) in +# /usr/lib/rpm/find-debuginfo.sh +# +# Hence by installing it below /usr/lib/debug we ensure it is added to the +# -debuginfo subpackage +# (if it doesn't, then the rpmbuild ought to fail since the debug-gdb.py +# payload file would be unpackaged) + + +# ====================================================== +# Finally, the changelog: +# ====================================================== + +%changelog +* Wed Sep 25 2019 Charalampos Stratakis - 2.7.5-88 +- Security fix for CVE-2019-16056 +Resolves: rhbz#1750773 + +* Tue Aug 27 2019 Charalampos Stratakis - 2.7.5-87 +- Fix CVE-2018-20852 +Resolves: rhbz#1741551 + +* Tue Jun 11 2019 Charalampos Stratakis - 2.7.5-86 +- Security fix for CVE-2019-10160 +Resolves: rhbz#1718388 + +* Tue May 28 2019 Charalampos Stratakis - 2.7.5-85 +- Security fix for CVE-2019-9948 +Resolves: rhbz#1704174 + +* Wed May 15 2019 Charalampos Stratakis - 2.7.5-84 +- Disallow control chars in http URLs +- Fixes CVE-2019-9740 and CVE-2019-9947 +Resolves: rhbz#1704362 and rhbz#1703530 + +* Thu May 09 2019 Charalampos Stratakis - 2.7.5-83 +- Remove unversioned obsoletes +Resolves: rhbz#1703600 + +* Fri May 03 2019 Charalampos Stratakis - 2.7.5-82 +- Updated fix for CVE-2019-9636 +Resolves: rhbz#1689317 + +* Tue Mar 26 2019 Charalampos Stratakis - 2.7.5-81 +- Security fix for CVE-2019-9636 +Resolves: rhbz#1689317 + +* Wed Mar 20 2019 Victor Stinner - 2.7.5-80 +- Security fix for CVE-2019-5010: crash on parsing a specially crafted X509 certificate + (resolves: rhbz#1666788) + +* Wed Mar 06 2019 Tomas Orsava - 2.7.5-79 +- Moved the macros.python/2 files into their own packages python/2-rpm-macros +Resolves: rhbz#1679221 + +* Mon Feb 25 2019 Charalampos Stratakis - 2.7.5-78 +- Security fix for CVE-2018-14647 +Resolves: rhbz#1636838 + +* Tue Nov 06 2018 Victor Stinner - 2.7.5-77 +- Python can sometimes create incorrect .pyc files: check I/O error + (rhbz#1629982). + +* Mon Sep 10 2018 Charalampos Stratakis - 2.7.5-76 +- Remove an unversioned obsoletes tag +Resolves: rhbz#1627059 + +* Mon Jul 16 2018 Charalampos Stratakis - 2.7.5-75 +- Provide the /usr/libexec/platform-python symlink to the main binary +Resolves: rhbz#1599159 + +* Tue Jun 12 2018 Charalampos Stratakis - 2.7.5-74 +- Fix OSERROR 17 due to _multiprocessing/semaphore.c assuming + a one-to-one Pid -> process mapping +Resolves: rhbz#1579432 + +* Wed May 30 2018 Charalampos Stratakis - 2.7.5-73 +- Remove 3DS cipher to mitigate CVE-2016-2183 (sweet32). +Resolves: rhbz#1581901 + +* Thu May 03 2018 Charalampos Stratakis - 2.7.5-72 +- Fix CVE-2018-1060 and CVE-2018-1061 +Resolves: rhbz#1563454 and rhbz#1549192 +- Provide python2-libs from the python-libs subpackage +Resolves: rhbz#1557460 + +* Wed Apr 18 2018 Charalampos Stratakis - 2.7.5-71 +- Limit the number of CPU cores when building the package on power architectures +Resolves: rhbz#1568974 + +* Tue Apr 17 2018 Charalampos Stratakis - 2.7.5-70 +- Do not send IP addresses in SNI TLS extension +Resolves: rhbz#1555314 + +* Tue Apr 17 2018 Charalampos Stratakis - 2.7.5-69 +- Fix nondeterministic read in test_pty +Resolves: rhbz#1512160 + +* Mon Feb 19 2018 Tomas Orsava - 2.7.5-68 +- Add Conflicts tag with old virtualenv versions due to new behaviour of + httplib (patch 295) +Resolves: rhbz#1483438 + +* Mon Feb 19 2018 Tomas Orsava - 2.7.5-67 +- Re-add the private `_set_hostport` api to httplib (Patch 296) +Resolves: rhbz#1546351 + +* Fri Feb 09 2018 Charalampos Stratakis - 2.7.5-66 +- Fix https connections behind a proxy. +Resolves: rhbz#1483438 + +* Fri Dec 08 2017 Charalampos Stratakis - 2.7.5-65 +- Fix hanging of all threads when trying to access an inaccessible NFS server. +Resolves: rhbz#1520068 + +* Tue Oct 17 2017 Charalampos Stratakis - 2.7.5-64 +- Fix an issue with the context parameter addition to xmlrpclib.ServerProxy +Resolves: rhbz#1490392 + +* Fri Oct 13 2017 Petr Viktorin - 2.7.5-63 +- Make it more likely for the system allocator to release free()d memory arenas +Resolves: rhbz#1468410 + +* Wed Oct 11 2017 Charalampos Stratakis - 2.7.5-62 +- Add context parameter to xmlrpclib.ServerProxy +Resolves: rhbz#1490392 + +* Tue Oct 03 2017 Charalampos Stratakis - 2.7.5-61 +- Increase imaplib's MAXLINE to accommodate modern mailbox sizes. +Resolves: rhbz#1489858 + +* Tue Oct 03 2017 Charalampos Stratakis - 2.7.5-60 +- Fix fcntl() with integer argument on 64-bit big-endian platforms. +Resolves: rhbz#1489858 + +* Tue Oct 03 2017 Charalampos Stratakis - 2.7.5-59 +- Increase timeouts in test_smtplib. +Resolves: rhbz#1497795 + +* Wed May 03 2017 Charalampos Stratakis - 2.7.5-58 +- Set stream to None in case an _open() fails. +Resolves: rhbz#1432003 + +* Tue Apr 11 2017 Charalampos Stratakis - 2.7.5-57 +- Fix implicit declaration warnings of functions added by patches 147 and 265 +Resolves: rhbz#1441237 + +* Mon Apr 10 2017 Charalampos Stratakis - 2.7.5-56 +- Fix shutil.make_archive ignoring empty directories when creating zip files +Resolves: rhbz#1439734 + +* Thu Mar 23 2017 Tomas Orsava - 2.7.5-55 +- Update Python RPM macros with new ones from EPEL7 to simplify packaging +Resolves: rhbz#1297522 + +* Wed Mar 22 2017 Charalampos Stratakis - 2.7.5-54 +- Protect key list during fork() +Resolves: rhbz#1268226 + +* Mon Mar 13 2017 Charalampos Stratakis - 2.7.5-53 +- Fix _ssl.c reference leaks +Resolves: rhbz#1272562 + +* Mon Feb 27 2017 Charalampos Stratakis - 2.7.5-52 +- Workaround Python's threading library issue with non returning wait, for signals with timeout +Resolves: rhbz#1368076 + +* Mon Jan 23 2017 Charalampos Stratakis - 2.7.5-51 +- Enable certificate verification by default +Resolves: rhbz#1219110 + +* Wed Jan 18 2017 Charalampos Stratakis - 2.7.5-50 +- Fix incorrect parsing of certain regular expressions +Resolves: rhbz#1373363 + +* Tue Jan 17 2017 Charalampos Stratakis - 2.7.5-49 +- Fix ssl module's parsing of GEN_RID subject alternative name fields in X.509 certs +Resolves: rhbz#1364444 + +* Mon Aug 01 2016 Charalampos Stratakis - 2.7.5-48 +- Fix for CVE-2016-1000110 HTTPoxy attack +Resolves: rhbz#1359164 + +* Mon Jul 11 2016 Charalampos Stratakis - 2.7.5-47 +- Fix for CVE-2016-5636: possible integer overflow and heap corruption in zipimporter.get_data() +Resolves: rhbz#1356364 + +* Mon Jul 11 2016 Charalampos Stratakis - 2.7.5-46 +- Drop patch 221 that backported sslwrap function since it was introducing regressions +- Refactor patch 227 +Resolves: rhbz#1331425 + +* Tue Jun 21 2016 Tomas Orsava - 2.7.5-45 +- Fix for CVE-2016-0772 python: smtplib StartTLS stripping attack (rhbz#1303647) + Raise an error when STARTTLS fails (upstream patch) +- Fix for CVE-2016-5699 python: http protocol steam injection attack (rhbz#1303699) + Disabled HTTP header injections in httplib (upstream patch) +Resolves: rhbz#1346357 + +* Wed May 4 2016 Charalampos Stratakis - 2.7.5-44 +- Fix iteration over files with very long lines +Resolves: rhbz#1271760 + +* Tue May 3 2016 Charalampos Stratakis - 2.7.5-43 +- Move python.conf from /etc/tmpfiles.d/ to /usr/lib/tmpfiles.d/ +Resolves: rhbz#1288426 + +* Mon Apr 4 2016 Charalampos Stratakis - 2.7.5-42 +- JSON decoder lone surrogates fix +Resolves: rhbz#1301017 + +* Mon Apr 4 2016 Charalampos Stratakis - 2.7.5-41 +- Updated PEP493 implementation +Resolves: rhbz#1315758 + +* Thu Mar 31 2016 Charalampos Stratakis - 2.7.5-40 +- Backport of Computed Goto dispatch +Resolves: rhbz#1289277 + +* Mon Mar 21 2016 Charalampos Stratakis - 2.7.5-39 +- Removal of the '$Date$' placeholder on the python(1) man page footer, +which does not expand during build +Resolves: rhbz#1268310 + +* Thu Mar 17 2016 Charalampos Stratakis - 2.7.5-38 +- Fix hashlib algorithms breaking by initializing OpenSSL_add_all_digests +function in _hashlib library +Resolves: rhbz#1295459 + +* Thu Mar 17 2016 Charalampos Stratakis - 2.7.5-37 +- Change HTTPS certificate verification to platform_default +Resolves: rhbz#1278429 + +* Wed Mar 16 2016 Charalampos Stratakis - 2.7.5-36 +- Force all child threads to terminate in TestForkInThread +Resolves: rhbz#1313259 + +* Thu Mar 10 2016 Charalampos Stratakis - 2.7.5-35 +- Adjusted tests to acknowledge lack of SSLv2 support +Resolves: rhbz#1315310 + +* Fri Oct 09 2015 Matej Stuchlik - 2.7.5-34 +- Revert fix for rhbz#1117751 as it leads to regressions +Resolves: rhbz#1117751 + +* Tue Sep 15 2015 Matej Stuchlik - 2.7.5-33 +- Only restore SIG_PIPE when Popen called with restore_sigpipe +Resolves: rhbz#1117751 + +* Fri Sep 04 2015 Robert Kuska - 2.7.5-32 +- Backport SSLSocket.version function +- Temporary disable test_gdb on ppc64le rhbz#1260558 +Resolves: rhbz#1259421 + +* Thu Aug 06 2015 Robert Kuska - 2.7.5-31 +- Update load_cert_chain function to accept None keyfile +Resolves: rhbz#1250611 + +* Tue Jul 07 2015 Robert Kuska - 2.7.5-30 +- Change Patch224 according to latest update in PEP493 +Resolves:rhbz#1219108 + +* Tue Jul 07 2015 Matej Stuchlik - 2.7.5-29 +- Popen shouldn't ignore SIG_PIPE +Resolves: rhbz#1117751 + +* Tue Jul 07 2015 Matej Stuchlik - 2.7.5-28 +- Exclude python subprocess temp files from cleaning +Resolves: rhbz#1058482 + +* Wed Jul 01 2015 Robert Kuska - 2.7.5-27 +- Add list for cprofile sort option +Resolves:rhbz#1237107 + +* Mon Jun 29 2015 Robert Kuska - 2.7.5-26 +- Add switch to toggle cert verification on or off globally +Resolves:rhbz#1219108 + +* Mon Jun 29 2015 Robert Kuska - 2.7.5-25 +- PEP476 enable cert verifications by default +Resolves:rhbz#1219110 + +* Mon Jun 29 2015 Robert Kuska - 2.7.5-24 +- Massive backport of ssl module from python3 aka PEP466 +Resolves: rhbz#1111461 + +* Tue Jun 23 2015 Matej Stuchlik - 2.7.5-23 +- Fixed CVE-2013-1753, CVE-2013-1752, CVE-2014-4616, CVE-2014-4650, CVE-2014-7185 +Resolves: rhbz#1206574 + +* Mon Jun 22 2015 Matej Stuchlik - 2.7.5-22 +- Fix importing readline producing erroneous output +Resolves: rhbz#1189301 + +* Mon Jun 22 2015 Matej Stuchlik - 2.7.5-21 +- Add missing import in bdist_rpm +Resolves: rhbz#1177613 + +* Mon Jun 22 2015 Matej Stuchlik - 2.7.5-20 +- Avoid double close of subprocess pipes +Resolves: rhbz#1103452 + +* Thu Apr 09 2015 Robert Kuska - 2.7.5-19 +- make multiprocessing ignore EINTR +Resolves: rhbz#1181624 + +* Wed Sep 3 2014 Peter Robinson 2.7.5-18 +- valgrind is now supported on aarch64/ppc64le +Resolves: rhbz#1137039 + +* Thu Aug 07 2014 Slavek Kabrda - 2.7.5-17 +- Fix building on ppc64le (fix test_gdb, disable valgrind support). +Resolves: rhbz#1125657 + +* Mon Feb 10 2014 Tomas Radej - 2.7.5-16 +- Fix buffer overflow (upstream patch) +Resolves: rhbz#1062376 + +* Tue Jan 28 2014 Daniel Mach - 2.7.5-15 +- Mass rebuild 2014-01-24 + +* Tue Jan 14 2014 Matej Stuchlik - 2.7.5-14 +- Fix missing documentation for some keywords +Resolves: rhbz#1032116 + +* Mon Jan 13 2014 Matej Stuchlik - 2.7.5-13 +- Make library-files user writable +Resolves: rhbz#1046276 + +* Fri Jan 10 2014 Bohuslav Kabrda - 2.7.5-12 +- Use -O3 when building on ppc64. +Resolves: rhbz#1051076 + +* Fri Dec 27 2013 Daniel Mach - 2.7.5-11 +- Mass rebuild 2013-12-27 + +* Thu Nov 07 2013 Matej Stuchlik - 2.7.5-10 +- Added an explicit RPATH to _elementtree.so +Resolves: rhbz#1019345 + +* Thu Nov 07 2013 Matej Stuchlik - 2.7.5-9 +- Fixed instances of #!/usr/bin/env python +Resolves: rhbz#1019336 + +* Wed Oct 09 2013 Bohuslav Kabrda - 2.7.5-8 +- Fix gdb bindings on ppc64. +Resolves: rhbz#835053 + +* Tue Aug 20 2013 Matej Stuchlik - 2.7.5-7 +- Added fix for CVE-2013-4238 +Resolves: rhbz#998781 + +* Tue Aug 20 2013 Bohuslav Kabrda - 2.7.5-6 +- Add explicit RPATH to pyexpat pointing at system libexpat (rhbz#996665). + +* Mon Aug 05 2013 Bohuslav Kabrda - 2.7.5-5 +- Fix memory leak in marshal.c, fixes rhbz#990554. + +* Wed Jul 24 2013 Robert Kuska - 2.7.5-4 +- Change shebangs of scripts in tools subpackage +(rhbz#987038) + +* Wed Jul 17 2013 Matej Stuchlik - 2.7.5-3 +- Added patch that makes urllib2 honor no_proxy variable for ftp URLs + (rhbz#971267) + +* Wed Jul 17 2013 Matej Stuchlik - 2.7.5-2 +- Pulled patch fixing build with libffi containing multilib wrapper for ffi.h + from Fedora (rhbz#979696) + +* Thu May 16 2013 Bohuslav Kabrda - 2.7.5-1 +- Updated to Python 2.7.5. +- Refreshed patches: 0 (config), 102 (lib64), 121 (add Modules to build path), +153 (gdb test noise) +- Dropped patches: 126, 127 (big endian issues, both fixed upstream), +175 (configure -Wformat, fixed upstream) +- Synced patch numbers with python3.spec. + +* Tue May 14 2013 David Malcolm - 2.7.4-5 +- fix multilib issue in python-tools due to /usr/bin/pynche (source 7; +rhbz#831437) + +* Thu May 02 2013 Bohuslav Kabrda - 2.7.4-4 +- Add patch that enables building on ppc64p7. + +* Mon Apr 22 2013 Bohuslav Kabrda - 2.7.4-3 +- Allow arbitrary timeout in Condition.wait (rhbz#917709). + +* Thu Apr 11 2013 Kalev Lember - 2.7.4-2 +- Build with libdb 5.3 instead of libdb4 +- Refreshed patches: 0 (config), 102 (lib64) +- Dropped patches: 54 (db4 version), 159 (db4 include path adjustment) + +* Mon Apr 08 2013 Bohuslav Kabrda - 2.7.4-1 +- Updated to Python 2.7.4. +- Refreshed patches: 0 (config), 7 (sqlite encoding), 16 (rpath in config), +55 (systemtap), 111 (no static lib), 112 (debug build), 113 (more +configuration flags), 130 (add extension to python config), 134 (fix +COUNT_ALLOCS in test_sys), 146 (haslib FIPS), 147 (add debug malloc stats), +153 (fix gdb test noise), 157 (uid, gid overflow - fixed upstream, just +keeping few more downstream tests), 165 (crypt module salt backport), +175 (fix configure Wformat), 5000 (regenerated autotooling patch) +- Dropped patches: 101 (lib64 regex; merged upstream), 171 (exception on +missing /dev/urandom; merged upstream), 172 (poll for multiprocessing socket +connection; merged upstream) + +* Mon Mar 25 2013 David Malcolm - 2.7.3-35 +- fix gcc 4.8 incompatibility (rhbz#927358); regenerate autotool intermediates + +* Wed Mar 6 2013 David Malcolm - 2.7.3-34 +- restrict scope of workaround for cmpi-bindings issue to avoid breaking +in-tree running of test_sys and test_subprocess (rhbz#817554) + +* Wed Mar 6 2013 David Malcolm - 2.7.3-33 +- add workaround for cmpi-bindings issue (rhbz#817554) + +* Mon Mar 4 2013 David Malcolm - 2.7.3-32 +- add workaround for ENOPROTOOPT seen running selftests in Koji +(rhbz#913732) + +* Mon Mar 4 2013 David Malcolm - 2.7.3-31 +- remove config flag from /etc/rpm/macros.python2 + +* Fri Feb 22 2013 David Malcolm - 2.7.3-30 +- remove __debug_package macro from comment + +* Fri Feb 22 2013 David Malcolm - 2.7.3-29 +- drop -b from application of patch 157 (uid/gid overflows) + +* Fri Feb 22 2013 David Malcolm - 2.7.3-28 +- fix bogus dates in changelog + +* Thu Feb 21 2013 David Malcolm - 2.7.3-27 +- port _multiprocessing.Connection.poll() to use the "poll" syscall, rather +than "select", allowing large numbers of subprocesses (patch 172; +rhbz#849992) + +* Thu Feb 21 2013 David Malcolm - 2.7.3-26 +- raise correct exception in os.urandom() when /dev/urandom is missing +(patch 171; rhbz#907383) + +* Wed Feb 20 2013 David Malcolm - 2.7.3-25 +- in debug builds, try to print repr() when a C-level assert fails in the +garbage collector (typically indicating a reference-counting error somewhere +else e.g in an extension module) (patch 170; rhbz#850013) + +* Wed Feb 20 2013 David Malcolm - 2.7.3-24 +- move lib2to3/tests from python-libs to python-test (rhbz#850056) + +* Wed Feb 20 2013 David Malcolm - 2.7.3-23 +- use SHA-256 rather than implicitly using MD5 within the challenge handling +in multiprocessing.connection (patch 169; rhbz#879695) + +* Wed Feb 20 2013 David Malcolm - 2.7.3-22 +- fix a problem with distutils.sysconfig when CFLAGS is defined in the +environment (patch 168; rhbz#849994) + +* Wed Feb 20 2013 David Malcolm - 2.7.3-21 +- don't run any stack navigation tests in test_gdb for optimized builds +(patch 167; rhbz#912025) + +* Wed Feb 20 2013 David Malcolm - 2.7.3-20 +- s/cryptmodule/_cryptmodule/ in package payload (rhbz#835021) + +* Tue Feb 19 2013 David Malcolm - 2.7.3-19 +- bulletproof the gdb debugging hooks against a failure seen in ARM builds +(patch 166; rhbz#912025) +- re-enable make check on ARM (rhbz#912025) + +* Tue Feb 19 2013 David Malcolm - 2.7.3-18 +- backport pre-canned ways of salting a password to the "crypt" module from 3.3 +(rhbz#835021) + +* Tue Feb 19 2013 David Malcolm - 2.7.3-17 +- remove "_default_patch_fuzz" directive to avoid patches being silently +misapplied (refresh patch 1, patch 101, patch 102, patch 111, patch 121, +patch 158; rename patch 1, patch 101, patch 121; apply patch 54 before the +lib64 patches to avoid fuzz problems caused by the conditional application +of the lib64 patches) + +* Mon Feb 18 2013 Peter Robinson 2.7.3-16 +- disable make check on ARM for the moment until 912025 is fixed + +* Mon Feb 11 2013 David Malcolm - 2.7.3-15 +- add aarch64 (rhbz#909783) + +* Thu Nov 29 2012 David Malcolm - 2.7.3-14 +- add BR on bluez-libs-devel (rhbz#879720) + +* Thu Aug 9 2012 David Malcolm - 2.7.3-13 +- remove f18 conditional from patch 159 + +* Fri Jul 27 2012 Fedora Release Engineering - 2.7.3-12 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_18_Mass_Rebuild + +* Tue Jul 17 2012 Bohuslav Kabrda - 2.7.3-11 +- fix memory leak in module _hashlib (patch 158, rhbz#836285) +- fix db4 include path for libdb4 package (f18 and above) (patch 159) + +* Tue Jun 26 2012 David Malcolm - 2.7.3-10 +- fix missing include in uid/gid handling patch (patch 157; rhbz#830405) + +* Fri Jun 22 2012 David Malcolm - 2.7.3-9 +- use rpm macro for power64 (rhbz#834653) + +* Tue May 15 2012 David Malcolm - 2.7.3-8 +- update uid/gid handling to avoid int overflows seen with uid/gid +values >= 2^31 on 32-bit architectures (patch 157; rhbz#697470) + +* Fri May 4 2012 David Malcolm - 2.7.3-7 +- renumber autotools patch from 300 to 5000 +- specfile cleanups + +* Mon Apr 30 2012 David Malcolm - 2.7.3-6 +- try again to fix test_gdb.py (patch 156; rhbz#817072) + +* Mon Apr 30 2012 David Malcolm - 2.7.3-5 +- fix test_gdb.py (patch 156; rhbz#817072) + +* Fri Apr 20 2012 David Malcolm - 2.7.3-4 +- avoid allocating thunks in ctypes unless absolutely necessary, to avoid +generating SELinux denials on "import ctypes" and "import uuid" when embedding +Python within httpd (patch 155; rhbz#814391) + +* Thu Apr 19 2012 David Malcolm - 2.7.3-3 +- add explicit version requirements on expat to avoid linkage problems with +XML_SetHashSalt + +* Wed Apr 18 2012 David Malcolm - 2.7.3-2 +- fix -config symlinks (patch 112; rhbz#813836) + +* Wed Apr 11 2012 David Malcolm - 2.7.3-1 +- 2.7.3: refresh patch 102 (lib64); drop upstream patches 11 (ascii-to-lower), +115 (pydoc robustness), 145 (linux2), 148 (gdbm magic values), 151 (deadlock +in fork); refresh patch 112 (debug build); revise patch 127 +(test_structmember); fix test_gdb (patch 153); refresh patch 137 (distutils +tests); add python2.pc to python-devel; regenerate the autotool intermediates +patch (patch 300) + +* Sat Feb 25 2012 Thomas Spura - 2.7.2-20 +- fix deadlock issue (#787712) + +* Fri Feb 17 2012 Toshio Kuratomi - 2.7.2-19 +- Obsolete python-sqlite2 + +* Thu Nov 24 2011 Ville Skyttä - 2.7.2-18 +- Build with $RPM_LD_FLAGS (#756862). +- Use xz-compressed source tarball. + +* Wed Oct 26 2011 Fedora Release Engineering - 2.7.2-17 +- Rebuilt for glibc bug#747377 + +* Fri Sep 30 2011 David Malcolm - 2.7.2-16 +- re-enable gdbm (patch 148; rhbz#742242) + +* Fri Sep 16 2011 David Malcolm - 2.7.2-15 +- add a sys._debugmallocstats() function (patch 147) + +* Wed Sep 14 2011 David Malcolm - 2.7.2-14 +- support OpenSSL FIPS mode in _hashlib and hashlib; don't build the _md5 and +_sha* modules, relying on _hashlib in hashlib, and thus within md5 etc +(rhbz#563986; patch 146) + +* Wed Sep 14 2011 David Malcolm - 2.7.2-13 +- force sys.platform to be "linux2" (patch 145) + +* Tue Sep 13 2011 David Malcolm - 2.7.2-12 +- disable gdbm module to prepare for gdbm soname bump + +* Mon Sep 12 2011 David Malcolm - 2.7.2-11 +- rename and renumber patches for consistency with python3.spec (55, 111, 113, +114, 125, 131, 129 to 143) + +* Sat Sep 10 2011 David Malcolm - 2.7.2-10 +- rewrite of "check", introducing downstream-only hooks for skipping specific +cases in an rpmbuild (patch 132), and fixing/skipping failing tests in a more +fine-grained manner than before (patches 104, 133-142) + +* Thu Sep 1 2011 David Malcolm - 2.7.2-9 +- run selftests with "--verbose" +- disable parts of test_io on ppc (rhbz#732998) + +* Tue Aug 23 2011 David Malcolm - 2.7.2-8 +- add --extension-suffix option to python-config (patch 130; rhbz#732808) + +* Tue Aug 23 2011 David Malcolm - 2.7.2-7 +- re-enable and fix the --with-tsc option on ppc64, and rework it on 32-bit +ppc to avoid aliasing violations (patch 129; rhbz#698726) + +* Tue Aug 23 2011 David Malcolm - 2.7.2-6 +- don't use --with-tsc on ppc64 debug builds (rhbz#698726) + +* Thu Aug 18 2011 David Malcolm - 2.7.2-5 +- add rpm macros file (rhbz#731800) + +* Fri Jul 8 2011 David Malcolm - 2.7.2-4 +- cleanup of BuildRequires; add comment headings to specfile sections + +* Wed Jun 22 2011 David Malcolm - 2.7.2-3 +- reorganize test exclusions (test_openpty and test_pty seem to be failing on +every arch, not just the explicitly-listed ones) + +* Mon Jun 13 2011 Dan Horák - 2.7.2-2 +- add s390(x) excluded tests + +* Mon Jun 13 2011 David Malcolm - 2.7.2-1 +- 2.7.2; drop upstreamed patches: patch 122 (parallel make fix), patch 124 +(test_commands and SELinux), patch 130 (ppc preprocessor macro in debug +build); patch 131 (decimal in Turkish locale); regenerate the autotool +intermediates patch (patch 300) + +* Tue Jun 07 2011 Dennis Gilmore - 2.7.1-9 +- fix sparc building by excluding failing tests RHBZ#711584 + +* Mon May 23 2011 Peter Robinson - 2.7.1-8 +- fix compile on ARM by excluding failing tests on arm - RHBZ #706253 + +* Tue Apr 12 2011 David Malcolm - 2.7.1-7 +- fix "import decimal" in the Turkish locale (patch 131; rhbz#694928) + +* Wed Feb 09 2011 Fedora Release Engineering - 2.7.1-6 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_15_Mass_Rebuild + +* Fri Jan 21 2011 Toshio Kuratomi - 2.7.1-5 +- Switch from setting OPT to setting EXTRA_CFLAGS so we don't overwrite the + DNDEBUG flag + +* Fri Jan 7 2011 David Malcolm - 2.7.1-4 +- for now, drop "obsoletes" of python-argparse, since it interracts badly with +multilib (rhbz#667984) + +* Fri Jan 7 2011 Thomas Spura - 2.7.1-3 +- obsolete/provide python-argparse (new in 2.7) + +* Thu Jan 6 2011 David Malcolm - 2.7.1-2 +- fix the ppc build of the debug configuration (patch 130; rhbz#661510) + +* Thu Dec 23 2010 David Malcolm - 2.7.1-1 +- 2.7.1, reworking patch 0 (config), patch 102 (lib64); drop upstream +patch 56 (cfgparse), patch 110 (ctypes/SELinux/noexecmem), patch 119 (expat +compat), patch 123 (2to3 on "from itertools import *") +- fix test_abc's test_cache_leak in the debug build (patch 128) +- drop _weakref.so from manifest (_weakref became a core module in r84230) + +* Wed Sep 29 2010 jkeating - 2.7-13 +- Rebuilt for gcc bug 634757 + +* Mon Sep 27 2010 David Malcolm - 2.7-12 +- fix test_structmember on 64bit-bigendian (patch 127) + +* Fri Sep 24 2010 David Malcolm - 2.7-11 +- fix dbm_contains on 64bit-bigendian (patch 126; rhbz#626756) + +* Thu Sep 16 2010 Toshio Kuratomi - 2.7-10 +- backport a patch to fix a change in behaviour in configparse. + +* Thu Sep 9 2010 David Malcolm - 2.7-9 +- move most of the payload of the core package to the libs subpackage, given +that the libs aren't meaningfully usable without the standard libraries + +* Wed Aug 18 2010 David Malcolm - 2.7-8 +- add %%check section +- update lib64 patch (patch 102) to fix expected output in test_site.py on +64-bit systems +- patch test_commands.py to work with SELinux (patch 124) +- patch the debug build's usage of COUNT_ALLOCS to be less verbose (patch 125) + +* Mon Jul 26 2010 David Malcolm - 2.7-7 +- fixup missing -lcrypt to "crypt" module in config patch (patch 0) + +* Mon Jul 26 2010 David Malcolm - 2.7-6 +- re-enable systemtap +- cherrypick upstream patch to 2to3 for "from itertools import *" +traceback (patch 123) + +* Thu Jul 22 2010 David Malcolm - 2.7-5 +- disable systemtap for now (dtrace is failing on startup due to the bug +mentioned in 2.7-4) +- provide relative path to python binary when running pathfix.py +- fix parallel make (patch 122) + +* Thu Jul 22 2010 David Malcolm - 2.7-4 +- fix reference to pyconfig.h in sysconfig that led to failure on startup if +python-devel was not installed + +* Thu Jul 8 2010 David Malcolm - 2.7-3 +- add patch to fixup the new sysconfig.py for our multilib support on +64-bit (patch 103) + +* Thu Jul 8 2010 David Malcolm - 2.7-2 +- add machinery for regenerating the "configure" script in the face of +mismatching autoconf versions (patch 300) + +* Tue Jul 6 2010 David Malcolm - 2.7-1 +- 2.7 final; drop alphatag +- drop patch 117 (upstream), patch 120 (upstreamed) +- fix the commented-out __python_ver from 26 to 27 + +* Tue Jun 22 2010 David Malcolm - 2.7-0.1.rc2 +- 2.7rc2 +- revert r79310 (patch 121) +- remove modulator: upstream removed it in r78338 +- rename mathmodule(_d).so to math(_d).so in manifests (appears to be changed +by r76861) +- _bytesio(_d).so and _filesio(_d).so were consolidated into _io(_d).so in +r73394 (upstream issue 6215) +- use the gdb hooks from the upstream tarball, rather than keeping our own +copy. The upstream version has some whitespace changes, a new write_repr for +unicode objects, and various bulletproofings for being run on older gdbs + +* Tue Jun 22 2010 David Malcolm - 2.7-0.1.rc1 +- 2.7rc1: + - rework patches to apply against 2.7 (which among other changes has had a +whitespace cleanup of the .c code): .rhconfig (patch0), .binutils-no-dep +(patch10), .ascii-tolower (patch11), .socketmodule (patch13), .socketmodule2 +(patch14), .systemtap (patch55), .lib64 (patch102), .selinux (patch110), +.no-static-lib (patch111), .debug-build (patch112), .statvfs-f-flag-constants +(patch114), ..CVE-2010-2089 (patch117) + - drop upstream patches: .expat (patch3), .brprpm (patch51), .valgrind +(patch52), .db48 (patch53), .CVE-2010-1634 (patch 116), .CVE-2008-5983 (patch +118) + +* Tue Jun 22 2010 David Malcolm - 2.6.5-17 +- Stop python bailing out with an assertion failure when UnicodeDecodeErrors +occur on very large buffers (patch 120, upstream issue 9058) + +* Mon Jun 21 2010 David Malcolm - 2.6.5-16 +- Fix an incompatibility between pyexpat and the system expat-2.0.1 that led to +a segfault running test_pyexpat.py (patch 119; upstream issue 9054) + +* Tue Jun 8 2010 David Malcolm - 2.6.5-15 +- add a flag to make it easy to turn off the debug build when troubleshooting +the rpm build + +* Sat Jun 5 2010 Dan Horák - 2.6.5-14 +- reading the timestamp counter is available only on some arches (see Python/ceval.c) +- disable --with-valgrind on s390(x) arches + +* Fri Jun 4 2010 David Malcolm - 2.6.5-13 +- ensure that the compiler is invoked with "-fwrapv" (rhbz#594819) +- CVE-2010-1634: fix various integer overflow checks in the audioop +module (patch 116) +- CVE-2010-2089: further checks within the audioop module (patch 117) +- CVE-2008-5983: the new PySys_SetArgvEx entry point from r81399 (patch 118) + +* Thu May 27 2010 David Malcolm - 2.6.5-12 +- make "pydoc -k" more robust in the face of broken modules (rhbz:461419, patch115) + +* Wed May 26 2010 David Malcolm - 2.6.5-11 +- add flags for statvfs.f_flag to the constant list in posixmodule (i.e. "os") +(patch 114) + +* Tue May 25 2010 David Malcolm - 2.6.5-10 +- add configure-time support for COUNT_ALLOCS and CALL_PROFILE debug options +(patch 113); enable them and the WITH_TSC option within the debug build + +* Tue May 18 2010 David Malcolm - 2.6.5-9 +- build and install two different configurations of Python: debug and standard, +packaging the debug build in a new "python-debug" subpackage (patch 112) + +* Tue May 4 2010 David Malcolm - 2.6.5-8 +- don't delete wsgiref.egg-info (rhbz:588426) + +* Mon Apr 26 2010 David Malcolm - 2.6.5-7 +- disable --with-valgrind on sparc arches + +* Mon Apr 12 2010 David Malcolm - 2.6.5-6 +- move the "bdist_wininst" command's template .exe files from the core package +to the devel subpackage, to save space (rhbz:525469) +- fix stray doublelisting of config directory wildcard in devel subpackage + +* Wed Mar 31 2010 David Malcolm - 2.6.5-5 +- update python-gdb.py from v4 to v5 (improving performance and stability, +adding commands) + +* Thu Mar 25 2010 David Malcolm - 2.6.5-4 +- update python-gdb.py from v3 to v4 (fixing infinite recursion on reference +cycles and tracebacks on bytes 0x80-0xff in strings, adding handlers for sets +and exceptions) + +* Wed Mar 24 2010 David Malcolm - 2.6.5-3 +- refresh gdb hooks to v3 (reworking how they are packaged) + +* Mon Mar 22 2010 David Malcolm - 2.6.5-2 +- remove unnecessary arch-conditionality for patch 101 + +* Fri Mar 19 2010 David Malcolm - 2.6.5-1 +- update to 2.6.5: http://www.python.org/download/releases/2.6.5/ +- replace our patch to compile against db4.8 with a patch from +upstream (patch 53, from r78974); update patch 54 since part of it is now in +that upstream patch +- update patch 110 so that it still applies in the face of upstream r78380 + +* Tue Mar 16 2010 David Malcolm - 2.6.4-23 +- fixup distutils/unixccompiler.py to remove standard library path from +rpath (patch 17) +- delete DOS batch files + +* Fri Mar 12 2010 David Malcolm - 2.6.4-22 +- add pyfuntop.stp; allow systemtap support to be disabled +- remove trailing period from tkinter summary +- don't own /usr/bin/python-config if you're not the main python + +* Thu Mar 11 2010 Marcela Mašláňová - 2.6.4-21 +- rebuild with new gdbm + +* Thu Feb 11 2010 David Malcolm - 2.6.4-20 +- avoid having the "test" subdirectory and the files within it that are in the +core subpackage also be owned by the test subpackage (rhbz:467588) + +* Wed Feb 10 2010 David Malcolm - 2.6.4-19 +- revise the systemtap patch (patch 55:python-2.6.4-dtrace.patch) to the +new version by mjw in attachment 390110 of rhbz:545179, as this should +eliminate the performance penalty for the case where the probes aren't in +use, and eliminate all architecture-specific code (rhbz:563541; except on +sparc) + +* Tue Feb 9 2010 David Malcolm - 2.6.4-18 +- add a systemtap tapset defining "python.function.entry" and +"python.function.return" to make it easy to use the static probepoint within +Python; add an example of using the tapset to the docs + +* Tue Feb 9 2010 David Malcolm - 2.6.4-17 +- add systemtap static probes (wcohen; patch 55; rh bug #545179) +- update some comments in specfile relating to gdb work +- manually byte-compile the gdb.py file with the freshly-built python to ensure +that .pyx and .pyo files make it into the debuginfo manifest if they are later +byte-compiled after find-debuginfo.sh is run + +* Mon Feb 8 2010 David Malcolm - 2.6.4-16 +- move the -gdb.py file from %%{_libdir}/INSTSONAME-gdb.py to +%%{_prefix}/lib/debug/%%{_libdir}/INSTSONAME.debug-gdb.py to avoid noise from +ldconfig (bug 562980), and which should also ensure it becomes part of the +debuginfo subpackage, rather than the libs subpackage +- introduce %%{py_SOVERSION} and %%{py_INSTSONAME} to reflect the upstream +configure script, and to avoid fragile scripts that try to figure this out +dynamically (e.g. for the -gdb.py change) + +* Mon Feb 8 2010 David Malcolm - 2.6.4-15 +- work around bug 562906 by supplying a fixed version of pythondeps.sh +- set %%{_python_bytecompile_errors_terminate_build} to 0 to prevent the broken +test files from killing the build on buildroots where python is installed + +* Fri Feb 5 2010 David Malcolm - 2.6.4-14 +- add gdb hooks for easier debugging + +* Fri Jan 29 2010 David Malcolm - 2.6.4-13 +- document all patches, and remove the commented-out ones + +* Tue Jan 26 2010 David Malcolm - 2.6.4-12 +- Address some of the issues identified in package review (bug 226342): + - update libs requirement on base package to use %%{name} for consistency's +sake + - convert from backticks to $() syntax throughout + - wrap value of LD_LIBRARY_PATH in quotes + - convert "/usr/bin/find" requirement to "findutils" + - remove trailing periods from summaries of -devel and -tools subpackages + - fix spelling mistake in description of -test subpackage + - convert usage of $$RPM_BUILD_ROOT to %%{buildroot} throughout, for +stylistic consistency + - supply dirmode arguments to defattr directives + +* Mon Jan 25 2010 David Malcolm - 2.6.4-11 +- update python-2.6.2-config.patch to remove downstream customization of build +of pyexpat and elementtree modules +- add patch adapted from upstream (patch 3) to add support for building against +system expat; add --with-system-expat to "configure" invocation +- remove embedded copy of expat from source tree during "prep" + +* Mon Jan 25 2010 David Malcolm - 2.6.4-10 +- introduce macros for 3 directories, replacing expanded references throughout: +%%{pylibdir}, %%{dynload_dir}, %%{site_packages} +- explicitly list all lib-dynload files, rather than dynamically gathering the +payload into a temporary text file, so that we can be sure what we are +shipping; remove now-redundant testing for presence of certain .so files +- remove embedded copy of zlib from source tree before building + +* Mon Jan 25 2010 David Malcolm - 2.6.4-9 +- change python-2.6.2-config.patch to remove our downstream change to curses +configuration in Modules/Setup.dist, so that the curses modules are built using +setup.py with the downstream default (linking against libncursesw.so, rather +than libncurses.so), rather than within the Makefile; add a test to %%install +to verify the dso files that the curses module is linked against the correct +DSO (bug 539917; changes _cursesmodule.so -> _curses.so) + +* Fri Jan 22 2010 David Malcolm - 2.6.4-8 +- rebuild (bug 556975) + +* Wed Jan 20 2010 David Malcolm - 2.6.4-7 +- move lib2to3 from -tools subpackage to main package (bug 556667) + +* Mon Jan 18 2010 David Malcolm - 2.6.4-6 +- patch Makefile.pre.in to avoid building static library (patch111, bug 556092) +- split up the "configure" invocation flags onto individual lines + +* Fri Jan 15 2010 David Malcolm - 2.6.4-5 +- replace usage of %%define with %%global +- use the %%{_isa} macro to ensure that the python-devel dependency on python +is for the correct multilib arch (#555943) +- delete bundled copy of libffi to make sure we use the system one +- replace references to /usr with %%{_prefix}; replace references to +/usr/include with %%{_includedir} + +* Wed Dec 16 2009 David Malcolm - 2.6.4-4 +- automatically disable arena allocator when run under valgrind (upstream +issue 2422; patch 52) +- add patch from Josh Boyer containing diff against upstream PyBSDDB to make +the bsddb module compile against db-4.8 (patch 53, #544275); bump the necessary +version of db4-devel to 4.8 +- patch setup.py so that it searches for db-4.8, and enable debug output for +said search; make Setup.dist use db-4.8 (patch 54) + +* Thu Nov 12 2009 David Malcolm - 2.6.4-3 +- fixup the build when __python_ver is set (Zach Sadecki; bug 533989); use +pybasever in the files section + +* Thu Oct 29 2009 David Malcolm - 2.6.4-2 +- "Makefile" and the config-32/64.h file are needed by distutils/sysconfig.py +_init_posix(), so we include them in the core package, along with their parent +directories (bug 531901) + +* Mon Oct 26 2009 David Malcolm - 2.6.4-1 +- Update to 2.6.4 + +* Fri Aug 21 2009 Tomas Mraz - 2.6.2-2 +- rebuilt with new openssl + +* Mon Jul 27 2009 James Antill - 2.6.2-1 +- Update to 2.6.2 + +* Sun Jul 26 2009 Fedora Release Engineering - 2.6-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_12_Mass_Rebuild + +* Sat Jul 4 2009 Jonathan Steffan - 2.6-10 +- Move python-config to devel subpackage (#506153) +- Update BuildRoot for new standard + +* Sun Jun 28 2009 Jonathan Steffan - 2.6-9 +- Update python-tools description (#448940) + +* Wed Apr 15 2009 Ignacio Vazquez-Abrams 2.6-8 +- Replace python-hashlib and python-uuid (#484715) + +* Tue Mar 17 2009 James Antill - 2.6-7 +- Use system libffi +- Resolves: bug#490573 +- Fix SELinux execmem problems +- Resolves: bug#488396 + +* Thu Feb 26 2009 Fedora Release Engineering - 2.6-5 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_11_Mass_Rebuild + +* Fri Jan 16 2009 Tomas Mraz - 2.6-4 +- rebuild with new openssl + +* Tue Jan 6 2009 James Antill - 2.6-3 +- Fix distutils generated rpms. +- Resolves: bug#236535 + +* Wed Dec 10 2008 Ignacio Vazquez-Abrams - 2.6-2 +- Enable -lcrypt for cryptmodule + +* Fri Nov 28 2008 Ignacio Vazquez-Abrams - 2.6-1 +- Update to 2.6 + +* Tue Sep 30 2008 James Antill - 2.5.2-1 +- Move to 2.5.2 +- Fix CVE-2008-2316 hashlib overflow. + +* Thu Jul 17 2008 Jeremy Katz - 2.5.1-30 +- Fix up the build for new rpm +- And actually build against db4-4.7 (#455170) + +* Thu Jul 10 2008 Tom "spot" Callaway - 2.5.1-27 +- fix license tag +- enable support for db4-4.7 + +* Sun Jun 15 2008 James Antill - 2.5.1-26 +- Fix sporadic listdir problem +- Resolves: bug#451494 + +* Mon Apr 7 2008 James Antill - 2.5.1-25 +- Rebuild to re-gen autoconf file due to glibc change. +- Resolves: bug#441003 + +* Tue Mar 25 2008 James Antill - 2.5.1-24 +- Add more constants to socketmodule + +* Sat Mar 8 2008 James Antill - 2.5.1-22 +- Add constants to socketmodule +- Resolves: bug#436560 + +* Tue Feb 19 2008 Fedora Release Engineering - 2.5.1-22 +- Autorebuild for GCC 4.3 + +* Sun Jan 13 2008 Tom "spot" Callaway - 2.5.1-21 +- rebuild for new tk in rawhide + +* Mon Jan 7 2008 James Antill - 2.5.1-20 +- Add valgrind support files, as doc, to python-devel +- Relates: rhbz#418621 +- Add new API from 2.6, set_wakeup_fd ... use at own risk, presumably won't +- change but I have no control to guarantee that. +- Resolves: rhbz#427794 +- Add gdbinit support file, as doc, to python-devel + +* Fri Jan 4 2008 Tom "spot" Callaway - 2.5.1-19 +- rebuild for new tcl/tk in rawhide + +* Fri Dec 7 2007 James Antill - 2.5.1-18 +- Create a python-test sub-module, over 3MB of stuff noone wants. +- Don't remove egginfo files, try this see what happens ... may revert. +- Resolves: rhbz#414711 + +* Mon Dec 3 2007 Jeremy Katz - 2.5.1-17 +- rebuild for new libssl + +* Fri Nov 30 2007 James Antill - 2.5.1-16 +- Fix pyconfig.h comment typo. +- Add back test_support.py and the __init__.py file. +- Resolves: rhbz#387401 + +* Tue Oct 30 2007 James Antill - 2.5.1-15 +- Do codec lowercase in C Locale. +- Resolves: 207134 191096 +- Fix stupid namespacing in pysqlite, minimal upgrade to 2.3.3 pysqlite +- Resolves: 263221 + +* Wed Oct 24 2007 James Antill - 2.5.1-14 +- Remove bintuils dep. for live CD ... add work around for ctypes + +* Mon Oct 22 2007 James Antill - 2.5.1-13 +- Add tix buildprereq +- Add tkinter patch +- Resolves: #281751 +- Fix ctypes loading of libraries, add requires on binutils +- Resolves: #307221 +- Possible fix for CVE-2007-4965 possible exploitable integer overflow +- Resolves: #295971 + +* Tue Oct 16 2007 Mike Bonnet - 2.5.1-12 +- fix marshalling of objects in xmlrpclib (python bug #1739842) + +* Fri Sep 14 2007 Jeremy Katz - 2.5.1-11 +- fix encoding of sqlite .py files to work around weird encoding problem + in Turkish (#283331) + +* Mon Sep 10 2007 Jeremy Katz - 2.5.1-10 +- work around problems with multi-line plural specification (#252136) + +* Tue Aug 28 2007 Jeremy Katz - 2.5.1-9 +- rebuild against new expat + +* Tue Aug 14 2007 Jeremy Katz - 2.5.1-8 +- build against db4.6 + +* Tue Aug 14 2007 Dennis Gilmore - 2.5.1-7 +- add sparc64 to the list of archs for _pyconfig64_h + +* Fri Aug 10 2007 Jeremy Katz - 2.5.1-6 +- fix ctypes again on some arches (Hans de Goede, #251637) + +* Fri Jul 6 2007 Jeremy Katz - 2.5.1-5 +- link curses modules with ncursesw (#246385) + +* Wed Jun 27 2007 Jeremy Katz - 2.5.1-4 +- fix _elementtree.so build (#245703) +- ensure that extension modules we expect are actually built rather than + having them silently fall out of the package + +* Tue Jun 26 2007 Jeremy Katz - 2.5.1-3 +- link with system expat (#245703) + +* Thu Jun 21 2007 Jeremy Katz - 2.5.1-2 +- rebuild to take advantage of hardlinking between identical pyc/pyo files + +* Thu May 31 2007 Jeremy Katz - 2.5.1-1 +- update to python 2.5.1 + +* Mon Mar 19 2007 Jeremy Katz - 2.5.3-12 +- fix alpha build (#231961) + +* Tue Feb 13 2007 Jeremy Katz - 2.5.3-11 +- tcl/tk was reverted; rebuild again + +* Thu Feb 1 2007 Jeremy Katz - 2.5.3-10 +- rebuild for new tcl/tk + +* Tue Jan 16 2007 Miroslav Lichvar - 2.5.3-9 +- link with ncurses + +* Sat Jan 6 2007 Jeremy Katz - 2.5.3-8 +- fix extensions to use shared libpython (#219564) +- all 64bit platforms need the regex fix (#122304) + +* Wed Jan 3 2007 Jeremy Katz - 2.5.3-7 +- fix ctypes to not require execstack (#220669) + +* Fri Dec 15 2006 Jeremy Katz - 2.5.3-6 +- don't link against compat-db (Robert Scheck) + +* Wed Dec 13 2006 Jarod Wilson - 2.5.3-5 +- fix invalid assert in debug mode (upstream changeset 52622) + +* Tue Dec 12 2006 Jeremy Katz - 2.5.3-4 +- obsolete/provide python-ctypes (#219256) + +* Mon Dec 11 2006 Jeremy Katz - 2.5.3-3 +- fix atexit traceback with failed syslog logger (#218214) +- split libpython into python-libs subpackage for multilib apps + embedding python interpreters + +* Wed Dec 6 2006 Jeremy Katz - 2.5.3-2 +- disable installation of .egg-info files for now + +* Tue Dec 5 2006 Jeremy Katz +- support db 4.5 +- obsolete python-elementtree; since it requires some code tweaks, don't + provide it +- obsolete old python-sqlite; provide the version that's actually included + +* Mon Oct 30 2006 Jeremy Katz +- fix _md5 and _sha modules (Robert Sheck) +- no longer provide optik compat; it's been a couple of years now +- no longer provide the old shm module; if this is still needed, let's + build it separately +- no longer provide japanese codecs; should be a separate package + +* Mon Oct 23 2006 Jeremy Katz - 2.5-0 +- update to 2.5.0 final + +* Fri Aug 18 2006 Mihai Ibanescu - 2.4.99.c1 +- Updated to 2.5c1. Merged fixes from FC6 too: +- Fixed bug #199373 (on some platforms CFLAGS is needed when linking) +- Fixed bug #198971 (case conversion not locale safe in logging library) +- Verified bug #201434 (distutils.sysconfig is confused by the change to make + python-devel multilib friendly) is fixed upstream + +* Sun Jul 16 2006 Mihai Ibanescu - 2.4.99.b2 +- Updated to 2.5b2 (which for comparison reasons is re-labeled 2.4.99.b2) + +* Fri Jun 23 2006 Mihai Ibanescu - 2.4.99.b1 +- Updated to 2.5b1 (which for comparison reasons is re-labeled 2.4.99.b1) + +* Tue Jun 13 2006 Jeremy Katz - 2.4.3-11.FC6 +- and fix it for real + +* Tue Jun 13 2006 Jeremy Katz - 2.4.3-10.FC6 +- fix python-devel on ia64 + +* Tue Jun 13 2006 Mihai Ibanescu - 2.4.3-9 +- Fixed python-devel to be multilib friendly (bug #192747, #139911) + +* Tue Jun 13 2006 Mihai Ibanescu - 2.4.3-8 +- Only copying mkhowto from the Docs - we don't need perl dependencies from + python-tools. + +* Mon Jun 12 2006 Mihai Ibanescu - 2.4.3-7 +- Fixed bug #121198 (webbrowser.py should use the user's preferences first) + +* Mon Jun 12 2006 Mihai Ibanescu - 2.4.3-6 +- Fixed bug #192592 (too aggressive assertion fails) - SF#1257960 +- Fixed bug #167468 (Doc/tools not included) - added in the python-tools package + +* Thu Jun 8 2006 Mihai Ibanescu - 2.4.3-5 +- Fixed bug #193484 (added pydoc in the main package) + +* Mon Jun 5 2006 Mihai Ibanescu - 2.4.3-4 +- Added dist in the release + +* Mon May 15 2006 Mihai Ibanescu - 2.4.3-3 +- rebuilt to fix broken libX11 dependency + +* Wed Apr 12 2006 Jeremy Katz - 2.4.3-2 +- rebuild with new gcc to fix #188649 + +* Thu Apr 6 2006 Mihai Ibanescu - 2.4.3-1 +- Updated to 2.4.3 + +* Fri Feb 10 2006 Jesse Keating - 2.4.2-3.2.1 +- bump again for double-long bug on ppc(64) + +* Fri Feb 10 2006 Mihai Ibanescu - 2.4.3-3.2 +- rebuilt for newer tix + +* Tue Feb 07 2006 Jesse Keating - 2.4.2-3.1 +- rebuilt for new gcc4.1 snapshot and glibc changes + +* Fri Jan 20 2006 Mihai Ibanescu 2.4.2-3 +- fixed #136654 for another instance of audiotest.au + +* Fri Dec 09 2005 Jesse Keating +- rebuilt + +* Sat Nov 19 2005 Bill Nottingham 2.4.2-2 +- fix build for modular X, remove X11R6 path references + +* Tue Nov 15 2005 Mihai Ibanescu 2.4.2-1 +- Upgraded to 2.4.2 +- BuildRequires autoconf + +* Wed Nov 9 2005 Mihai Ibanescu 2.4.1-16 +- Rebuilding against newer openssl. +- XFree86-devel no longer exists + +* Mon Sep 26 2005 Peter Jones 2.4.1-14 +- Once more -- this time, to fix -EPERM when you run it in a directory + you can't read from. + +* Mon Sep 26 2005 Peter Jones 2.4.1-13 +- So, 5 or 6 people have said it works for them with this patch... + +* Sun Sep 25 2005 Peter Jones 2.4.1-12 +- Fixed bug #169159 (check for argc>0 and argv[0] == NULL, not just + argv[0][0]='\0') + Reworked the patch from -8 a bit more. + +* Fri Sep 23 2005 Mihai Ibanescu 2.4.1-10 +- Fixed bug #169159 (don't let python core dump if no arguments are passed in) + Reworked the patch from -8 a bit more. + +* Thu Sep 22 2005 Peter Jones 2.4.1-8 +- Fix bug #169046 more correctly. + +* Thu Sep 22 2005 Mihai Ibanescu 2.4.1-7 +- Fixed bug #169046 (realpath is unsafe); thanks to + Peter Jones and Arjan van de Ven for + diagnosing and the patch. + +* Tue Sep 20 2005 Mihai Ibanescu 2.4.1-4 +- Fixed bug #168655 (fixes for building as python24) + +* Tue Jul 26 2005 Mihai Ibanescu 2.4.1-3 +- Fixed bug #163435 (pynche doesn't start)) + +* Wed Apr 20 2005 Mihai Ibanescu 2.4.1-2 +- Fixed bug #143667 (python should own /usr/lib/python* on 64-bit systems, for + noarch packages) +- Fixed bug #143419 (BuildRequires db4 is not versioned) + +* Wed Apr 6 2005 Mihai Ibanescu 2.4.1-1 +- updated to 2.4.1 + +* Mon Mar 14 2005 Mihai Ibanescu 2.4-6 +- building the docs from a different source rpm, to decouple bootstrapping + python from having tetex installed + +* Fri Mar 11 2005 Dan Williams 2.4-5 +- Rebuild to pick up new libssl.so.5 + +* Wed Feb 2 2005 Mihai Ibanescu 2.4-4 +- Fixed security issue in SimpleXMLRPCServer.py (#146647) + +* Wed Jan 12 2005 Tim Waugh 2.4-3 +- Rebuilt for new readline. + +* Mon Dec 6 2004 Jeff Johnson 2.4-2 +- db-4.3.21 returns DB_BUFFER_SMALL rather than ENOMEM (#141994). +- add Provide: python(abi) = 2.4 +- include msgfmt/pygettext *.pyc and *.pyo from brp-python-bytecompile. + +* Fri Dec 3 2004 Mihai Ibanescu 2.4-1 +- Python-2.4.tar.bz2 (final) + +* Fri Nov 19 2004 Mihai Ibanescu 2.4-0.c1.1 +- Python-2.4c1.tar.bz2 (release candidate 1) + +* Thu Nov 11 2004 Jeff Johnson 2.4-0.b2.4 +- rebuild against db-4.3.21. + +* Mon Nov 8 2004 Jeremy Katz - 2.4-0.b2.3 +- fix the lib64 patch so that 64bit arches still look in /usr/lib/python... + +* Mon Nov 8 2004 Jeremy Katz - 2.4-0.b2.2 +- cryptmodule still needs -lcrypt (again) + +* Thu Nov 4 2004 Mihai Ibanescu 2.4-0.b2.1 +- Updated to python 2.4b2 (and labeled it 2.4-0.b2.1 to avoid breaking rpm's + version comparison) + +* Thu Nov 4 2004 Mihai Ibanescu 2.3.4-13 +- Fixed bug #138112 (python overflows stack buffer) - SF bug 105470 + +* Tue Nov 2 2004 Mihai Ibanescu 2.3.4-12 +- Fixed bugs #131439 #136023 #137863 (.pyc/.pyo files had the buildroot added) + +* Tue Oct 26 2004 Mihai Ibanescu 2.3.4-11 +- Fixed bug #136654 (python has sketchy audio clip) + +* Tue Aug 31 2004 Mihai Ibanescu 2.3.4-10 +- Fixed bug #77418 (Demo dir not packaged) +- More tweaking on #19347 (Moved Tools/ under /usr/lib/python2.3/Tools) + +* Fri Aug 13 2004 Mihai Ibanescu 2.3.4-8 +- Fixed bug #129769: Makefile in new python conflicts with older version found + in old python-devel +- Reorganized the spec file to get rid of the aspython2 define; __python_ver + is more powerful. + +* Tue Aug 3 2004 Mihai Ibanescu 2.3.4-7 +- Including html documentation for non-i386 arches +- Fixed #125362 (python-doc html files have japanese character encoding) +- Fixed #128923 (missing dependency between python and python-devel) + +* Fri Jul 30 2004 Mihai Ibanescu 2.3.4-6 +- Fixed #128030 (help() not printing anything) +- Fixed #125472 (distutils.sysconfig.get_python_lib() not returning the right + path on 64-bit systems) +- Fixed #127357 (building python as a shared library) +- Fixed #19347 (including the contents of Tools/scripts/ in python-tools) + +* Tue Jun 15 2004 Elliot Lee +- rebuilt + +* Tue Jun 8 2004 Mihai Ibanescu 2.3.4-3 +- Added an optik.py that provides the same interface from optparse for + backward compatibility; obsoleting python-optik + +* Mon Jun 7 2004 Mihai Ibanescu 2.3.4-2 +- Patched bdist_rpm to allow for builds of multiple binary rpms (bug #123598) + +* Fri Jun 4 2004 Mihai Ibanescu 2.3.4-1 +- Updated to 2.3.4-1 with Robert Scheck's help (bug #124764) +- Added BuildRequires: tix-devel (bug #124918) + +* Fri May 7 2004 Mihai Ibanescu 2.3.3-6 +- Correct fix for #122304 from upstream: + http://sourceforge.net/tracker/?func=detail&atid=105470&aid=931848&group_id=5470 + +* Thu May 6 2004 Mihai Ibanescu 2.3.3-4 +- Fix for bug #122304 : splitting the domain name fails on 64-bit arches +- Fix for bug #120879 : including Makefile into the main package + +- Requires XFree86-devel instead of -libs (see bug #118442) + +* Tue Mar 16 2004 Mihai Ibanescu 2.3.3-3 +- Requires XFree86-devel instead of -libs (see bug #118442) + +* Tue Mar 02 2004 Elliot Lee +- rebuilt + +* Fri Feb 13 2004 Elliot Lee +- rebuilt + +* Fri Dec 19 2003 Jeff Johnson 2.3.3-1 +- upgrade to 2.3.3. + +* Sat Dec 13 2003 Jeff Johnson 2.3.2-9 +- rebuild against db-4.2.52. + +* Fri Dec 12 2003 Jeremy Katz 2.3.2-8 +- more rebuilding for new tcl/tk + +* Wed Dec 3 2003 Jeff Johnson 2.3.2-7.1 +- rebuild against db-4.2.42. + +* Fri Nov 28 2003 Mihai Ibanescu 2.3.2-7 +- rebuilt against newer tcl/tk + +* Mon Nov 24 2003 Mihai Ibanescu 2.3.2-6 +- added a Provides: python-abi + +* Wed Nov 12 2003 Mihai Ibanescu 2.3.2-5 +- force CC (#109268) + +* Sun Nov 9 2003 Jeremy Katz 2.3.2-4 +- cryptmodule still needs -lcrypt + +* Wed Nov 5 2003 Mihai Ibanescu 2.3.2-2 +- Added patch for missing mkhowto + +* Thu Oct 16 2003 Mihai Ibanescu 2.3.2-1 +- Updated to 2.3.2 + +* Thu Sep 25 2003 Mihai Ibanescu 2.3.1-1 +- 2.3.1 final + +* Tue Sep 23 2003 Mihai Ibanescu 2.3.1-0.8.RC1 +- Building the python 2.3.1 release candidate +- Updated the lib64 patch + +* Wed Jul 30 2003 Mihai Ibanescu 2.3-0.2 +- Building python 2.3 +- Added more BuildRequires +- Updated the startup files for modulator and pynche; idle installs its own + now. + +* Thu Jul 3 2003 Mihai Ibanescu 2.2.3-4 +- Rebuilt against newer db4 packages (bug #98539) + +* Mon Jun 9 2003 Elliot Lee 2.2.3-3 +- rebuilt + +* Sat Jun 7 2003 Mihai Ibanescu 2.2.3-2 +- Rebuilt + +* Fri Jun 6 2003 Mihai Ibanescu 2.2.3-1 +- Upgraded to 2.2.3 + +* Wed Apr 2 2003 Mihai Ibanescu 2.2.2-28 +- Rebuilt + +* Wed Apr 2 2003 Mihai Ibanescu 2.2.2-27 +- Modified the ftpuri patch conforming to http://ietf.org/rfc/rfc1738.txt + +* Mon Feb 24 2003 Elliot Lee +- rebuilt + +* Mon Feb 24 2003 Mihai Ibanescu 2.2.2-25 +- Fixed bug #84886: pydoc dies when run w/o arguments +- Fixed bug #84205: add python shm module back (used to be shipped with 1.5.2) +- Fixed bug #84966: path in byte-compiled code still wrong + +* Thu Feb 20 2003 Jeremy Katz 2.2.2-23 +- ftp uri's should be able to specify being rooted at the root instead of + where you login via ftp (#84692) + +* Mon Feb 10 2003 Mihai Ibanescu 2.2.2-22 +- Using newer Japanese codecs (1.4.9). Thanks to + Peter Bowen for pointing this out. + +* Thu Feb 6 2003 Mihai Ibanescu 2.2.2-21 +- Rebuild + +* Wed Feb 5 2003 Mihai Ibanescu 2.2.2-20 +- Release number bumped really high: turning on UCS4 (ABI compatibility + breakage) + +* Fri Jan 31 2003 Mihai Ibanescu 2.2.2-13 +- Attempt to look both in /usr/lib64 and /usr/lib/python2.2/site-packages/: + some work on python-2.2.2-lib64.patch + +* Thu Jan 30 2003 Mihai Ibanescu 2.2.2-12 +- Rebuild to incorporate the removal of .lib64 and - files. + +* Thu Jan 30 2003 Mihai Ibanescu 2.2.2-11.7.3 +- Fixed bug #82544: Errata removes most tools +- Fixed bug #82435: Python 2.2.2 errata breaks redhat-config-users +- Removed .lib64 and - files that get installed after we fix the multilib + .py files. + +* Wed Jan 22 2003 Tim Powers +- rebuilt + +* Wed Jan 15 2003 Jens Petersen 2.2.2-10 +- rebuild to update tkinter's tcltk deps +- convert changelog to utf-8 + +* Tue Jan 7 2003 Nalin Dahyabhai 2.2.2-9 +- rebuild + +* Fri Jan 3 2003 Nalin Dahyabhai +- pick up OpenSSL cflags and ldflags from pkgconfig if available + +* Thu Jan 2 2003 Jeremy Katz 2.2.2-8 +- urllib2 didn't support non-anonymous ftp. add support based on how + urllib did it (#80676, #78168) + +* Mon Dec 16 2002 Mihai Ibanescu 2.2.2-7 +- Fix bug #79647 (Rebuild of SRPM fails if python isn't installed) +- Added a bunch of missing BuildRequires found while fixing the + above-mentioned bug + +* Tue Dec 10 2002 Tim Powers 2.2.2-6 +- rebuild to fix broken tcltk deps for tkinter + +* Fri Nov 22 2002 Mihai Ibanescu +2.2.2-3.7.3 +- Recompiled for 7.3 (to fix the -lcrypt bug) +- Fix for the spurious error message at the end of the build (build-requires + gets confused by executable files starting with """"): make the tests + non-executable. + +* Wed Nov 20 2002 Mihai Ibanescu +2.2.2-5 +- Fixed configuration patch to add -lcrypt when compiling cryptmodule.c + +2.2.2-4 +- Spec file change from Matt Wilson to disable linking + with the C++ compiler. + +* Mon Nov 11 2002 Mihai Ibanescu +2.2.2-3.* +- Merged patch from Karsten Hopp from 2.2.1-17hammer to + use %%{_libdir} +- Added XFree86-libs as BuildRequires (because of tkinter) +- Fixed duplicate listing of plat-linux2 +- Fixed exclusion of lib-dynload/japanese +- Added lib64 patch for the japanese codecs +- Use setup magic instead of using tar directly on JapaneseCodecs + +* Tue Nov 5 2002 Mihai Ibanescu +2.2.2-2 +- Fix #76912 (python-tools contains idle, which uses tkinter, but there is no + requirement of tkinter from python-tools). +- Fix #74013 (rpm is missing the /usr/lib/python2.2/test directory) + +* Mon Nov 4 2002 Mihai Ibanescu +- builds as python2 require a different libdb +- changed the buildroot name of python to match python2 builds + +* Fri Nov 1 2002 Mihai Ibanescu +- updated python to 2.2.2 and adjusted the patches accordingly + +* Mon Oct 21 2002 Mihai Ibanescu +- Fix #53930 (Python-2.2.1-buildroot-bytecode.patch) +- Added BuildPrereq dependency on gcc-c++ + +* Fri Aug 30 2002 Trond Eivind Glomsrød 2.2.1-17 +- security fix for _execvpe + +* Tue Aug 13 2002 Trond Eivind Glomsrød 2.2.1-16 +- Fix #71011,#71134, #58157 + +* Wed Aug 7 2002 Trond Eivind Glomsrød 2.2.1-15 +- Resurrect tkinter +- Fix for distutils (#67671) +- Fix #69962 + +* Thu Jul 25 2002 Trond Eivind Glomsrød 2.2.1-14 +- Obsolete tkinter/tkinter2 (#69838) + +* Tue Jul 23 2002 Trond Eivind Glomsrød 2.2.1-13 +- Doc fixes (#53951) - not on alpha at the momemt + +* Mon Jul 8 2002 Trond Eivind Glomsrød 2.2.1-12 +- fix pydoc (#68082) + +* Mon Jul 8 2002 Trond Eivind Glomsrød 2.2.1-11 +- Add db4-devel as a BuildPrereq + +* Fri Jun 21 2002 Tim Powers 2.2.1-10 +- automated rebuild + +* Mon Jun 17 2002 Trond Eivind Glomsrød 2.2.1-9 +- Add Japanese codecs (#66352) + +* Tue Jun 11 2002 Trond Eivind Glomsrød 2.2.1-8 +- No more tkinter... + +* Wed May 29 2002 Trond Eivind Glomsrød 2.2.1-7 +- Rebuild + +* Tue May 21 2002 Trond Eivind Glomsrød 2.2.1-6 +- Add the email subcomponent (#65301) + +* Fri May 10 2002 Trond Eivind Glomsrød 2.2.1-5 +- Rebuild + +* Thu May 02 2002 Than Ngo 2.2.1-4 +- rebuild i new enviroment + +* Tue Apr 23 2002 Trond Eivind Glomsrød +- Use ucs2, not ucs4, to avoid breaking tkinter (#63965) + +* Mon Apr 22 2002 Trond Eivind Glomsrød 2.2.1-2 +- Make it use db4 + +* Fri Apr 12 2002 Trond Eivind Glomsrød 2.2.1-1 +- 2.2.1 - a bugfix-only release + +* Fri Apr 12 2002 Trond Eivind Glomsrød 2.2-16 +- the same, but in builddirs - this will remove them from the + docs package, which doesn't look in the buildroot for files. + +* Fri Apr 12 2002 Trond Eivind Glomsrød 2.2-15 +- Get rid of temporary files and .cvsignores included + in the tarball and make install + +* Fri Apr 5 2002 Trond Eivind Glomsrød 2.2-14 +- Don't own lib-tk in main package, only in tkinter (#62753) + +* Mon Mar 25 2002 Trond Eivind Glomsrød 2.2-13 +- rebuild + +* Mon Mar 25 2002 Trond Eivind Glomsrød 2.2-12 +- rebuild + +* Fri Mar 1 2002 Trond Eivind Glomsrød 2.2-11 +- Add a not to the Distutils obsoletes test (doh!) + +* Fri Mar 1 2002 Trond Eivind Glomsrød 2.2-10 +- Rebuild + +* Mon Feb 25 2002 Trond Eivind Glomsrød 2.2-9 +- Only obsolete Distutils when built as python + +* Thu Feb 21 2002 Trond Eivind Glomsrød 2.2-8 +- Make files in /usr/bin install side by side with python 1.5 when +- Drop explicit requirement of db4 + built as python2 + +* Thu Jan 31 2002 Elliot Lee 2.2-7 +- Use version and pybasever macros to make updating easy +- Use _smp_mflags macro + +* Tue Jan 29 2002 Trond Eivind Glomsrød 2.2-6 +- Add db4-devel to BuildPrereq + +* Fri Jan 25 2002 Nalin Dahyabhai 2.2-5 +- disable ndbm support, which is db2 in disguise (really interesting things + can happen when you mix db2 and db4 in a single application) + +* Thu Jan 24 2002 Trond Eivind Glomsrød 2.2-4 +- Obsolete subpackages if necesarry +- provide versioned python2 +- build with db4 + +* Wed Jan 16 2002 Trond Eivind Glomsrød 2.2-3 +- Alpha toolchain broken. Disable build on alpha. +- New openssl + +* Wed Dec 26 2001 Trond Eivind Glomsrød 2.2-1 +- 2.2 final + +* Fri Dec 14 2001 Trond Eivind Glomsrød 2.2-0.11c1 +- 2.2 RC 1 +- Don't include the _tkinter module in the main package - it's + already in the tkiter packace +- Turn off the mpzmodule, something broke in the buildroot + +* Wed Nov 28 2001 Trond Eivind Glomsrød 2.2-0.10b2 +- Use -fPIC for OPT as well, in lack of a proper libpython.so + +* Mon Nov 26 2001 Matt Wilson 2.2-0.9b2 +- changed DESTDIR to point to / so that distutils will install dynload + modules properly in the installroot + +* Fri Nov 16 2001 Matt Wilson 2.2-0.8b2 +- 2.2b2 + +* Fri Oct 26 2001 Matt Wilson 2.2-0.7b1 +- python2ify + +* Fri Oct 19 2001 Trond Eivind Glomsrød 2.2-0.5b1 +- 2.2b1 + +* Sun Sep 30 2001 Trond Eivind Glomsrød 2.2-0.4a4 +- 2.2a4 +- Enable UCS4 support +- Enable IPv6 +- Provide distutils +- Include msgfmt.py and pygettext.py + +* Fri Sep 14 2001 Trond Eivind Glomsrød 2.2-0.3a3 +- Obsolete Distutils, which is now part of the main package +- Obsolete python2 + +* Thu Sep 13 2001 Trond Eivind Glomsrød 2.2-0.2a3 +- Add docs, tools and tkinter subpackages, to match the 1.5 layout + +* Wed Sep 12 2001 Trond Eivind Glomsrød 2.2-0.1a3 +- 2.2a3 +- don't build tix and blt extensions + +* Mon Aug 13 2001 Trond Eivind Glomsrød +- Add tk and tix to build dependencies + +* Sat Jul 21 2001 Trond Eivind Glomsrød +- 2.1.1 bugfix release - with a GPL compatible license + +* Fri Jul 20 2001 Trond Eivind Glomsrød +- Add new build dependencies (#49753) + +* Tue Jun 26 2001 Nalin Dahyabhai +- build with -fPIC + +* Fri Jun 1 2001 Trond Eivind Glomsrød +- 2.1 +- reorganization of file includes + +* Wed Dec 20 2000 Trond Eivind Glomsrød +- fix the "requires" clause, it lacked a space causing problems +- use %%{_tmppath} +- don't define name, version etc +- add the available patches from the Python home page + +* Fri Dec 15 2000 Matt Wilson +- added devel subpackage + +* Fri Dec 15 2000 Matt Wilson +- modify all files to use "python2.0" as the intrepter +- don't build the Expat bindings +- build against db1 + +* Mon Oct 16 2000 Jeremy Hylton +- updated for 2.0 final + +* Mon Oct 9 2000 Jeremy Hylton +- updated for 2.0c1 +- build audioop, imageop, and rgbimg extension modules +- include xml.parsers subpackage +- add test.xml.out to files list + +* Thu Oct 5 2000 Jeremy Hylton +- added bin/python2.0 to files list (suggested by Martin v. L?) + +* Tue Sep 26 2000 Jeremy Hylton +- updated for release 1 of 2.0b2 +- use .bz2 version of Python source + +* Tue Sep 12 2000 Jeremy Hylton +- Version 2 of 2.0b1 +- Make the package relocatable. Thanks to Suchandra Thapa. +- Exclude Tkinter from main RPM. If it is in a separate RPM, it is + easier to track Tk releases.