import python3-3.6.8-36.el8
This commit is contained in:
commit
4c1443b0f5
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
SOURCES/Python-3.6.8-noexe.tar.xz
|
1
.python3.metadata
Normal file
1
.python3.metadata
Normal file
@ -0,0 +1 @@
|
||||
a39802ac8f0c61645c6a50fbdd32e3ca92862ff5 SOURCES/Python-3.6.8-noexe.tar.xz
|
19
SOURCES/00001-rpath.patch
Normal file
19
SOURCES/00001-rpath.patch
Normal file
@ -0,0 +1,19 @@
|
||||
diff -up Python-3.1.1/Lib/distutils/unixccompiler.py.rpath Python-3.1.1/Lib/distutils/unixccompiler.py
|
||||
--- Python-3.1.1/Lib/distutils/unixccompiler.py.rpath 2009-09-04 17:29:34.000000000 -0400
|
||||
+++ Python-3.1.1/Lib/distutils/unixccompiler.py 2009-09-04 17:49:54.000000000 -0400
|
||||
@@ -141,6 +141,15 @@ class UnixCCompiler(CCompiler):
|
||||
if sys.platform == "cygwin":
|
||||
exe_extension = ".exe"
|
||||
|
||||
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
|
||||
+ """Remove standard library path from rpath"""
|
||||
+ libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args(
|
||||
+ libraries, library_dirs, runtime_library_dirs)
|
||||
+ libdir = sysconfig.get_config_var('LIBDIR')
|
||||
+ if runtime_library_dirs and (libdir in runtime_library_dirs):
|
||||
+ runtime_library_dirs.remove(libdir)
|
||||
+ return libraries, library_dirs, runtime_library_dirs
|
||||
+
|
||||
def preprocess(self, source, output_file=None, macros=None,
|
||||
include_dirs=None, extra_preargs=None, extra_postargs=None):
|
||||
fixed_args = self._fix_compile_args(None, macros, include_dirs)
|
218
SOURCES/00102-lib64.patch
Normal file
218
SOURCES/00102-lib64.patch
Normal file
@ -0,0 +1,218 @@
|
||||
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
|
||||
index 9474e9c..c0ce4c6 100644
|
||||
--- a/Lib/distutils/command/install.py
|
||||
+++ b/Lib/distutils/command/install.py
|
||||
@@ -30,14 +30,14 @@ WINDOWS_SCHEME = {
|
||||
INSTALL_SCHEMES = {
|
||||
'unix_prefix': {
|
||||
'purelib': '$base/lib/python$py_version_short/site-packages',
|
||||
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
|
||||
+ 'platlib': '$platbase/lib64/python$py_version_short/site-packages',
|
||||
'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
|
||||
'scripts': '$base/bin',
|
||||
'data' : '$base',
|
||||
},
|
||||
'unix_home': {
|
||||
'purelib': '$base/lib/python',
|
||||
- 'platlib': '$base/lib/python',
|
||||
+ 'platlib': '$base/lib64/python',
|
||||
'headers': '$base/include/python/$dist_name',
|
||||
'scripts': '$base/bin',
|
||||
'data' : '$base',
|
||||
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
|
||||
index 026cca7..6d3e077 100644
|
||||
--- a/Lib/distutils/sysconfig.py
|
||||
+++ b/Lib/distutils/sysconfig.py
|
||||
@@ -132,8 +132,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
|
||||
prefix = plat_specific and EXEC_PREFIX or PREFIX
|
||||
|
||||
if os.name == "posix":
|
||||
+ if plat_specific or standard_lib:
|
||||
+ lib = "lib64"
|
||||
+ else:
|
||||
+ lib = "lib"
|
||||
libpython = os.path.join(prefix,
|
||||
- "lib", "python" + get_python_version())
|
||||
+ lib, "python" + get_python_version())
|
||||
if standard_lib:
|
||||
return libpython
|
||||
else:
|
||||
diff a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
|
||||
--- a/Lib/distutils/tests/test_install.py
|
||||
+++ b/Lib/distutils/tests/test_install.py
|
||||
@@ -57,8 +57,9 @@
|
||||
self.assertEqual(got, expected)
|
||||
|
||||
libdir = os.path.join(destination, "lib", "python")
|
||||
+ platlibdir = os.path.join(destination, "lib64", "python")
|
||||
check_path(cmd.install_lib, libdir)
|
||||
- check_path(cmd.install_platlib, libdir)
|
||||
+ check_path(cmd.install_platlib, platlibdir)
|
||||
check_path(cmd.install_purelib, libdir)
|
||||
check_path(cmd.install_headers,
|
||||
os.path.join(destination, "include", "python", "foopkg"))
|
||||
diff --git a/Lib/site.py b/Lib/site.py
|
||||
index a84e3bb..ba0d3ea 100644
|
||||
--- a/Lib/site.py
|
||||
+++ b/Lib/site.py
|
||||
@@ -303,11 +303,15 @@ def getsitepackages(prefixes=None):
|
||||
seen.add(prefix)
|
||||
|
||||
if os.sep == '/':
|
||||
+ sitepackages.append(os.path.join(prefix, "lib64",
|
||||
+ "python" + sys.version[:3],
|
||||
+ "site-packages"))
|
||||
sitepackages.append(os.path.join(prefix, "lib",
|
||||
"python%d.%d" % sys.version_info[:2],
|
||||
"site-packages"))
|
||||
else:
|
||||
sitepackages.append(prefix)
|
||||
+ sitepackages.append(os.path.join(prefix, "lib64", "site-packages"))
|
||||
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
|
||||
if sys.platform == "darwin":
|
||||
# for framework builds *only* we add the standard Apple
|
||||
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
|
||||
index b9bbfe5..2a5f29c 100644
|
||||
--- a/Lib/sysconfig.py
|
||||
+++ b/Lib/sysconfig.py
|
||||
@@ -20,10 +20,10 @@ __all__ = [
|
||||
|
||||
_INSTALL_SCHEMES = {
|
||||
'posix_prefix': {
|
||||
- 'stdlib': '{installed_base}/lib/python{py_version_short}',
|
||||
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
|
||||
+ 'stdlib': '{installed_base}/lib64/python{py_version_short}',
|
||||
+ 'platstdlib': '{platbase}/lib64/python{py_version_short}',
|
||||
'purelib': '{base}/lib/python{py_version_short}/site-packages',
|
||||
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
|
||||
+ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages',
|
||||
'include':
|
||||
'{installed_base}/include/python{py_version_short}{abiflags}',
|
||||
'platinclude':
|
||||
@@ -61,10 +61,10 @@ _INSTALL_SCHEMES = {
|
||||
'data': '{userbase}',
|
||||
},
|
||||
'posix_user': {
|
||||
- 'stdlib': '{userbase}/lib/python{py_version_short}',
|
||||
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
|
||||
+ 'stdlib': '{userbase}/lib64/python{py_version_short}',
|
||||
+ 'platstdlib': '{userbase}/lib64/python{py_version_short}',
|
||||
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||
+ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages',
|
||||
'include': '{userbase}/include/python{py_version_short}',
|
||||
'scripts': '{userbase}/bin',
|
||||
'data': '{userbase}',
|
||||
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
|
||||
index f698927..bc977b5 100644
|
||||
--- a/Lib/test/test_site.py
|
||||
+++ b/Lib/test/test_site.py
|
||||
@@ -248,8 +248,8 @@ class HelperFunctionsTests(unittest.TestCase):
|
||||
self.assertEqual(dirs[1], wanted)
|
||||
elif os.sep == '/':
|
||||
# OS X non-framework builds, Linux, FreeBSD, etc
|
||||
- self.assertEqual(len(dirs), 1)
|
||||
- wanted = os.path.join('xoxo', 'lib',
|
||||
+ self.assertEqual(len(dirs), 2)
|
||||
+ wanted = os.path.join('xoxo', 'lib64',
|
||||
'python%d.%d' % sys.version_info[:2],
|
||||
'site-packages')
|
||||
self.assertEqual(dirs[0], wanted)
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 8fa7934..a693917 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -126,7 +126,7 @@ LIBDIR= @libdir@
|
||||
MANDIR= @mandir@
|
||||
INCLUDEDIR= @includedir@
|
||||
CONFINCLUDEDIR= $(exec_prefix)/include
|
||||
-SCRIPTDIR= $(prefix)/lib
|
||||
+SCRIPTDIR= $(prefix)/lib64
|
||||
ABIFLAGS= @ABIFLAGS@
|
||||
|
||||
# Detailed destination directories
|
||||
diff --git a/Modules/getpath.c b/Modules/getpath.c
|
||||
index 65b47a3..eaa756c 100644
|
||||
--- a/Modules/getpath.c
|
||||
+++ b/Modules/getpath.c
|
||||
@@ -494,7 +494,7 @@ calculate_path(void)
|
||||
_pythonpath = Py_DecodeLocale(PYTHONPATH, NULL);
|
||||
_prefix = Py_DecodeLocale(PREFIX, NULL);
|
||||
_exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL);
|
||||
- lib_python = Py_DecodeLocale("lib/python" VERSION, NULL);
|
||||
+ lib_python = Py_DecodeLocale("lib64/python" VERSION, NULL);
|
||||
|
||||
if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) {
|
||||
Py_FatalError(
|
||||
@@ -683,7 +683,7 @@ calculate_path(void)
|
||||
}
|
||||
else
|
||||
wcsncpy(zip_path, _prefix, MAXPATHLEN);
|
||||
- joinpath(zip_path, L"lib/python00.zip");
|
||||
+ joinpath(zip_path, L"lib64/python00.zip");
|
||||
bufsz = wcslen(zip_path); /* Replace "00" with version */
|
||||
zip_path[bufsz - 6] = VERSION[0];
|
||||
zip_path[bufsz - 5] = VERSION[2];
|
||||
@@ -695,7 +695,7 @@ calculate_path(void)
|
||||
fprintf(stderr,
|
||||
"Could not find platform dependent libraries <exec_prefix>\n");
|
||||
wcsncpy(exec_prefix, _exec_prefix, MAXPATHLEN);
|
||||
- joinpath(exec_prefix, L"lib/lib-dynload");
|
||||
+ joinpath(exec_prefix, L"lib64/lib-dynload");
|
||||
}
|
||||
/* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */
|
||||
|
||||
diff --git a/setup.py b/setup.py
|
||||
index 0f2dfc4..da37896 100644
|
||||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -492,7 +492,7 @@ class PyBuildExt(build_ext):
|
||||
# directories (i.e. '.' and 'Include') must be first. See issue
|
||||
# 10520.
|
||||
if not cross_compiling:
|
||||
- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
|
||||
+ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64')
|
||||
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
|
||||
# only change this for cross builds for 3.3, issues on Mageia
|
||||
if cross_compiling:
|
||||
@@ -780,11 +780,11 @@ class PyBuildExt(build_ext):
|
||||
elif curses_library:
|
||||
readline_libs.append(curses_library)
|
||||
elif self.compiler.find_library_file(lib_dirs +
|
||||
- ['/usr/lib/termcap'],
|
||||
+ ['/usr/lib64/termcap'],
|
||||
'termcap'):
|
||||
readline_libs.append('termcap')
|
||||
exts.append( Extension('readline', ['readline.c'],
|
||||
- library_dirs=['/usr/lib/termcap'],
|
||||
+ library_dirs=['/usr/lib64/termcap'],
|
||||
extra_link_args=readline_extra_link_args,
|
||||
libraries=readline_libs) )
|
||||
else:
|
||||
@@ -821,8 +821,8 @@ class PyBuildExt(build_ext):
|
||||
if krb5_h:
|
||||
ssl_incs += krb5_h
|
||||
ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
|
||||
- ['/usr/local/ssl/lib',
|
||||
- '/usr/contrib/ssl/lib/'
|
||||
+ ['/usr/local/ssl/lib64',
|
||||
+ '/usr/contrib/ssl/lib64/'
|
||||
] )
|
||||
|
||||
if (ssl_incs is not None and
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 01c66fe..1e6d515 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -4772,9 +4772,9 @@ AC_MSG_RESULT($LDVERSION)
|
||||
dnl define LIBPL after ABIFLAGS and LDVERSION is defined.
|
||||
AC_SUBST(PY_ENABLE_SHARED)
|
||||
if test x$PLATFORM_TRIPLET = x; then
|
||||
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}"
|
||||
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}"
|
||||
else
|
||||
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
|
||||
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
|
||||
fi
|
||||
AC_SUBST(LIBPL)
|
||||
|
53
SOURCES/00111-no-static-lib.patch
Normal file
53
SOURCES/00111-no-static-lib.patch
Normal file
@ -0,0 +1,53 @@
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 9cd482f..b074b26 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -549,7 +549,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c
|
||||
$(PYTHON_FOR_REGEN) ./Tools/clinic/clinic.py --make
|
||||
|
||||
# Build the interpreter
|
||||
-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
|
||||
+$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY)
|
||||
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
|
||||
|
||||
platform: $(BUILDPYTHON) pybuilddir.txt
|
||||
@@ -597,12 +597,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
|
||||
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
|
||||
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
|
||||
|
||||
-
|
||||
-# Build static library
|
||||
-$(LIBRARY): $(LIBRARY_OBJS)
|
||||
- -rm -f $@
|
||||
- $(AR) $(ARFLAGS) $@ $(LIBRARY_OBJS)
|
||||
-
|
||||
libpython$(LDVERSION).so: $(LIBRARY_OBJS)
|
||||
if test $(INSTSONAME) != $(LDLIBRARY); then \
|
||||
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
|
||||
@@ -692,7 +686,7 @@ Modules/Setup: $(srcdir)/Modules/Setup.dist
|
||||
echo "-----------------------------------------------"; \
|
||||
fi
|
||||
|
||||
-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
|
||||
+Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY)
|
||||
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
|
||||
|
||||
############################################################################
|
||||
@@ -1428,17 +1422,6 @@ libainstall: @DEF_MAKE_RULE@ python-config
|
||||
else true; \
|
||||
fi; \
|
||||
done
|
||||
- @if test -d $(LIBRARY); then :; else \
|
||||
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
|
||||
- if test "$(SHLIB_SUFFIX)" = .dll; then \
|
||||
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
|
||||
- else \
|
||||
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
|
||||
- fi; \
|
||||
- else \
|
||||
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \
|
||||
- fi; \
|
||||
- fi
|
||||
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
|
||||
$(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o
|
||||
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
|
46
SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch
Normal file
46
SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch
Normal file
@ -0,0 +1,46 @@
|
||||
diff -up Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/case.py
|
||||
--- Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400
|
||||
+++ Python-3.2.2/Lib/unittest/case.py 2011-09-09 06:35:16.365568382 -0400
|
||||
@@ -3,6 +3,7 @@
|
||||
import sys
|
||||
import functools
|
||||
import difflib
|
||||
+import os
|
||||
import logging
|
||||
import pprint
|
||||
import re
|
||||
@@ -101,5 +102,21 @@ def expectedFailure(func):
|
||||
raise self.test_case.failureException(msg)
|
||||
|
||||
+# Non-standard/downstream-only hooks for handling issues with specific test
|
||||
+# cases:
|
||||
+
|
||||
+def _skipInRpmBuild(reason):
|
||||
+ """
|
||||
+ Non-standard/downstream-only decorator for marking a specific unit test
|
||||
+ to be skipped when run within the %check of an rpmbuild.
|
||||
+
|
||||
+ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within
|
||||
+ the environment, and has no effect otherwise.
|
||||
+ """
|
||||
+ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ:
|
||||
+ return skip(reason)
|
||||
+ else:
|
||||
+ return _id
|
||||
+
|
||||
class _AssertRaisesBaseContext(_BaseTestCaseContext):
|
||||
|
||||
def __init__(self, expected, test_case, expected_regex=None):
|
||||
diff -up Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/__init__.py
|
||||
--- Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400
|
||||
+++ Python-3.2.2/Lib/unittest/__init__.py 2011-09-09 06:35:16.366568382 -0400
|
||||
@@ -57,7 +57,8 @@ __unittest = True
|
||||
|
||||
from .result import TestResult
|
||||
from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf,
|
||||
- skipUnless, expectedFailure)
|
||||
+ skipUnless, expectedFailure,
|
||||
+ _skipInRpmBuild)
|
||||
from .suite import BaseTestSuite, TestSuite
|
||||
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
|
||||
findTestCases)
|
15
SOURCES/00155-avoid-ctypes-thunks.patch
Normal file
15
SOURCES/00155-avoid-ctypes-thunks.patch
Normal file
@ -0,0 +1,15 @@
|
||||
diff -up Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 Python-3.2.3/Lib/ctypes/__init__.py
|
||||
--- Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 15:12:49.017867692 -0400
|
||||
+++ Python-3.2.3/Lib/ctypes/__init__.py 2012-04-20 15:15:09.501111408 -0400
|
||||
@@ -275,11 +275,6 @@ def _reset_cache():
|
||||
# _SimpleCData.c_char_p_from_param
|
||||
POINTER(c_char).from_param = c_char_p.from_param
|
||||
_pointer_type_cache[None] = c_void_p
|
||||
- # XXX for whatever reasons, creating the first instance of a callback
|
||||
- # function is needed for the unittests on Win64 to succeed. This MAY
|
||||
- # be a compiler bug, since the problem occurs only when _ctypes is
|
||||
- # compiled with the MS SDK compiler. Or an uninitialized variable?
|
||||
- CFUNCTYPE(c_int)(lambda: None)
|
||||
|
||||
def create_unicode_buffer(init, size=None):
|
||||
"""create_unicode_buffer(aString) -> character array
|
11
SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch
Normal file
11
SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch
Normal file
@ -0,0 +1,11 @@
|
||||
diff -up cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build cpython-59223da36dec/Lib/test/test_posix.py
|
||||
--- cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build 2012-08-07 17:15:59.000000000 -0400
|
||||
+++ cpython-59223da36dec/Lib/test/test_posix.py 2012-08-07 17:16:53.528330330 -0400
|
||||
@@ -973,6 +973,7 @@ class PosixTester(unittest.TestCase):
|
||||
posix.RTLD_GLOBAL
|
||||
posix.RTLD_LOCAL
|
||||
|
||||
+ @unittest._skipInRpmBuild('running kernel may not match kernel in chroot')
|
||||
@unittest.skipUnless(hasattr(os, 'SEEK_HOLE'),
|
||||
"test needs an OS that reports file holes")
|
||||
def test_fs_holes(self):
|
@ -0,0 +1,11 @@
|
||||
diff -up Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds Python-3.3.0b1/Lib/test/test_socket.py
|
||||
--- Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds 2012-07-24 15:02:30.823355067 -0400
|
||||
+++ Python-3.3.0b1/Lib/test/test_socket.py 2012-07-24 15:08:13.021354999 -0400
|
||||
@@ -2188,6 +2188,7 @@ class RecvmsgGenericStreamTests(RecvmsgG
|
||||
# Tests which require a stream socket and can use either recvmsg()
|
||||
# or recvmsg_into().
|
||||
|
||||
+ @unittest._skipInRpmBuild('fails intermittently when run within Koji')
|
||||
def testRecvmsgEOF(self):
|
||||
# Receive end-of-stream indicator (b"", peer socket closed).
|
||||
msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, 1024)
|
309
SOURCES/00170-gc-assertions.patch
Normal file
309
SOURCES/00170-gc-assertions.patch
Normal file
@ -0,0 +1,309 @@
|
||||
diff --git a/Include/object.h b/Include/object.h
|
||||
index 63e37b8..613b26c 100644
|
||||
--- a/Include/object.h
|
||||
+++ b/Include/object.h
|
||||
@@ -1071,6 +1071,49 @@ PyAPI_FUNC(void)
|
||||
_PyObject_DebugTypeStats(FILE *out);
|
||||
#endif /* ifndef Py_LIMITED_API */
|
||||
|
||||
+/*
|
||||
+ Define a pair of assertion macros.
|
||||
+
|
||||
+ These work like the regular C assert(), in that they will abort the
|
||||
+ process with a message on stderr if the given condition fails to hold,
|
||||
+ but compile away to nothing if NDEBUG is defined.
|
||||
+
|
||||
+ However, before aborting, Python will also try to call _PyObject_Dump() on
|
||||
+ the given object. This may be of use when investigating bugs in which a
|
||||
+ particular object is corrupt (e.g. buggy a tp_visit method in an extension
|
||||
+ module breaking the garbage collector), to help locate the broken objects.
|
||||
+
|
||||
+ The WITH_MSG variant allows you to supply an additional message that Python
|
||||
+ will attempt to print to stderr, after the object dump.
|
||||
+*/
|
||||
+#ifdef NDEBUG
|
||||
+/* No debugging: compile away the assertions: */
|
||||
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0)
|
||||
+#else
|
||||
+/* With debugging: generate checks: */
|
||||
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \
|
||||
+ ((expr) \
|
||||
+ ? (void)(0) \
|
||||
+ : _PyObject_AssertFailed((obj), \
|
||||
+ (msg), \
|
||||
+ (__STRING(expr)), \
|
||||
+ (__FILE__), \
|
||||
+ (__LINE__), \
|
||||
+ (__PRETTY_FUNCTION__)))
|
||||
+#endif
|
||||
+
|
||||
+#define PyObject_ASSERT(obj, expr) \
|
||||
+ PyObject_ASSERT_WITH_MSG(obj, expr, NULL)
|
||||
+
|
||||
+/*
|
||||
+ Declare and define the entrypoint even when NDEBUG is defined, to avoid
|
||||
+ causing compiler/linker errors when building extensions without NDEBUG
|
||||
+ against a Python built with NDEBUG defined
|
||||
+*/
|
||||
+PyAPI_FUNC(void) _PyObject_AssertFailed(PyObject *, const char *,
|
||||
+ const char *, const char *, int,
|
||||
+ const char *);
|
||||
+
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
|
||||
index 7e82b24..8ecc3d9 100644
|
||||
--- a/Lib/test/test_gc.py
|
||||
+++ b/Lib/test/test_gc.py
|
||||
@@ -2,9 +2,11 @@ import unittest
|
||||
from test.support import (verbose, refcount_test, run_unittest,
|
||||
strip_python_stderr, cpython_only, start_threads,
|
||||
temp_dir, requires_type_collecting, TESTFN, unlink)
|
||||
+from test.support import import_module
|
||||
from test.support.script_helper import assert_python_ok, make_script
|
||||
|
||||
import sys
|
||||
+import sysconfig
|
||||
import time
|
||||
import gc
|
||||
import weakref
|
||||
@@ -50,6 +52,8 @@ class GC_Detector(object):
|
||||
# gc collects it.
|
||||
self.wr = weakref.ref(C1055820(666), it_happened)
|
||||
|
||||
+BUILD_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS'])
|
||||
+
|
||||
@with_tp_del
|
||||
class Uncollectable(object):
|
||||
"""Create a reference cycle with multiple __del__ methods.
|
||||
@@ -877,6 +881,50 @@ class GCCallbackTests(unittest.TestCase):
|
||||
self.assertEqual(len(gc.garbage), 0)
|
||||
|
||||
|
||||
+ @unittest.skipIf(BUILD_WITH_NDEBUG,
|
||||
+ 'built with -NDEBUG')
|
||||
+ def test_refcount_errors(self):
|
||||
+ self.preclean()
|
||||
+ # Verify the "handling" of objects with broken refcounts
|
||||
+ import_module("ctypes") #skip if not supported
|
||||
+
|
||||
+ import subprocess
|
||||
+ code = '''if 1:
|
||||
+ a = []
|
||||
+ b = [a]
|
||||
+
|
||||
+ # Simulate the refcount of "a" being too low (compared to the
|
||||
+ # references held on it by live data), but keeping it above zero
|
||||
+ # (to avoid deallocating it):
|
||||
+ import ctypes
|
||||
+ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a))
|
||||
+
|
||||
+ # The garbage collector should now have a fatal error when it reaches
|
||||
+ # the broken object:
|
||||
+ import gc
|
||||
+ gc.collect()
|
||||
+ '''
|
||||
+ p = subprocess.Popen([sys.executable, "-c", code],
|
||||
+ stdout=subprocess.PIPE,
|
||||
+ stderr=subprocess.PIPE)
|
||||
+ stdout, stderr = p.communicate()
|
||||
+ p.stdout.close()
|
||||
+ p.stderr.close()
|
||||
+ # Verify that stderr has a useful error message:
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "\(\(gc\)->gc.gc_refs >> \(1\)\) != 0" failed.')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'refcount was too small')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'object : \[\]')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'type : list')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'refcount: 1')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'address : 0x[0-9a-f]+')
|
||||
+
|
||||
+
|
||||
class GCTogglingTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
gc.enable()
|
||||
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
|
||||
index 3bddc40..0cc24f7 100644
|
||||
--- a/Modules/gcmodule.c
|
||||
+++ b/Modules/gcmodule.c
|
||||
@@ -342,7 +342,8 @@ update_refs(PyGC_Head *containers)
|
||||
{
|
||||
PyGC_Head *gc = containers->gc.gc_next;
|
||||
for (; gc != containers; gc = gc->gc.gc_next) {
|
||||
- assert(_PyGCHead_REFS(gc) == GC_REACHABLE);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) == GC_REACHABLE);
|
||||
_PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc)));
|
||||
/* Python's cyclic gc should never see an incoming refcount
|
||||
* of 0: if something decref'ed to 0, it should have been
|
||||
@@ -362,7 +363,8 @@ update_refs(PyGC_Head *containers)
|
||||
* so serious that maybe this should be a release-build
|
||||
* check instead of an assert?
|
||||
*/
|
||||
- assert(_PyGCHead_REFS(gc) != 0);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) != 0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -377,7 +379,9 @@ visit_decref(PyObject *op, void *data)
|
||||
* generation being collected, which can be recognized
|
||||
* because only they have positive gc_refs.
|
||||
*/
|
||||
- assert(_PyGCHead_REFS(gc) != 0); /* else refcount was too small */
|
||||
+ PyObject_ASSERT_WITH_MSG(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) != 0,
|
||||
+ "refcount was too small"); /* else refcount was too small */
|
||||
if (_PyGCHead_REFS(gc) > 0)
|
||||
_PyGCHead_DECREF(gc);
|
||||
}
|
||||
@@ -437,9 +441,10 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
|
||||
* If gc_refs == GC_UNTRACKED, it must be ignored.
|
||||
*/
|
||||
else {
|
||||
- assert(gc_refs > 0
|
||||
- || gc_refs == GC_REACHABLE
|
||||
- || gc_refs == GC_UNTRACKED);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ gc_refs > 0
|
||||
+ || gc_refs == GC_REACHABLE
|
||||
+ || gc_refs == GC_UNTRACKED);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
@@ -481,7 +486,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
|
||||
*/
|
||||
PyObject *op = FROM_GC(gc);
|
||||
traverseproc traverse = Py_TYPE(op)->tp_traverse;
|
||||
- assert(_PyGCHead_REFS(gc) > 0);
|
||||
+ PyObject_ASSERT(op, _PyGCHead_REFS(gc) > 0);
|
||||
_PyGCHead_SET_REFS(gc, GC_REACHABLE);
|
||||
(void) traverse(op,
|
||||
(visitproc)visit_reachable,
|
||||
@@ -544,7 +549,7 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers)
|
||||
for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) {
|
||||
PyObject *op = FROM_GC(gc);
|
||||
|
||||
- assert(IS_TENTATIVELY_UNREACHABLE(op));
|
||||
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
|
||||
next = gc->gc.gc_next;
|
||||
|
||||
if (has_legacy_finalizer(op)) {
|
||||
@@ -620,7 +625,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
PyWeakReference **wrlist;
|
||||
|
||||
op = FROM_GC(gc);
|
||||
- assert(IS_TENTATIVELY_UNREACHABLE(op));
|
||||
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
|
||||
next = gc->gc.gc_next;
|
||||
|
||||
if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op)))
|
||||
@@ -641,9 +646,9 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
* the callback pointer intact. Obscure: it also
|
||||
* changes *wrlist.
|
||||
*/
|
||||
- assert(wr->wr_object == op);
|
||||
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == op);
|
||||
_PyWeakref_ClearRef(wr);
|
||||
- assert(wr->wr_object == Py_None);
|
||||
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None);
|
||||
if (wr->wr_callback == NULL)
|
||||
continue; /* no callback */
|
||||
|
||||
@@ -677,7 +682,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
*/
|
||||
if (IS_TENTATIVELY_UNREACHABLE(wr))
|
||||
continue;
|
||||
- assert(IS_REACHABLE(wr));
|
||||
+ PyObject_ASSERT(op, IS_REACHABLE(wr));
|
||||
|
||||
/* Create a new reference so that wr can't go away
|
||||
* before we can process it again.
|
||||
@@ -686,7 +691,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
|
||||
/* Move wr to wrcb_to_call, for the next pass. */
|
||||
wrasgc = AS_GC(wr);
|
||||
- assert(wrasgc != next); /* wrasgc is reachable, but
|
||||
+ PyObject_ASSERT(op, wrasgc != next);
|
||||
+ /* wrasgc is reachable, but
|
||||
next isn't, so they can't
|
||||
be the same */
|
||||
gc_list_move(wrasgc, &wrcb_to_call);
|
||||
@@ -702,11 +708,11 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
|
||||
gc = wrcb_to_call.gc.gc_next;
|
||||
op = FROM_GC(gc);
|
||||
- assert(IS_REACHABLE(op));
|
||||
- assert(PyWeakref_Check(op));
|
||||
+ PyObject_ASSERT(op, IS_REACHABLE(op));
|
||||
+ PyObject_ASSERT(op, PyWeakref_Check(op));
|
||||
wr = (PyWeakReference *)op;
|
||||
callback = wr->wr_callback;
|
||||
- assert(callback != NULL);
|
||||
+ PyObject_ASSERT(op, callback != NULL);
|
||||
|
||||
/* copy-paste of weakrefobject.c's handle_callback() */
|
||||
temp = PyObject_CallFunctionObjArgs(callback, wr, NULL);
|
||||
@@ -820,12 +826,14 @@ check_garbage(PyGC_Head *collectable)
|
||||
for (gc = collectable->gc.gc_next; gc != collectable;
|
||||
gc = gc->gc.gc_next) {
|
||||
_PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc)));
|
||||
- assert(_PyGCHead_REFS(gc) != 0);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) != 0);
|
||||
}
|
||||
subtract_refs(collectable);
|
||||
for (gc = collectable->gc.gc_next; gc != collectable;
|
||||
gc = gc->gc.gc_next) {
|
||||
- assert(_PyGCHead_REFS(gc) >= 0);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) >= 0);
|
||||
if (_PyGCHead_REFS(gc) != 0)
|
||||
return -1;
|
||||
}
|
||||
diff --git a/Objects/object.c b/Objects/object.c
|
||||
index fdd41a6..bfe806c 100644
|
||||
--- a/Objects/object.c
|
||||
+++ b/Objects/object.c
|
||||
@@ -2031,6 +2031,35 @@ _PyTrash_thread_destroy_chain(void)
|
||||
}
|
||||
}
|
||||
|
||||
+PyAPI_FUNC(void)
|
||||
+_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr,
|
||||
+ const char *file, int line, const char *function)
|
||||
+{
|
||||
+ fprintf(stderr,
|
||||
+ "%s:%d: %s: Assertion \"%s\" failed.\n",
|
||||
+ file, line, function, expr);
|
||||
+ if (msg) {
|
||||
+ fprintf(stderr, "%s\n", msg);
|
||||
+ }
|
||||
+
|
||||
+ fflush(stderr);
|
||||
+
|
||||
+ if (obj) {
|
||||
+ /* This might succeed or fail, but we're about to abort, so at least
|
||||
+ try to provide any extra info we can: */
|
||||
+ _PyObject_Dump(obj);
|
||||
+ }
|
||||
+ else {
|
||||
+ fprintf(stderr, "NULL object\n");
|
||||
+ }
|
||||
+
|
||||
+ fflush(stdout);
|
||||
+ fflush(stderr);
|
||||
+
|
||||
+ /* Terminate the process: */
|
||||
+ abort();
|
||||
+}
|
||||
+
|
||||
#ifndef Py_TRACE_REFS
|
||||
/* For Py_LIMITED_API, we need an out-of-line version of _Py_Dealloc.
|
||||
Define this here, so we can undefine the macro. */
|
70
SOURCES/00189-use-rpm-wheels.patch
Normal file
70
SOURCES/00189-use-rpm-wheels.patch
Normal file
@ -0,0 +1,70 @@
|
||||
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
|
||||
index 09c572d..167d27b 100644
|
||||
--- a/Lib/ensurepip/__init__.py
|
||||
+++ b/Lib/ensurepip/__init__.py
|
||||
@@ -1,16 +1,27 @@
|
||||
+import distutils.version
|
||||
+import glob
|
||||
import os
|
||||
import os.path
|
||||
-import pkgutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
__all__ = ["version", "bootstrap"]
|
||||
|
||||
+_WHEEL_DIR = "/usr/share/python{}-wheels/".format(sys.version_info[0])
|
||||
|
||||
-_SETUPTOOLS_VERSION = "40.6.2"
|
||||
+def _get_most_recent_wheel_version(pkg):
|
||||
+ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg))
|
||||
+ suffix = "-py2.py3-none-any.whl"
|
||||
+ pattern = "{}*{}".format(prefix, suffix)
|
||||
+ versions = (p[len(prefix):-len(suffix)] for p in glob.glob(pattern))
|
||||
+ return str(max(versions, key=distutils.version.LooseVersion))
|
||||
+
|
||||
+
|
||||
+_SETUPTOOLS_VERSION = _get_most_recent_wheel_version("setuptools")
|
||||
+
|
||||
+_PIP_VERSION = _get_most_recent_wheel_version("pip")
|
||||
|
||||
-_PIP_VERSION = "18.1"
|
||||
|
||||
_PROJECTS = [
|
||||
("setuptools", _SETUPTOOLS_VERSION),
|
||||
@@ -23,9 +34,15 @@ def _run_pip(args, additional_paths=None):
|
||||
if additional_paths is not None:
|
||||
sys.path = additional_paths + sys.path
|
||||
|
||||
- # Install the bundled software
|
||||
- import pip._internal
|
||||
- return pip._internal.main(args)
|
||||
+ try:
|
||||
+ # pip 10
|
||||
+ from pip._internal import main
|
||||
+ except ImportError:
|
||||
+ # pip 9
|
||||
+ from pip import main
|
||||
+ if args[0] in ["install", "list", "wheel"]:
|
||||
+ args.append('--pre')
|
||||
+ return main(args)
|
||||
|
||||
|
||||
def version():
|
||||
@@ -94,12 +111,9 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
|
||||
additional_paths = []
|
||||
for project, version in _PROJECTS:
|
||||
wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||
- whl = pkgutil.get_data(
|
||||
- "ensurepip",
|
||||
- "_bundled/{}".format(wheel_name),
|
||||
- )
|
||||
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||
- fp.write(whl)
|
||||
+ with open(os.path.join(_WHEEL_DIR, wheel_name), "rb") as sfp:
|
||||
+ with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||
+ fp.write(sfp.read())
|
||||
|
||||
additional_paths.append(os.path.join(tmpdir, wheel_name))
|
||||
|
46
SOURCES/00251-change-user-install-location.patch
Normal file
46
SOURCES/00251-change-user-install-location.patch
Normal file
@ -0,0 +1,46 @@
|
||||
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
|
||||
index 0258d3d..4ebf50a 100644
|
||||
--- a/Lib/distutils/command/install.py
|
||||
+++ b/Lib/distutils/command/install.py
|
||||
@@ -418,8 +418,19 @@ class install(Command):
|
||||
raise DistutilsOptionError(
|
||||
"must not supply exec-prefix without prefix")
|
||||
|
||||
- self.prefix = os.path.normpath(sys.prefix)
|
||||
- self.exec_prefix = os.path.normpath(sys.exec_prefix)
|
||||
+ # self.prefix is set to sys.prefix + /local/
|
||||
+ # if neither RPM build nor virtual environment is
|
||||
+ # detected to make pip and distutils install packages
|
||||
+ # into the separate location.
|
||||
+ if (not (hasattr(sys, 'real_prefix') or
|
||||
+ sys.prefix != sys.base_prefix) and
|
||||
+ 'RPM_BUILD_ROOT' not in os.environ):
|
||||
+ addition = "/local"
|
||||
+ else:
|
||||
+ addition = ""
|
||||
+
|
||||
+ self.prefix = os.path.normpath(sys.prefix) + addition
|
||||
+ self.exec_prefix = os.path.normpath(sys.exec_prefix) + addition
|
||||
|
||||
else:
|
||||
if self.exec_prefix is None:
|
||||
diff --git a/Lib/site.py b/Lib/site.py
|
||||
index 0fc9200..c95202e 100644
|
||||
--- a/Lib/site.py
|
||||
+++ b/Lib/site.py
|
||||
@@ -322,7 +322,14 @@ def getsitepackages(prefixes=None):
|
||||
return sitepackages
|
||||
|
||||
def addsitepackages(known_paths, prefixes=None):
|
||||
- """Add site-packages to sys.path"""
|
||||
+ """Add site-packages to sys.path
|
||||
+
|
||||
+ '/usr/local' is included in PREFIXES if RPM build is not detected
|
||||
+ to make packages installed into this location visible.
|
||||
+
|
||||
+ """
|
||||
+ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ:
|
||||
+ PREFIXES.insert(0, "/usr/local")
|
||||
for sitedir in getsitepackages(prefixes):
|
||||
if os.path.isdir(sitedir):
|
||||
addsitedir(sitedir, known_paths)
|
901
SOURCES/00262-pep538_coerce_legacy_c_locale.patch
Normal file
901
SOURCES/00262-pep538_coerce_legacy_c_locale.patch
Normal file
@ -0,0 +1,901 @@
|
||||
diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst
|
||||
index d14793a..65aa3ad 100644
|
||||
--- a/Doc/using/cmdline.rst
|
||||
+++ b/Doc/using/cmdline.rst
|
||||
@@ -728,6 +728,45 @@ conflict.
|
||||
|
||||
.. versionadded:: 3.6
|
||||
|
||||
+
|
||||
+.. envvar:: PYTHONCOERCECLOCALE
|
||||
+
|
||||
+ If set to the value ``0``, causes the main Python command line application
|
||||
+ to skip coercing the legacy ASCII-based C locale to a more capable UTF-8
|
||||
+ based alternative. Note that this setting is checked even when the
|
||||
+ :option:`-E` or :option:`-I` options are used, as it is handled prior to
|
||||
+ the processing of command line options.
|
||||
+
|
||||
+ If this variable is *not* set, or is set to a value other than ``0``, and
|
||||
+ the current locale reported for the ``LC_CTYPE`` category is the default
|
||||
+ ``C`` locale, then the Python CLI will attempt to configure one of the
|
||||
+ following locales for the given locale categories before loading the
|
||||
+ interpreter runtime:
|
||||
+
|
||||
+ * ``C.UTF-8`` (``LC_ALL``)
|
||||
+ * ``C.utf8`` (``LC_ALL``)
|
||||
+ * ``UTF-8`` (``LC_CTYPE``)
|
||||
+
|
||||
+ If setting one of these locale categories succeeds, then the matching
|
||||
+ environment variables will be set (both ``LC_ALL`` and ``LANG`` for the
|
||||
+ ``LC_ALL`` category, and ``LC_CTYPE`` for the ``LC_CTYPE`` category) in
|
||||
+ the current process environment before the Python runtime is initialized.
|
||||
+
|
||||
+ Configuring one of these locales (either explicitly or via the above
|
||||
+ implicit locale coercion) will automatically set the error handler for
|
||||
+ :data:`sys.stdin` and :data:`sys.stdout` to ``surrogateescape``. This
|
||||
+ behavior can be overridden using :envvar:`PYTHONIOENCODING` as usual.
|
||||
+
|
||||
+ For debugging purposes, setting ``PYTHONCOERCECLOCALE=warn`` will cause
|
||||
+ Python to emit warning messages on ``stderr`` if either the locale coercion
|
||||
+ activates, or else if a locale that *would* have triggered coercion is
|
||||
+ still active when the Python runtime is initialized.
|
||||
+
|
||||
+ Availability: \*nix
|
||||
+
|
||||
+ .. versionadded:: 3.7
|
||||
+ See :pep:`538` for more details.
|
||||
+
|
||||
Debug-mode variables
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
diff --git a/Lib/test/support/script_helper.py b/Lib/test/support/script_helper.py
|
||||
index 507dc48..c3cb720 100644
|
||||
--- a/Lib/test/support/script_helper.py
|
||||
+++ b/Lib/test/support/script_helper.py
|
||||
@@ -56,8 +56,35 @@ def interpreter_requires_environment():
|
||||
return __cached_interp_requires_environment
|
||||
|
||||
|
||||
-_PythonRunResult = collections.namedtuple("_PythonRunResult",
|
||||
- ("rc", "out", "err"))
|
||||
+class _PythonRunResult(collections.namedtuple("_PythonRunResult",
|
||||
+ ("rc", "out", "err"))):
|
||||
+ """Helper for reporting Python subprocess run results"""
|
||||
+ def fail(self, cmd_line):
|
||||
+ """Provide helpful details about failed subcommand runs"""
|
||||
+ # Limit to 80 lines to ASCII characters
|
||||
+ maxlen = 80 * 100
|
||||
+ out, err = self.out, self.err
|
||||
+ if len(out) > maxlen:
|
||||
+ out = b'(... truncated stdout ...)' + out[-maxlen:]
|
||||
+ if len(err) > maxlen:
|
||||
+ err = b'(... truncated stderr ...)' + err[-maxlen:]
|
||||
+ out = out.decode('ascii', 'replace').rstrip()
|
||||
+ err = err.decode('ascii', 'replace').rstrip()
|
||||
+ raise AssertionError("Process return code is %d\n"
|
||||
+ "command line: %r\n"
|
||||
+ "\n"
|
||||
+ "stdout:\n"
|
||||
+ "---\n"
|
||||
+ "%s\n"
|
||||
+ "---\n"
|
||||
+ "\n"
|
||||
+ "stderr:\n"
|
||||
+ "---\n"
|
||||
+ "%s\n"
|
||||
+ "---"
|
||||
+ % (self.rc, cmd_line,
|
||||
+ out,
|
||||
+ err))
|
||||
|
||||
|
||||
# Executing the interpreter in a subprocess
|
||||
@@ -115,30 +142,7 @@ def run_python_until_end(*args, **env_vars):
|
||||
def _assert_python(expected_success, *args, **env_vars):
|
||||
res, cmd_line = run_python_until_end(*args, **env_vars)
|
||||
if (res.rc and expected_success) or (not res.rc and not expected_success):
|
||||
- # Limit to 80 lines to ASCII characters
|
||||
- maxlen = 80 * 100
|
||||
- out, err = res.out, res.err
|
||||
- if len(out) > maxlen:
|
||||
- out = b'(... truncated stdout ...)' + out[-maxlen:]
|
||||
- if len(err) > maxlen:
|
||||
- err = b'(... truncated stderr ...)' + err[-maxlen:]
|
||||
- out = out.decode('ascii', 'replace').rstrip()
|
||||
- err = err.decode('ascii', 'replace').rstrip()
|
||||
- raise AssertionError("Process return code is %d\n"
|
||||
- "command line: %r\n"
|
||||
- "\n"
|
||||
- "stdout:\n"
|
||||
- "---\n"
|
||||
- "%s\n"
|
||||
- "---\n"
|
||||
- "\n"
|
||||
- "stderr:\n"
|
||||
- "---\n"
|
||||
- "%s\n"
|
||||
- "---"
|
||||
- % (res.rc, cmd_line,
|
||||
- out,
|
||||
- err))
|
||||
+ res.fail(cmd_line)
|
||||
return res
|
||||
|
||||
def assert_python_ok(*args, **env_vars):
|
||||
diff --git a/Lib/test/test_c_locale_coercion.py b/Lib/test/test_c_locale_coercion.py
|
||||
new file mode 100644
|
||||
index 0000000..635c98f
|
||||
--- /dev/null
|
||||
+++ b/Lib/test/test_c_locale_coercion.py
|
||||
@@ -0,0 +1,371 @@
|
||||
+# Tests the attempted automatic coercion of the C locale to a UTF-8 locale
|
||||
+
|
||||
+import unittest
|
||||
+import locale
|
||||
+import os
|
||||
+import sys
|
||||
+import sysconfig
|
||||
+import shutil
|
||||
+import subprocess
|
||||
+from collections import namedtuple
|
||||
+
|
||||
+import test.support
|
||||
+from test.support.script_helper import (
|
||||
+ run_python_until_end,
|
||||
+ interpreter_requires_environment,
|
||||
+)
|
||||
+
|
||||
+# Set our expectation for the default encoding used in the C locale
|
||||
+# for the filesystem encoding and the standard streams
|
||||
+
|
||||
+# AIX uses iso8859-1 in the C locale, other *nix platforms use ASCII
|
||||
+if sys.platform.startswith("aix"):
|
||||
+ C_LOCALE_STREAM_ENCODING = "iso8859-1"
|
||||
+else:
|
||||
+ C_LOCALE_STREAM_ENCODING = "ascii"
|
||||
+
|
||||
+# FS encoding is UTF-8 on macOS, other *nix platforms use the locale encoding
|
||||
+if sys.platform == "darwin":
|
||||
+ C_LOCALE_FS_ENCODING = "utf-8"
|
||||
+else:
|
||||
+ C_LOCALE_FS_ENCODING = C_LOCALE_STREAM_ENCODING
|
||||
+
|
||||
+# Note that the above is probably still wrong in some cases, such as:
|
||||
+# * Windows when PYTHONLEGACYWINDOWSFSENCODING is set
|
||||
+# * AIX and any other platforms that use latin-1 in the C locale
|
||||
+#
|
||||
+# Options for dealing with this:
|
||||
+# * Don't set PYTHON_COERCE_C_LOCALE on such platforms (e.g. Windows doesn't)
|
||||
+# * Fix the test expectations to match the actual platform behaviour
|
||||
+
|
||||
+# In order to get the warning messages to match up as expected, the candidate
|
||||
+# order here must much the target locale order in Python/pylifecycle.c
|
||||
+_C_UTF8_LOCALES = ("C.UTF-8", "C.utf8", "UTF-8")
|
||||
+
|
||||
+# There's no reliable cross-platform way of checking locale alias
|
||||
+# lists, so the only way of knowing which of these locales will work
|
||||
+# is to try them with locale.setlocale(). We do that in a subprocess
|
||||
+# to avoid altering the locale of the test runner.
|
||||
+#
|
||||
+# If the relevant locale module attributes exist, and we're not on a platform
|
||||
+# where we expect it to always succeed, we also check that
|
||||
+# `locale.nl_langinfo(locale.CODESET)` works, as if it fails, the interpreter
|
||||
+# will skip locale coercion for that particular target locale
|
||||
+_check_nl_langinfo_CODESET = bool(
|
||||
+ sys.platform not in ("darwin", "linux") and
|
||||
+ hasattr(locale, "nl_langinfo") and
|
||||
+ hasattr(locale, "CODESET")
|
||||
+)
|
||||
+
|
||||
+def _set_locale_in_subprocess(locale_name):
|
||||
+ cmd_fmt = "import locale; print(locale.setlocale(locale.LC_CTYPE, '{}'))"
|
||||
+ if _check_nl_langinfo_CODESET:
|
||||
+ # If there's no valid CODESET, we expect coercion to be skipped
|
||||
+ cmd_fmt += "; import sys; sys.exit(not locale.nl_langinfo(locale.CODESET))"
|
||||
+ cmd = cmd_fmt.format(locale_name)
|
||||
+ result, py_cmd = run_python_until_end("-c", cmd, __isolated=True)
|
||||
+ return result.rc == 0
|
||||
+
|
||||
+
|
||||
+
|
||||
+_fields = "fsencoding stdin_info stdout_info stderr_info lang lc_ctype lc_all"
|
||||
+_EncodingDetails = namedtuple("EncodingDetails", _fields)
|
||||
+
|
||||
+class EncodingDetails(_EncodingDetails):
|
||||
+ # XXX (ncoghlan): Using JSON for child state reporting may be less fragile
|
||||
+ CHILD_PROCESS_SCRIPT = ";".join([
|
||||
+ "import sys, os",
|
||||
+ "print(sys.getfilesystemencoding())",
|
||||
+ "print(sys.stdin.encoding + ':' + sys.stdin.errors)",
|
||||
+ "print(sys.stdout.encoding + ':' + sys.stdout.errors)",
|
||||
+ "print(sys.stderr.encoding + ':' + sys.stderr.errors)",
|
||||
+ "print(os.environ.get('LANG', 'not set'))",
|
||||
+ "print(os.environ.get('LC_CTYPE', 'not set'))",
|
||||
+ "print(os.environ.get('LC_ALL', 'not set'))",
|
||||
+ ])
|
||||
+
|
||||
+ @classmethod
|
||||
+ def get_expected_details(cls, coercion_expected, fs_encoding, stream_encoding, env_vars):
|
||||
+ """Returns expected child process details for a given encoding"""
|
||||
+ _stream = stream_encoding + ":{}"
|
||||
+ # stdin and stdout should use surrogateescape either because the
|
||||
+ # coercion triggered, or because the C locale was detected
|
||||
+ stream_info = 2*[_stream.format("surrogateescape")]
|
||||
+ # stderr should always use backslashreplace
|
||||
+ stream_info.append(_stream.format("backslashreplace"))
|
||||
+ expected_lang = env_vars.get("LANG", "not set").lower()
|
||||
+ if coercion_expected:
|
||||
+ expected_lc_ctype = CLI_COERCION_TARGET.lower()
|
||||
+ else:
|
||||
+ expected_lc_ctype = env_vars.get("LC_CTYPE", "not set").lower()
|
||||
+ expected_lc_all = env_vars.get("LC_ALL", "not set").lower()
|
||||
+ env_info = expected_lang, expected_lc_ctype, expected_lc_all
|
||||
+ return dict(cls(fs_encoding, *stream_info, *env_info)._asdict())
|
||||
+
|
||||
+ @staticmethod
|
||||
+ def _handle_output_variations(data):
|
||||
+ """Adjust the output to handle platform specific idiosyncrasies
|
||||
+
|
||||
+ * Some platforms report ASCII as ANSI_X3.4-1968
|
||||
+ * Some platforms report ASCII as US-ASCII
|
||||
+ * Some platforms report UTF-8 instead of utf-8
|
||||
+ """
|
||||
+ data = data.replace(b"ANSI_X3.4-1968", b"ascii")
|
||||
+ data = data.replace(b"US-ASCII", b"ascii")
|
||||
+ data = data.lower()
|
||||
+ return data
|
||||
+
|
||||
+ @classmethod
|
||||
+ def get_child_details(cls, env_vars):
|
||||
+ """Retrieves fsencoding and standard stream details from a child process
|
||||
+
|
||||
+ Returns (encoding_details, stderr_lines):
|
||||
+
|
||||
+ - encoding_details: EncodingDetails for eager decoding
|
||||
+ - stderr_lines: result of calling splitlines() on the stderr output
|
||||
+
|
||||
+ The child is run in isolated mode if the current interpreter supports
|
||||
+ that.
|
||||
+ """
|
||||
+ result, py_cmd = run_python_until_end(
|
||||
+ "-c", cls.CHILD_PROCESS_SCRIPT,
|
||||
+ __isolated=True,
|
||||
+ **env_vars
|
||||
+ )
|
||||
+ if not result.rc == 0:
|
||||
+ result.fail(py_cmd)
|
||||
+ # All subprocess outputs in this test case should be pure ASCII
|
||||
+ adjusted_output = cls._handle_output_variations(result.out)
|
||||
+ stdout_lines = adjusted_output.decode("ascii").splitlines()
|
||||
+ child_encoding_details = dict(cls(*stdout_lines)._asdict())
|
||||
+ stderr_lines = result.err.decode("ascii").rstrip().splitlines()
|
||||
+ return child_encoding_details, stderr_lines
|
||||
+
|
||||
+
|
||||
+# Details of the shared library warning emitted at runtime
|
||||
+LEGACY_LOCALE_WARNING = (
|
||||
+ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII "
|
||||
+ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, "
|
||||
+ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible "
|
||||
+ "locales is recommended."
|
||||
+)
|
||||
+
|
||||
+# Details of the CLI locale coercion warning emitted at runtime
|
||||
+CLI_COERCION_WARNING_FMT = (
|
||||
+ "Python detected LC_CTYPE=C: LC_CTYPE coerced to {} (set another locale "
|
||||
+ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior)."
|
||||
+)
|
||||
+
|
||||
+
|
||||
+AVAILABLE_TARGETS = None
|
||||
+CLI_COERCION_TARGET = None
|
||||
+CLI_COERCION_WARNING = None
|
||||
+
|
||||
+def setUpModule():
|
||||
+ global AVAILABLE_TARGETS
|
||||
+ global CLI_COERCION_TARGET
|
||||
+ global CLI_COERCION_WARNING
|
||||
+
|
||||
+ if AVAILABLE_TARGETS is not None:
|
||||
+ # initialization already done
|
||||
+ return
|
||||
+ AVAILABLE_TARGETS = []
|
||||
+
|
||||
+ # Find the target locales available in the current system
|
||||
+ for target_locale in _C_UTF8_LOCALES:
|
||||
+ if _set_locale_in_subprocess(target_locale):
|
||||
+ AVAILABLE_TARGETS.append(target_locale)
|
||||
+
|
||||
+ if AVAILABLE_TARGETS:
|
||||
+ # Coercion is expected to use the first available target locale
|
||||
+ CLI_COERCION_TARGET = AVAILABLE_TARGETS[0]
|
||||
+ CLI_COERCION_WARNING = CLI_COERCION_WARNING_FMT.format(CLI_COERCION_TARGET)
|
||||
+
|
||||
+
|
||||
+class _LocaleHandlingTestCase(unittest.TestCase):
|
||||
+ # Base class to check expected locale handling behaviour
|
||||
+
|
||||
+ def _check_child_encoding_details(self,
|
||||
+ env_vars,
|
||||
+ expected_fs_encoding,
|
||||
+ expected_stream_encoding,
|
||||
+ expected_warnings,
|
||||
+ coercion_expected):
|
||||
+ """Check the C locale handling for the given process environment
|
||||
+
|
||||
+ Parameters:
|
||||
+ expected_fs_encoding: expected sys.getfilesystemencoding() result
|
||||
+ expected_stream_encoding: expected encoding for standard streams
|
||||
+ expected_warning: stderr output to expect (if any)
|
||||
+ """
|
||||
+ result = EncodingDetails.get_child_details(env_vars)
|
||||
+ encoding_details, stderr_lines = result
|
||||
+ expected_details = EncodingDetails.get_expected_details(
|
||||
+ coercion_expected,
|
||||
+ expected_fs_encoding,
|
||||
+ expected_stream_encoding,
|
||||
+ env_vars
|
||||
+ )
|
||||
+ self.assertEqual(encoding_details, expected_details)
|
||||
+ if expected_warnings is None:
|
||||
+ expected_warnings = []
|
||||
+ self.assertEqual(stderr_lines, expected_warnings)
|
||||
+
|
||||
+
|
||||
+class LocaleConfigurationTests(_LocaleHandlingTestCase):
|
||||
+ # Test explicit external configuration via the process environment
|
||||
+
|
||||
+ def setUpClass():
|
||||
+ # This relies on setupModule() having been run, so it can't be
|
||||
+ # handled via the @unittest.skipUnless decorator
|
||||
+ if not AVAILABLE_TARGETS:
|
||||
+ raise unittest.SkipTest("No C-with-UTF-8 locale available")
|
||||
+
|
||||
+ def test_external_target_locale_configuration(self):
|
||||
+
|
||||
+ # Explicitly setting a target locale should give the same behaviour as
|
||||
+ # is seen when implicitly coercing to that target locale
|
||||
+ self.maxDiff = None
|
||||
+
|
||||
+ expected_fs_encoding = "utf-8"
|
||||
+ expected_stream_encoding = "utf-8"
|
||||
+
|
||||
+ base_var_dict = {
|
||||
+ "LANG": "",
|
||||
+ "LC_CTYPE": "",
|
||||
+ "LC_ALL": "",
|
||||
+ }
|
||||
+ for env_var in ("LANG", "LC_CTYPE"):
|
||||
+ for locale_to_set in AVAILABLE_TARGETS:
|
||||
+ # XXX (ncoghlan): LANG=UTF-8 doesn't appear to work as
|
||||
+ # expected, so skip that combination for now
|
||||
+ # See https://bugs.python.org/issue30672 for discussion
|
||||
+ if env_var == "LANG" and locale_to_set == "UTF-8":
|
||||
+ continue
|
||||
+
|
||||
+ with self.subTest(env_var=env_var,
|
||||
+ configured_locale=locale_to_set):
|
||||
+ var_dict = base_var_dict.copy()
|
||||
+ var_dict[env_var] = locale_to_set
|
||||
+ self._check_child_encoding_details(var_dict,
|
||||
+ expected_fs_encoding,
|
||||
+ expected_stream_encoding,
|
||||
+ expected_warnings=None,
|
||||
+ coercion_expected=False)
|
||||
+
|
||||
+
|
||||
+
|
||||
+@test.support.cpython_only
|
||||
+@unittest.skipUnless(sysconfig.get_config_var("PY_COERCE_C_LOCALE"),
|
||||
+ "C locale coercion disabled at build time")
|
||||
+class LocaleCoercionTests(_LocaleHandlingTestCase):
|
||||
+ # Test implicit reconfiguration of the environment during CLI startup
|
||||
+
|
||||
+ def _check_c_locale_coercion(self,
|
||||
+ fs_encoding, stream_encoding,
|
||||
+ coerce_c_locale,
|
||||
+ expected_warnings=None,
|
||||
+ coercion_expected=True,
|
||||
+ **extra_vars):
|
||||
+ """Check the C locale handling for various configurations
|
||||
+
|
||||
+ Parameters:
|
||||
+ fs_encoding: expected sys.getfilesystemencoding() result
|
||||
+ stream_encoding: expected encoding for standard streams
|
||||
+ coerce_c_locale: setting to use for PYTHONCOERCECLOCALE
|
||||
+ None: don't set the variable at all
|
||||
+ str: the value set in the child's environment
|
||||
+ expected_warnings: expected warning lines on stderr
|
||||
+ extra_vars: additional environment variables to set in subprocess
|
||||
+ """
|
||||
+ self.maxDiff = None
|
||||
+
|
||||
+ if not AVAILABLE_TARGETS:
|
||||
+ # Locale coercion is disabled when there aren't any target locales
|
||||
+ fs_encoding = C_LOCALE_FS_ENCODING
|
||||
+ stream_encoding = C_LOCALE_STREAM_ENCODING
|
||||
+ coercion_expected = False
|
||||
+ if expected_warnings:
|
||||
+ expected_warnings = [LEGACY_LOCALE_WARNING]
|
||||
+
|
||||
+ base_var_dict = {
|
||||
+ "LANG": "",
|
||||
+ "LC_CTYPE": "",
|
||||
+ "LC_ALL": "",
|
||||
+ }
|
||||
+ base_var_dict.update(extra_vars)
|
||||
+ for env_var in ("LANG", "LC_CTYPE"):
|
||||
+ for locale_to_set in ("", "C", "POSIX", "invalid.ascii"):
|
||||
+ # XXX (ncoghlan): *BSD platforms don't behave as expected in the
|
||||
+ # POSIX locale, so we skip that for now
|
||||
+ # See https://bugs.python.org/issue30672 for discussion
|
||||
+ if locale_to_set == "POSIX":
|
||||
+ continue
|
||||
+ with self.subTest(env_var=env_var,
|
||||
+ nominal_locale=locale_to_set,
|
||||
+ PYTHONCOERCECLOCALE=coerce_c_locale):
|
||||
+ var_dict = base_var_dict.copy()
|
||||
+ var_dict[env_var] = locale_to_set
|
||||
+ if coerce_c_locale is not None:
|
||||
+ var_dict["PYTHONCOERCECLOCALE"] = coerce_c_locale
|
||||
+ # Check behaviour on successful coercion
|
||||
+ self._check_child_encoding_details(var_dict,
|
||||
+ fs_encoding,
|
||||
+ stream_encoding,
|
||||
+ expected_warnings,
|
||||
+ coercion_expected)
|
||||
+
|
||||
+ def test_test_PYTHONCOERCECLOCALE_not_set(self):
|
||||
+ # This should coerce to the first available target locale by default
|
||||
+ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=None)
|
||||
+
|
||||
+ def test_PYTHONCOERCECLOCALE_not_zero(self):
|
||||
+ # *Any* string other than "0" is considered "set" for our purposes
|
||||
+ # and hence should result in the locale coercion being enabled
|
||||
+ for setting in ("", "1", "true", "false"):
|
||||
+ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=setting)
|
||||
+
|
||||
+ def test_PYTHONCOERCECLOCALE_set_to_warn(self):
|
||||
+ # PYTHONCOERCECLOCALE=warn enables runtime warnings for legacy locales
|
||||
+ self._check_c_locale_coercion("utf-8", "utf-8",
|
||||
+ coerce_c_locale="warn",
|
||||
+ expected_warnings=[CLI_COERCION_WARNING])
|
||||
+
|
||||
+
|
||||
+ def test_PYTHONCOERCECLOCALE_set_to_zero(self):
|
||||
+ # The setting "0" should result in the locale coercion being disabled
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale="0",
|
||||
+ coercion_expected=False)
|
||||
+ # Setting LC_ALL=C shouldn't make any difference to the behaviour
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale="0",
|
||||
+ LC_ALL="C",
|
||||
+ coercion_expected=False)
|
||||
+
|
||||
+ def test_LC_ALL_set_to_C(self):
|
||||
+ # Setting LC_ALL should render the locale coercion ineffective
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale=None,
|
||||
+ LC_ALL="C",
|
||||
+ coercion_expected=False)
|
||||
+ # And result in a warning about a lack of locale compatibility
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale="warn",
|
||||
+ LC_ALL="C",
|
||||
+ expected_warnings=[LEGACY_LOCALE_WARNING],
|
||||
+ coercion_expected=False)
|
||||
+
|
||||
+def test_main():
|
||||
+ test.support.run_unittest(
|
||||
+ LocaleConfigurationTests,
|
||||
+ LocaleCoercionTests
|
||||
+ )
|
||||
+ test.support.reap_children()
|
||||
+
|
||||
+if __name__ == "__main__":
|
||||
+ test_main()
|
||||
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
|
||||
index 38156b4..5922ed9 100644
|
||||
--- a/Lib/test/test_cmd_line.py
|
||||
+++ b/Lib/test/test_cmd_line.py
|
||||
@@ -153,6 +153,7 @@ class CmdLineTest(unittest.TestCase):
|
||||
env = os.environ.copy()
|
||||
# Use C locale to get ascii for the locale encoding
|
||||
env['LC_ALL'] = 'C'
|
||||
+ env['PYTHONCOERCECLOCALE'] = '0'
|
||||
code = (
|
||||
b'import locale; '
|
||||
b'print(ascii("' + undecodable + b'"), '
|
||||
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
|
||||
index 7866a5c..b41239a 100644
|
||||
--- a/Lib/test/test_sys.py
|
||||
+++ b/Lib/test/test_sys.py
|
||||
@@ -680,6 +680,7 @@ class SysModuleTest(unittest.TestCase):
|
||||
# Force the POSIX locale
|
||||
env = os.environ.copy()
|
||||
env["LC_ALL"] = "C"
|
||||
+ env["PYTHONCOERCECLOCALE"] = "0"
|
||||
code = '\n'.join((
|
||||
'import sys',
|
||||
'def dump(name):',
|
||||
diff --git a/Modules/main.c b/Modules/main.c
|
||||
index 585d696..96d8be4 100644
|
||||
--- a/Modules/main.c
|
||||
+++ b/Modules/main.c
|
||||
@@ -107,7 +107,11 @@ static const char usage_6[] =
|
||||
" predictable seed.\n"
|
||||
"PYTHONMALLOC: set the Python memory allocators and/or install debug hooks\n"
|
||||
" on Python memory allocators. Use PYTHONMALLOC=debug to install debug\n"
|
||||
-" hooks.\n";
|
||||
+" hooks.\n"
|
||||
+
|
||||
+"PYTHONCOERCECLOCALE: if this variable is set to 0, it disables the locale\n"
|
||||
+" coercion behavior. Use PYTHONCOERCECLOCALE=warn to request display of\n"
|
||||
+" locale coercion and locale compatibility warnings on stderr.\n";
|
||||
|
||||
static int
|
||||
usage(int exitcode, const wchar_t* program)
|
||||
diff --git a/Programs/_testembed.c b/Programs/_testembed.c
|
||||
index 813cf30..2a64092 100644
|
||||
--- a/Programs/_testembed.c
|
||||
+++ b/Programs/_testembed.c
|
||||
@@ -1,4 +1,5 @@
|
||||
#include <Python.h>
|
||||
+#include "pyconfig.h"
|
||||
#include "pythread.h"
|
||||
#include <stdio.h>
|
||||
|
||||
diff --git a/Programs/python.c b/Programs/python.c
|
||||
index a7afbc7..03f8295 100644
|
||||
--- a/Programs/python.c
|
||||
+++ b/Programs/python.c
|
||||
@@ -15,6 +15,21 @@ wmain(int argc, wchar_t **argv)
|
||||
}
|
||||
#else
|
||||
|
||||
+/* Access private pylifecycle helper API to better handle the legacy C locale
|
||||
+ *
|
||||
+ * The legacy C locale assumes ASCII as the default text encoding, which
|
||||
+ * causes problems not only for the CPython runtime, but also other
|
||||
+ * components like GNU readline.
|
||||
+ *
|
||||
+ * Accordingly, when the CLI detects it, it attempts to coerce it to a
|
||||
+ * more capable UTF-8 based alternative.
|
||||
+ *
|
||||
+ * See the documentation of the PYTHONCOERCECLOCALE setting for more details.
|
||||
+ *
|
||||
+ */
|
||||
+extern int _Py_LegacyLocaleDetected(void);
|
||||
+extern void _Py_CoerceLegacyLocale(void);
|
||||
+
|
||||
int
|
||||
main(int argc, char **argv)
|
||||
{
|
||||
@@ -25,7 +40,11 @@ main(int argc, char **argv)
|
||||
char *oldloc;
|
||||
|
||||
/* Force malloc() allocator to bootstrap Python */
|
||||
+#ifdef Py_DEBUG
|
||||
+ (void)_PyMem_SetupAllocators("malloc_debug");
|
||||
+# else
|
||||
(void)_PyMem_SetupAllocators("malloc");
|
||||
+# endif
|
||||
|
||||
argv_copy = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1));
|
||||
argv_copy2 = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1));
|
||||
@@ -49,7 +68,21 @@ main(int argc, char **argv)
|
||||
return 1;
|
||||
}
|
||||
|
||||
+#ifdef __ANDROID__
|
||||
+ /* Passing "" to setlocale() on Android requests the C locale rather
|
||||
+ * than checking environment variables, so request C.UTF-8 explicitly
|
||||
+ */
|
||||
+ setlocale(LC_ALL, "C.UTF-8");
|
||||
+#else
|
||||
+ /* Reconfigure the locale to the default for this process */
|
||||
setlocale(LC_ALL, "");
|
||||
+#endif
|
||||
+
|
||||
+ if (_Py_LegacyLocaleDetected()) {
|
||||
+ _Py_CoerceLegacyLocale();
|
||||
+ }
|
||||
+
|
||||
+ /* Convert from char to wchar_t based on the locale settings */
|
||||
for (i = 0; i < argc; i++) {
|
||||
argv_copy[i] = Py_DecodeLocale(argv[i], NULL);
|
||||
if (!argv_copy[i]) {
|
||||
@@ -70,7 +103,11 @@ main(int argc, char **argv)
|
||||
|
||||
/* Force again malloc() allocator to release memory blocks allocated
|
||||
before Py_Main() */
|
||||
+#ifdef Py_DEBUG
|
||||
+ (void)_PyMem_SetupAllocators("malloc_debug");
|
||||
+# else
|
||||
(void)_PyMem_SetupAllocators("malloc");
|
||||
+# endif
|
||||
|
||||
for (i = 0; i < argc; i++) {
|
||||
PyMem_RawFree(argv_copy2[i]);
|
||||
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
|
||||
index ecfdfee..4fee178 100644
|
||||
--- a/Python/pylifecycle.c
|
||||
+++ b/Python/pylifecycle.c
|
||||
@@ -167,6 +167,7 @@ Py_SetStandardStreamEncoding(const char *encoding, const char *errors)
|
||||
return 0;
|
||||
}
|
||||
|
||||
+
|
||||
/* Global initializations. Can be undone by Py_FinalizeEx(). Don't
|
||||
call this twice without an intervening Py_FinalizeEx() call. When
|
||||
initializations fail, a fatal error is issued and the function does
|
||||
@@ -301,6 +302,183 @@ import_init(PyInterpreterState *interp, PyObject *sysmod)
|
||||
}
|
||||
|
||||
|
||||
+/* Helper functions to better handle the legacy C locale
|
||||
+ *
|
||||
+ * The legacy C locale assumes ASCII as the default text encoding, which
|
||||
+ * causes problems not only for the CPython runtime, but also other
|
||||
+ * components like GNU readline.
|
||||
+ *
|
||||
+ * Accordingly, when the CLI detects it, it attempts to coerce it to a
|
||||
+ * more capable UTF-8 based alternative as follows:
|
||||
+ *
|
||||
+ * if (_Py_LegacyLocaleDetected()) {
|
||||
+ * _Py_CoerceLegacyLocale();
|
||||
+ * }
|
||||
+ *
|
||||
+ * See the documentation of the PYTHONCOERCECLOCALE setting for more details.
|
||||
+ *
|
||||
+ * Locale coercion also impacts the default error handler for the standard
|
||||
+ * streams: while the usual default is "strict", the default for the legacy
|
||||
+ * C locale and for any of the coercion target locales is "surrogateescape".
|
||||
+ */
|
||||
+
|
||||
+int
|
||||
+_Py_LegacyLocaleDetected(void)
|
||||
+{
|
||||
+#ifndef MS_WINDOWS
|
||||
+ /* On non-Windows systems, the C locale is considered a legacy locale */
|
||||
+ /* XXX (ncoghlan): some platforms (notably Mac OS X) don't appear to treat
|
||||
+ * the POSIX locale as a simple alias for the C locale, so
|
||||
+ * we may also want to check for that explicitly.
|
||||
+ */
|
||||
+ const char *ctype_loc = setlocale(LC_CTYPE, NULL);
|
||||
+ return ctype_loc != NULL && strcmp(ctype_loc, "C") == 0;
|
||||
+#else
|
||||
+ /* Windows uses code pages instead of locales, so no locale is legacy */
|
||||
+ return 0;
|
||||
+#endif
|
||||
+}
|
||||
+
|
||||
+
|
||||
+static const char *_C_LOCALE_WARNING =
|
||||
+ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII "
|
||||
+ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, "
|
||||
+ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible "
|
||||
+ "locales is recommended.\n";
|
||||
+
|
||||
+static int
|
||||
+_legacy_locale_warnings_enabled(void)
|
||||
+{
|
||||
+ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE");
|
||||
+ return (coerce_c_locale != NULL &&
|
||||
+ strncmp(coerce_c_locale, "warn", 5) == 0);
|
||||
+}
|
||||
+
|
||||
+static void
|
||||
+_emit_stderr_warning_for_legacy_locale(void)
|
||||
+{
|
||||
+ if (_legacy_locale_warnings_enabled()) {
|
||||
+ if (_Py_LegacyLocaleDetected()) {
|
||||
+ fprintf(stderr, "%s", _C_LOCALE_WARNING);
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+typedef struct _CandidateLocale {
|
||||
+ const char *locale_name; /* The locale to try as a coercion target */
|
||||
+} _LocaleCoercionTarget;
|
||||
+
|
||||
+static _LocaleCoercionTarget _TARGET_LOCALES[] = {
|
||||
+ {"C.UTF-8"},
|
||||
+ {"C.utf8"},
|
||||
+ {"UTF-8"},
|
||||
+ {NULL}
|
||||
+};
|
||||
+
|
||||
+static char *
|
||||
+get_default_standard_stream_error_handler(void)
|
||||
+{
|
||||
+ const char *ctype_loc = setlocale(LC_CTYPE, NULL);
|
||||
+ if (ctype_loc != NULL) {
|
||||
+ /* "surrogateescape" is the default in the legacy C locale */
|
||||
+ if (strcmp(ctype_loc, "C") == 0) {
|
||||
+ return "surrogateescape";
|
||||
+ }
|
||||
+
|
||||
+#ifdef PY_COERCE_C_LOCALE
|
||||
+ /* "surrogateescape" is the default in locale coercion target locales */
|
||||
+ const _LocaleCoercionTarget *target = NULL;
|
||||
+ for (target = _TARGET_LOCALES; target->locale_name; target++) {
|
||||
+ if (strcmp(ctype_loc, target->locale_name) == 0) {
|
||||
+ return "surrogateescape";
|
||||
+ }
|
||||
+ }
|
||||
+#endif
|
||||
+ }
|
||||
+
|
||||
+ /* Otherwise return NULL to request the typical default error handler */
|
||||
+ return NULL;
|
||||
+}
|
||||
+
|
||||
+#ifdef PY_COERCE_C_LOCALE
|
||||
+static const char *_C_LOCALE_COERCION_WARNING =
|
||||
+ "Python detected LC_CTYPE=C: LC_CTYPE coerced to %.20s (set another locale "
|
||||
+ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior).\n";
|
||||
+
|
||||
+static void
|
||||
+_coerce_default_locale_settings(const _LocaleCoercionTarget *target)
|
||||
+{
|
||||
+
|
||||
+ const char *newloc = target->locale_name;
|
||||
+
|
||||
+ /* Reset locale back to currently configured defaults */
|
||||
+ setlocale(LC_ALL, "");
|
||||
+
|
||||
+ /* Set the relevant locale environment variable */
|
||||
+ if (setenv("LC_CTYPE", newloc, 1)) {
|
||||
+ fprintf(stderr,
|
||||
+ "Error setting LC_CTYPE, skipping C locale coercion\n");
|
||||
+ return;
|
||||
+ }
|
||||
+ if (_legacy_locale_warnings_enabled()) {
|
||||
+ fprintf(stderr, _C_LOCALE_COERCION_WARNING, newloc);
|
||||
+ }
|
||||
+
|
||||
+ /* Reconfigure with the overridden environment variables */
|
||||
+ setlocale(LC_ALL, "");
|
||||
+}
|
||||
+#endif
|
||||
+
|
||||
+
|
||||
+void
|
||||
+_Py_CoerceLegacyLocale(void)
|
||||
+{
|
||||
+#ifdef PY_COERCE_C_LOCALE
|
||||
+ /* We ignore the Python -E and -I flags here, as the CLI needs to sort out
|
||||
+ * the locale settings *before* we try to do anything with the command
|
||||
+ * line arguments. For cross-platform debugging purposes, we also need
|
||||
+ * to give end users a way to force even scripts that are otherwise
|
||||
+ * isolated from their environment to use the legacy ASCII-centric C
|
||||
+ * locale.
|
||||
+ *
|
||||
+ * Ignoring -E and -I is safe from a security perspective, as we only use
|
||||
+ * the setting to turn *off* the implicit locale coercion, and anyone with
|
||||
+ * access to the process environment already has the ability to set
|
||||
+ * `LC_ALL=C` to override the C level locale settings anyway.
|
||||
+ */
|
||||
+ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE");
|
||||
+ if (coerce_c_locale == NULL || strncmp(coerce_c_locale, "0", 2) != 0) {
|
||||
+ /* PYTHONCOERCECLOCALE is not set, or is set to something other than "0" */
|
||||
+ const char *locale_override = getenv("LC_ALL");
|
||||
+ if (locale_override == NULL || *locale_override == '\0') {
|
||||
+ /* LC_ALL is also not set (or is set to an empty string) */
|
||||
+ const _LocaleCoercionTarget *target = NULL;
|
||||
+ for (target = _TARGET_LOCALES; target->locale_name; target++) {
|
||||
+ const char *new_locale = setlocale(LC_CTYPE,
|
||||
+ target->locale_name);
|
||||
+ if (new_locale != NULL) {
|
||||
+#if !defined(__APPLE__) && defined(HAVE_LANGINFO_H) && defined(CODESET)
|
||||
+ /* Also ensure that nl_langinfo works in this locale */
|
||||
+ char *codeset = nl_langinfo(CODESET);
|
||||
+ if (!codeset || *codeset == '\0') {
|
||||
+ /* CODESET is not set or empty, so skip coercion */
|
||||
+ new_locale = NULL;
|
||||
+ setlocale(LC_CTYPE, "");
|
||||
+ continue;
|
||||
+ }
|
||||
+#endif
|
||||
+ /* Successfully configured locale, so make it the default */
|
||||
+ _coerce_default_locale_settings(target);
|
||||
+ return;
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ /* No C locale warning here, as Py_Initialize will emit one later */
|
||||
+#endif
|
||||
+}
|
||||
+
|
||||
+
|
||||
void
|
||||
_Py_InitializeEx_Private(int install_sigs, int install_importlib)
|
||||
{
|
||||
@@ -315,11 +493,19 @@ _Py_InitializeEx_Private(int install_sigs, int install_importlib)
|
||||
initialized = 1;
|
||||
_Py_Finalizing = NULL;
|
||||
|
||||
-#ifdef HAVE_SETLOCALE
|
||||
+#ifdef __ANDROID__
|
||||
+ /* Passing "" to setlocale() on Android requests the C locale rather
|
||||
+ * than checking environment variables, so request C.UTF-8 explicitly
|
||||
+ */
|
||||
+ setlocale(LC_CTYPE, "C.UTF-8");
|
||||
+#else
|
||||
+#ifndef MS_WINDOWS
|
||||
/* Set up the LC_CTYPE locale, so we can obtain
|
||||
the locale's charset without having to switch
|
||||
locales. */
|
||||
setlocale(LC_CTYPE, "");
|
||||
+ _emit_stderr_warning_for_legacy_locale();
|
||||
+#endif
|
||||
#endif
|
||||
|
||||
if ((p = Py_GETENV("PYTHONDEBUG")) && *p != '\0')
|
||||
@@ -1247,12 +1433,8 @@ initstdio(void)
|
||||
}
|
||||
}
|
||||
if (!errors && !(pythonioencoding && *pythonioencoding)) {
|
||||
- /* When the LC_CTYPE locale is the POSIX locale ("C locale"),
|
||||
- stdin and stdout use the surrogateescape error handler by
|
||||
- default, instead of the strict error handler. */
|
||||
- char *loc = setlocale(LC_CTYPE, NULL);
|
||||
- if (loc != NULL && strcmp(loc, "C") == 0)
|
||||
- errors = "surrogateescape";
|
||||
+ /* Choose the default error handler based on the current locale */
|
||||
+ errors = get_default_standard_stream_error_handler();
|
||||
}
|
||||
}
|
||||
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 3f2459a..7444486 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -3360,6 +3360,40 @@ then
|
||||
fi
|
||||
AC_MSG_RESULT($with_pymalloc)
|
||||
|
||||
+# Check for --with-c-locale-coercion
|
||||
+AC_MSG_CHECKING(for --with-c-locale-coercion)
|
||||
+AC_ARG_WITH(c-locale-coercion,
|
||||
+ AS_HELP_STRING([--with(out)-c-locale-coercion],
|
||||
+ [disable/enable C locale coercion to a UTF-8 based locale]))
|
||||
+
|
||||
+if test -z "$with_c_locale_coercion"
|
||||
+then
|
||||
+ with_c_locale_coercion="yes"
|
||||
+fi
|
||||
+if test "$with_c_locale_coercion" != "no"
|
||||
+then
|
||||
+ AC_DEFINE(PY_COERCE_C_LOCALE, 1,
|
||||
+ [Define if you want to coerce the C locale to a UTF-8 based locale])
|
||||
+fi
|
||||
+AC_MSG_RESULT($with_c_locale_coercion)
|
||||
+
|
||||
+# Check for --with-c-locale-warning
|
||||
+AC_MSG_CHECKING(for --with-c-locale-warning)
|
||||
+AC_ARG_WITH(c-locale-warning,
|
||||
+ AS_HELP_STRING([--with(out)-c-locale-warning],
|
||||
+ [disable/enable locale compatibility warning in the C locale]))
|
||||
+
|
||||
+if test -z "$with_c_locale_warning"
|
||||
+then
|
||||
+ with_c_locale_warning="yes"
|
||||
+fi
|
||||
+if test "$with_c_locale_warning" != "no"
|
||||
+then
|
||||
+ AC_DEFINE(PY_WARN_ON_C_LOCALE, 1,
|
||||
+ [Define to emit a locale compatibility warning in the C locale])
|
||||
+fi
|
||||
+AC_MSG_RESULT($with_c_locale_warning)
|
||||
+
|
||||
# Check for Valgrind support
|
||||
AC_MSG_CHECKING([for --with-valgrind])
|
||||
AC_ARG_WITH([valgrind],
|
228
SOURCES/00294-define-TLS-cipher-suite-on-build-time.patch
Normal file
228
SOURCES/00294-define-TLS-cipher-suite-on-build-time.patch
Normal file
@ -0,0 +1,228 @@
|
||||
diff --git a/Lib/ssl.py b/Lib/ssl.py
|
||||
index 1f3a31a..b54a684 100644
|
||||
--- a/Lib/ssl.py
|
||||
+++ b/Lib/ssl.py
|
||||
@@ -116,6 +116,7 @@ except ImportError:
|
||||
|
||||
|
||||
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN, HAS_TLSv1_3
|
||||
+from _ssl import _DEFAULT_CIPHERS
|
||||
from _ssl import _OPENSSL_API_VERSION
|
||||
|
||||
|
||||
@@ -174,48 +175,7 @@ else:
|
||||
CHANNEL_BINDING_TYPES = []
|
||||
|
||||
|
||||
-# Disable weak or insecure ciphers by default
|
||||
-# (OpenSSL's default setting is 'DEFAULT:!aNULL:!eNULL')
|
||||
-# Enable a better set of ciphers by default
|
||||
-# This list has been explicitly chosen to:
|
||||
-# * TLS 1.3 ChaCha20 and AES-GCM cipher suites
|
||||
-# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE)
|
||||
-# * Prefer ECDHE over DHE for better performance
|
||||
-# * Prefer AEAD over CBC for better performance and security
|
||||
-# * Prefer AES-GCM over ChaCha20 because most platforms have AES-NI
|
||||
-# (ChaCha20 needs OpenSSL 1.1.0 or patched 1.0.2)
|
||||
-# * Prefer any AES-GCM and ChaCha20 over any AES-CBC for better
|
||||
-# performance and security
|
||||
-# * Then Use HIGH cipher suites as a fallback
|
||||
-# * Disable NULL authentication, NULL encryption, 3DES and MD5 MACs
|
||||
-# for security reasons
|
||||
-_DEFAULT_CIPHERS = (
|
||||
- 'TLS13-AES-256-GCM-SHA384:TLS13-CHACHA20-POLY1305-SHA256:'
|
||||
- 'TLS13-AES-128-GCM-SHA256:'
|
||||
- 'ECDH+AESGCM:ECDH+CHACHA20:DH+AESGCM:DH+CHACHA20:ECDH+AES256:DH+AES256:'
|
||||
- 'ECDH+AES128:DH+AES:ECDH+HIGH:DH+HIGH:RSA+AESGCM:RSA+AES:RSA+HIGH:'
|
||||
- '!aNULL:!eNULL:!MD5:!3DES'
|
||||
- )
|
||||
-
|
||||
-# Restricted and more secure ciphers for the server side
|
||||
-# This list has been explicitly chosen to:
|
||||
-# * TLS 1.3 ChaCha20 and AES-GCM cipher suites
|
||||
-# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE)
|
||||
-# * Prefer ECDHE over DHE for better performance
|
||||
-# * Prefer AEAD over CBC for better performance and security
|
||||
-# * Prefer AES-GCM over ChaCha20 because most platforms have AES-NI
|
||||
-# * Prefer any AES-GCM and ChaCha20 over any AES-CBC for better
|
||||
-# performance and security
|
||||
-# * Then Use HIGH cipher suites as a fallback
|
||||
-# * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, RC4, and
|
||||
-# 3DES for security reasons
|
||||
-_RESTRICTED_SERVER_CIPHERS = (
|
||||
- 'TLS13-AES-256-GCM-SHA384:TLS13-CHACHA20-POLY1305-SHA256:'
|
||||
- 'TLS13-AES-128-GCM-SHA256:'
|
||||
- 'ECDH+AESGCM:ECDH+CHACHA20:DH+AESGCM:DH+CHACHA20:ECDH+AES256:DH+AES256:'
|
||||
- 'ECDH+AES128:DH+AES:ECDH+HIGH:DH+HIGH:RSA+AESGCM:RSA+AES:RSA+HIGH:'
|
||||
- '!aNULL:!eNULL:!MD5:!DSS:!RC4:!3DES'
|
||||
-)
|
||||
+_RESTRICTED_SERVER_CIPHERS = _DEFAULT_CIPHERS
|
||||
|
||||
|
||||
class CertificateError(ValueError):
|
||||
@@ -389,8 +349,6 @@ class SSLContext(_SSLContext):
|
||||
|
||||
def __new__(cls, protocol=PROTOCOL_TLS, *args, **kwargs):
|
||||
self = _SSLContext.__new__(cls, protocol)
|
||||
- if protocol != _SSLv2_IF_EXISTS:
|
||||
- self.set_ciphers(_DEFAULT_CIPHERS)
|
||||
return self
|
||||
|
||||
def __init__(self, protocol=PROTOCOL_TLS):
|
||||
@@ -505,8 +463,6 @@ def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None,
|
||||
# verify certs and host name in client mode
|
||||
context.verify_mode = CERT_REQUIRED
|
||||
context.check_hostname = True
|
||||
- elif purpose == Purpose.CLIENT_AUTH:
|
||||
- context.set_ciphers(_RESTRICTED_SERVER_CIPHERS)
|
||||
|
||||
if cafile or capath or cadata:
|
||||
context.load_verify_locations(cafile, capath, cadata)
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 9785a59..34a7ec2 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -18,6 +18,7 @@ import asyncore
|
||||
import weakref
|
||||
import platform
|
||||
import functools
|
||||
+import sysconfig
|
||||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
@@ -36,7 +37,7 @@ PROTOCOLS = sorted(ssl._PROTOCOL_NAMES)
|
||||
HOST = support.HOST
|
||||
IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL')
|
||||
IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0)
|
||||
-
|
||||
+PY_SSL_DEFAULT_CIPHERS = sysconfig.get_config_var('PY_SSL_DEFAULT_CIPHERS')
|
||||
|
||||
def data_file(*name):
|
||||
return os.path.join(os.path.dirname(__file__), *name)
|
||||
@@ -889,6 +890,19 @@ class ContextTests(unittest.TestCase):
|
||||
with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"):
|
||||
ctx.set_ciphers("^$:,;?*'dorothyx")
|
||||
|
||||
+ @unittest.skipUnless(PY_SSL_DEFAULT_CIPHERS == 1,
|
||||
+ "Test applies only to Python default ciphers")
|
||||
+ def test_python_ciphers(self):
|
||||
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
+ ciphers = ctx.get_ciphers()
|
||||
+ for suite in ciphers:
|
||||
+ name = suite['name']
|
||||
+ self.assertNotIn("PSK", name)
|
||||
+ self.assertNotIn("SRP", name)
|
||||
+ self.assertNotIn("MD5", name)
|
||||
+ self.assertNotIn("RC4", name)
|
||||
+ self.assertNotIn("3DES", name)
|
||||
+
|
||||
@unittest.skipIf(ssl.OPENSSL_VERSION_INFO < (1, 0, 2, 0, 0), 'OpenSSL too old')
|
||||
def test_get_ciphers(self):
|
||||
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
|
||||
index 5e007da..130f006 100644
|
||||
--- a/Modules/_ssl.c
|
||||
+++ b/Modules/_ssl.c
|
||||
@@ -237,6 +237,31 @@ SSL_SESSION_get_ticket_lifetime_hint(const SSL_SESSION *s)
|
||||
|
||||
#endif /* OpenSSL < 1.1.0 or LibreSSL < 2.7.0 */
|
||||
|
||||
+/* Default cipher suites */
|
||||
+#ifndef PY_SSL_DEFAULT_CIPHERS
|
||||
+#define PY_SSL_DEFAULT_CIPHERS 1
|
||||
+#endif
|
||||
+
|
||||
+#if PY_SSL_DEFAULT_CIPHERS == 0
|
||||
+ #ifndef PY_SSL_DEFAULT_CIPHER_STRING
|
||||
+ #error "Py_SSL_DEFAULT_CIPHERS 0 needs Py_SSL_DEFAULT_CIPHER_STRING"
|
||||
+ #endif
|
||||
+#elif PY_SSL_DEFAULT_CIPHERS == 1
|
||||
+/* Python custom selection of sensible ciper suites
|
||||
+ * DEFAULT: OpenSSL's default cipher list. Since 1.0.2 the list is in sensible order.
|
||||
+ * !aNULL:!eNULL: really no NULL ciphers
|
||||
+ * !MD5:!3DES:!DES:!RC4:!IDEA:!SEED: no weak or broken algorithms on old OpenSSL versions.
|
||||
+ * !aDSS: no authentication with discrete logarithm DSA algorithm
|
||||
+ * !SRP:!PSK: no secure remote password or pre-shared key authentication
|
||||
+ */
|
||||
+ #define PY_SSL_DEFAULT_CIPHER_STRING "DEFAULT:!aNULL:!eNULL:!MD5:!3DES:!DES:!RC4:!IDEA:!SEED:!aDSS:!SRP:!PSK"
|
||||
+#elif PY_SSL_DEFAULT_CIPHERS == 2
|
||||
+/* Ignored in SSLContext constructor, only used to as _ssl.DEFAULT_CIPHER_STRING */
|
||||
+ #define PY_SSL_DEFAULT_CIPHER_STRING SSL_DEFAULT_CIPHER_LIST
|
||||
+#else
|
||||
+ #error "Unsupported PY_SSL_DEFAULT_CIPHERS"
|
||||
+#endif
|
||||
+
|
||||
|
||||
enum py_ssl_error {
|
||||
/* these mirror ssl.h */
|
||||
@@ -2803,7 +2828,12 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version)
|
||||
/* A bare minimum cipher list without completely broken cipher suites.
|
||||
* It's far from perfect but gives users a better head start. */
|
||||
if (proto_version != PY_SSL_VERSION_SSL2) {
|
||||
- result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL:!MD5");
|
||||
+#if PY_SSL_DEFAULT_CIPHERS == 2
|
||||
+ /* stick to OpenSSL's default settings */
|
||||
+ result = 1;
|
||||
+#else
|
||||
+ result = SSL_CTX_set_cipher_list(ctx, PY_SSL_DEFAULT_CIPHER_STRING);
|
||||
+#endif
|
||||
} else {
|
||||
/* SSLv2 needs MD5 */
|
||||
result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL");
|
||||
@@ -5343,6 +5373,9 @@ PyInit__ssl(void)
|
||||
(PyObject *)&PySSLSession_Type) != 0)
|
||||
return NULL;
|
||||
|
||||
+ PyModule_AddStringConstant(m, "_DEFAULT_CIPHERS",
|
||||
+ PY_SSL_DEFAULT_CIPHER_STRING);
|
||||
+
|
||||
PyModule_AddIntConstant(m, "SSL_ERROR_ZERO_RETURN",
|
||||
PY_SSL_ERROR_ZERO_RETURN);
|
||||
PyModule_AddIntConstant(m, "SSL_ERROR_WANT_READ",
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 3703701..2eff514 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -5598,6 +5598,42 @@ if test "$have_getrandom" = yes; then
|
||||
[Define to 1 if the getrandom() function is available])
|
||||
fi
|
||||
|
||||
+# ssl module default cipher suite string
|
||||
+AH_TEMPLATE(PY_SSL_DEFAULT_CIPHERS,
|
||||
+ [Default cipher suites list for ssl module.
|
||||
+ 1: Python's preferred selection, 2: leave OpenSSL defaults untouched, 0: custom string])
|
||||
+AH_TEMPLATE(PY_SSL_DEFAULT_CIPHER_STRING,
|
||||
+ [Cipher suite string for PY_SSL_DEFAULT_CIPHERS=0]
|
||||
+)
|
||||
+AC_MSG_CHECKING(for --with-ssl-default-suites)
|
||||
+AC_ARG_WITH(ssl-default-suites,
|
||||
+ AS_HELP_STRING([--with-ssl-default-suites=@<:@python|openssl|STRING@:>@],
|
||||
+ [Override default cipher suites string,
|
||||
+ python: use Python's preferred selection (default),
|
||||
+ openssl: leave OpenSSL's defaults untouched,
|
||||
+ STRING: use a custom string,
|
||||
+ PROTOCOL_SSLv2 ignores the setting]),
|
||||
+[
|
||||
+AC_MSG_RESULT($withval)
|
||||
+case "$withval" in
|
||||
+ python)
|
||||
+ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1)
|
||||
+ ;;
|
||||
+ openssl)
|
||||
+ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 2)
|
||||
+ ;;
|
||||
+ *)
|
||||
+ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 0)
|
||||
+ AC_DEFINE_UNQUOTED(PY_SSL_DEFAULT_CIPHER_STRING, "$withval")
|
||||
+ ;;
|
||||
+esac
|
||||
+],
|
||||
+[
|
||||
+AC_MSG_RESULT(python)
|
||||
+AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1)
|
||||
+])
|
||||
+
|
||||
+
|
||||
# generate output files
|
||||
AC_CONFIG_FILES(Makefile.pre Modules/Setup.config Misc/python.pc Misc/python-config.sh)
|
||||
AC_CONFIG_FILES([Modules/ld_so_aix], [chmod +x Modules/ld_so_aix])
|
14
SOURCES/00316-mark-bdist_wininst-unsupported.patch
Normal file
14
SOURCES/00316-mark-bdist_wininst-unsupported.patch
Normal file
@ -0,0 +1,14 @@
|
||||
diff --git a/Lib/distutils/command/bdist_wininst.py b/Lib/distutils/command/bdist_wininst.py
|
||||
index fde5675..15434c3 100644
|
||||
--- a/Lib/distutils/command/bdist_wininst.py
|
||||
+++ b/Lib/distutils/command/bdist_wininst.py
|
||||
@@ -55,6 +55,9 @@ class bdist_wininst(Command):
|
||||
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
|
||||
'skip-build']
|
||||
|
||||
+ # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows
|
||||
+ _unsupported = (sys.platform != "win32")
|
||||
+
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
111
SOURCES/00317-CVE-2019-5010.patch
Normal file
111
SOURCES/00317-CVE-2019-5010.patch
Normal file
@ -0,0 +1,111 @@
|
||||
From c660debb97f4f422255a82fef2d77804552c043a Mon Sep 17 00:00:00 2001
|
||||
From: Christian Heimes <christian@python.org>
|
||||
Date: Tue, 15 Jan 2019 18:16:30 +0100
|
||||
Subject: [PATCH] bpo-35746: Fix segfault in ssl's cert parser
|
||||
|
||||
CVE-2019-5010, Fix a NULL pointer deref in ssl module. The cert parser did
|
||||
not handle CRL distribution points with empty DP or URI correctly. A
|
||||
malicious or buggy certificate can result into segfault.
|
||||
|
||||
Signed-off-by: Christian Heimes <christian@python.org>
|
||||
---
|
||||
Lib/test/talos-2019-0758.pem | 22 +++++++++++++++++++
|
||||
Lib/test/test_ssl.py | 22 +++++++++++++++++++
|
||||
.../2019-01-15-18-16-05.bpo-35746.nMSd0j.rst | 3 +++
|
||||
Modules/_ssl.c | 4 ++++
|
||||
4 files changed, 51 insertions(+)
|
||||
create mode 100644 Lib/test/talos-2019-0758.pem
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst
|
||||
|
||||
diff --git a/Lib/test/talos-2019-0758.pem b/Lib/test/talos-2019-0758.pem
|
||||
new file mode 100644
|
||||
index 000000000000..13b95a77fd8a
|
||||
--- /dev/null
|
||||
+++ b/Lib/test/talos-2019-0758.pem
|
||||
@@ -0,0 +1,22 @@
|
||||
+-----BEGIN CERTIFICATE-----
|
||||
+MIIDqDCCApKgAwIBAgIBAjALBgkqhkiG9w0BAQswHzELMAkGA1UEBhMCVUsxEDAO
|
||||
+BgNVBAMTB2NvZHktY2EwHhcNMTgwNjE4MTgwMDU4WhcNMjgwNjE0MTgwMDU4WjA7
|
||||
+MQswCQYDVQQGEwJVSzEsMCoGA1UEAxMjY29kZW5vbWljb24tdm0tMi50ZXN0Lmxh
|
||||
+bC5jaXNjby5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC63fGB
|
||||
+J80A9Av1GB0bptslKRIUtJm8EeEu34HkDWbL6AJY0P8WfDtlXjlPaLqFa6sqH6ES
|
||||
+V48prSm1ZUbDSVL8R6BYVYpOlK8/48xk4pGTgRzv69gf5SGtQLwHy8UPBKgjSZoD
|
||||
+5a5k5wJXGswhKFFNqyyxqCvWmMnJWxXTt2XDCiWc4g4YAWi4O4+6SeeHVAV9rV7C
|
||||
+1wxqjzKovVe2uZOHjKEzJbbIU6JBPb6TRfMdRdYOw98n1VXDcKVgdX2DuuqjCzHP
|
||||
+WhU4Tw050M9NaK3eXp4Mh69VuiKoBGOLSOcS8reqHIU46Reg0hqeL8LIL6OhFHIF
|
||||
+j7HR6V1X6F+BfRS/AgMBAAGjgdYwgdMwCQYDVR0TBAIwADAdBgNVHQ4EFgQUOktp
|
||||
+HQjxDXXUg8prleY9jeLKeQ4wTwYDVR0jBEgwRoAUx6zgPygZ0ZErF9sPC4+5e2Io
|
||||
+UU+hI6QhMB8xCzAJBgNVBAYTAlVLMRAwDgYDVQQDEwdjb2R5LWNhggkA1QEAuwb7
|
||||
+2s0wCQYDVR0SBAIwADAuBgNVHREEJzAlgiNjb2Rlbm9taWNvbi12bS0yLnRlc3Qu
|
||||
+bGFsLmNpc2NvLmNvbTAOBgNVHQ8BAf8EBAMCBaAwCwYDVR0fBAQwAjAAMAsGCSqG
|
||||
+SIb3DQEBCwOCAQEAvqantx2yBlM11RoFiCfi+AfSblXPdrIrHvccepV4pYc/yO6p
|
||||
+t1f2dxHQb8rWH3i6cWag/EgIZx+HJQvo0rgPY1BFJsX1WnYf1/znZpkUBGbVmlJr
|
||||
+t/dW1gSkNS6sPsM0Q+7HPgEv8CPDNK5eo7vU2seE0iWOkxSyVUuiCEY9ZVGaLVit
|
||||
+p0C78nZ35Pdv4I+1cosmHl28+es1WI22rrnmdBpH8J1eY6WvUw2xuZHLeNVN0TzV
|
||||
+Q3qq53AaCWuLOD1AjESWuUCxMZTK9DPS4JKXTK8RLyDeqOvJGjsSWp3kL0y3GaQ+
|
||||
+10T1rfkKJub2+m9A9duin1fn6tHc2wSvB7m3DA==
|
||||
+-----END CERTIFICATE-----
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 7f6b93148f45..1fc657f4d867 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -115,6 +115,7 @@ def data_file(*name):
|
||||
BADKEY = data_file("badkey.pem")
|
||||
NOKIACERT = data_file("nokia.pem")
|
||||
NULLBYTECERT = data_file("nullbytecert.pem")
|
||||
+TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem")
|
||||
|
||||
DHFILE = data_file("ffdh3072.pem")
|
||||
BYTES_DHFILE = os.fsencode(DHFILE)
|
||||
@@ -348,6 +349,27 @@ def test_parse_cert(self):
|
||||
self.assertEqual(p['crlDistributionPoints'],
|
||||
('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',))
|
||||
|
||||
+ def test_parse_cert_CVE_2019_5010(self):
|
||||
+ p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP)
|
||||
+ if support.verbose:
|
||||
+ sys.stdout.write("\n" + pprint.pformat(p) + "\n")
|
||||
+ self.assertEqual(
|
||||
+ p,
|
||||
+ {
|
||||
+ 'issuer': (
|
||||
+ (('countryName', 'UK'),), (('commonName', 'cody-ca'),)),
|
||||
+ 'notAfter': 'Jun 14 18:00:58 2028 GMT',
|
||||
+ 'notBefore': 'Jun 18 18:00:58 2018 GMT',
|
||||
+ 'serialNumber': '02',
|
||||
+ 'subject': ((('countryName', 'UK'),),
|
||||
+ (('commonName',
|
||||
+ 'codenomicon-vm-2.test.lal.cisco.com'),)),
|
||||
+ 'subjectAltName': (
|
||||
+ ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),),
|
||||
+ 'version': 3
|
||||
+ }
|
||||
+ )
|
||||
+
|
||||
def test_parse_cert_CVE_2013_4238(self):
|
||||
p = ssl._ssl._test_decode_cert(NULLBYTECERT)
|
||||
if support.verbose:
|
||||
diff --git a/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst b/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst
|
||||
new file mode 100644
|
||||
index 000000000000..dffe347eec84
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst
|
||||
@@ -0,0 +1,3 @@
|
||||
+[CVE-2019-5010] Fix a NULL pointer deref in ssl module. The cert parser did
|
||||
+not handle CRL distribution points with empty DP or URI correctly. A
|
||||
+malicious or buggy certificate can result into segfault.
|
||||
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
|
||||
index 4e3352d9e661..0e720e268d93 100644
|
||||
--- a/Modules/_ssl.c
|
||||
+++ b/Modules/_ssl.c
|
||||
@@ -1515,6 +1515,10 @@ _get_crl_dp(X509 *certificate) {
|
||||
STACK_OF(GENERAL_NAME) *gns;
|
||||
|
||||
dp = sk_DIST_POINT_value(dps, i);
|
||||
+ if (dp->distpoint == NULL) {
|
||||
+ /* Ignore empty DP value, CVE-2019-5010 */
|
||||
+ continue;
|
||||
+ }
|
||||
gns = dp->distpoint->name.fullname;
|
||||
|
||||
for (j=0; j < sk_GENERAL_NAME_num(gns); j++) {
|
949
SOURCES/00318-fixes-for-tls-13.patch
Normal file
949
SOURCES/00318-fixes-for-tls-13.patch
Normal file
@ -0,0 +1,949 @@
|
||||
From 412ccf4c6f8c417006c0a93392a8274a425074c0 Mon Sep 17 00:00:00 2001
|
||||
From: Victor Stinner <vstinner@redhat.com>
|
||||
Date: Wed, 29 May 2019 04:04:54 +0200
|
||||
Subject: [PATCH 1/5] bpo-32947: test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1
|
||||
(GH-11612)
|
||||
|
||||
Backport partially commit 529525fb5a8fd9b96ab4021311a598c77588b918:
|
||||
complete the previous partial backport (commit
|
||||
2a4ee8aa01d61b6a9c8e9c65c211e61bdb471826.
|
||||
|
||||
Co-Authored-By: Christian Heimes <christian@python.org>
|
||||
---
|
||||
Lib/test/test_ssl.py | 15 +++++++++++++++
|
||||
.../2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst | 1 +
|
||||
2 files changed, 16 insertions(+)
|
||||
create mode 100644 Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst
|
||||
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index cb0acda..639109f 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -2043,6 +2043,16 @@ if _have_threads:
|
||||
sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n"
|
||||
% (msg, ctype, msg.lower(), ctype))
|
||||
self.write(msg.lower())
|
||||
+ except ConnectionResetError:
|
||||
+ # XXX: OpenSSL 1.1.1 sometimes raises ConnectionResetError
|
||||
+ # when connection is not shut down gracefully.
|
||||
+ if self.server.chatty and support.verbose:
|
||||
+ sys.stdout.write(
|
||||
+ " Connection reset by peer: {}\n".format(
|
||||
+ self.addr)
|
||||
+ )
|
||||
+ self.close()
|
||||
+ self.running = False
|
||||
except OSError:
|
||||
if self.server.chatty:
|
||||
handle_error("Test server failure:\n")
|
||||
@@ -2122,6 +2132,11 @@ if _have_threads:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
self.stop()
|
||||
+ except BaseException as e:
|
||||
+ if support.verbose and self.chatty:
|
||||
+ sys.stdout.write(
|
||||
+ ' connection handling failed: ' + repr(e) + '\n')
|
||||
+
|
||||
self.sock.close()
|
||||
|
||||
def stop(self):
|
||||
diff --git a/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst b/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst
|
||||
new file mode 100644
|
||||
index 0000000..f508504
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst
|
||||
@@ -0,0 +1 @@
|
||||
+test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1.
|
||||
--
|
||||
2.21.0
|
||||
|
||||
|
||||
From 6b728ec778067849dd1f0d9b73cf1ac47dafa270 Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Wed, 25 Sep 2019 09:12:59 -0700
|
||||
Subject: [PATCH 2/5] bpo-38271: encrypt private key test files with AES256
|
||||
(GH-16385)
|
||||
|
||||
The private keys for test_ssl were encrypted with 3DES in traditional
|
||||
PKCSGH-5 format. 3DES and the digest algorithm of PKCSGH-5 are blocked by
|
||||
some strict crypto policies. Use PKCSGH-8 format with AES256 encryption
|
||||
instead.
|
||||
|
||||
Signed-off-by: Christian Heimes <christian@python.org>
|
||||
|
||||
https://bugs.python.org/issue38271
|
||||
|
||||
Automerge-Triggered-By: @tiran
|
||||
(cherry picked from commit bfd0c963d88f3df69489ee250655e2b8f3d235bd)
|
||||
|
||||
Co-authored-by: Christian Heimes <christian@python.org>
|
||||
---
|
||||
Lib/test/keycert.passwd.pem | 85 ++++++++++---------
|
||||
Lib/test/make_ssl_certs.py | 4 +-
|
||||
Lib/test/ssl_key.passwd.pem | 84 +++++++++---------
|
||||
.../2019-09-25-13-11-29.bpo-38271.iHXNIg.rst | 4 +
|
||||
4 files changed, 91 insertions(+), 86 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst
|
||||
|
||||
diff --git a/Lib/test/keycert.passwd.pem b/Lib/test/keycert.passwd.pem
|
||||
index cbb3c3b..c330c36 100644
|
||||
--- a/Lib/test/keycert.passwd.pem
|
||||
+++ b/Lib/test/keycert.passwd.pem
|
||||
@@ -1,45 +1,45 @@
|
||||
------BEGIN RSA PRIVATE KEY-----
|
||||
-Proc-Type: 4,ENCRYPTED
|
||||
-DEK-Info: DES-EDE3-CBC,D134E931C96D9DEC
|
||||
-
|
||||
-nuGFEej7vIjkYWSMz5OJeVTNntDRQi6ZM4DBm3g8T7i/0odr3WFqGMMKZcIhLYQf
|
||||
-rgRq7RSKtrJ1y5taVucMV+EuCjyfzDo0TsYt+ZrXv/D08eZhjRmkhoHnGVF0TqQm
|
||||
-nQEXM/ERT4J2RM78dnG+homMkI76qOqxgGbRqQqJo6AiVRcAZ45y8s96bru2TAB8
|
||||
-+pWjO/v0Je7AFVdwSU52N8OOY6uoSAygW+0UY1WVxbVGJF2XfRsNpPX+YQHYl6e+
|
||||
-3xM5XBVCgr6kmdAyub5qUJ38X3TpdVGoR0i+CVS9GTr2pSRib1zURAeeHnlqiUZM
|
||||
-4m0Gn9s72nJevU1wxED8pwOhR8fnHEmMKGD2HPhKoOCbzDhwwBZO27TNa1uWeM3f
|
||||
-M5oixKDi2PqMn3y2cDx1NjJtP661688EcJ5a2Ih9BgO9xpnhSyzBWEKcAn0tJB0H
|
||||
-/56M0FW6cdOOIzMveGGL7sHW5E+iOdI1n5e7C6KJUzew78Y9qJnhS53EdI6qTz9R
|
||||
-wsIsj1i070Fk6RbPo6zpLlF6w7Zj8GlZaZA7OZZv9wo5VEV/0ST8gmiiBOBc4C6Y
|
||||
-u9hyLIIu4dFEBKyQHRvBnQSLNpKx6or1OGFDVBay2In9Yh2BHh1+vOj/OIz/wq48
|
||||
-EHOIV27fRJxLu4jeK5LIGDhuPnMJ8AJYQ0bQOUP6fd7p+TxWkAQZPB/Dx/cs3hxr
|
||||
-nFEdzx+eO+IAsObx/b1EGZyEJyETBslu4GwYX7/KK3HsJhDJ1bdZ//28jOCaoir6
|
||||
-ZOMT72GRwmVoQTJ0XpccfjHfKJDRLT7C1xvzo4Eibth0hpTZkA75IUYUp6qK/PuJ
|
||||
-kH/qdiC7QIkRKtsrawW4vEDna3YtxIYhQqz9+KwO6u/0gzooZtv1RU4U3ifMDB5u
|
||||
-5P5GAzACRqlY8QYBkM869lvWqzQPHvybC4ak9Yx6/heMO9ddjdIW9BaK8BLxvN/6
|
||||
-UCD936Y4fWltt09jHZIoxWFykouBwmd7bXooNYXmDRNmjTdVhKJuOEOQw8hDzx7e
|
||||
-pWFJ9Z/V4Qm1tvXbCD7QFqMCDoY3qFvVG8DBqXpmxe1yPfz21FWrT7IuqDXAD3ns
|
||||
-vxfN/2a+Cy04U9FBNVCvWqWIs5AgNpdCMJC2FlXKTy+H3/7rIjNyFyvbX0vxIXtK
|
||||
-liOVNXiyVM++KZXqktqMUDlsJENmIHV9B046luqbgW018fHkyEYlL3iRZGbYegwr
|
||||
-XO9VVIKVPw1BEvJ8VNdGFGuZGepd8qX2ezfYADrNR+4t85HDm8inbjTobSjWuljs
|
||||
-ftUNkOeCHqAvWCFQTLCfdykvV08EJfVY79y7yFPtfRV2gxYokXFifjo3su9sVQr1
|
||||
-UiIS5ZAsIC1hBXWeXoBN7QVTkFi7Yto6E1q2k10LiT3obpUUUQ/oclhrJOCJVjrS
|
||||
-oRcj2QBy8OT4T9slJr5maTWdgd7Lt6+I6cGQXPaDvjGOJl0eBYM14vhx4rRQWytJ
|
||||
-k07hhHFO4+9CGCuHS8AAy2gR6acYFWt2ZiiNZ0z/iPIHNK4YEyy9aLf6uZH/KQjE
|
||||
-jmHToo7XD6QvCAEC5qTHby3o3LfHIhyZi/4L+AhS4FKUHF6M0peeyYt4z3HaK2d2
|
||||
-N6mHLPdjwNjra7GOmcns4gzcrdfoF+R293KpPal4PjknvR3dZL4kKP/ougTAM5zv
|
||||
-qDIvRbkHzjP8ChTpoLcJsNVXykNcNkjcSi0GHtIpYjh6QX6P2uvR/S4+Bbb9p9rn
|
||||
-hIy/ovu9tWN2hiPxGPe6torF6BulAxsTYlDercC204AyzsrdA0pr6HBgJH9C6ML1
|
||||
-TchwodbFJqn9rSv91i1liusAGoOvE81AGBdrXY7LxfSNhYY1IK6yR/POJPTd53sA
|
||||
-uX2/j6Rtoksd/2BHPM6AUnI/2B9slhuzWX2aCtWLeuwvXDS6rYuTigaQmLkzTRfM
|
||||
-dlMI3s9KLXxgi5YVumUZleJWXwBNP7KiKajd+VTSD+7WAhyhM5FIG5wVOaxmy4G2
|
||||
-TyqZ/Ax9d2VEjTQHWvQlLPQ4Mp0EIz0aEl94K/S8CK8bJRH6+PRkar+dJi1xqlL+
|
||||
-BYb42At9mEJ8odLlFikvNi1+t7jqXk5jRi5C0xFKx3nTtzoH2zNUeuA3R6vSocVK
|
||||
-45jnze9IkKmxMlJ4loR5sgszdpDCD3kXqjtCcbMTmcrGyzJek3HSOTpiEORoTFOe
|
||||
-Rhg6jH5lm+QcC263oipojS0qEQcnsWJP2CylNYMYHR9O/9NQxT3o2lsRHqZTMELV
|
||||
-uQa/SFH+paQNbZOj8MRwPSqqiIxJFuLswKte1R+W7LKn1yBSM7Pp39lNbzGvJD2E
|
||||
-YRfnCwFpJ54voVAuQ4jXJvigCW2qeCjXlxeD6K2j4eGJEEOmIjIW1wjubyBY6OI3
|
||||
------END RSA PRIVATE KEY-----
|
||||
+-----BEGIN ENCRYPTED PRIVATE KEY-----
|
||||
+MIIHbTBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQIhD+rJdxqb6ECAggA
|
||||
+MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBDTdyjCP3riOSUfxix4aXEvBIIH
|
||||
+ECGkbsFabrcFMZcplw5jHMaOlG7rYjUzwDJ80JM8uzbv2Jb8SvNlns2+xmnEvH/M
|
||||
+mNvRmnXmplbVjH3XBMK8o2Psnr2V/a0j7/pgqpRxHykG+koOY4gzdt3MAg8JPbS2
|
||||
+hymSl+Y5EpciO3xLfz4aFL1ZNqspQbO/TD13Ij7DUIy7xIRBMp4taoZCrP0cEBAZ
|
||||
++wgu9m23I4dh3E8RUBzWyFFNic2MVVHrui6JbHc4dIHfyKLtXJDhUcS0vIC9PvcV
|
||||
+jhorh3UZC4lM+/jjXV5AhzQ0VrJ2tXAUX2dA144XHzkSH2QmwfnajPsci7BL2CGC
|
||||
+rjyTy4NfB/lDwU+55dqJZQSKXMxAapJMrtgw7LD5CKQcN6zmfhXGssJ7HQUXKkaX
|
||||
+I1YOFzuUD7oo56BVCnVswv0jX9RxrE5QYNreMlOP9cS+kIYH65N+PAhlURuQC14K
|
||||
+PgDkHn5knSa2UQA5tc5f7zdHOZhGRUfcjLP+KAWA3nh+/2OKw/X3zuPx75YT/FKe
|
||||
+tACPw5hjEpl62m9Xa0eWepZXwqkIOkzHMmCyNCsbC0mmRoEjmvfnslfsmnh4Dg/c
|
||||
+4YsTYMOLLIeCa+WIc38aA5W2lNO9lW0LwLhX1rP+GRVPv+TVHXlfoyaI+jp0iXrJ
|
||||
+t3xxT0gaiIR/VznyS7Py68QV/zB7VdqbsNzS7LdquHK1k8+7OYiWjY3gqyU40Iu2
|
||||
+d1eSnIoDvQJwyYp7XYXbOlXNLY+s1Qb7yxcW3vXm0Bg3gKT8r1XHWJ9rj+CxAn5r
|
||||
+ysfkPs1JsesxzzQjwTiDNvHnBnZnwxuxfBr26ektEHmuAXSl8V6dzLN/aaPjpTj4
|
||||
+CkE7KyqX3U9bLkp+ztl4xWKEmW44nskzm0+iqrtrxMyTfvvID4QrABjZL4zmWIqc
|
||||
+e3ZfA3AYk9VDIegk/YKGC5VZ8YS7ZXQ0ASK652XqJ7QlMKTxxV7zda6Fp4uW6/qN
|
||||
+ezt5wgbGGhZQXj2wDQmWNQYyG/juIgYTpCUA54U5XBIjuR6pg+Ytm0UrvNjsUoAC
|
||||
+wGelyqaLDq8U8jdIFYVTJy9aJjQOYXjsUJ0dZN2aGHSlju0ZGIZc49cTIVQ9BTC5
|
||||
+Yc0Vlwzpl+LuA25DzKZNSb/ci0lO/cQGJ2uXQQgaNgdsHlu8nukENGJhnIzx4fzK
|
||||
+wEh3yHxhTRCzPPwDfXmx0IHXrPqJhSpAgaXBVIm8OjvmMxO+W75W4uLfNY/B7e2H
|
||||
+3cjklGuvkofOf7sEOrGUYf4cb6Obg8FpvHgpKo5Twwmoh/qvEKckBFqNhZXDDl88
|
||||
+GbGlSEgyaAV1Ig8s1NJKBolWFa0juyPAwJ8vT1T4iwW7kQ7KXKt2UNn96K/HxkLu
|
||||
+pikvukz8oRHMlfVHa0R48UB1fFHwZLzPmwkpu6ancIxk3uO3yfhf6iDk3bmnyMlz
|
||||
+g3k/b6MrLYaOVByRxay85jH3Vvgqfgn6wa6BJ7xQ81eZ8B45gFuTH0J5JtLL7SH8
|
||||
+darRPLCYfA+Ums9/H6pU5EXfd3yfjMIbvhCXHkJrrljkZ+th3p8dyto6wmYqIY6I
|
||||
+qR9sU+o6DhRaiP8tCICuhHxQpXylUM6WeJkJwduTJ8KWIvzsj4mReIKOl/oC2jSd
|
||||
+gIdKhb9Q3zj9ce4N5m6v66tyvjxGZ+xf3BvUPDD+LwZeXgf7OBsNVbXzQbzto594
|
||||
+nbCzPocFi3gERE50ru4K70eQCy08TPG5NpOz+DDdO5vpAuMLYEuI7O3L+3GjW40Q
|
||||
+G5bu7H5/i7o/RWR67qhG/7p9kPw3nkUtYgnvnWaPMIuTfb4c2d069kjlfgWjIbbI
|
||||
+tpSKmm5DHlqTE4/ECAbIEDtSaw9dXHCdL3nh5+n428xDdGbjN4lT86tfu17EYKzl
|
||||
+ydH1RJ1LX3o3TEj9UkmDPt7LnftvwybMFEcP7hM2xD4lC++wKQs7Alg6dTkBnJV4
|
||||
+5xU78WRntJkJTU7kFkpPKA0QfyCuSF1fAMoukDBkqUdOj6jE0BlJQlHk5iwgnJlt
|
||||
+uEdkTjHZEjIUxWC6llPcAzaPNlmnD45AgfEW+Jn21IvutmJiQAz5lm9Z9PXaR0C8
|
||||
+hXB6owRY67C0YKQwXhoNf6xQun2xGBGYy5rPEEezX1S1tUH5GR/KW1Lh+FzFqHXI
|
||||
+ZEb5avfDqHKehGAjPON+Br7akuQ125M9LLjKuSyPaQzeeCAy356Xd7XzVwbPddbm
|
||||
+9S9WSPqzaPgh10chIHoNoC8HMd33dB5j9/Q6jrbU/oPlptu/GlorWblvJdcTuBGI
|
||||
+IVn45RFnkG8hCz0GJSNzW7+70YdESQbfJW79vssWMaiSjFE0pMyFXrFR5lBywBTx
|
||||
+PiGEUWtvrKG94X1TMlGUzDzDJOQNZ9dT94bonNe9pVmP5BP4/DzwwiWh6qrzWk6p
|
||||
+j8OE4cfCSh2WvHnhJbH7/N0v+JKjtxeIeJ16jx/K2oK5
|
||||
+-----END ENCRYPTED PRIVATE KEY-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEWTCCAsGgAwIBAgIJAJinz4jHSjLtMA0GCSqGSIb3DQEBCwUAMF8xCzAJBgNV
|
||||
BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u
|
||||
@@ -66,3 +66,4 @@ jMqTFlmO7kpf/jpCSmamp3/JSEE1BJKHwQ6Ql4nzRA2N1mnvWH7Zxcv043gkHeAu
|
||||
9Wc2uXpw9xF8itV4Uvcdr3dwqByvIqn7iI/gB+4l41e0u8OmH2MKOx4Nxlly5TNW
|
||||
HcVKQHyOeyvnINuBAQ==
|
||||
-----END CERTIFICATE-----
|
||||
+
|
||||
diff --git a/Lib/test/make_ssl_certs.py b/Lib/test/make_ssl_certs.py
|
||||
index 3622765..41b5f46 100644
|
||||
--- a/Lib/test/make_ssl_certs.py
|
||||
+++ b/Lib/test/make_ssl_certs.py
|
||||
@@ -206,8 +206,8 @@ if __name__ == '__main__':
|
||||
with open('ssl_key.pem', 'w') as f:
|
||||
f.write(key)
|
||||
print("password protecting ssl_key.pem in ssl_key.passwd.pem")
|
||||
- check_call(['openssl','rsa','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-des3','-passout','pass:somepass'])
|
||||
- check_call(['openssl','rsa','-in','ssl_key.pem','-out','keycert.passwd.pem','-des3','-passout','pass:somepass'])
|
||||
+ check_call(['openssl','pkey','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-aes256','-passout','pass:somepass'])
|
||||
+ check_call(['openssl','pkey','-in','ssl_key.pem','-out','keycert.passwd.pem','-aes256','-passout','pass:somepass'])
|
||||
|
||||
with open('keycert.pem', 'w') as f:
|
||||
f.write(key)
|
||||
diff --git a/Lib/test/ssl_key.passwd.pem b/Lib/test/ssl_key.passwd.pem
|
||||
index e4f1370..46de61a 100644
|
||||
--- a/Lib/test/ssl_key.passwd.pem
|
||||
+++ b/Lib/test/ssl_key.passwd.pem
|
||||
@@ -1,42 +1,42 @@
|
||||
------BEGIN RSA PRIVATE KEY-----
|
||||
-Proc-Type: 4,ENCRYPTED
|
||||
-DEK-Info: DES-EDE3-CBC,8064BE1494B24B13
|
||||
-
|
||||
-KJrffOMbo8M0I3PzcYxRZGMpKD1yB3Ii4+bT5XoanxjIJ+4fdx6LfZ0Rsx+riyzs
|
||||
-tymsQu/iYY9j+4rCvN9+eetsL1X6iZpiimKsLexcid9M3fb0vxED5Sgw0dvunCUA
|
||||
-xhqjLIKR92MKbODHf6KrDKCpsiPbjq4gZ7P+uCGXAMHL3MXIJSC0hW9rK7Ce6oyO
|
||||
-CjpIcgB8x+GUWZZZhAFdlzIHMZrteNP2P5HK6QcaT71P034Dz1hhqoj4Q0t+Fta2
|
||||
-4tfsM/bnTR/l6hwlhPa1e3Uj322tDTDWBScgWANn5+sEWldLmozMaWhZsn22pfk2
|
||||
-KjRMGXG024JVheV882nbdOBvG7oq+lxkZ/ZP+vvqJqnvYtf7WtM8UivzYpe5Hz5b
|
||||
-kVvWzPjBLUSZ9whM9rDLqSSqMPyPvDTuEmLkuq+xm7pYJmsLqIMP2klZLqRxLX6K
|
||||
-uqwplb8UG440qauxgnQ905PId1l2fJEnRtV+7vXprA0L0QotgXLVHBhLmTFM+3PH
|
||||
-9H3onf31dionUAPrn3nfVE36HhvVgRyvDBnBzJSIMighgq21Qx/d1dk0DRYi1hUI
|
||||
-nCHl0YJPXheVcXR7JiSF2XQCAaFuS1Mr7NCXfWZOZQC/0dkvmHnl9DUAhuqq9BNZ
|
||||
-1cKhZXcKHadg2/r0Zup/oDzmHPUEfTAXT0xbqoWlhkdwbF2veWQ96A/ncx3ISTb4
|
||||
-PkXBlX9rdia8nmtyQDQRn4NuvchbaGkj4WKFC8pF8Hn7naHqwjpHaDUimBc0CoQW
|
||||
-edNJqruKWwtSVLuwKHCC2gZFX9AXSKJXJz/QRSUlhFGOhuF/J6yKaXj6n5lxWNiQ
|
||||
-54J+OP/hz2aS95CD2+Zf1SKpxdWiLZSIQqESpmmUrXROixNJZ/Z7gI74Dd9dSJOH
|
||||
-W+3AU03vrrFZVrJVZhjcINHoH1Skh6JKscH18L6x4U868nSr4SrRLX8BhHllOQyD
|
||||
-bmU+PZAjF8ZBIaCtTGulDXD29F73MeAZeTSsgQjFu0iKLj1wPiphbx8i/SUtR4YP
|
||||
-X6PVA04g66r1NBw+3RQASVorZ3g1MSFvITHXcbKkBDeJH2z1+c6t/VVyTONnQhM5
|
||||
-lLgRSk6HCbetvT9PKxWrWutA12pdBYEHdZhMHVf2+xclky7l09w8hg2/qqcdGRGe
|
||||
-oAOZ72t0l5ObNyaruDKUS6f4AjOyWq/Xj5xuFtf1n3tQHyslSyCTPcAbQhDfTHUx
|
||||
-vixb/V9qvYPt7OCn8py7v1M69NH42QVFAvwveDIFjZdqfIKBoJK2V4qPoevJI6uj
|
||||
-Q5ByMt8OXOjSXNpHXpYQWUiWeCwOEBXJX8rzCHdMtg37jJ0zCmeErR1NTdg+EujM
|
||||
-TWYgd06jlT67tURST0aB2kg4ijKgUJefD313LW1zC6gVsTbjSZxYyRbPfSP6flQB
|
||||
-yCi1C19E2OsgleqbkBVC5GlYUzaJT7SGjCRmGx1eqtbrALu+LVH24Wceexlpjydl
|
||||
-+s2nf/DZlKun/tlPh6YioifPCJjByZMQOCEfIox6BkemZETz8uYA4TTWimG13Z03
|
||||
-gyDGC2jdpEW414J2qcQDvrdUgJ+HlhrAAHaWpMQDbXYxBGoZ+3+ORvQV4kAsCwL8
|
||||
-k3EIrVpePdik+1xgOWsyLj6QxFXlTMvL6Wc5pnArFPORsgHEolJvxSPTf9aAHNPn
|
||||
-V2WBvxiLBtYpGrujAUM40Syx/aN2RPtcXYPAusHUBw+S8/p+/8Kg8GZmnIXG3F89
|
||||
-45Eepl2quZYIrou7a1fwIpIIZ0hFiBQ1mlHVMFtxwVHS1bQb3SU2GeO+JcGjdVXc
|
||||
-04qeGuQ5M164eQ5C0T7ZQ1ULiUlFWKD30m+cjqmZzt3d7Q0mKpMKuESIuZJo/wpD
|
||||
-Nas432aLKUhcNx/pOYLkKJRpGZKOupQoD5iUj/j44o8JoFkDK33v2S57XB5QGz28
|
||||
-9Zuhx49b3W8mbM6EBanlQKLWJGCxXqc/jhYhFWn+b0MhidynFgA0oeWvf6ZDyt6H
|
||||
-Yi5Etxsar09xp0Do3NxtQXLuSUu0ji2pQzSIKuoqQWKqldm6VrpwojiqJhy4WQBQ
|
||||
-aVVyFeWBC7G3Zj76dO+yp2sfJ0itJUQ8AIB9Cg0f34rEZu+r9luPmqBoUeL95Tk7
|
||||
-YvCOU3Jl8Iqysv8aNpVXT8sa8rrSbruWCByEePZ37RIdHLMVBwVY0eVaFQjrjU7E
|
||||
-mXmM9eaoYLfXOllsQ+M2+qPFUITr/GU3Qig13DhK/+yC1R6V2a0l0WRhMltIPYKW
|
||||
-Ztvvr4hK5LcYCeS113BLiMbDIMMZZYGDZGMdC8DnnVbT2loF0Rfmp80Af31KmMQ4
|
||||
-6XvMatW9UDjBoY5a/YMpdm7SRwm+MgV2KNPpc2kST87/yi9oprGAb8qiarHiHTM0
|
||||
------END RSA PRIVATE KEY-----
|
||||
+-----BEGIN ENCRYPTED PRIVATE KEY-----
|
||||
+MIIHbTBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQI072N7W+PDDMCAggA
|
||||
+MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBA/AuaRNi4vE4KGqI4In+70BIIH
|
||||
+ENGS5Vex5NID873frmd1UZEHZ+O/Bd0wDb+NUpIqesHkRYf7kKi6Gnr+nKQ/oVVn
|
||||
+Lm3JjE7c8ECP0OkOOXmiXuWL1SkzBBWqCI4stSGUPvBiHsGwNnvJAaGjUffgMlcC
|
||||
+aJOA2+dnejLkzblq4CB2LQdm06N3Xoe9tyqtQaUHxfzJAf5Ydd8uj7vpKN2MMhY7
|
||||
+icIPJwSyh0N7S6XWVtHEokr9Kp4y2hS5a+BgCWV1/1z0aF7agnSVndmT1VR+nWmc
|
||||
+lM14k+lethmHMB+fsNSjnqeJ7XOPlOTHqhiZ9bBSTgF/xr5Bck/NiKRzHjdovBox
|
||||
+TKg+xchaBhpRh7wBPBIlNJeHmIjv+8obOKjKU98Ig/7R9+IryZaNcKAH0PuOT+Sw
|
||||
+QHXiCGQbOiYHB9UyhDTWiB7YVjd8KHefOFxfHzOQb/iBhbv1x3bTl3DgepvRN6VO
|
||||
+dIsPLoIZe42sdf9GeMsk8mGJyZUQ6AzsfhWk3grb/XscizPSvrNsJ2VL1R7YTyT3
|
||||
+3WA4ZXR1EqvXnWL7N/raemQjy62iOG6t7fcF5IdP9CMbWP+Plpsz4cQW7FtesCTq
|
||||
+a5ZXraochQz361ODFNIeBEGU+0qqXUtZDlmos/EySkZykSeU/L0bImS62VGE3afo
|
||||
+YXBmznTTT9kkFkqv7H0MerfJsrE/wF8puP3GM01DW2JRgXRpSWlvbPV/2LnMtRuD
|
||||
+II7iH4rWDtTjCN6BWKAgDOnPkc9sZ4XulqT32lcUeV6LTdMBfq8kMEc8eDij1vUT
|
||||
+maVCRpuwaq8EIT3lVgNLufHiG96ojlyYtj3orzw22IjkgC/9ee8UDik9CqbMVmFf
|
||||
+fVHhsw8LNSg8Q4bmwm5Eg2w2it2gtI68+mwr75oCxuJ/8OMjW21Prj8XDh5reie2
|
||||
+c0lDKQOFZ9UnLU1bXR/6qUM+JFKR4DMq+fOCuoQSVoyVUEOsJpvBOYnYZN9cxsZm
|
||||
+vh9dKafMEcKZ8flsbr+gOmOw7+Py2ifSlf25E/Frb1W4gtbTb0LQVHb6+drutrZj
|
||||
+8HEu4CnHYFCD4ZnOJb26XlZCb8GFBddW86yJYyUqMMV6Q1aJfAOAglsTo1LjIMOZ
|
||||
+byo0BTAmwUevU/iuOXQ4qRBXXcoidDcTCrxfUSPG9wdt9l+m5SdQpWqfQ+fx5O7m
|
||||
+SLlrHyZCiPSFMtC9DxqjIklHjf5W3wslGLgaD30YXa4VDYkRihf3CNsxGQ+tVvef
|
||||
+l0ZjoAitF7Gaua06IESmKnpHe23dkr1cjYq+u2IV+xGH8LeExdwsQ9kpuTeXPnQs
|
||||
+JOA99SsFx1ct32RrwjxnDDsiNkaViTKo9GDkV3jQTfoFgAVqfSgg9wGXpqUqhNG7
|
||||
+TiSIHCowllLny2zn4XrXCy2niD3VDt0skb3l/PaegHE2z7S5YY85nQtYwpLiwB9M
|
||||
+SQ08DYKxPBZYKtS2iZ/fsA1gjSRQDPg/SIxMhUC3M3qH8iWny1Lzl25F2Uq7VVEX
|
||||
+LdTUtaby49jRTT3CQGr5n6z7bMbUegiY7h8WmOekuThGDH+4xZp6+rDP4GFk4FeK
|
||||
+JcF70vMQYIjQZhadic6olv+9VtUP42ltGG/yP9a3eWRkzfAf2eCh6B1rYdgEWwE8
|
||||
+rlcZzwM+y6eUmeNF2FVWB8iWtTMQHy+dYNPM+Jtus1KQKxiiq/yCRs7nWvzWRFWA
|
||||
+HRyqV0J6/lqgm4FvfktFt1T0W+mDoLJOR2/zIwMy2lgL5zeHuR3SaMJnCikJbqKS
|
||||
+HB3UvrhAWUcZqdH29+FhVWeM7ybyF1Wccmf+IIC/ePLa6gjtqPV8lG/5kbpcpnB6
|
||||
+UQY8WWaKMxyr3jJ9bAX5QKshchp04cDecOLZrpFGNNQngR8RxSEkiIgAqNxWunIu
|
||||
+KrdBDrupv/XAgEOclmgToY3iywLJSV5gHAyHWDUhRH4cFCLiGPl4XIcnXOuTze3H
|
||||
+3j+EYSiS3v3DhHjp33YU2pXlJDjiYsKzAXejEh66++Y8qaQdCAad3ruWRCzW3kgk
|
||||
+Md0A1VGzntTnQsewvExQEMZH2LtYIsPv3KCYGeSAuLabX4tbGk79PswjnjLLEOr0
|
||||
+Ghf6RF6qf5/iFyJoG4vrbKT8kx6ywh0InILCdjUunuDskIBxX6tEcr9XwajoIvb2
|
||||
+kcmGdjam5kKLS7QOWQTl8/r/cuFes0dj34cX5Qpq+Gd7tRq/D+b0207926Cxvftv
|
||||
+qQ1cVn8HiLxKkZzd3tpf2xnoV1zkTL0oHrNg+qzxoxXUTUcwtIf1d/HRbYEAhi/d
|
||||
+bBBoFeftEHWNq+sJgS9bH+XNzo/yK4u04B5miOq8v4CSkJdzu+ZdF22d4cjiGmtQ
|
||||
+8BTmcn0Unzm+u5H0+QSZe54QBHJGNXXOIKMTkgnOdW27g4DbI1y7fCqJiSMbRW6L
|
||||
+oHmMfbdB3GWqGbsUkhY8i6h9op0MU6WOX7ea2Rxyt4t6
|
||||
+-----END ENCRYPTED PRIVATE KEY-----
|
||||
diff --git a/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst b/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst
|
||||
new file mode 100644
|
||||
index 0000000..8f43d32
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst
|
||||
@@ -0,0 +1,4 @@
|
||||
+The private keys for test_ssl were encrypted with 3DES in traditional
|
||||
+PKCS#5 format. 3DES and the digest algorithm of PKCS#5 are blocked by
|
||||
+some strict crypto policies. Use PKCS#8 format with AES256 encryption
|
||||
+instead.
|
||||
--
|
||||
2.21.0
|
||||
|
||||
|
||||
From d8584f9bb3fb841a1b21ed25abc2237ea8bbc206 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Tue, 26 Nov 2019 23:57:21 +0100
|
||||
Subject: [PATCH 3/5] Use PROTOCOL_TLS_CLIENT/SERVER
|
||||
|
||||
Replaces PROTOCOL_TLSv* and PROTOCOL_SSLv23 with PROTOCOL_TLS_CLIENT and
|
||||
PROTOCOL_TLS_SERVER.
|
||||
|
||||
Partially backports a170fa162dc03f0a014373349e548954fff2e567
|
||||
---
|
||||
Lib/ssl.py | 7 +-
|
||||
Lib/test/test_logging.py | 2 +-
|
||||
Lib/test/test_ssl.py | 169 +++++++++++++++++++--------------------
|
||||
3 files changed, 87 insertions(+), 91 deletions(-)
|
||||
|
||||
diff --git a/Lib/ssl.py b/Lib/ssl.py
|
||||
index 0114387..c5c5529 100644
|
||||
--- a/Lib/ssl.py
|
||||
+++ b/Lib/ssl.py
|
||||
@@ -473,7 +473,7 @@ def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None,
|
||||
context.load_default_certs(purpose)
|
||||
return context
|
||||
|
||||
-def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=None,
|
||||
+def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=CERT_NONE,
|
||||
check_hostname=False, purpose=Purpose.SERVER_AUTH,
|
||||
certfile=None, keyfile=None,
|
||||
cafile=None, capath=None, cadata=None):
|
||||
@@ -492,9 +492,12 @@ def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=None,
|
||||
# by default.
|
||||
context = SSLContext(protocol)
|
||||
|
||||
+ if not check_hostname:
|
||||
+ context.check_hostname = False
|
||||
if cert_reqs is not None:
|
||||
context.verify_mode = cert_reqs
|
||||
- context.check_hostname = check_hostname
|
||||
+ if check_hostname:
|
||||
+ context.check_hostname = True
|
||||
|
||||
if keyfile and not certfile:
|
||||
raise ValueError("certfile must be specified")
|
||||
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
|
||||
index 763a5d1..d5c63b4 100644
|
||||
--- a/Lib/test/test_logging.py
|
||||
+++ b/Lib/test/test_logging.py
|
||||
@@ -1830,7 +1830,7 @@ class HTTPHandlerTest(BaseTest):
|
||||
else:
|
||||
here = os.path.dirname(__file__)
|
||||
localhost_cert = os.path.join(here, "keycert.pem")
|
||||
- sslctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
+ sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
sslctx.load_cert_chain(localhost_cert)
|
||||
|
||||
context = ssl.create_default_context(cafile=localhost_cert)
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 639109f..a7bf2f7 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -155,6 +155,8 @@ def test_wrap_socket(sock, ssl_version=ssl.PROTOCOL_TLS, *,
|
||||
**kwargs):
|
||||
context = ssl.SSLContext(ssl_version)
|
||||
if cert_reqs is not None:
|
||||
+ if cert_reqs == ssl.CERT_NONE:
|
||||
+ context.check_hostname = False
|
||||
context.verify_mode = cert_reqs
|
||||
if ca_certs is not None:
|
||||
context.load_verify_locations(ca_certs)
|
||||
@@ -1377,7 +1379,7 @@ class ContextTests(unittest.TestCase):
|
||||
self._assert_context_options(ctx)
|
||||
|
||||
def test_check_hostname(self):
|
||||
- ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
self.assertFalse(ctx.check_hostname)
|
||||
|
||||
# Requires CERT_REQUIRED or CERT_OPTIONAL
|
||||
@@ -2386,17 +2388,13 @@ if _have_threads:
|
||||
server_params_test(context, context,
|
||||
chatty=True, connectionchatty=True)
|
||||
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
- client_context.load_verify_locations(SIGNING_CA)
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
- # server_context.load_verify_locations(SIGNING_CA)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE2)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
|
||||
with self.subTest(client=ssl.PROTOCOL_TLS_CLIENT, server=ssl.PROTOCOL_TLS_SERVER):
|
||||
server_params_test(client_context=client_context,
|
||||
server_context=server_context,
|
||||
chatty=True, connectionchatty=True,
|
||||
- sni_name='fakehostname')
|
||||
+ sni_name='localhost')
|
||||
|
||||
client_context.check_hostname = False
|
||||
with self.subTest(client=ssl.PROTOCOL_TLS_SERVER, server=ssl.PROTOCOL_TLS_CLIENT):
|
||||
@@ -2404,7 +2402,7 @@ if _have_threads:
|
||||
server_params_test(client_context=server_context,
|
||||
server_context=client_context,
|
||||
chatty=True, connectionchatty=True,
|
||||
- sni_name='fakehostname')
|
||||
+ sni_name='localhost')
|
||||
self.assertIn('called a function you should not call',
|
||||
str(e.exception))
|
||||
|
||||
@@ -2469,39 +2467,38 @@ if _have_threads:
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.verify_mode = ssl.CERT_REQUIRED
|
||||
- context.load_verify_locations(SIGNING_CA)
|
||||
tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0)
|
||||
- self.assertEqual(context.verify_flags, ssl.VERIFY_DEFAULT | tf)
|
||||
+ self.assertEqual(client_context.verify_flags, ssl.VERIFY_DEFAULT | tf)
|
||||
|
||||
# VERIFY_DEFAULT should pass
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket()) as s:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
s.connect((HOST, server.port))
|
||||
cert = s.getpeercert()
|
||||
self.assertTrue(cert, "Can't get peer certificate.")
|
||||
|
||||
# VERIFY_CRL_CHECK_LEAF without a loaded CRL file fails
|
||||
- context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF
|
||||
+ client_context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF
|
||||
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket()) as s:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
with self.assertRaisesRegex(ssl.SSLError,
|
||||
"certificate verify failed"):
|
||||
s.connect((HOST, server.port))
|
||||
|
||||
# now load a CRL file. The CRL file is signed by the CA.
|
||||
- context.load_verify_locations(CRLFILE)
|
||||
+ client_context.load_verify_locations(CRLFILE)
|
||||
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket()) as s:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
s.connect((HOST, server.port))
|
||||
cert = s.getpeercert()
|
||||
self.assertTrue(cert, "Can't get peer certificate.")
|
||||
@@ -2510,19 +2507,13 @@ if _have_threads:
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
-
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.verify_mode = ssl.CERT_REQUIRED
|
||||
- context.check_hostname = True
|
||||
- context.load_verify_locations(SIGNING_CA)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
|
||||
# correct hostname should verify
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket(),
|
||||
- server_hostname="localhost") as s:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
s.connect((HOST, server.port))
|
||||
cert = s.getpeercert()
|
||||
self.assertTrue(cert, "Can't get peer certificate.")
|
||||
@@ -2530,7 +2521,7 @@ if _have_threads:
|
||||
# incorrect hostname should raise an exception
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket(),
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
server_hostname="invalid") as s:
|
||||
with self.assertRaisesRegex(ssl.CertificateError,
|
||||
"hostname 'invalid' doesn't match 'localhost'"):
|
||||
@@ -2542,7 +2533,7 @@ if _have_threads:
|
||||
with socket.socket() as s:
|
||||
with self.assertRaisesRegex(ValueError,
|
||||
"check_hostname requires server_hostname"):
|
||||
- context.wrap_socket(s)
|
||||
+ client_context.wrap_socket(s)
|
||||
|
||||
def test_wrong_cert(self):
|
||||
"""Connecting when the server rejects the client's certificate
|
||||
@@ -2767,7 +2758,6 @@ if _have_threads:
|
||||
msgs = (b"msg 1", b"MSG 2", b"STARTTLS", b"MSG 3", b"msg 4", b"ENDTLS", b"msg 5", b"msg 6")
|
||||
|
||||
server = ThreadedEchoServer(CERTFILE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
starttls_server=True,
|
||||
chatty=True,
|
||||
connectionchatty=True)
|
||||
@@ -2795,7 +2785,7 @@ if _have_threads:
|
||||
sys.stdout.write(
|
||||
" client: read %r from server, starting TLS...\n"
|
||||
% msg)
|
||||
- conn = test_wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ conn = test_wrap_socket(s)
|
||||
wrapped = True
|
||||
elif indata == b"ENDTLS" and msg.startswith(b"ok"):
|
||||
# ENDTLS ok, switch back to clear text
|
||||
@@ -2882,7 +2872,7 @@ if _have_threads:
|
||||
|
||||
server = ThreadedEchoServer(CERTFILE,
|
||||
certreqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_SERVER,
|
||||
cacerts=CERTFILE,
|
||||
chatty=True,
|
||||
connectionchatty=False)
|
||||
@@ -2892,7 +2882,7 @@ if _have_threads:
|
||||
certfile=CERTFILE,
|
||||
ca_certs=CERTFILE,
|
||||
cert_reqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_CLIENT)
|
||||
s.connect((HOST, server.port))
|
||||
# helper methods for standardising recv* method signatures
|
||||
def _recv_into():
|
||||
@@ -3034,7 +3024,7 @@ if _have_threads:
|
||||
def test_nonblocking_send(self):
|
||||
server = ThreadedEchoServer(CERTFILE,
|
||||
certreqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_SERVER,
|
||||
cacerts=CERTFILE,
|
||||
chatty=True,
|
||||
connectionchatty=False)
|
||||
@@ -3044,7 +3034,7 @@ if _have_threads:
|
||||
certfile=CERTFILE,
|
||||
ca_certs=CERTFILE,
|
||||
cert_reqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_CLIENT)
|
||||
s.connect((HOST, server.port))
|
||||
s.setblocking(False)
|
||||
|
||||
@@ -3190,9 +3180,11 @@ if _have_threads:
|
||||
Basic tests for SSLSocket.version().
|
||||
More tests are done in the test_protocol_*() methods.
|
||||
"""
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
+ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
+ context.check_hostname = False
|
||||
+ context.verify_mode = ssl.CERT_NONE
|
||||
with ThreadedEchoServer(CERTFILE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_SERVER,
|
||||
chatty=False) as server:
|
||||
with context.wrap_socket(socket.socket()) as s:
|
||||
self.assertIs(s.version(), None)
|
||||
@@ -3247,7 +3239,7 @@ if _have_threads:
|
||||
|
||||
server = ThreadedEchoServer(CERTFILE,
|
||||
certreqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_SERVER,
|
||||
cacerts=CERTFILE,
|
||||
chatty=True,
|
||||
connectionchatty=False)
|
||||
@@ -3257,7 +3249,7 @@ if _have_threads:
|
||||
certfile=CERTFILE,
|
||||
ca_certs=CERTFILE,
|
||||
cert_reqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_CLIENT)
|
||||
s.connect((HOST, server.port))
|
||||
# get the data
|
||||
cb_data = s.get_channel_binding("tls-unique")
|
||||
@@ -3282,7 +3274,7 @@ if _have_threads:
|
||||
certfile=CERTFILE,
|
||||
ca_certs=CERTFILE,
|
||||
cert_reqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_CLIENT)
|
||||
s.connect((HOST, server.port))
|
||||
new_cb_data = s.get_channel_binding("tls-unique")
|
||||
if support.verbose:
|
||||
@@ -3299,32 +3291,35 @@ if _have_threads:
|
||||
s.close()
|
||||
|
||||
def test_compression(self):
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
if support.verbose:
|
||||
sys.stdout.write(" got compression: {!r}\n".format(stats['compression']))
|
||||
self.assertIn(stats['compression'], { None, 'ZLIB', 'RLE' })
|
||||
|
||||
+
|
||||
@unittest.skipUnless(hasattr(ssl, 'OP_NO_COMPRESSION'),
|
||||
"ssl.OP_NO_COMPRESSION needed for this test")
|
||||
def test_compression_disabled(self):
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- context.options |= ssl.OP_NO_COMPRESSION
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ client_context.options |= ssl.OP_NO_COMPRESSION
|
||||
+ server_context.options |= ssl.OP_NO_COMPRESSION
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
self.assertIs(stats['compression'], None)
|
||||
|
||||
def test_dh_params(self):
|
||||
# Check we can get a connection with ephemeral Diffie-Hellman
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- context.load_dh_params(DHFILE)
|
||||
- context.set_ciphers("kEDH")
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ server_context.load_dh_params(DHFILE)
|
||||
+ server_context.set_ciphers("kEDH")
|
||||
+ server_context.options |= ssl.OP_NO_TLSv1_3
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
cipher = stats["cipher"][0]
|
||||
parts = cipher.split("-")
|
||||
if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts:
|
||||
@@ -3332,22 +3327,20 @@ if _have_threads:
|
||||
|
||||
def test_selected_alpn_protocol(self):
|
||||
# selected_alpn_protocol() is None unless ALPN is used.
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
self.assertIs(stats['client_alpn_protocol'], None)
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_ALPN, "ALPN support required")
|
||||
def test_selected_alpn_protocol_if_server_uses_alpn(self):
|
||||
# selected_alpn_protocol() is None unless ALPN is used by the client.
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- client_context.load_verify_locations(CERTFILE)
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(CERTFILE)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
server_context.set_alpn_protocols(['foo', 'bar'])
|
||||
stats = server_params_test(client_context, server_context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
self.assertIs(stats['client_alpn_protocol'], None)
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_ALPN, "ALPN support needed for this test")
|
||||
@@ -3394,10 +3387,10 @@ if _have_threads:
|
||||
|
||||
def test_selected_npn_protocol(self):
|
||||
# selected_npn_protocol() is None unless NPN is used
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
self.assertIs(stats['client_npn_protocol'], None)
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_NPN, "NPN support needed for this test")
|
||||
@@ -3430,12 +3423,11 @@ if _have_threads:
|
||||
self.assertEqual(server_result, expected, msg % (server_result, "server"))
|
||||
|
||||
def sni_contexts(self):
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
+ server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
- other_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
+ other_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
other_context.load_cert_chain(SIGNED_CERTFILE2)
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- client_context.verify_mode = ssl.CERT_REQUIRED
|
||||
+ client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
client_context.load_verify_locations(SIGNING_CA)
|
||||
return server_context, other_context, client_context
|
||||
|
||||
@@ -3448,6 +3440,8 @@ if _have_threads:
|
||||
calls = []
|
||||
server_context, other_context, client_context = self.sni_contexts()
|
||||
|
||||
+ client_context.check_hostname = False
|
||||
+
|
||||
def servername_cb(ssl_sock, server_name, initial_context):
|
||||
calls.append((server_name, initial_context))
|
||||
if server_name is not None:
|
||||
@@ -3533,11 +3527,7 @@ if _have_threads:
|
||||
self.assertIn("TypeError", stderr.getvalue())
|
||||
|
||||
def test_shared_ciphers(self):
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- client_context.verify_mode = ssl.CERT_REQUIRED
|
||||
- client_context.load_verify_locations(SIGNING_CA)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
if ssl.OPENSSL_VERSION_INFO >= (1, 0, 2):
|
||||
client_context.set_ciphers("AES128:AES256")
|
||||
server_context.set_ciphers("AES256")
|
||||
@@ -3555,7 +3545,8 @@ if _have_threads:
|
||||
# TLS 1.3 ciphers are always enabled
|
||||
expected_algs.extend(["TLS_CHACHA20", "TLS_AES"])
|
||||
|
||||
- stats = server_params_test(client_context, server_context)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ sni_name=hostname)
|
||||
ciphers = stats['server_shared_ciphers'][0]
|
||||
self.assertGreater(len(ciphers), 0)
|
||||
for name, tls_version, bits in ciphers:
|
||||
@@ -3595,14 +3586,13 @@ if _have_threads:
|
||||
self.assertEqual(s.recv(1024), TEST_DATA)
|
||||
|
||||
def test_session(self):
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- client_context.verify_mode = ssl.CERT_REQUIRED
|
||||
- client_context.load_verify_locations(SIGNING_CA)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ # TODO: sessions aren't compatible with TLSv1.3 yet
|
||||
+ client_context.options |= ssl.OP_NO_TLSv1_3
|
||||
|
||||
# first connection without session
|
||||
- stats = server_params_test(client_context, server_context)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ sni_name=hostname)
|
||||
session = stats['session']
|
||||
self.assertTrue(session.id)
|
||||
self.assertGreater(session.time, 0)
|
||||
@@ -3616,7 +3606,8 @@ if _have_threads:
|
||||
self.assertEqual(sess_stat['hits'], 0)
|
||||
|
||||
# reuse session
|
||||
- stats = server_params_test(client_context, server_context, session=session)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ session=session, sni_name=hostname)
|
||||
sess_stat = server_context.session_stats()
|
||||
self.assertEqual(sess_stat['accept'], 2)
|
||||
self.assertEqual(sess_stat['hits'], 1)
|
||||
@@ -3629,7 +3620,8 @@ if _have_threads:
|
||||
self.assertGreaterEqual(session2.timeout, session.timeout)
|
||||
|
||||
# another one without session
|
||||
- stats = server_params_test(client_context, server_context)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ sni_name=hostname)
|
||||
self.assertFalse(stats['session_reused'])
|
||||
session3 = stats['session']
|
||||
self.assertNotEqual(session3.id, session.id)
|
||||
@@ -3639,7 +3631,8 @@ if _have_threads:
|
||||
self.assertEqual(sess_stat['hits'], 1)
|
||||
|
||||
# reuse session again
|
||||
- stats = server_params_test(client_context, server_context, session=session)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ session=session, sni_name=hostname)
|
||||
self.assertTrue(stats['session_reused'])
|
||||
session4 = stats['session']
|
||||
self.assertEqual(session4.id, session.id)
|
||||
--
|
||||
2.21.0
|
||||
|
||||
|
||||
From 743c3e09b485092b51a982ab9859ffc79cbb7791 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Wed, 27 Nov 2019 00:01:17 +0100
|
||||
Subject: [PATCH 4/5] Adjust some tests for TLS 1.3 compatibility
|
||||
|
||||
Partially backports some changes from 529525fb5a8fd9b96ab4021311a598c77588b918
|
||||
and 2614ed4c6e4b32eafb683f2378ed20e87d42976d
|
||||
---
|
||||
Lib/test/test_ssl.py | 17 ++++++++++++++---
|
||||
1 file changed, 14 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index a7bf2f7..43c2dbc 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -3189,7 +3189,12 @@ if _have_threads:
|
||||
with context.wrap_socket(socket.socket()) as s:
|
||||
self.assertIs(s.version(), None)
|
||||
s.connect((HOST, server.port))
|
||||
- self.assertEqual(s.version(), 'TLSv1')
|
||||
+ if IS_OPENSSL_1_1:
|
||||
+ self.assertEqual(s.version(), 'TLSv1.3')
|
||||
+ elif ssl.OPENSSL_VERSION_INFO >= (1, 0, 2):
|
||||
+ self.assertEqual(s.version(), 'TLSv1.2')
|
||||
+ else: # 0.9.8 to 1.0.1
|
||||
+ self.assertIn(s.version(), ('TLSv1', 'TLSv1.2'))
|
||||
self.assertIs(s.version(), None)
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_TLSv1_3,
|
||||
@@ -3259,7 +3264,10 @@ if _have_threads:
|
||||
|
||||
# check if it is sane
|
||||
self.assertIsNotNone(cb_data)
|
||||
- self.assertEqual(len(cb_data), 12) # True for TLSv1
|
||||
+ if s.version() == 'TLSv1.3':
|
||||
+ self.assertEqual(len(cb_data), 48)
|
||||
+ else:
|
||||
+ self.assertEqual(len(cb_data), 12) # True for TLSv1
|
||||
|
||||
# and compare with the peers version
|
||||
s.write(b"CB tls-unique\n")
|
||||
@@ -3283,7 +3291,10 @@ if _have_threads:
|
||||
# is it really unique
|
||||
self.assertNotEqual(cb_data, new_cb_data)
|
||||
self.assertIsNotNone(cb_data)
|
||||
- self.assertEqual(len(cb_data), 12) # True for TLSv1
|
||||
+ if s.version() == 'TLSv1.3':
|
||||
+ self.assertEqual(len(cb_data), 48)
|
||||
+ else:
|
||||
+ self.assertEqual(len(cb_data), 12) # True for TLSv1
|
||||
s.write(b"CB tls-unique\n")
|
||||
peer_data_repr = s.read().strip()
|
||||
self.assertEqual(peer_data_repr,
|
||||
--
|
||||
2.21.0
|
||||
|
||||
|
||||
From cd250c8a782f36c7a6f5ffabc922cb75744fa9c0 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Tue, 26 Nov 2019 23:18:10 +0100
|
||||
Subject: [PATCH 5/5] Skip the ssl tests that rely on TLSv1 and TLSv1.1
|
||||
availability
|
||||
|
||||
---
|
||||
Lib/test/test_ssl.py | 32 +++++++++++++++++++++++---------
|
||||
1 file changed, 23 insertions(+), 9 deletions(-)
|
||||
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 43c2dbc..b35db25 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -39,6 +39,13 @@ IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL')
|
||||
IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0)
|
||||
PY_SSL_DEFAULT_CIPHERS = sysconfig.get_config_var('PY_SSL_DEFAULT_CIPHERS')
|
||||
|
||||
+# On RHEL8 openssl disables TLSv1 and TLSv1.1 on runtime.
|
||||
+# Since we don't have a good way to detect runtime changes
|
||||
+# on the allowed protocols, we hardcode the default config
|
||||
+# with those flags.
|
||||
+TLSv1_enabled = False
|
||||
+TLSv1_1_enabled = False
|
||||
+
|
||||
def data_file(*name):
|
||||
return os.path.join(os.path.dirname(__file__), *name)
|
||||
|
||||
@@ -2380,7 +2387,8 @@ if _have_threads:
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
for protocol in PROTOCOLS:
|
||||
- if protocol in {ssl.PROTOCOL_TLS_CLIENT, ssl.PROTOCOL_TLS_SERVER}:
|
||||
+ if protocol in {ssl.PROTOCOL_TLS_CLIENT, ssl.PROTOCOL_TLS_SERVER,
|
||||
+ ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1}:
|
||||
continue
|
||||
with self.subTest(protocol=ssl._PROTOCOL_NAMES[protocol]):
|
||||
context = ssl.SSLContext(protocol)
|
||||
@@ -2650,17 +2658,20 @@ if _have_threads:
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False)
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True)
|
||||
- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1')
|
||||
+ if TLSv1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1')
|
||||
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_OPTIONAL)
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_OPTIONAL)
|
||||
- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
|
||||
+ if TLSv1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
|
||||
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_REQUIRED)
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED)
|
||||
- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
|
||||
+ if TLSv1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
|
||||
|
||||
# Server with specific SSL options
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
@@ -2698,9 +2709,10 @@ if _have_threads:
|
||||
"""Connecting to a TLSv1 server with various client options"""
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1')
|
||||
- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
|
||||
- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
|
||||
+ if TLSv1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1')
|
||||
+ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
|
||||
+ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv2'):
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
@@ -2716,7 +2728,8 @@ if _have_threads:
|
||||
Testing against older TLS versions."""
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
- try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
|
||||
+ if TLSv1_1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv2'):
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv2, False)
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
@@ -2724,7 +2737,8 @@ if _have_threads:
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv23, False,
|
||||
client_options=ssl.OP_NO_TLSv1_1)
|
||||
|
||||
- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
|
||||
+ if TLSv1_1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1, False)
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1, False)
|
||||
|
||||
--
|
||||
2.21.0
|
||||
|
41
SOURCES/00319-test_tarfile_ppc64.patch
Normal file
41
SOURCES/00319-test_tarfile_ppc64.patch
Normal file
@ -0,0 +1,41 @@
|
||||
commit 86ed41792d394f804d2c9e695ac8b257220fbdee
|
||||
Author: Victor Stinner <vstinner@redhat.com>
|
||||
Date: Tue Mar 12 17:17:13 2019 +0100
|
||||
|
||||
Fix test_tarfile on ppc64
|
||||
|
||||
Fix sparse file tests of test_tarfile on ppc64le with the tmpfs
|
||||
filesystem.
|
||||
|
||||
* https://bugzilla.redhat.com/show_bug.cgi?id=1639490
|
||||
* https://bugs.python.org/issue35772
|
||||
* https://github.com/python/cpython/commit/d1dd6be613381b996b9071443ef081de8e5f3aff
|
||||
|
||||
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
|
||||
index 4cd7d53..bd8b05f 100644
|
||||
--- a/Lib/test/test_tarfile.py
|
||||
+++ b/Lib/test/test_tarfile.py
|
||||
@@ -973,16 +973,21 @@ class GNUReadTest(LongnameTest, ReadTest, unittest.TestCase):
|
||||
def _fs_supports_holes():
|
||||
# Return True if the platform knows the st_blocks stat attribute and
|
||||
# uses st_blocks units of 512 bytes, and if the filesystem is able to
|
||||
- # store holes in files.
|
||||
+ # store holes of 4 KiB in files.
|
||||
+ #
|
||||
+ # The function returns False if page size is larger than 4 KiB.
|
||||
+ # For example, ppc64 uses pages of 64 KiB.
|
||||
if sys.platform.startswith("linux"):
|
||||
# Linux evidentially has 512 byte st_blocks units.
|
||||
name = os.path.join(TEMPDIR, "sparse-test")
|
||||
with open(name, "wb") as fobj:
|
||||
+ # Seek to "punch a hole" of 4 KiB
|
||||
fobj.seek(4096)
|
||||
+ fobj.write(b'x' * 4096)
|
||||
fobj.truncate()
|
||||
s = os.stat(name)
|
||||
support.unlink(name)
|
||||
- return s.st_blocks == 0
|
||||
+ return (s.st_blocks * 512 < s.st_size)
|
||||
else:
|
||||
return False
|
||||
|
137
SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch
Normal file
137
SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch
Normal file
@ -0,0 +1,137 @@
|
||||
diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst
|
||||
index d991254..647af61 100644
|
||||
--- a/Doc/library/urllib.parse.rst
|
||||
+++ b/Doc/library/urllib.parse.rst
|
||||
@@ -121,6 +121,11 @@ or on combining URL components into a URL string.
|
||||
Unmatched square brackets in the :attr:`netloc` attribute will raise a
|
||||
:exc:`ValueError`.
|
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC
|
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``,
|
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is
|
||||
+ decomposed before parsing, no error will be raised.
|
||||
+
|
||||
.. versionchanged:: 3.2
|
||||
Added IPv6 URL parsing capabilities.
|
||||
|
||||
@@ -133,6 +138,10 @@ or on combining URL components into a URL string.
|
||||
Out-of-range port numbers now raise :exc:`ValueError`, instead of
|
||||
returning :const:`None`.
|
||||
|
||||
+ .. versionchanged:: 3.6.9
|
||||
+ Characters that affect netloc parsing under NFKC normalization will
|
||||
+ now raise :exc:`ValueError`.
|
||||
+
|
||||
|
||||
.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None)
|
||||
|
||||
@@ -256,10 +265,19 @@ or on combining URL components into a URL string.
|
||||
Unmatched square brackets in the :attr:`netloc` attribute will raise a
|
||||
:exc:`ValueError`.
|
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC
|
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``,
|
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is
|
||||
+ decomposed before parsing, no error will be raised.
|
||||
+
|
||||
.. versionchanged:: 3.6
|
||||
Out-of-range port numbers now raise :exc:`ValueError`, instead of
|
||||
returning :const:`None`.
|
||||
|
||||
+ .. versionchanged:: 3.6.9
|
||||
+ Characters that affect netloc parsing under NFKC normalization will
|
||||
+ now raise :exc:`ValueError`.
|
||||
+
|
||||
|
||||
.. function:: urlunsplit(parts)
|
||||
|
||||
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
|
||||
index be50b47..68f633c 100644
|
||||
--- a/Lib/test/test_urlparse.py
|
||||
+++ b/Lib/test/test_urlparse.py
|
||||
@@ -1,3 +1,5 @@
|
||||
+import sys
|
||||
+import unicodedata
|
||||
import unittest
|
||||
import urllib.parse
|
||||
|
||||
@@ -984,6 +986,34 @@ class UrlParseTestCase(unittest.TestCase):
|
||||
expected.append(name)
|
||||
self.assertCountEqual(urllib.parse.__all__, expected)
|
||||
|
||||
+ def test_urlsplit_normalization(self):
|
||||
+ # Certain characters should never occur in the netloc,
|
||||
+ # including under normalization.
|
||||
+ # Ensure that ALL of them are detected and cause an error
|
||||
+ illegal_chars = '/:#?@'
|
||||
+ hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars}
|
||||
+ denorm_chars = [
|
||||
+ c for c in map(chr, range(128, sys.maxunicode))
|
||||
+ if (hex_chars & set(unicodedata.decomposition(c).split()))
|
||||
+ and c not in illegal_chars
|
||||
+ ]
|
||||
+ # Sanity check that we found at least one such character
|
||||
+ self.assertIn('\u2100', denorm_chars)
|
||||
+ self.assertIn('\uFF03', denorm_chars)
|
||||
+
|
||||
+ # bpo-36742: Verify port separators are ignored when they
|
||||
+ # existed prior to decomposition
|
||||
+ urllib.parse.urlsplit('http://\u30d5\u309a:80')
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ urllib.parse.urlsplit('http://\u30d5\u309a\ufe1380')
|
||||
+
|
||||
+ for scheme in ["http", "https", "ftp"]:
|
||||
+ for netloc in ["netloc{}false.netloc", "n{}user@netloc"]:
|
||||
+ for c in denorm_chars:
|
||||
+ url = "{}://{}/path".format(scheme, netloc.format(c))
|
||||
+ with self.subTest(url=url, char='{:04X}'.format(ord(c))):
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ urllib.parse.urlsplit(url)
|
||||
|
||||
class Utility_Tests(unittest.TestCase):
|
||||
"""Testcase to test the various utility functions in the urllib."""
|
||||
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
|
||||
index 85e68c8..fa8827a 100644
|
||||
--- a/Lib/urllib/parse.py
|
||||
+++ b/Lib/urllib/parse.py
|
||||
@@ -391,6 +391,24 @@ def _splitnetloc(url, start=0):
|
||||
delim = min(delim, wdelim) # use earliest delim position
|
||||
return url[start:delim], url[delim:] # return (domain, rest)
|
||||
|
||||
+def _checknetloc(netloc):
|
||||
+ if not netloc or not any(ord(c) > 127 for c in netloc):
|
||||
+ return
|
||||
+ # looking for characters like \u2100 that expand to 'a/c'
|
||||
+ # IDNA uses NFKC equivalence, so normalize for this check
|
||||
+ import unicodedata
|
||||
+ n = netloc.replace('@', '') # ignore characters already included
|
||||
+ n = n.replace(':', '') # but not the surrounding text
|
||||
+ n = n.replace('#', '')
|
||||
+ n = n.replace('?', '')
|
||||
+ netloc2 = unicodedata.normalize('NFKC', n)
|
||||
+ if n == netloc2:
|
||||
+ return
|
||||
+ for c in '/?#@:':
|
||||
+ if c in netloc2:
|
||||
+ raise ValueError("netloc '" + netloc + "' contains invalid " +
|
||||
+ "characters under NFKC normalization")
|
||||
+
|
||||
def urlsplit(url, scheme='', allow_fragments=True):
|
||||
"""Parse a URL into 5 components:
|
||||
<scheme>://<netloc>/<path>?<query>#<fragment>
|
||||
@@ -420,6 +438,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
|
||||
url, fragment = url.split('#', 1)
|
||||
if '?' in url:
|
||||
url, query = url.split('?', 1)
|
||||
+ _checknetloc(netloc)
|
||||
v = SplitResult(scheme, netloc, url, query, fragment)
|
||||
_parse_cache[key] = v
|
||||
return _coerce_result(v)
|
||||
@@ -443,6 +462,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
|
||||
url, fragment = url.split('#', 1)
|
||||
if '?' in url:
|
||||
url, query = url.split('?', 1)
|
||||
+ _checknetloc(netloc)
|
||||
v = SplitResult(scheme, netloc, url, query, fragment)
|
||||
_parse_cache[key] = v
|
||||
return _coerce_result(v)
|
150
SOURCES/00324-disallow-control-chars-in-http-urls.patch
Normal file
150
SOURCES/00324-disallow-control-chars-in-http-urls.patch
Normal file
@ -0,0 +1,150 @@
|
||||
From 7e200e0763f5b71c199aaf98bd5588f291585619 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz>
|
||||
Date: Tue, 7 May 2019 17:28:47 +0200
|
||||
Subject: [PATCH] bpo-30458: Disallow control chars in http URLs. (GH-12755)
|
||||
(GH-13154)
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Disallow control chars in http URLs in urllib.urlopen. This addresses a potential security problem for applications that do not sanity check their URLs where http request headers could be injected.
|
||||
|
||||
Disable https related urllib tests on a build without ssl (GH-13032)
|
||||
These tests require an SSL enabled build. Skip these tests when python is built without SSL to fix test failures.
|
||||
|
||||
Use http.client.InvalidURL instead of ValueError as the new error case's exception. (GH-13044)
|
||||
|
||||
Backport Co-Authored-By: Miro Hrončok <miro@hroncok.cz>
|
||||
---
|
||||
Lib/http/client.py | 15 ++++++
|
||||
Lib/test/test_urllib.py | 53 +++++++++++++++++++
|
||||
Lib/test/test_xmlrpc.py | 7 ++-
|
||||
.../2019-04-10-08-53-30.bpo-30458.51E-DA.rst | 1 +
|
||||
4 files changed, 75 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst
|
||||
|
||||
diff --git a/Lib/http/client.py b/Lib/http/client.py
|
||||
index 1de151c38e..2afd452fe3 100644
|
||||
--- a/Lib/http/client.py
|
||||
+++ b/Lib/http/client.py
|
||||
@@ -140,6 +140,16 @@ _MAXHEADERS = 100
|
||||
_is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch
|
||||
_is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search
|
||||
|
||||
+# These characters are not allowed within HTTP URL paths.
|
||||
+# See https://tools.ietf.org/html/rfc3986#section-3.3 and the
|
||||
+# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition.
|
||||
+# Prevents CVE-2019-9740. Includes control characters such as \r\n.
|
||||
+# We don't restrict chars above \x7f as putrequest() limits us to ASCII.
|
||||
+_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
|
||||
+# Arguably only these _should_ allowed:
|
||||
+# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
|
||||
+# We are more lenient for assumed real world compatibility purposes.
|
||||
+
|
||||
# We always set the Content-Length header for these methods because some
|
||||
# servers will otherwise respond with a 411
|
||||
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
|
||||
@@ -1101,6 +1111,11 @@ class HTTPConnection:
|
||||
self._method = method
|
||||
if not url:
|
||||
url = '/'
|
||||
+ # Prevent CVE-2019-9740.
|
||||
+ match = _contains_disallowed_url_pchar_re.search(url)
|
||||
+ if match:
|
||||
+ raise InvalidURL(f"URL can't contain control characters. {url!r} "
|
||||
+ f"(found at least {match.group()!r})")
|
||||
request = '%s %s %s' % (method, url, self._http_vsn_str)
|
||||
|
||||
# Non-ASCII characters should have been eliminated earlier
|
||||
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
|
||||
index 2ac73b58d8..7214492eca 100644
|
||||
--- a/Lib/test/test_urllib.py
|
||||
+++ b/Lib/test/test_urllib.py
|
||||
@@ -329,6 +329,59 @@ class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin, FakeFTPMixin):
|
||||
finally:
|
||||
self.unfakehttp()
|
||||
|
||||
+ @unittest.skipUnless(ssl, "ssl module required")
|
||||
+ def test_url_with_control_char_rejected(self):
|
||||
+ for char_no in list(range(0, 0x21)) + [0x7f]:
|
||||
+ char = chr(char_no)
|
||||
+ schemeless_url = f"//localhost:7777/test{char}/"
|
||||
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
|
||||
+ try:
|
||||
+ # We explicitly test urllib.request.urlopen() instead of the top
|
||||
+ # level 'def urlopen()' function defined in this... (quite ugly)
|
||||
+ # test suite. They use different url opening codepaths. Plain
|
||||
+ # urlopen uses FancyURLOpener which goes via a codepath that
|
||||
+ # calls urllib.parse.quote() on the URL which makes all of the
|
||||
+ # above attempts at injection within the url _path_ safe.
|
||||
+ escaped_char_repr = repr(char).replace('\\', r'\\')
|
||||
+ InvalidURL = http.client.InvalidURL
|
||||
+ with self.assertRaisesRegex(
|
||||
+ InvalidURL, f"contain control.*{escaped_char_repr}"):
|
||||
+ urllib.request.urlopen(f"http:{schemeless_url}")
|
||||
+ with self.assertRaisesRegex(
|
||||
+ InvalidURL, f"contain control.*{escaped_char_repr}"):
|
||||
+ urllib.request.urlopen(f"https:{schemeless_url}")
|
||||
+ # This code path quotes the URL so there is no injection.
|
||||
+ resp = urlopen(f"http:{schemeless_url}")
|
||||
+ self.assertNotIn(char, resp.geturl())
|
||||
+ finally:
|
||||
+ self.unfakehttp()
|
||||
+
|
||||
+ @unittest.skipUnless(ssl, "ssl module required")
|
||||
+ def test_url_with_newline_header_injection_rejected(self):
|
||||
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
|
||||
+ host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123"
|
||||
+ schemeless_url = "//" + host + ":8080/test/?test=a"
|
||||
+ try:
|
||||
+ # We explicitly test urllib.request.urlopen() instead of the top
|
||||
+ # level 'def urlopen()' function defined in this... (quite ugly)
|
||||
+ # test suite. They use different url opening codepaths. Plain
|
||||
+ # urlopen uses FancyURLOpener which goes via a codepath that
|
||||
+ # calls urllib.parse.quote() on the URL which makes all of the
|
||||
+ # above attempts at injection within the url _path_ safe.
|
||||
+ InvalidURL = http.client.InvalidURL
|
||||
+ with self.assertRaisesRegex(
|
||||
+ InvalidURL, r"contain control.*\\r.*(found at least . .)"):
|
||||
+ urllib.request.urlopen(f"http:{schemeless_url}")
|
||||
+ with self.assertRaisesRegex(InvalidURL, r"contain control.*\\n"):
|
||||
+ urllib.request.urlopen(f"https:{schemeless_url}")
|
||||
+ # This code path quotes the URL so there is no injection.
|
||||
+ resp = urlopen(f"http:{schemeless_url}")
|
||||
+ self.assertNotIn(' ', resp.geturl())
|
||||
+ self.assertNotIn('\r', resp.geturl())
|
||||
+ self.assertNotIn('\n', resp.geturl())
|
||||
+ finally:
|
||||
+ self.unfakehttp()
|
||||
+
|
||||
def test_read_0_9(self):
|
||||
# "0.9" response accepted (but not "simple responses" without
|
||||
# a status line)
|
||||
diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py
|
||||
index 32263f7f0b..0e002ec4ef 100644
|
||||
--- a/Lib/test/test_xmlrpc.py
|
||||
+++ b/Lib/test/test_xmlrpc.py
|
||||
@@ -945,7 +945,12 @@ class SimpleServerTestCase(BaseServerTestCase):
|
||||
def test_partial_post(self):
|
||||
# Check that a partial POST doesn't make the server loop: issue #14001.
|
||||
conn = http.client.HTTPConnection(ADDR, PORT)
|
||||
- conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye')
|
||||
+ conn.send('POST /RPC2 HTTP/1.0\r\n'
|
||||
+ 'Content-Length: 100\r\n\r\n'
|
||||
+ 'bye HTTP/1.1\r\n'
|
||||
+ f'Host: {ADDR}:{PORT}\r\n'
|
||||
+ 'Accept-Encoding: identity\r\n'
|
||||
+ 'Content-Length: 0\r\n\r\n'.encode('ascii'))
|
||||
conn.close()
|
||||
|
||||
def test_context_manager(self):
|
||||
diff --git a/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst
|
||||
new file mode 100644
|
||||
index 0000000000..ed8027fb4d
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Address CVE-2019-9740 by disallowing URL paths with embedded whitespace or control characters through into the underlying http client request. Such potentially malicious header injection URLs now cause an http.client.InvalidURL exception to be raised.
|
||||
--
|
||||
2.21.0
|
||||
|
49
SOURCES/00325-CVE-2019-9948.patch
Normal file
49
SOURCES/00325-CVE-2019-9948.patch
Normal file
@ -0,0 +1,49 @@
|
||||
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
|
||||
index 649a5b8..0061a52 100644
|
||||
--- a/Lib/test/test_urllib.py
|
||||
+++ b/Lib/test/test_urllib.py
|
||||
@@ -16,6 +16,7 @@ except ImportError:
|
||||
ssl = None
|
||||
import sys
|
||||
import tempfile
|
||||
+import warnings
|
||||
from nturl2path import url2pathname, pathname2url
|
||||
|
||||
from base64 import b64encode
|
||||
@@ -1463,6 +1464,23 @@ class URLopener_Tests(unittest.TestCase):
|
||||
"spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"),
|
||||
"//c:|windows%/:=&?~#+!$,;'@()*[]|/path/")
|
||||
|
||||
+ def test_local_file_open(self):
|
||||
+ # bpo-35907, CVE-2019-9948: urllib must reject local_file:// scheme
|
||||
+ class DummyURLopener(urllib.request.URLopener):
|
||||
+ def open_local_file(self, url):
|
||||
+ return url
|
||||
+
|
||||
+ with warnings.catch_warnings(record=True):
|
||||
+ warnings.simplefilter("ignore", DeprecationWarning)
|
||||
+
|
||||
+ for url in ('local_file://example', 'local-file://example'):
|
||||
+ self.assertRaises(OSError, urllib.request.urlopen, url)
|
||||
+ self.assertRaises(OSError, urllib.request.URLopener().open, url)
|
||||
+ self.assertRaises(OSError, urllib.request.URLopener().retrieve, url)
|
||||
+ self.assertRaises(OSError, DummyURLopener().open, url)
|
||||
+ self.assertRaises(OSError, DummyURLopener().retrieve, url)
|
||||
+
|
||||
+
|
||||
# Just commented them out.
|
||||
# Can't really tell why keep failing in windows and sparc.
|
||||
# Everywhere else they work ok, but on those machines, sometimes
|
||||
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
|
||||
index d28f2f8..c9945d9 100644
|
||||
--- a/Lib/urllib/request.py
|
||||
+++ b/Lib/urllib/request.py
|
||||
@@ -1747,7 +1747,7 @@ class URLopener:
|
||||
name = 'open_' + urltype
|
||||
self.type = urltype
|
||||
name = name.replace('-', '_')
|
||||
- if not hasattr(self, name):
|
||||
+ if not hasattr(self, name) or name == 'open_local_file':
|
||||
if proxy:
|
||||
return self.open_unknown_proxy(proxy, fullurl, data)
|
||||
else:
|
117
SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch
Normal file
117
SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch
Normal file
@ -0,0 +1,117 @@
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 883201f..cf4d84d 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -3891,6 +3891,37 @@ class TestPostHandshakeAuth(unittest.TestCase):
|
||||
s.write(b'PHA')
|
||||
self.assertIn(b'WRONG_SSL_VERSION', s.recv(1024))
|
||||
|
||||
+ def test_bpo37428_pha_cert_none(self):
|
||||
+ # verify that post_handshake_auth does not implicitly enable cert
|
||||
+ # validation.
|
||||
+ hostname = 'localhost'
|
||||
+ client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
+ client_context.post_handshake_auth = True
|
||||
+ client_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
+ # no cert validation and CA on client side
|
||||
+ client_context.check_hostname = False
|
||||
+ client_context.verify_mode = ssl.CERT_NONE
|
||||
+
|
||||
+ server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
+ server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
+ server_context.load_verify_locations(SIGNING_CA)
|
||||
+ server_context.post_handshake_auth = True
|
||||
+ server_context.verify_mode = ssl.CERT_REQUIRED
|
||||
+
|
||||
+ server = ThreadedEchoServer(context=server_context, chatty=False)
|
||||
+ with server:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
+ s.connect((HOST, server.port))
|
||||
+ s.write(b'HASCERT')
|
||||
+ self.assertEqual(s.recv(1024), b'FALSE\n')
|
||||
+ s.write(b'PHA')
|
||||
+ self.assertEqual(s.recv(1024), b'OK\n')
|
||||
+ s.write(b'HASCERT')
|
||||
+ self.assertEqual(s.recv(1024), b'TRUE\n')
|
||||
+ # server cert has not been validated
|
||||
+ self.assertEqual(s.getpeercert(), {})
|
||||
+
|
||||
|
||||
def test_main(verbose=False):
|
||||
if support.verbose:
|
||||
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
|
||||
index ec366f0..9bf1cde 100644
|
||||
--- a/Modules/_ssl.c
|
||||
+++ b/Modules/_ssl.c
|
||||
@@ -732,6 +732,26 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock,
|
||||
#endif
|
||||
SSL_set_mode(self->ssl, mode);
|
||||
|
||||
+#ifdef TLS1_3_VERSION
|
||||
+ if (sslctx->post_handshake_auth == 1) {
|
||||
+ if (socket_type == PY_SSL_SERVER) {
|
||||
+ /* bpo-37428: OpenSSL does not ignore SSL_VERIFY_POST_HANDSHAKE.
|
||||
+ * Set SSL_VERIFY_POST_HANDSHAKE flag only for server sockets and
|
||||
+ * only in combination with SSL_VERIFY_PEER flag. */
|
||||
+ int mode = SSL_get_verify_mode(self->ssl);
|
||||
+ if (mode & SSL_VERIFY_PEER) {
|
||||
+ int (*verify_cb)(int, X509_STORE_CTX *) = NULL;
|
||||
+ verify_cb = SSL_get_verify_callback(self->ssl);
|
||||
+ mode |= SSL_VERIFY_POST_HANDSHAKE;
|
||||
+ SSL_set_verify(self->ssl, mode, verify_cb);
|
||||
+ }
|
||||
+ } else {
|
||||
+ /* client socket */
|
||||
+ SSL_set_post_handshake_auth(self->ssl, 1);
|
||||
+ }
|
||||
+ }
|
||||
+#endif
|
||||
+
|
||||
#if HAVE_SNI
|
||||
if (server_hostname != NULL) {
|
||||
/* Don't send SNI for IP addresses. We cannot simply use inet_aton() and
|
||||
@@ -2765,10 +2785,10 @@ _set_verify_mode(PySSLContext *self, enum py_ssl_cert_requirements n)
|
||||
"invalid value for verify_mode");
|
||||
return -1;
|
||||
}
|
||||
-#ifdef TLS1_3_VERSION
|
||||
- if (self->post_handshake_auth)
|
||||
- mode |= SSL_VERIFY_POST_HANDSHAKE;
|
||||
-#endif
|
||||
+
|
||||
+ /* bpo-37428: newPySSLSocket() sets SSL_VERIFY_POST_HANDSHAKE flag for
|
||||
+ * server sockets and SSL_set_post_handshake_auth() for client. */
|
||||
+
|
||||
/* keep current verify cb */
|
||||
verify_cb = SSL_CTX_get_verify_callback(self->ctx);
|
||||
SSL_CTX_set_verify(self->ctx, mode, verify_cb);
|
||||
@@ -3346,8 +3366,6 @@ get_post_handshake_auth(PySSLContext *self, void *c) {
|
||||
#if TLS1_3_VERSION
|
||||
static int
|
||||
set_post_handshake_auth(PySSLContext *self, PyObject *arg, void *c) {
|
||||
- int (*verify_cb)(int, X509_STORE_CTX *) = NULL;
|
||||
- int mode = SSL_CTX_get_verify_mode(self->ctx);
|
||||
int pha = PyObject_IsTrue(arg);
|
||||
|
||||
if (pha == -1) {
|
||||
@@ -3355,17 +3373,8 @@ set_post_handshake_auth(PySSLContext *self, PyObject *arg, void *c) {
|
||||
}
|
||||
self->post_handshake_auth = pha;
|
||||
|
||||
- /* client-side socket setting, ignored by server-side */
|
||||
- SSL_CTX_set_post_handshake_auth(self->ctx, pha);
|
||||
-
|
||||
- /* server-side socket setting, ignored by client-side */
|
||||
- verify_cb = SSL_CTX_get_verify_callback(self->ctx);
|
||||
- if (pha) {
|
||||
- mode |= SSL_VERIFY_POST_HANDSHAKE;
|
||||
- } else {
|
||||
- mode ^= SSL_VERIFY_POST_HANDSHAKE;
|
||||
- }
|
||||
- SSL_CTX_set_verify(self->ctx, mode, verify_cb);
|
||||
+ /* bpo-37428: newPySSLSocket() sets SSL_VERIFY_POST_HANDSHAKE flag for
|
||||
+ * server sockets and SSL_set_post_handshake_auth() for client. */
|
||||
|
||||
return 0;
|
||||
}
|
70
SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch
Normal file
70
SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch
Normal file
@ -0,0 +1,70 @@
|
||||
diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst
|
||||
index 2f59ece..d756916 100644
|
||||
--- a/Doc/library/http.client.rst
|
||||
+++ b/Doc/library/http.client.rst
|
||||
@@ -88,6 +88,11 @@ The module provides the following classes:
|
||||
:func:`ssl._create_unverified_context` can be passed to the *context*
|
||||
parameter.
|
||||
|
||||
+ .. versionchanged:: 3.7.4
|
||||
+ This class now enables TLS 1.3
|
||||
+ :attr:`ssl.SSLContext.post_handshake_auth` for the default *context* or
|
||||
+ when *cert_file* is passed with a custom *context*.
|
||||
+
|
||||
.. deprecated:: 3.6
|
||||
|
||||
*key_file* and *cert_file* are deprecated in favor of *context*.
|
||||
diff --git a/Lib/http/client.py b/Lib/http/client.py
|
||||
index 1a6bd8a..f0d2642 100644
|
||||
--- a/Lib/http/client.py
|
||||
+++ b/Lib/http/client.py
|
||||
@@ -1390,6 +1390,9 @@ else:
|
||||
self.cert_file = cert_file
|
||||
if context is None:
|
||||
context = ssl._create_default_https_context()
|
||||
+ # enable PHA for TLS 1.3 connections if available
|
||||
+ if context.post_handshake_auth is not None:
|
||||
+ context.post_handshake_auth = True
|
||||
will_verify = context.verify_mode != ssl.CERT_NONE
|
||||
if check_hostname is None:
|
||||
check_hostname = context.check_hostname
|
||||
@@ -1398,6 +1401,10 @@ else:
|
||||
"either CERT_OPTIONAL or CERT_REQUIRED")
|
||||
if key_file or cert_file:
|
||||
context.load_cert_chain(cert_file, key_file)
|
||||
+ # cert and key file means the user wants to authenticate.
|
||||
+ # enable TLS 1.3 PHA implicitly even for custom contexts.
|
||||
+ if context.post_handshake_auth is not None:
|
||||
+ context.post_handshake_auth = True
|
||||
self._context = context
|
||||
self._check_hostname = check_hostname
|
||||
|
||||
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
|
||||
index 714d521..5795b7a 100644
|
||||
--- a/Lib/test/test_httplib.py
|
||||
+++ b/Lib/test/test_httplib.py
|
||||
@@ -1709,6 +1709,24 @@ class HTTPSTest(TestCase):
|
||||
self.assertEqual(h, c.host)
|
||||
self.assertEqual(p, c.port)
|
||||
|
||||
+ def test_tls13_pha(self):
|
||||
+ import ssl
|
||||
+ if not ssl.HAS_TLSv1_3:
|
||||
+ self.skipTest('TLS 1.3 support required')
|
||||
+ # just check status of PHA flag
|
||||
+ h = client.HTTPSConnection('localhost', 443)
|
||||
+ self.assertTrue(h._context.post_handshake_auth)
|
||||
+
|
||||
+ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
+ self.assertFalse(context.post_handshake_auth)
|
||||
+ h = client.HTTPSConnection('localhost', 443, context=context)
|
||||
+ self.assertIs(h._context, context)
|
||||
+ self.assertFalse(h._context.post_handshake_auth)
|
||||
+
|
||||
+ h = client.HTTPSConnection('localhost', 443, context=context,
|
||||
+ cert_file=CERT_localhost)
|
||||
+ self.assertTrue(h._context.post_handshake_auth)
|
||||
+
|
||||
|
||||
class RequestBodyTest(TestCase):
|
||||
"""Test cases where a request includes a message body."""
|
5597
SOURCES/00329-fips.patch
Normal file
5597
SOURCES/00329-fips.patch
Normal file
File diff suppressed because it is too large
Load Diff
93
SOURCES/00330-CVE-2018-20852.patch
Normal file
93
SOURCES/00330-CVE-2018-20852.patch
Normal file
@ -0,0 +1,93 @@
|
||||
diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py
|
||||
index adf956d..97599d4 100644
|
||||
--- a/Lib/http/cookiejar.py
|
||||
+++ b/Lib/http/cookiejar.py
|
||||
@@ -1148,6 +1148,11 @@ class DefaultCookiePolicy(CookiePolicy):
|
||||
req_host, erhn = eff_request_host(request)
|
||||
domain = cookie.domain
|
||||
|
||||
+ if domain and not domain.startswith("."):
|
||||
+ dotdomain = "." + domain
|
||||
+ else:
|
||||
+ dotdomain = domain
|
||||
+
|
||||
# strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't
|
||||
if (cookie.version == 0 and
|
||||
(self.strict_ns_domain & self.DomainStrictNonDomain) and
|
||||
@@ -1160,7 +1165,7 @@ class DefaultCookiePolicy(CookiePolicy):
|
||||
_debug(" effective request-host name %s does not domain-match "
|
||||
"RFC 2965 cookie domain %s", erhn, domain)
|
||||
return False
|
||||
- if cookie.version == 0 and not ("."+erhn).endswith(domain):
|
||||
+ if cookie.version == 0 and not ("."+erhn).endswith(dotdomain):
|
||||
_debug(" request-host %s does not match Netscape cookie domain "
|
||||
"%s", req_host, domain)
|
||||
return False
|
||||
@@ -1174,7 +1179,11 @@ class DefaultCookiePolicy(CookiePolicy):
|
||||
req_host = "."+req_host
|
||||
if not erhn.startswith("."):
|
||||
erhn = "."+erhn
|
||||
- if not (req_host.endswith(domain) or erhn.endswith(domain)):
|
||||
+ if domain and not domain.startswith("."):
|
||||
+ dotdomain = "." + domain
|
||||
+ else:
|
||||
+ dotdomain = domain
|
||||
+ if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)):
|
||||
#_debug(" request domain %s does not match cookie domain %s",
|
||||
# req_host, domain)
|
||||
return False
|
||||
diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py
|
||||
index abc625d..6e1b308 100644
|
||||
--- a/Lib/test/test_http_cookiejar.py
|
||||
+++ b/Lib/test/test_http_cookiejar.py
|
||||
@@ -415,6 +415,7 @@ class CookieTests(unittest.TestCase):
|
||||
("http://foo.bar.com/", ".foo.bar.com", True),
|
||||
("http://foo.bar.com/", "foo.bar.com", True),
|
||||
("http://foo.bar.com/", ".bar.com", True),
|
||||
+ ("http://foo.bar.com/", "bar.com", True),
|
||||
("http://foo.bar.com/", "com", True),
|
||||
("http://foo.com/", "rhubarb.foo.com", False),
|
||||
("http://foo.com/", ".foo.com", True),
|
||||
@@ -425,6 +426,8 @@ class CookieTests(unittest.TestCase):
|
||||
("http://foo/", "foo", True),
|
||||
("http://foo/", "foo.local", True),
|
||||
("http://foo/", ".local", True),
|
||||
+ ("http://barfoo.com", ".foo.com", False),
|
||||
+ ("http://barfoo.com", "foo.com", False),
|
||||
]:
|
||||
request = urllib.request.Request(url)
|
||||
r = pol.domain_return_ok(domain, request)
|
||||
@@ -959,6 +962,33 @@ class CookieTests(unittest.TestCase):
|
||||
c.add_cookie_header(req)
|
||||
self.assertFalse(req.has_header("Cookie"))
|
||||
|
||||
+ c.clear()
|
||||
+
|
||||
+ pol.set_blocked_domains([])
|
||||
+ req = urllib.request.Request("http://acme.com/")
|
||||
+ res = FakeResponse(headers, "http://acme.com/")
|
||||
+ cookies = c.make_cookies(res, req)
|
||||
+ c.extract_cookies(res, req)
|
||||
+ self.assertEqual(len(c), 1)
|
||||
+
|
||||
+ req = urllib.request.Request("http://acme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertTrue(req.has_header("Cookie"))
|
||||
+
|
||||
+ req = urllib.request.Request("http://badacme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertFalse(pol.return_ok(cookies[0], req))
|
||||
+ self.assertFalse(req.has_header("Cookie"))
|
||||
+
|
||||
+ p = pol.set_blocked_domains(["acme.com"])
|
||||
+ req = urllib.request.Request("http://acme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertFalse(req.has_header("Cookie"))
|
||||
+
|
||||
+ req = urllib.request.Request("http://badacme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertFalse(req.has_header("Cookie"))
|
||||
+
|
||||
def test_secure(self):
|
||||
for ns in True, False:
|
||||
for whitespace in " ", "":
|
95
SOURCES/00332-CVE-2019-16056.patch
Normal file
95
SOURCES/00332-CVE-2019-16056.patch
Normal file
@ -0,0 +1,95 @@
|
||||
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
|
||||
index 737951e4b1b1..bc9c9b6241d4 100644
|
||||
--- a/Lib/email/_header_value_parser.py
|
||||
+++ b/Lib/email/_header_value_parser.py
|
||||
@@ -1561,6 +1561,8 @@ def get_domain(value):
|
||||
token, value = get_dot_atom(value)
|
||||
except errors.HeaderParseError:
|
||||
token, value = get_atom(value)
|
||||
+ if value and value[0] == '@':
|
||||
+ raise errors.HeaderParseError('Invalid Domain')
|
||||
if leader is not None:
|
||||
token[:0] = [leader]
|
||||
domain.append(token)
|
||||
diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py
|
||||
index cdfa3729adc7..41ff6f8c000d 100644
|
||||
--- a/Lib/email/_parseaddr.py
|
||||
+++ b/Lib/email/_parseaddr.py
|
||||
@@ -379,7 +379,12 @@ def getaddrspec(self):
|
||||
aslist.append('@')
|
||||
self.pos += 1
|
||||
self.gotonext()
|
||||
- return EMPTYSTRING.join(aslist) + self.getdomain()
|
||||
+ domain = self.getdomain()
|
||||
+ if not domain:
|
||||
+ # Invalid domain, return an empty address instead of returning a
|
||||
+ # local part to denote failed parsing.
|
||||
+ return EMPTYSTRING
|
||||
+ return EMPTYSTRING.join(aslist) + domain
|
||||
|
||||
def getdomain(self):
|
||||
"""Get the complete domain name from an address."""
|
||||
@@ -394,6 +399,10 @@ def getdomain(self):
|
||||
elif self.field[self.pos] == '.':
|
||||
self.pos += 1
|
||||
sdlist.append('.')
|
||||
+ elif self.field[self.pos] == '@':
|
||||
+ # bpo-34155: Don't parse domains with two `@` like
|
||||
+ # `a@malicious.org@important.com`.
|
||||
+ return EMPTYSTRING
|
||||
elif self.field[self.pos] in self.atomends:
|
||||
break
|
||||
else:
|
||||
diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py
|
||||
index a2c900fa7fd2..02ef3e1006c6 100644
|
||||
--- a/Lib/test/test_email/test__header_value_parser.py
|
||||
+++ b/Lib/test/test_email/test__header_value_parser.py
|
||||
@@ -1418,6 +1418,16 @@ def test_get_addr_spec_dot_atom(self):
|
||||
self.assertEqual(addr_spec.domain, 'example.com')
|
||||
self.assertEqual(addr_spec.addr_spec, 'star.a.star@example.com')
|
||||
|
||||
+ def test_get_addr_spec_multiple_domains(self):
|
||||
+ with self.assertRaises(errors.HeaderParseError):
|
||||
+ parser.get_addr_spec('star@a.star@example.com')
|
||||
+
|
||||
+ with self.assertRaises(errors.HeaderParseError):
|
||||
+ parser.get_addr_spec('star@a@example.com')
|
||||
+
|
||||
+ with self.assertRaises(errors.HeaderParseError):
|
||||
+ parser.get_addr_spec('star@172.17.0.1@example.com')
|
||||
+
|
||||
# get_obs_route
|
||||
|
||||
def test_get_obs_route_simple(self):
|
||||
diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py
|
||||
index f97ccc6711cc..68d052279987 100644
|
||||
--- a/Lib/test/test_email/test_email.py
|
||||
+++ b/Lib/test/test_email/test_email.py
|
||||
@@ -3035,6 +3035,20 @@ def test_parseaddr_empty(self):
|
||||
self.assertEqual(utils.parseaddr('<>'), ('', ''))
|
||||
self.assertEqual(utils.formataddr(utils.parseaddr('<>')), '')
|
||||
|
||||
+ def test_parseaddr_multiple_domains(self):
|
||||
+ self.assertEqual(
|
||||
+ utils.parseaddr('a@b@c'),
|
||||
+ ('', '')
|
||||
+ )
|
||||
+ self.assertEqual(
|
||||
+ utils.parseaddr('a@b.c@c'),
|
||||
+ ('', '')
|
||||
+ )
|
||||
+ self.assertEqual(
|
||||
+ utils.parseaddr('a@172.17.0.1@c'),
|
||||
+ ('', '')
|
||||
+ )
|
||||
+
|
||||
def test_noquote_dump(self):
|
||||
self.assertEqual(
|
||||
utils.formataddr(('A Silly Person', 'person@dom.ain')),
|
||||
diff --git a/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst
|
||||
new file mode 100644
|
||||
index 000000000000..50292e29ed1d
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Fix parsing of invalid email addresses with more than one ``@`` (e.g. a@b@c.com.) to not return the part before 2nd ``@`` as valid email address. Patch by maxking & jpic.
|
296
SOURCES/00333-reduce-pgo-tests.patch
Normal file
296
SOURCES/00333-reduce-pgo-tests.patch
Normal file
@ -0,0 +1,296 @@
|
||||
diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py
|
||||
index 538ff05..e7f2013 100644
|
||||
--- a/Lib/test/libregrtest/cmdline.py
|
||||
+++ b/Lib/test/libregrtest/cmdline.py
|
||||
@@ -263,7 +263,9 @@ def _create_parser():
|
||||
help='only write the name of test cases that will be run'
|
||||
' , don\'t execute them')
|
||||
group.add_argument('-P', '--pgo', dest='pgo', action='store_true',
|
||||
- help='enable Profile Guided Optimization training')
|
||||
+ help='enable Profile Guided Optimization (PGO) training')
|
||||
+ group.add_argument('--pgo-extended', action='store_true',
|
||||
+ help='enable extended PGO training (slower training)')
|
||||
group.add_argument('--fail-env-changed', action='store_true',
|
||||
help='if a test file alters the environment, mark '
|
||||
'the test as failed')
|
||||
@@ -339,6 +341,8 @@ def _parse_args(args, **kwargs):
|
||||
parser.error("-G/--failfast needs either -v or -W")
|
||||
if ns.pgo and (ns.verbose or ns.verbose2 or ns.verbose3):
|
||||
parser.error("--pgo/-v don't go together!")
|
||||
+ if ns.pgo_extended:
|
||||
+ ns.pgo = True # pgo_extended implies pgo
|
||||
|
||||
if ns.nowindows:
|
||||
print("Warning: the --nowindows (-n) option is deprecated. "
|
||||
diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py
|
||||
index b6d05f6..524dbfa 100644
|
||||
--- a/Lib/test/libregrtest/main.py
|
||||
+++ b/Lib/test/libregrtest/main.py
|
||||
@@ -17,6 +17,7 @@ from test.libregrtest.runtest import (
|
||||
INTERRUPTED, CHILD_ERROR, TEST_DID_NOT_RUN,
|
||||
PROGRESS_MIN_TIME, format_test_result)
|
||||
from test.libregrtest.setup import setup_tests
|
||||
+from test.libregrtest.pgo import setup_pgo_tests
|
||||
from test.libregrtest.utils import removepy, count, format_duration, printlist
|
||||
from test import support
|
||||
try:
|
||||
@@ -214,6 +215,10 @@ class Regrtest:
|
||||
|
||||
removepy(self.tests)
|
||||
|
||||
+ if self.ns.pgo:
|
||||
+ # add default PGO tests if no tests are specified
|
||||
+ setup_pgo_tests(self.ns)
|
||||
+
|
||||
stdtests = STDTESTS[:]
|
||||
nottests = NOTTESTS.copy()
|
||||
if self.ns.exclude:
|
||||
@@ -601,6 +606,7 @@ class Regrtest:
|
||||
input("Press any key to continue...")
|
||||
|
||||
support.PGO = self.ns.pgo
|
||||
+ support.PGO_EXTENDED = self.ns.pgo_extended
|
||||
|
||||
setup_tests(self.ns)
|
||||
|
||||
diff --git a/Lib/test/libregrtest/pgo.py b/Lib/test/libregrtest/pgo.py
|
||||
new file mode 100644
|
||||
index 0000000..379ff05
|
||||
--- /dev/null
|
||||
+++ b/Lib/test/libregrtest/pgo.py
|
||||
@@ -0,0 +1,55 @@
|
||||
+# Set of tests run by default if --pgo is specified. The tests below were
|
||||
+# chosen based on the following criteria: either they exercise a commonly used
|
||||
+# C extension module or type, or they run some relatively typical Python code.
|
||||
+# Long running tests should be avoided because the PGO instrumented executable
|
||||
+# runs slowly.
|
||||
+PGO_TESTS = [
|
||||
+ 'test_array',
|
||||
+ 'test_base64',
|
||||
+ 'test_binascii',
|
||||
+ 'test_binop',
|
||||
+ 'test_bisect',
|
||||
+ 'test_bytes',
|
||||
+ 'test_bz2',
|
||||
+ 'test_cmath',
|
||||
+ 'test_codecs',
|
||||
+ 'test_collections',
|
||||
+ 'test_complex',
|
||||
+ 'test_dataclasses',
|
||||
+ 'test_datetime',
|
||||
+ 'test_decimal',
|
||||
+ 'test_difflib',
|
||||
+ 'test_embed',
|
||||
+ 'test_float',
|
||||
+ 'test_fstring',
|
||||
+ 'test_functools',
|
||||
+ 'test_generators',
|
||||
+ 'test_hashlib',
|
||||
+ 'test_heapq',
|
||||
+ 'test_int',
|
||||
+ 'test_itertools',
|
||||
+ 'test_json',
|
||||
+ 'test_long',
|
||||
+ 'test_lzma',
|
||||
+ 'test_math',
|
||||
+ 'test_memoryview',
|
||||
+ 'test_operator',
|
||||
+ 'test_ordered_dict',
|
||||
+ 'test_pickle',
|
||||
+ 'test_pprint',
|
||||
+ 'test_re',
|
||||
+ 'test_set',
|
||||
+ 'test_sqlite',
|
||||
+ 'test_statistics',
|
||||
+ 'test_struct',
|
||||
+ 'test_tabnanny',
|
||||
+ 'test_time',
|
||||
+ 'test_unicode',
|
||||
+ 'test_xml_etree',
|
||||
+ 'test_xml_etree_c',
|
||||
+]
|
||||
+
|
||||
+def setup_pgo_tests(ns):
|
||||
+ if not ns.args and not ns.pgo_extended:
|
||||
+ # run default set of tests for PGO training
|
||||
+ ns.args = PGO_TESTS[:]
|
||||
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
|
||||
index 764057a..468ee46 100644
|
||||
--- a/Lib/test/pickletester.py
|
||||
+++ b/Lib/test/pickletester.py
|
||||
@@ -2039,6 +2039,7 @@ class AbstractPickleTests(unittest.TestCase):
|
||||
|
||||
FRAME_SIZE_TARGET = 64 * 1024
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
def check_frame_opcodes(self, pickled):
|
||||
"""
|
||||
Check the arguments of FRAME opcodes in a protocol 4+ pickle.
|
||||
@@ -2059,6 +2060,7 @@ class AbstractPickleTests(unittest.TestCase):
|
||||
frame_size = len(pickled) - last_pos - frame_opcode_size
|
||||
self.assertEqual(frame_size, last_arg)
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
def test_framing_many_objects(self):
|
||||
obj = list(range(10**5))
|
||||
for proto in range(4, pickle.HIGHEST_PROTOCOL + 1):
|
||||
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
|
||||
index 66c0fed..e80a819 100644
|
||||
--- a/Lib/test/support/__init__.py
|
||||
+++ b/Lib/test/support/__init__.py
|
||||
@@ -953,6 +953,10 @@ SAVEDCWD = os.getcwd()
|
||||
# useful for PGO
|
||||
PGO = False
|
||||
|
||||
+# Set by libregrtest/main.py if we are running the extended (time consuming)
|
||||
+# PGO task. If this is True, PGO is also True.
|
||||
+PGO_EXTENDED = False
|
||||
+
|
||||
@contextlib.contextmanager
|
||||
def temp_dir(path=None, quiet=False):
|
||||
"""Return a context manager that creates a temporary directory.
|
||||
@@ -2442,6 +2446,11 @@ def skip_unless_xattr(test):
|
||||
msg = "no non-broken extended attribute support"
|
||||
return test if ok else unittest.skip(msg)(test)
|
||||
|
||||
+def skip_if_pgo_task(test):
|
||||
+ """Skip decorator for tests not run in (non-extended) PGO task"""
|
||||
+ ok = not PGO or PGO_EXTENDED
|
||||
+ msg = "Not run for (non-extended) PGO task"
|
||||
+ return test if ok else unittest.skip(msg)(test)
|
||||
|
||||
def fs_is_case_insensitive(directory):
|
||||
"""Detects if the file system for the specified directory is case-insensitive."""
|
||||
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
|
||||
index f340f23..ebb151c 100644
|
||||
--- a/Lib/test/test_bz2.py
|
||||
+++ b/Lib/test/test_bz2.py
|
||||
@@ -654,6 +654,7 @@ class BZ2CompressorTest(BaseTest):
|
||||
data += bz2c.flush()
|
||||
self.assertEqual(ext_decompress(data), self.TEXT)
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
@bigmemtest(size=_4G + 100, memuse=2)
|
||||
def testCompress4G(self, size):
|
||||
# "Test BZ2Compressor.compress()/flush() with >4GiB input"
|
||||
@@ -712,6 +713,7 @@ class BZ2DecompressorTest(BaseTest):
|
||||
self.assertRaises(EOFError, bz2d.decompress, b"anything")
|
||||
self.assertRaises(EOFError, bz2d.decompress, b"")
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
@bigmemtest(size=_4G + 100, memuse=3.3)
|
||||
def testDecompress4G(self, size):
|
||||
# "Test BZ2Decompressor.decompress() with >4GiB input"
|
||||
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
|
||||
index 9317951..8c1d016 100644
|
||||
--- a/Lib/test/test_itertools.py
|
||||
+++ b/Lib/test/test_itertools.py
|
||||
@@ -2023,6 +2023,7 @@ class RegressionTests(unittest.TestCase):
|
||||
self.assertRaises(AssertionError, list, cycle(gen1()))
|
||||
self.assertEqual(hist, [0,1])
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
def test_long_chain_of_empty_iterables(self):
|
||||
# Make sure itertools.chain doesn't run into recursion limits when
|
||||
# dealing with long chains of empty iterables. Even with a high
|
||||
diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py
|
||||
index 3dc2c1e..117de0a 100644
|
||||
--- a/Lib/test/test_lzma.py
|
||||
+++ b/Lib/test/test_lzma.py
|
||||
@@ -333,6 +333,7 @@ class CompressorDecompressorTestCase(unittest.TestCase):
|
||||
|
||||
# Test with inputs larger than 4GiB.
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
@bigmemtest(size=_4G + 100, memuse=2)
|
||||
def test_compressor_bigmem(self, size):
|
||||
lzc = LZMACompressor()
|
||||
@@ -344,6 +345,7 @@ class CompressorDecompressorTestCase(unittest.TestCase):
|
||||
finally:
|
||||
ddata = None
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
@bigmemtest(size=_4G + 100, memuse=3)
|
||||
def test_decompressor_bigmem(self, size):
|
||||
lzd = LZMADecompressor()
|
||||
diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py
|
||||
index 5347bb1..9d83217 100644
|
||||
--- a/Lib/test/test_regrtest.py
|
||||
+++ b/Lib/test/test_regrtest.py
|
||||
@@ -6,6 +6,7 @@ Note: test_regrtest cannot be run twice in parallel.
|
||||
|
||||
import contextlib
|
||||
import faulthandler
|
||||
+import glob
|
||||
import io
|
||||
import os.path
|
||||
import platform
|
||||
@@ -532,6 +533,31 @@ class BaseTestCase(unittest.TestCase):
|
||||
return proc.stdout
|
||||
|
||||
|
||||
+class CheckActualTests(BaseTestCase):
|
||||
+ """
|
||||
+ Check that regrtest appears to find the expected set of tests.
|
||||
+ """
|
||||
+
|
||||
+ def test_finds_expected_number_of_tests(self):
|
||||
+ args = ['-Wd', '-E', '-bb', '-m', 'test.regrtest', '--list-tests']
|
||||
+ output = self.run_python(args)
|
||||
+ rough_number_of_tests_found = len(output.splitlines())
|
||||
+ actual_testsuite_glob = os.path.join(os.path.dirname(__file__),
|
||||
+ 'test*.py')
|
||||
+ rough_counted_test_py_files = len(glob.glob(actual_testsuite_glob))
|
||||
+ # We're not trying to duplicate test finding logic in here,
|
||||
+ # just give a rough estimate of how many there should be and
|
||||
+ # be near that. This is a regression test to prevent mishaps
|
||||
+ # such as https://bugs.python.org/issue37667 in the future.
|
||||
+ # If you need to change the values in here during some
|
||||
+ # mythical future test suite reorganization, don't go
|
||||
+ # overboard with logic and keep that goal in mind.
|
||||
+ self.assertGreater(rough_number_of_tests_found,
|
||||
+ rough_counted_test_py_files*9//10,
|
||||
+ msg='Unexpectedly low number of tests found in:\n'
|
||||
+ f'{", ".join(output.splitlines())}')
|
||||
+
|
||||
+
|
||||
class ProgramsTestCase(BaseTestCase):
|
||||
"""
|
||||
Test various ways to run the Python test suite. Use options close
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index b452289..cc428ac 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -247,9 +247,10 @@ TCLTK_INCLUDES= @TCLTK_INCLUDES@
|
||||
TCLTK_LIBS= @TCLTK_LIBS@
|
||||
|
||||
# The task to run while instrumented when building the profile-opt target.
|
||||
-# We exclude unittests with -x that take a rediculious amount of time to
|
||||
-# run in the instrumented training build or do not provide much value.
|
||||
-PROFILE_TASK=-m test.regrtest --pgo
|
||||
+# To speed up profile generation, we don't run the full unit test suite
|
||||
+# by default. The default is "-m test --pgo". To run more tests, use
|
||||
+# PROFILE_TASK="-m test --pgo-extended"
|
||||
+PROFILE_TASK= @PROFILE_TASK@
|
||||
|
||||
# report files for gcov / lcov coverage report
|
||||
COVERAGE_INFO= $(abs_builddir)/coverage.info
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index c071ec3..816fc5a 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -1308,6 +1308,14 @@ else
|
||||
DEF_MAKE_RULE="all"
|
||||
fi
|
||||
|
||||
+AC_ARG_VAR(PROFILE_TASK, Python args for PGO generation task)
|
||||
+AC_MSG_CHECKING(PROFILE_TASK)
|
||||
+if test -z "$PROFILE_TASK"
|
||||
+then
|
||||
+ PROFILE_TASK='-m test --pgo'
|
||||
+fi
|
||||
+AC_MSG_RESULT($PROFILE_TASK)
|
||||
+
|
||||
# Make llvm-relatec checks work on systems where llvm tools are not installed with their
|
||||
# normal names in the default $PATH (ie: Ubuntu). They exist under the
|
||||
# non-suffixed name in their versioned llvm directory.
|
22
SOURCES/00338-fix-test_gdb-for-LTO.patch
Normal file
22
SOURCES/00338-fix-test_gdb-for-LTO.patch
Normal file
@ -0,0 +1,22 @@
|
||||
diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py
|
||||
index 9c15fca..c972409 100644
|
||||
--- a/Lib/test/test_gdb.py
|
||||
+++ b/Lib/test/test_gdb.py
|
||||
@@ -279,8 +279,15 @@ class DebuggerTests(unittest.TestCase):
|
||||
# gdb can insert additional '\n' and space characters in various places
|
||||
# in its output, depending on the width of the terminal it's connected
|
||||
# to (using its "wrap_here" function)
|
||||
- m = re.match(r'.*#0\s+builtin_id\s+\(self\=.*,\s+v=\s*(.*?)\)\s+at\s+\S*Python/bltinmodule.c.*',
|
||||
- gdb_output, re.DOTALL)
|
||||
+ m = re.search(
|
||||
+ # Match '#0 builtin_id(self=..., v=...)'
|
||||
+ r'#0\s+builtin_id\s+\(self\=.*,\s+v=\s*(.*?)?\)'
|
||||
+ # Match ' at Python/bltinmodule.c'.
|
||||
+ # bpo-38239: builtin_id() is defined in Python/bltinmodule.c,
|
||||
+ # but accept any "Directory\file.c" to support Link Time
|
||||
+ # Optimization (LTO).
|
||||
+ r'\s+at\s+\S*[A-Za-z]+/[A-Za-z0-9_-]+\.c',
|
||||
+ gdb_output, re.DOTALL)
|
||||
if not m:
|
||||
self.fail('Unexpected gdb output: %r\n%s' % (gdb_output, gdb_output))
|
||||
return m.group(1), gdb_output
|
54
SOURCES/00344-CVE-2019-16935.patch
Normal file
54
SOURCES/00344-CVE-2019-16935.patch
Normal file
@ -0,0 +1,54 @@
|
||||
diff --git a/Lib/test/test_docxmlrpc.py b/Lib/test/test_docxmlrpc.py
|
||||
index 0090333..d2adb21 100644
|
||||
--- a/Lib/test/test_docxmlrpc.py
|
||||
+++ b/Lib/test/test_docxmlrpc.py
|
||||
@@ -1,5 +1,6 @@
|
||||
from xmlrpc.server import DocXMLRPCServer
|
||||
import http.client
|
||||
+import re
|
||||
import sys
|
||||
from test import support
|
||||
threading = support.import_module('threading')
|
||||
@@ -193,6 +194,21 @@ class DocXMLRPCHTTPGETServer(unittest.TestCase):
|
||||
b'method_annotation</strong></a>(x: bytes)</dt></dl>'),
|
||||
response.read())
|
||||
|
||||
+ def test_server_title_escape(self):
|
||||
+ # bpo-38243: Ensure that the server title and documentation
|
||||
+ # are escaped for HTML.
|
||||
+ self.serv.set_server_title('test_title<script>')
|
||||
+ self.serv.set_server_documentation('test_documentation<script>')
|
||||
+ self.assertEqual('test_title<script>', self.serv.server_title)
|
||||
+ self.assertEqual('test_documentation<script>',
|
||||
+ self.serv.server_documentation)
|
||||
+
|
||||
+ generated = self.serv.generate_html_documentation()
|
||||
+ title = re.search(r'<title>(.+?)</title>', generated).group()
|
||||
+ documentation = re.search(r'<p><tt>(.+?)</tt></p>', generated).group()
|
||||
+ self.assertEqual('<title>Python: test_title<script></title>', title)
|
||||
+ self.assertEqual('<p><tt>test_documentation<script></tt></p>', documentation)
|
||||
+
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
diff --git a/Lib/xmlrpc/server.py b/Lib/xmlrpc/server.py
|
||||
index 3e0dca0..efe5937 100644
|
||||
--- a/Lib/xmlrpc/server.py
|
||||
+++ b/Lib/xmlrpc/server.py
|
||||
@@ -106,6 +106,7 @@ server.handle_request()
|
||||
|
||||
from xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode
|
||||
from http.server import BaseHTTPRequestHandler
|
||||
+import html
|
||||
import http.server
|
||||
import socketserver
|
||||
import sys
|
||||
@@ -904,7 +905,7 @@ class XMLRPCDocGenerator:
|
||||
methods
|
||||
)
|
||||
|
||||
- return documenter.page(self.server_title, documentation)
|
||||
+ return documenter.page(html.escape(self.server_title), documentation)
|
||||
|
||||
class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
|
||||
"""XML-RPC and documentation request handler class.
|
44
SOURCES/00345-fix-test_site-with-extra-pth-files.patch
Normal file
44
SOURCES/00345-fix-test_site-with-extra-pth-files.patch
Normal file
@ -0,0 +1,44 @@
|
||||
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
|
||||
index d0cd84f..9d2c28c 100644
|
||||
--- a/Lib/test/test_site.py
|
||||
+++ b/Lib/test/test_site.py
|
||||
@@ -10,6 +10,7 @@ from test import support
|
||||
from test.support import (captured_stderr, TESTFN, EnvironmentVarGuard,
|
||||
change_cwd)
|
||||
import builtins
|
||||
+import glob
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
@@ -500,6 +501,23 @@ class ImportSideEffectTests(unittest.TestCase):
|
||||
class StartupImportTests(unittest.TestCase):
|
||||
|
||||
def test_startup_imports(self):
|
||||
+ # Get sys.path in isolated mode (python3 -I)
|
||||
+ popen = subprocess.Popen([sys.executable, '-I', '-c',
|
||||
+ 'import sys; print(repr(sys.path))'],
|
||||
+ stdout=subprocess.PIPE,
|
||||
+ encoding='utf-8')
|
||||
+ stdout = popen.communicate()[0]
|
||||
+ self.assertEqual(popen.returncode, 0, repr(stdout))
|
||||
+ isolated_paths = eval(stdout)
|
||||
+
|
||||
+ # bpo-27807: Even with -I, the site module executes all .pth files
|
||||
+ # found in sys.path (see site.addpackage()). Skip the test if at least
|
||||
+ # one .pth file is found.
|
||||
+ for path in isolated_paths:
|
||||
+ pth_files = glob.glob(os.path.join(path, "*.pth"))
|
||||
+ if pth_files:
|
||||
+ self.skipTest(f"found {len(pth_files)} .pth files in: {path}")
|
||||
+
|
||||
# This tests checks which modules are loaded by Python when it
|
||||
# initially starts upon startup.
|
||||
popen = subprocess.Popen([sys.executable, '-I', '-v', '-c',
|
||||
@@ -508,6 +526,7 @@ class StartupImportTests(unittest.TestCase):
|
||||
stderr=subprocess.PIPE,
|
||||
encoding='utf-8')
|
||||
stdout, stderr = popen.communicate()
|
||||
+ self.assertEqual(popen.returncode, 0, (stdout, stderr))
|
||||
modules = eval(stdout)
|
||||
|
||||
self.assertIn('site', modules)
|
193
SOURCES/00346-CVE-2020-8492.patch
Normal file
193
SOURCES/00346-CVE-2020-8492.patch
Normal file
@ -0,0 +1,193 @@
|
||||
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
|
||||
index 876fcd4..fe9a32b 100644
|
||||
--- a/Lib/test/test_urllib2.py
|
||||
+++ b/Lib/test/test_urllib2.py
|
||||
@@ -1445,40 +1445,64 @@ class HandlerTests(unittest.TestCase):
|
||||
bypass = {'exclude_simple': True, 'exceptions': []}
|
||||
self.assertTrue(_proxy_bypass_macosx_sysconf('test', bypass))
|
||||
|
||||
- def test_basic_auth(self, quote_char='"'):
|
||||
- opener = OpenerDirector()
|
||||
- password_manager = MockPasswordManager()
|
||||
- auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
|
||||
- realm = "ACME Widget Store"
|
||||
- http_handler = MockHTTPHandler(
|
||||
- 401, 'WWW-Authenticate: Basic realm=%s%s%s\r\n\r\n' %
|
||||
- (quote_char, realm, quote_char))
|
||||
- opener.add_handler(auth_handler)
|
||||
- opener.add_handler(http_handler)
|
||||
- self._test_basic_auth(opener, auth_handler, "Authorization",
|
||||
- realm, http_handler, password_manager,
|
||||
- "http://acme.example.com/protected",
|
||||
- "http://acme.example.com/protected",
|
||||
- )
|
||||
-
|
||||
- def test_basic_auth_with_single_quoted_realm(self):
|
||||
- self.test_basic_auth(quote_char="'")
|
||||
-
|
||||
- def test_basic_auth_with_unquoted_realm(self):
|
||||
- opener = OpenerDirector()
|
||||
- password_manager = MockPasswordManager()
|
||||
- auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
|
||||
- realm = "ACME Widget Store"
|
||||
- http_handler = MockHTTPHandler(
|
||||
- 401, 'WWW-Authenticate: Basic realm=%s\r\n\r\n' % realm)
|
||||
- opener.add_handler(auth_handler)
|
||||
- opener.add_handler(http_handler)
|
||||
- with self.assertWarns(UserWarning):
|
||||
+ def check_basic_auth(self, headers, realm):
|
||||
+ with self.subTest(realm=realm, headers=headers):
|
||||
+ opener = OpenerDirector()
|
||||
+ password_manager = MockPasswordManager()
|
||||
+ auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
|
||||
+ body = '\r\n'.join(headers) + '\r\n\r\n'
|
||||
+ http_handler = MockHTTPHandler(401, body)
|
||||
+ opener.add_handler(auth_handler)
|
||||
+ opener.add_handler(http_handler)
|
||||
self._test_basic_auth(opener, auth_handler, "Authorization",
|
||||
- realm, http_handler, password_manager,
|
||||
- "http://acme.example.com/protected",
|
||||
- "http://acme.example.com/protected",
|
||||
- )
|
||||
+ realm, http_handler, password_manager,
|
||||
+ "http://acme.example.com/protected",
|
||||
+ "http://acme.example.com/protected")
|
||||
+
|
||||
+ def test_basic_auth(self):
|
||||
+ realm = "realm2@example.com"
|
||||
+ realm2 = "realm2@example.com"
|
||||
+ basic = f'Basic realm="{realm}"'
|
||||
+ basic2 = f'Basic realm="{realm2}"'
|
||||
+ other_no_realm = 'Otherscheme xxx'
|
||||
+ digest = (f'Digest realm="{realm2}", '
|
||||
+ f'qop="auth, auth-int", '
|
||||
+ f'nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", '
|
||||
+ f'opaque="5ccc069c403ebaf9f0171e9517f40e41"')
|
||||
+ for realm_str in (
|
||||
+ # test "quote" and 'quote'
|
||||
+ f'Basic realm="{realm}"',
|
||||
+ f"Basic realm='{realm}'",
|
||||
+
|
||||
+ # charset is ignored
|
||||
+ f'Basic realm="{realm}", charset="UTF-8"',
|
||||
+
|
||||
+ # Multiple challenges per header
|
||||
+ f'{basic}, {basic2}',
|
||||
+ f'{basic}, {other_no_realm}',
|
||||
+ f'{other_no_realm}, {basic}',
|
||||
+ f'{basic}, {digest}',
|
||||
+ f'{digest}, {basic}',
|
||||
+ ):
|
||||
+ headers = [f'WWW-Authenticate: {realm_str}']
|
||||
+ self.check_basic_auth(headers, realm)
|
||||
+
|
||||
+ # no quote: expect a warning
|
||||
+ with support.check_warnings(("Basic Auth Realm was unquoted",
|
||||
+ UserWarning)):
|
||||
+ headers = [f'WWW-Authenticate: Basic realm={realm}']
|
||||
+ self.check_basic_auth(headers, realm)
|
||||
+
|
||||
+ # Multiple headers: one challenge per header.
|
||||
+ # Use the first Basic realm.
|
||||
+ for challenges in (
|
||||
+ [basic, basic2],
|
||||
+ [basic, digest],
|
||||
+ [digest, basic],
|
||||
+ ):
|
||||
+ headers = [f'WWW-Authenticate: {challenge}'
|
||||
+ for challenge in challenges]
|
||||
+ self.check_basic_auth(headers, realm)
|
||||
|
||||
def test_proxy_basic_auth(self):
|
||||
opener = OpenerDirector()
|
||||
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
|
||||
index c9945d9..6624e04 100644
|
||||
--- a/Lib/urllib/request.py
|
||||
+++ b/Lib/urllib/request.py
|
||||
@@ -945,8 +945,15 @@ class AbstractBasicAuthHandler:
|
||||
|
||||
# allow for double- and single-quoted realm values
|
||||
# (single quotes are a violation of the RFC, but appear in the wild)
|
||||
- rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
|
||||
- 'realm=(["\']?)([^"\']*)\\2', re.I)
|
||||
+ rx = re.compile('(?:^|,)' # start of the string or ','
|
||||
+ '[ \t]*' # optional whitespaces
|
||||
+ '([^ \t]+)' # scheme like "Basic"
|
||||
+ '[ \t]+' # mandatory whitespaces
|
||||
+ # realm=xxx
|
||||
+ # realm='xxx'
|
||||
+ # realm="xxx"
|
||||
+ 'realm=(["\']?)([^"\']*)\\2',
|
||||
+ re.I)
|
||||
|
||||
# XXX could pre-emptively send auth info already accepted (RFC 2617,
|
||||
# end of section 2, and section 1.2 immediately after "credentials"
|
||||
@@ -958,27 +965,51 @@ class AbstractBasicAuthHandler:
|
||||
self.passwd = password_mgr
|
||||
self.add_password = self.passwd.add_password
|
||||
|
||||
+ def _parse_realm(self, header):
|
||||
+ # parse WWW-Authenticate header: accept multiple challenges per header
|
||||
+ found_challenge = False
|
||||
+ for mo in AbstractBasicAuthHandler.rx.finditer(header):
|
||||
+ scheme, quote, realm = mo.groups()
|
||||
+ if quote not in ['"', "'"]:
|
||||
+ warnings.warn("Basic Auth Realm was unquoted",
|
||||
+ UserWarning, 3)
|
||||
+
|
||||
+ yield (scheme, realm)
|
||||
+
|
||||
+ found_challenge = True
|
||||
+
|
||||
+ if not found_challenge:
|
||||
+ if header:
|
||||
+ scheme = header.split()[0]
|
||||
+ else:
|
||||
+ scheme = ''
|
||||
+ yield (scheme, None)
|
||||
+
|
||||
def http_error_auth_reqed(self, authreq, host, req, headers):
|
||||
# host may be an authority (without userinfo) or a URL with an
|
||||
# authority
|
||||
- # XXX could be multiple headers
|
||||
- authreq = headers.get(authreq, None)
|
||||
+ headers = headers.get_all(authreq)
|
||||
+ if not headers:
|
||||
+ # no header found
|
||||
+ return
|
||||
|
||||
- if authreq:
|
||||
- scheme = authreq.split()[0]
|
||||
- if scheme.lower() != 'basic':
|
||||
- raise ValueError("AbstractBasicAuthHandler does not"
|
||||
- " support the following scheme: '%s'" %
|
||||
- scheme)
|
||||
- else:
|
||||
- mo = AbstractBasicAuthHandler.rx.search(authreq)
|
||||
- if mo:
|
||||
- scheme, quote, realm = mo.groups()
|
||||
- if quote not in ['"',"'"]:
|
||||
- warnings.warn("Basic Auth Realm was unquoted",
|
||||
- UserWarning, 2)
|
||||
- if scheme.lower() == 'basic':
|
||||
- return self.retry_http_basic_auth(host, req, realm)
|
||||
+ unsupported = None
|
||||
+ for header in headers:
|
||||
+ for scheme, realm in self._parse_realm(header):
|
||||
+ if scheme.lower() != 'basic':
|
||||
+ unsupported = scheme
|
||||
+ continue
|
||||
+
|
||||
+ if realm is not None:
|
||||
+ # Use the first matching Basic challenge.
|
||||
+ # Ignore following challenges even if they use the Basic
|
||||
+ # scheme.
|
||||
+ return self.retry_http_basic_auth(host, req, realm)
|
||||
+
|
||||
+ if unsupported is not None:
|
||||
+ raise ValueError("AbstractBasicAuthHandler does not "
|
||||
+ "support the following scheme: %r"
|
||||
+ % (scheme,))
|
||||
|
||||
def retry_http_basic_auth(self, host, req, realm):
|
||||
user, pw = self.passwd.find_user_password(realm, host)
|
@ -0,0 +1,67 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Wed, 15 Jul 2020 05:36:36 -0700
|
||||
Subject: [PATCH] 00351: Avoid infinite loop in the tarfile module
|
||||
|
||||
Avoid infinite loop when reading specially crafted TAR files using the tarfile module
|
||||
(CVE-2019-20907).
|
||||
Fixed upstream: https://bugs.python.org/issue39017
|
||||
---
|
||||
Lib/tarfile.py | 2 ++
|
||||
Lib/test/recursion.tar | Bin 0 -> 516 bytes
|
||||
Lib/test/test_tarfile.py | 7 +++++++
|
||||
.../2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst | 1 +
|
||||
4 files changed, 10 insertions(+)
|
||||
create mode 100644 Lib/test/recursion.tar
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
|
||||
|
||||
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
|
||||
index 62d22150f5..2ea47978ff 100755
|
||||
--- a/Lib/tarfile.py
|
||||
+++ b/Lib/tarfile.py
|
||||
@@ -1231,6 +1231,8 @@ class TarInfo(object):
|
||||
|
||||
length, keyword = match.groups()
|
||||
length = int(length)
|
||||
+ if length == 0:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
value = buf[match.end(2) + 1:match.start(1) + length - 1]
|
||||
|
||||
# Normally, we could just use "utf-8" as the encoding and "strict"
|
||||
diff --git a/Lib/test/recursion.tar b/Lib/test/recursion.tar
|
||||
new file mode 100644
|
||||
index 0000000000000000000000000000000000000000..b8237251964983f54ed1966297e887636cd0c5f4
|
||||
GIT binary patch
|
||||
literal 516
|
||||
zcmYdFPRz+kEn=W0Fn}74P8%Xw3X=l~85kIuo0>8xq$A1Gm}!7)KUsFc41m#O8A5+e
|
||||
I1_}|j06>QaCIA2c
|
||||
|
||||
literal 0
|
||||
HcmV?d00001
|
||||
|
||||
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
|
||||
index 4cd7d5370f..573be812ea 100644
|
||||
--- a/Lib/test/test_tarfile.py
|
||||
+++ b/Lib/test/test_tarfile.py
|
||||
@@ -395,6 +395,13 @@ class CommonReadTest(ReadTest):
|
||||
with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"):
|
||||
tar.extractfile(t).read()
|
||||
|
||||
+ def test_length_zero_header(self):
|
||||
+ # bpo-39017 (CVE-2019-20907): reading a zero-length header should fail
|
||||
+ # with an exception
|
||||
+ with self.assertRaisesRegex(tarfile.ReadError, "file could not be opened successfully"):
|
||||
+ with tarfile.open(support.findfile('recursion.tar')) as tar:
|
||||
+ pass
|
||||
+
|
||||
class MiscReadTestBase(CommonReadTest):
|
||||
def requires_name_attribute(self):
|
||||
pass
|
||||
diff --git a/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst b/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
|
||||
new file mode 100644
|
||||
index 0000000000..ad26676f8b
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Avoid infinite loop when reading specially crafted TAR files using the tarfile module (CVE-2019-20907).
|
@ -0,0 +1,70 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: Tapas Kundu <39723251+tapakund@users.noreply.github.com>
|
||||
Date: Wed, 1 Jul 2020 01:00:22 +0530
|
||||
Subject: [PATCH] 00352: Resolve hash collisions for IPv4Interface and
|
||||
IPv6Interface
|
||||
|
||||
CVE-2020-14422
|
||||
The hash() methods of classes IPv4Interface and IPv6Interface had issue
|
||||
of generating constant hash values of 32 and 128 respectively causing hash collisions.
|
||||
The fix uses the hash() function to generate hash values for the objects
|
||||
instead of XOR operation.
|
||||
Fixed upstream: https://bugs.python.org/issue41004
|
||||
---
|
||||
Lib/ipaddress.py | 4 ++--
|
||||
Lib/test/test_ipaddress.py | 11 +++++++++++
|
||||
.../Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst | 1 +
|
||||
3 files changed, 14 insertions(+), 2 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst
|
||||
|
||||
diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py
|
||||
index 583f02ad54..98492136ca 100644
|
||||
--- a/Lib/ipaddress.py
|
||||
+++ b/Lib/ipaddress.py
|
||||
@@ -1418,7 +1418,7 @@ class IPv4Interface(IPv4Address):
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
- return self._ip ^ self._prefixlen ^ int(self.network.network_address)
|
||||
+ return hash((self._ip, self._prefixlen, int(self.network.network_address)))
|
||||
|
||||
__reduce__ = _IPAddressBase.__reduce__
|
||||
|
||||
@@ -2092,7 +2092,7 @@ class IPv6Interface(IPv6Address):
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
- return self._ip ^ self._prefixlen ^ int(self.network.network_address)
|
||||
+ return hash((self._ip, self._prefixlen, int(self.network.network_address)))
|
||||
|
||||
__reduce__ = _IPAddressBase.__reduce__
|
||||
|
||||
diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py
|
||||
index 1cef4217bc..7de444af4a 100644
|
||||
--- a/Lib/test/test_ipaddress.py
|
||||
+++ b/Lib/test/test_ipaddress.py
|
||||
@@ -1990,6 +1990,17 @@ class IpaddrUnitTest(unittest.TestCase):
|
||||
sixtofouraddr.sixtofour)
|
||||
self.assertFalse(bad_addr.sixtofour)
|
||||
|
||||
+ # issue41004 Hash collisions in IPv4Interface and IPv6Interface
|
||||
+ def testV4HashIsNotConstant(self):
|
||||
+ ipv4_address1 = ipaddress.IPv4Interface("1.2.3.4")
|
||||
+ ipv4_address2 = ipaddress.IPv4Interface("2.3.4.5")
|
||||
+ self.assertNotEqual(ipv4_address1.__hash__(), ipv4_address2.__hash__())
|
||||
+
|
||||
+ # issue41004 Hash collisions in IPv4Interface and IPv6Interface
|
||||
+ def testV6HashIsNotConstant(self):
|
||||
+ ipv6_address1 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:1")
|
||||
+ ipv6_address2 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:2")
|
||||
+ self.assertNotEqual(ipv6_address1.__hash__(), ipv6_address2.__hash__())
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
diff --git a/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst
|
||||
new file mode 100644
|
||||
index 0000000000..f5a9db52ff
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst
|
||||
@@ -0,0 +1 @@
|
||||
+CVE-2020-14422: The __hash__() methods of ipaddress.IPv4Interface and ipaddress.IPv6Interface incorrectly generated constant hash values of 32 and 128 respectively. This resulted in always causing hash collisions. The fix uses hash() to generate hash values for the tuple of (address, mask length, network address).
|
97
SOURCES/00353-architecture-names-upstream-downstream.patch
Normal file
97
SOURCES/00353-architecture-names-upstream-downstream.patch
Normal file
@ -0,0 +1,97 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: Lumir Balhar <lbalhar@redhat.com>
|
||||
Date: Tue, 4 Aug 2020 12:04:03 +0200
|
||||
Subject: [PATCH] 00353: Original names for architectures with different names
|
||||
downstream
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
https://fedoraproject.org/wiki/Changes/Python_Upstream_Architecture_Names
|
||||
|
||||
Pythons in RHEL/Fedora used different names for some architectures
|
||||
than upstream and other distros (for example ppc64 vs. powerpc64).
|
||||
This was patched in patch 274, now it is sedded if %with legacy_archnames.
|
||||
|
||||
That meant that an extension built with the default upstream settings
|
||||
(on other distro or as an manylinux wheel) could not been found by Python
|
||||
on RHEL/Fedora because it had a different suffix.
|
||||
This patch adds the legacy names to importlib so Python is able
|
||||
to import extensions with a legacy architecture name in its
|
||||
file name.
|
||||
It work both ways, so it support both %with and %without legacy_archnames.
|
||||
|
||||
WARNING: This patch has no effect on Python built with bootstrap
|
||||
enabled because Python/importlib_external.h is not regenerated
|
||||
and therefore Python during bootstrap contains importlib from
|
||||
upstream without this feature. It's possible to include
|
||||
Python/importlib_external.h to this patch but it'd make rebasing
|
||||
a nightmare because it's basically a binary file.
|
||||
|
||||
Co-authored-by: Miro Hrončok <miro@hroncok.cz>
|
||||
---
|
||||
Lib/importlib/_bootstrap_external.py | 40 ++++++++++++++++++++++++++--
|
||||
1 file changed, 38 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py
|
||||
index 9feec50842..5bb2454a5c 100644
|
||||
--- a/Lib/importlib/_bootstrap_external.py
|
||||
+++ b/Lib/importlib/_bootstrap_external.py
|
||||
@@ -1361,7 +1361,7 @@ def _get_supported_file_loaders():
|
||||
|
||||
Each item is a tuple (loader, suffixes).
|
||||
"""
|
||||
- extensions = ExtensionFileLoader, _imp.extension_suffixes()
|
||||
+ extensions = ExtensionFileLoader, _alternative_architectures(_imp.extension_suffixes())
|
||||
source = SourceFileLoader, SOURCE_SUFFIXES
|
||||
bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES
|
||||
return [extensions, source, bytecode]
|
||||
@@ -1428,7 +1428,7 @@ def _setup(_bootstrap_module):
|
||||
|
||||
# Constants
|
||||
setattr(self_module, '_relax_case', _make_relax_case())
|
||||
- EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())
|
||||
+ EXTENSION_SUFFIXES.extend(_alternative_architectures(_imp.extension_suffixes()))
|
||||
if builtin_os == 'nt':
|
||||
SOURCE_SUFFIXES.append('.pyw')
|
||||
if '_d.pyd' in EXTENSION_SUFFIXES:
|
||||
@@ -1441,3 +1441,39 @@ def _install(_bootstrap_module):
|
||||
supported_loaders = _get_supported_file_loaders()
|
||||
sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])
|
||||
sys.meta_path.append(PathFinder)
|
||||
+
|
||||
+
|
||||
+_ARCH_MAP = {
|
||||
+ "-arm-linux-gnueabi.": "-arm-linux-gnueabihf.",
|
||||
+ "-armeb-linux-gnueabi.": "-armeb-linux-gnueabihf.",
|
||||
+ "-mips64-linux-gnu.": "-mips64-linux-gnuabi64.",
|
||||
+ "-mips64el-linux-gnu.": "-mips64el-linux-gnuabi64.",
|
||||
+ "-ppc-linux-gnu.": "-powerpc-linux-gnu.",
|
||||
+ "-ppc-linux-gnuspe.": "-powerpc-linux-gnuspe.",
|
||||
+ "-ppc64-linux-gnu.": "-powerpc64-linux-gnu.",
|
||||
+ "-ppc64le-linux-gnu.": "-powerpc64le-linux-gnu.",
|
||||
+ # The above, but the other way around:
|
||||
+ "-arm-linux-gnueabihf.": "-arm-linux-gnueabi.",
|
||||
+ "-armeb-linux-gnueabihf.": "-armeb-linux-gnueabi.",
|
||||
+ "-mips64-linux-gnuabi64.": "-mips64-linux-gnu.",
|
||||
+ "-mips64el-linux-gnuabi64.": "-mips64el-linux-gnu.",
|
||||
+ "-powerpc-linux-gnu.": "-ppc-linux-gnu.",
|
||||
+ "-powerpc-linux-gnuspe.": "-ppc-linux-gnuspe.",
|
||||
+ "-powerpc64-linux-gnu.": "-ppc64-linux-gnu.",
|
||||
+ "-powerpc64le-linux-gnu.": "-ppc64le-linux-gnu.",
|
||||
+}
|
||||
+
|
||||
+
|
||||
+def _alternative_architectures(suffixes):
|
||||
+ """Add a suffix with an alternative architecture name
|
||||
+ to the list of suffixes so an extension built with
|
||||
+ the default (upstream) setting is loadable with our Pythons
|
||||
+ """
|
||||
+
|
||||
+ for suffix in suffixes:
|
||||
+ for original, alternative in _ARCH_MAP.items():
|
||||
+ if original in suffix:
|
||||
+ suffixes.append(suffix.replace(original, alternative))
|
||||
+ return suffixes
|
||||
+
|
||||
+ return suffixes
|
@ -0,0 +1,73 @@
|
||||
diff --git a/Lib/http/client.py b/Lib/http/client.py
|
||||
index f0d2642..0a044e9 100644
|
||||
--- a/Lib/http/client.py
|
||||
+++ b/Lib/http/client.py
|
||||
@@ -151,6 +151,10 @@ _contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
|
||||
# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
|
||||
# We are more lenient for assumed real world compatibility purposes.
|
||||
|
||||
+# These characters are not allowed within HTTP method names
|
||||
+# to prevent http header injection.
|
||||
+_contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]')
|
||||
+
|
||||
# We always set the Content-Length header for these methods because some
|
||||
# servers will otherwise respond with a 411
|
||||
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
|
||||
@@ -1117,6 +1121,8 @@ class HTTPConnection:
|
||||
else:
|
||||
raise CannotSendRequest(self.__state)
|
||||
|
||||
+ self._validate_method(method)
|
||||
+
|
||||
# Save the method we use, we need it later in the response phase
|
||||
self._method = method
|
||||
if not url:
|
||||
@@ -1207,6 +1213,15 @@ class HTTPConnection:
|
||||
# For HTTP/1.0, the server will assume "not chunked"
|
||||
pass
|
||||
|
||||
+ def _validate_method(self, method):
|
||||
+ """Validate a method name for putrequest."""
|
||||
+ # prevent http header injection
|
||||
+ match = _contains_disallowed_method_pchar_re.search(method)
|
||||
+ if match:
|
||||
+ raise ValueError(
|
||||
+ f"method can't contain control characters. {method!r} "
|
||||
+ f"(found at least {match.group()!r})")
|
||||
+
|
||||
def putheader(self, header, *values):
|
||||
"""Send a request header line to the server.
|
||||
|
||||
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
|
||||
index 5795b7a..af0350f 100644
|
||||
--- a/Lib/test/test_httplib.py
|
||||
+++ b/Lib/test/test_httplib.py
|
||||
@@ -359,6 +359,28 @@ class HeaderTests(TestCase):
|
||||
self.assertEqual(lines[2], "header: Second: val")
|
||||
|
||||
|
||||
+class HttpMethodTests(TestCase):
|
||||
+ def test_invalid_method_names(self):
|
||||
+ methods = (
|
||||
+ 'GET\r',
|
||||
+ 'POST\n',
|
||||
+ 'PUT\n\r',
|
||||
+ 'POST\nValue',
|
||||
+ 'POST\nHOST:abc',
|
||||
+ 'GET\nrHost:abc\n',
|
||||
+ 'POST\rRemainder:\r',
|
||||
+ 'GET\rHOST:\n',
|
||||
+ '\nPUT'
|
||||
+ )
|
||||
+
|
||||
+ for method in methods:
|
||||
+ with self.assertRaisesRegex(
|
||||
+ ValueError, "method can't contain control characters"):
|
||||
+ conn = client.HTTPConnection('example.com')
|
||||
+ conn.sock = FakeSocket(None)
|
||||
+ conn.request(method=method, url="/")
|
||||
+
|
||||
+
|
||||
class TransferEncodingTest(TestCase):
|
||||
expected_body = b"It's just a flesh wound"
|
||||
|
42
SOURCES/00355-CVE-2020-27619.patch
Normal file
42
SOURCES/00355-CVE-2020-27619.patch
Normal file
@ -0,0 +1,42 @@
|
||||
diff --git a/Lib/test/multibytecodec_support.py b/Lib/test/multibytecodec_support.py
|
||||
index f9884c6..98feec2 100644
|
||||
--- a/Lib/test/multibytecodec_support.py
|
||||
+++ b/Lib/test/multibytecodec_support.py
|
||||
@@ -300,29 +300,23 @@ class TestBase_Mapping(unittest.TestCase):
|
||||
self._test_mapping_file_plain()
|
||||
|
||||
def _test_mapping_file_plain(self):
|
||||
- unichrs = lambda s: ''.join(map(chr, map(eval, s.split('+'))))
|
||||
+ def unichrs(s):
|
||||
+ return ''.join(chr(int(x, 16)) for x in s.split('+'))
|
||||
+
|
||||
urt_wa = {}
|
||||
|
||||
with self.open_mapping_file() as f:
|
||||
for line in f:
|
||||
if not line:
|
||||
break
|
||||
- data = line.split('#')[0].strip().split()
|
||||
+ data = line.split('#')[0].split()
|
||||
if len(data) != 2:
|
||||
continue
|
||||
|
||||
- csetval = eval(data[0])
|
||||
- if csetval <= 0x7F:
|
||||
- csetch = bytes([csetval & 0xff])
|
||||
- elif csetval >= 0x1000000:
|
||||
- csetch = bytes([(csetval >> 24), ((csetval >> 16) & 0xff),
|
||||
- ((csetval >> 8) & 0xff), (csetval & 0xff)])
|
||||
- elif csetval >= 0x10000:
|
||||
- csetch = bytes([(csetval >> 16), ((csetval >> 8) & 0xff),
|
||||
- (csetval & 0xff)])
|
||||
- elif csetval >= 0x100:
|
||||
- csetch = bytes([(csetval >> 8), (csetval & 0xff)])
|
||||
- else:
|
||||
+ if data[0][:2] != '0x':
|
||||
+ self.fail(f"Invalid line: {line!r}")
|
||||
+ csetch = bytes.fromhex(data[0][2:])
|
||||
+ if len(csetch) == 1 and 0x80 <= csetch[0]:
|
||||
continue
|
||||
|
||||
unich = unichrs(data[1])
|
269
SOURCES/00356-k_and_a_options_for_pathfix.patch
Normal file
269
SOURCES/00356-k_and_a_options_for_pathfix.patch
Normal file
@ -0,0 +1,269 @@
|
||||
From 0cfd9a7f26488567b9a3e5ec192099a8b80ad9df Mon Sep 17 00:00:00 2001
|
||||
From: Lumir Balhar <lbalhar@redhat.com>
|
||||
Date: Tue, 19 Jan 2021 07:55:37 +0100
|
||||
Subject: [PATCH] [PATCH] bpo-37064: Add -k and -a options to pathfix.py tool
|
||||
(GH-16387)
|
||||
|
||||
* bpo-37064: Add option -k to Tools/scripts/pathfix.py (GH-15548)
|
||||
|
||||
Add flag -k to pathscript.py script: preserve shebang flags.
|
||||
|
||||
(cherry picked from commit 50254ac4c179cb412e90682098c97db786143929)
|
||||
|
||||
* bpo-37064: Add option -a to pathfix.py tool (GH-15717)
|
||||
|
||||
Add option -a to Tools/Scripts/pathfix.py script: add flags.
|
||||
|
||||
(cherry picked from commit 1dc1acbd73f05f14c974b7ce1041787d7abef31e)
|
||||
---
|
||||
Lib/test/test_tools/test_pathfix.py | 104 ++++++++++++++++++++++++++++
|
||||
Tools/scripts/pathfix.py | 64 +++++++++++++++--
|
||||
2 files changed, 163 insertions(+), 5 deletions(-)
|
||||
create mode 100644 Lib/test/test_tools/test_pathfix.py
|
||||
|
||||
diff --git a/Lib/test/test_tools/test_pathfix.py b/Lib/test/test_tools/test_pathfix.py
|
||||
new file mode 100644
|
||||
index 0000000..1f0585e
|
||||
--- /dev/null
|
||||
+++ b/Lib/test/test_tools/test_pathfix.py
|
||||
@@ -0,0 +1,104 @@
|
||||
+import os
|
||||
+import subprocess
|
||||
+import sys
|
||||
+import unittest
|
||||
+from test import support
|
||||
+from test.test_tools import import_tool, scriptsdir
|
||||
+
|
||||
+
|
||||
+class TestPathfixFunctional(unittest.TestCase):
|
||||
+ script = os.path.join(scriptsdir, 'pathfix.py')
|
||||
+
|
||||
+ def setUp(self):
|
||||
+ self.temp_file = support.TESTFN
|
||||
+ self.addCleanup(support.unlink, support.TESTFN)
|
||||
+
|
||||
+ def pathfix(self, shebang, pathfix_flags, exitcode=0, stdout='', stderr=''):
|
||||
+ with open(self.temp_file, 'w', encoding='utf8') as f:
|
||||
+ f.write(f'{shebang}\n' + 'print("Hello world")\n')
|
||||
+
|
||||
+ proc = subprocess.run(
|
||||
+ [sys.executable, self.script,
|
||||
+ *pathfix_flags, '-n', self.temp_file],
|
||||
+ universal_newlines=True, stdout=subprocess.PIPE,
|
||||
+ stderr=subprocess.PIPE)
|
||||
+
|
||||
+ if stdout == '' and proc.returncode == 0:
|
||||
+ stdout = f'{self.temp_file}: updating\n'
|
||||
+ self.assertEqual(proc.returncode, exitcode, proc)
|
||||
+ self.assertEqual(proc.stdout, stdout, proc)
|
||||
+ self.assertEqual(proc.stderr, stderr, proc)
|
||||
+
|
||||
+ with open(self.temp_file, 'r', encoding='utf8') as f:
|
||||
+ output = f.read()
|
||||
+
|
||||
+ lines = output.split('\n')
|
||||
+ self.assertEqual(lines[1:], ['print("Hello world")', ''])
|
||||
+ new_shebang = lines[0]
|
||||
+
|
||||
+ if proc.returncode != 0:
|
||||
+ self.assertEqual(shebang, new_shebang)
|
||||
+
|
||||
+ return new_shebang
|
||||
+
|
||||
+ def test_pathfix(self):
|
||||
+ self.assertEqual(
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python',
|
||||
+ ['-i', '/usr/bin/python3']),
|
||||
+ '#! /usr/bin/python3')
|
||||
+ self.assertEqual(
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python -R',
|
||||
+ ['-i', '/usr/bin/python3']),
|
||||
+ '#! /usr/bin/python3')
|
||||
+
|
||||
+ def test_pathfix_keeping_flags(self):
|
||||
+ self.assertEqual(
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python -R',
|
||||
+ ['-i', '/usr/bin/python3', '-k']),
|
||||
+ '#! /usr/bin/python3 -R')
|
||||
+ self.assertEqual(
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python',
|
||||
+ ['-i', '/usr/bin/python3', '-k']),
|
||||
+ '#! /usr/bin/python3')
|
||||
+
|
||||
+ def test_pathfix_adding_flag(self):
|
||||
+ self.assertEqual(
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python',
|
||||
+ ['-i', '/usr/bin/python3', '-a', 's']),
|
||||
+ '#! /usr/bin/python3 -s')
|
||||
+ self.assertEqual(
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python -S',
|
||||
+ ['-i', '/usr/bin/python3', '-a', 's']),
|
||||
+ '#! /usr/bin/python3 -s')
|
||||
+ self.assertEqual(
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python -V',
|
||||
+ ['-i', '/usr/bin/python3', '-a', 'v', '-k']),
|
||||
+ '#! /usr/bin/python3 -vV')
|
||||
+ self.assertEqual(
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python',
|
||||
+ ['-i', '/usr/bin/python3', '-a', 'Rs']),
|
||||
+ '#! /usr/bin/python3 -Rs')
|
||||
+ self.assertEqual(
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python -W default',
|
||||
+ ['-i', '/usr/bin/python3', '-a', 's', '-k']),
|
||||
+ '#! /usr/bin/python3 -sW default')
|
||||
+
|
||||
+ def test_pathfix_adding_errors(self):
|
||||
+ self.pathfix(
|
||||
+ '#! /usr/bin/env python -E',
|
||||
+ ['-i', '/usr/bin/python3', '-a', 'W default', '-k'],
|
||||
+ exitcode=2,
|
||||
+ stderr="-a option doesn't support whitespaces")
|
||||
+
|
||||
+
|
||||
+if __name__ == '__main__':
|
||||
+ unittest.main()
|
||||
diff --git a/Tools/scripts/pathfix.py b/Tools/scripts/pathfix.py
|
||||
index c5bf984..2dfa6e8 100755
|
||||
--- a/Tools/scripts/pathfix.py
|
||||
+++ b/Tools/scripts/pathfix.py
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
-# Change the #! line occurring in Python scripts. The new interpreter
|
||||
+# Change the #! line (shebang) occurring in Python scripts. The new interpreter
|
||||
# pathname must be given with a -i option.
|
||||
#
|
||||
# Command line arguments are files or directories to be processed.
|
||||
@@ -10,7 +10,13 @@
|
||||
# arguments).
|
||||
# The original file is kept as a back-up (with a "~" attached to its name),
|
||||
# -n flag can be used to disable this.
|
||||
-#
|
||||
+
|
||||
+# Sometimes you may find shebangs with flags such as `#! /usr/bin/env python -si`.
|
||||
+# Normally, pathfix overwrites the entire line, including the flags.
|
||||
+# To change interpreter and keep flags from the original shebang line, use -k.
|
||||
+# If you want to keep flags and add to them one single literal flag, use option -a.
|
||||
+
|
||||
+
|
||||
# Undoubtedly you can do this using find and sed or perl, but this is
|
||||
# a nice example of Python code that recurses down a directory tree
|
||||
# and uses regular expressions. Also note several subtleties like
|
||||
@@ -33,16 +39,21 @@ rep = sys.stdout.write
|
||||
new_interpreter = None
|
||||
preserve_timestamps = False
|
||||
create_backup = True
|
||||
+keep_flags = False
|
||||
+add_flags = b''
|
||||
|
||||
|
||||
def main():
|
||||
global new_interpreter
|
||||
global preserve_timestamps
|
||||
global create_backup
|
||||
- usage = ('usage: %s -i /interpreter -p -n file-or-directory ...\n' %
|
||||
+ global keep_flags
|
||||
+ global add_flags
|
||||
+
|
||||
+ usage = ('usage: %s -i /interpreter -p -n -k -a file-or-directory ...\n' %
|
||||
sys.argv[0])
|
||||
try:
|
||||
- opts, args = getopt.getopt(sys.argv[1:], 'i:pn')
|
||||
+ opts, args = getopt.getopt(sys.argv[1:], 'i:a:kpn')
|
||||
except getopt.error as msg:
|
||||
err(str(msg) + '\n')
|
||||
err(usage)
|
||||
@@ -54,6 +65,13 @@ def main():
|
||||
preserve_timestamps = True
|
||||
if o == '-n':
|
||||
create_backup = False
|
||||
+ if o == '-k':
|
||||
+ keep_flags = True
|
||||
+ if o == '-a':
|
||||
+ add_flags = a.encode()
|
||||
+ if b' ' in add_flags:
|
||||
+ err("-a option doesn't support whitespaces")
|
||||
+ sys.exit(2)
|
||||
if not new_interpreter or not new_interpreter.startswith(b'/') or \
|
||||
not args:
|
||||
err('-i option or file-or-directory missing\n')
|
||||
@@ -70,10 +88,14 @@ def main():
|
||||
if fix(arg): bad = 1
|
||||
sys.exit(bad)
|
||||
|
||||
+
|
||||
ispythonprog = re.compile(r'^[a-zA-Z0-9_]+\.py$')
|
||||
+
|
||||
+
|
||||
def ispython(name):
|
||||
return bool(ispythonprog.match(name))
|
||||
|
||||
+
|
||||
def recursedown(dirname):
|
||||
dbg('recursedown(%r)\n' % (dirname,))
|
||||
bad = 0
|
||||
@@ -96,6 +118,7 @@ def recursedown(dirname):
|
||||
if recursedown(fullname): bad = 1
|
||||
return bad
|
||||
|
||||
+
|
||||
def fix(filename):
|
||||
## dbg('fix(%r)\n' % (filename,))
|
||||
try:
|
||||
@@ -166,12 +189,43 @@ def fix(filename):
|
||||
# Return success
|
||||
return 0
|
||||
|
||||
+
|
||||
+def parse_shebang(shebangline):
|
||||
+ shebangline = shebangline.rstrip(b'\n')
|
||||
+ start = shebangline.find(b' -')
|
||||
+ if start == -1:
|
||||
+ return b''
|
||||
+ return shebangline[start:]
|
||||
+
|
||||
+
|
||||
+def populate_flags(shebangline):
|
||||
+ old_flags = b''
|
||||
+ if keep_flags:
|
||||
+ old_flags = parse_shebang(shebangline)
|
||||
+ if old_flags:
|
||||
+ old_flags = old_flags[2:]
|
||||
+ if not (old_flags or add_flags):
|
||||
+ return b''
|
||||
+ # On Linux, the entire string following the interpreter name
|
||||
+ # is passed as a single argument to the interpreter.
|
||||
+ # e.g. "#! /usr/bin/python3 -W Error -s" runs "/usr/bin/python3 "-W Error -s"
|
||||
+ # so shebang should have single '-' where flags are given and
|
||||
+ # flag might need argument for that reasons adding new flags is
|
||||
+ # between '-' and original flags
|
||||
+ # e.g. #! /usr/bin/python3 -sW Error
|
||||
+ return b' -' + add_flags + old_flags
|
||||
+
|
||||
+
|
||||
def fixline(line):
|
||||
if not line.startswith(b'#!'):
|
||||
return line
|
||||
+
|
||||
if b"python" not in line:
|
||||
return line
|
||||
- return b'#! ' + new_interpreter + b'\n'
|
||||
+
|
||||
+ flags = populate_flags(line)
|
||||
+ return b'#! ' + new_interpreter + flags + b'\n'
|
||||
+
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
--
|
||||
2.29.2
|
||||
|
184
SOURCES/00357-CVE-2021-3177.patch
Normal file
184
SOURCES/00357-CVE-2021-3177.patch
Normal file
@ -0,0 +1,184 @@
|
||||
From e92381a0a6a3e1f000956e1f1e70e543b9c2bcd5 Mon Sep 17 00:00:00 2001
|
||||
From: Benjamin Peterson <benjamin@python.org>
|
||||
Date: Mon, 18 Jan 2021 14:47:05 -0600
|
||||
Subject: [PATCH] [3.6] closes bpo-42938: Replace snprintf with Python unicode
|
||||
formatting in ctypes param reprs. (24239). (cherry picked from commit
|
||||
916610ef90a0d0761f08747f7b0905541f0977c7)
|
||||
|
||||
Co-authored-by: Benjamin Peterson <benjamin@python.org>
|
||||
---
|
||||
Lib/ctypes/test/test_parameters.py | 43 +++++++++++++++
|
||||
.../2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst | 2 +
|
||||
Modules/_ctypes/callproc.c | 55 +++++++------------
|
||||
3 files changed, 66 insertions(+), 34 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
||||
|
||||
diff --git a/Lib/ctypes/test/test_parameters.py b/Lib/ctypes/test/test_parameters.py
|
||||
index e4c25fd880cef..531894fdec838 100644
|
||||
--- a/Lib/ctypes/test/test_parameters.py
|
||||
+++ b/Lib/ctypes/test/test_parameters.py
|
||||
@@ -201,6 +201,49 @@ def __dict__(self):
|
||||
with self.assertRaises(ZeroDivisionError):
|
||||
WorseStruct().__setstate__({}, b'foo')
|
||||
|
||||
+ def test_parameter_repr(self):
|
||||
+ from ctypes import (
|
||||
+ c_bool,
|
||||
+ c_char,
|
||||
+ c_wchar,
|
||||
+ c_byte,
|
||||
+ c_ubyte,
|
||||
+ c_short,
|
||||
+ c_ushort,
|
||||
+ c_int,
|
||||
+ c_uint,
|
||||
+ c_long,
|
||||
+ c_ulong,
|
||||
+ c_longlong,
|
||||
+ c_ulonglong,
|
||||
+ c_float,
|
||||
+ c_double,
|
||||
+ c_longdouble,
|
||||
+ c_char_p,
|
||||
+ c_wchar_p,
|
||||
+ c_void_p,
|
||||
+ )
|
||||
+ self.assertRegex(repr(c_bool.from_param(True)), r"^<cparam '\?' at 0x[A-Fa-f0-9]+>$")
|
||||
+ self.assertEqual(repr(c_char.from_param(97)), "<cparam 'c' ('a')>")
|
||||
+ self.assertRegex(repr(c_wchar.from_param('a')), r"^<cparam 'u' at 0x[A-Fa-f0-9]+>$")
|
||||
+ self.assertEqual(repr(c_byte.from_param(98)), "<cparam 'b' (98)>")
|
||||
+ self.assertEqual(repr(c_ubyte.from_param(98)), "<cparam 'B' (98)>")
|
||||
+ self.assertEqual(repr(c_short.from_param(511)), "<cparam 'h' (511)>")
|
||||
+ self.assertEqual(repr(c_ushort.from_param(511)), "<cparam 'H' (511)>")
|
||||
+ self.assertRegex(repr(c_int.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
|
||||
+ self.assertRegex(repr(c_uint.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
|
||||
+ self.assertRegex(repr(c_long.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
|
||||
+ self.assertRegex(repr(c_ulong.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
|
||||
+ self.assertRegex(repr(c_longlong.from_param(20000)), r"^<cparam '[liq]' \(20000\)>$")
|
||||
+ self.assertRegex(repr(c_ulonglong.from_param(20000)), r"^<cparam '[LIQ]' \(20000\)>$")
|
||||
+ self.assertEqual(repr(c_float.from_param(1.5)), "<cparam 'f' (1.5)>")
|
||||
+ self.assertEqual(repr(c_double.from_param(1.5)), "<cparam 'd' (1.5)>")
|
||||
+ self.assertEqual(repr(c_double.from_param(1e300)), "<cparam 'd' (1e+300)>")
|
||||
+ self.assertRegex(repr(c_longdouble.from_param(1.5)), r"^<cparam ('d' \(1.5\)|'g' at 0x[A-Fa-f0-9]+)>$")
|
||||
+ self.assertRegex(repr(c_char_p.from_param(b'hihi')), "^<cparam 'z' \(0x[A-Fa-f0-9]+\)>$")
|
||||
+ self.assertRegex(repr(c_wchar_p.from_param('hihi')), "^<cparam 'Z' \(0x[A-Fa-f0-9]+\)>$")
|
||||
+ self.assertRegex(repr(c_void_p.from_param(0x12)), r"^<cparam 'P' \(0x0*12\)>$")
|
||||
+
|
||||
################################################################
|
||||
|
||||
if __name__ == '__main__':
|
||||
diff --git a/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
||||
new file mode 100644
|
||||
index 0000000000000..7df65a156feab
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Avoid static buffers when computing the repr of :class:`ctypes.c_double` and
|
||||
+:class:`ctypes.c_longdouble` values.
|
||||
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
|
||||
index d1c190f359108..2bb289bce043f 100644
|
||||
--- a/Modules/_ctypes/callproc.c
|
||||
+++ b/Modules/_ctypes/callproc.c
|
||||
@@ -461,58 +461,47 @@ is_literal_char(unsigned char c)
|
||||
static PyObject *
|
||||
PyCArg_repr(PyCArgObject *self)
|
||||
{
|
||||
- char buffer[256];
|
||||
switch(self->tag) {
|
||||
case 'b':
|
||||
case 'B':
|
||||
- sprintf(buffer, "<cparam '%c' (%d)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
|
||||
self->tag, self->value.b);
|
||||
- break;
|
||||
case 'h':
|
||||
case 'H':
|
||||
- sprintf(buffer, "<cparam '%c' (%d)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
|
||||
self->tag, self->value.h);
|
||||
- break;
|
||||
case 'i':
|
||||
case 'I':
|
||||
- sprintf(buffer, "<cparam '%c' (%d)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
|
||||
self->tag, self->value.i);
|
||||
- break;
|
||||
case 'l':
|
||||
case 'L':
|
||||
- sprintf(buffer, "<cparam '%c' (%ld)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%ld)>",
|
||||
self->tag, self->value.l);
|
||||
- break;
|
||||
|
||||
case 'q':
|
||||
case 'Q':
|
||||
- sprintf(buffer,
|
||||
-#ifdef MS_WIN32
|
||||
- "<cparam '%c' (%I64d)>",
|
||||
-#else
|
||||
- "<cparam '%c' (%lld)>",
|
||||
-#endif
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%lld)>",
|
||||
self->tag, self->value.q);
|
||||
- break;
|
||||
case 'd':
|
||||
- sprintf(buffer, "<cparam '%c' (%f)>",
|
||||
- self->tag, self->value.d);
|
||||
- break;
|
||||
- case 'f':
|
||||
- sprintf(buffer, "<cparam '%c' (%f)>",
|
||||
- self->tag, self->value.f);
|
||||
- break;
|
||||
-
|
||||
+ case 'f': {
|
||||
+ PyObject *f = PyFloat_FromDouble((self->tag == 'f') ? self->value.f : self->value.d);
|
||||
+ if (f == NULL) {
|
||||
+ return NULL;
|
||||
+ }
|
||||
+ PyObject *result = PyUnicode_FromFormat("<cparam '%c' (%R)>", self->tag, f);
|
||||
+ Py_DECREF(f);
|
||||
+ return result;
|
||||
+ }
|
||||
case 'c':
|
||||
if (is_literal_char((unsigned char)self->value.c)) {
|
||||
- sprintf(buffer, "<cparam '%c' ('%c')>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' ('%c')>",
|
||||
self->tag, self->value.c);
|
||||
}
|
||||
else {
|
||||
- sprintf(buffer, "<cparam '%c' ('\\x%02x')>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' ('\\x%02x')>",
|
||||
self->tag, (unsigned char)self->value.c);
|
||||
}
|
||||
- break;
|
||||
|
||||
/* Hm, are these 'z' and 'Z' codes useful at all?
|
||||
Shouldn't they be replaced by the functionality of c_string
|
||||
@@ -521,22 +510,20 @@ PyCArg_repr(PyCArgObject *self)
|
||||
case 'z':
|
||||
case 'Z':
|
||||
case 'P':
|
||||
- sprintf(buffer, "<cparam '%c' (%p)>",
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%p)>",
|
||||
self->tag, self->value.p);
|
||||
break;
|
||||
|
||||
default:
|
||||
if (is_literal_char((unsigned char)self->tag)) {
|
||||
- sprintf(buffer, "<cparam '%c' at %p>",
|
||||
- (unsigned char)self->tag, self);
|
||||
+ return PyUnicode_FromFormat("<cparam '%c' at %p>",
|
||||
+ (unsigned char)self->tag, (void *)self);
|
||||
}
|
||||
else {
|
||||
- sprintf(buffer, "<cparam 0x%02x at %p>",
|
||||
- (unsigned char)self->tag, self);
|
||||
+ return PyUnicode_FromFormat("<cparam 0x%02x at %p>",
|
||||
+ (unsigned char)self->tag, (void *)self);
|
||||
}
|
||||
- break;
|
||||
}
|
||||
- return PyUnicode_FromString(buffer);
|
||||
}
|
||||
|
||||
static PyMemberDef PyCArgType_members[] = {
|
62
SOURCES/check-pyc-and-pyo-timestamps.py
Normal file
62
SOURCES/check-pyc-and-pyo-timestamps.py
Normal file
@ -0,0 +1,62 @@
|
||||
"""Checks if all *.pyc and *.pyo files have later mtime than their *.py files."""
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
|
||||
# list of test and other files that we expect not to have bytecode
|
||||
not_compiled = [
|
||||
'/usr/bin/pathfix.py',
|
||||
'test/bad_coding.py',
|
||||
'test/bad_coding2.py',
|
||||
'test/badsyntax_3131.py',
|
||||
'test/badsyntax_future3.py',
|
||||
'test/badsyntax_future4.py',
|
||||
'test/badsyntax_future5.py',
|
||||
'test/badsyntax_future6.py',
|
||||
'test/badsyntax_future7.py',
|
||||
'test/badsyntax_future8.py',
|
||||
'test/badsyntax_future9.py',
|
||||
'test/badsyntax_future10.py',
|
||||
'test/badsyntax_async1.py',
|
||||
'test/badsyntax_async2.py',
|
||||
'test/badsyntax_async3.py',
|
||||
'test/badsyntax_async4.py',
|
||||
'test/badsyntax_async5.py',
|
||||
'test/badsyntax_async6.py',
|
||||
'test/badsyntax_async7.py',
|
||||
'test/badsyntax_async8.py',
|
||||
'test/badsyntax_async9.py',
|
||||
'test/badsyntax_pep3120.py',
|
||||
'lib2to3/tests/data/bom.py',
|
||||
'lib2to3/tests/data/crlf.py',
|
||||
'lib2to3/tests/data/different_encoding.py',
|
||||
'lib2to3/tests/data/false_encoding.py',
|
||||
'lib2to3/tests/data/py2_test_grammar.py',
|
||||
'.debug-gdb.py',
|
||||
]
|
||||
failed = 0
|
||||
|
||||
|
||||
def bytecode_expected(source):
|
||||
for f in not_compiled:
|
||||
if source.endswith(f):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
compiled = filter(lambda f: bytecode_expected(f), sys.argv[1:])
|
||||
for f in compiled:
|
||||
# check both pyo and pyc
|
||||
to_check = map(lambda b: importlib.util.cache_from_source(f, b), (True, False))
|
||||
f_mtime = os.path.getmtime(f)
|
||||
for c in to_check:
|
||||
c_mtime = os.path.getmtime(c)
|
||||
if c_mtime < f_mtime:
|
||||
sys.stderr.write('Failed bytecompilation timestamps check: ')
|
||||
sys.stderr.write('Bytecode file {} is older than source file {}.\n'.format(c, f))
|
||||
failed += 1
|
||||
|
||||
if failed:
|
||||
sys.stderr.write('\n{} files failed bytecompilation timestamps check.\n'.format(failed))
|
||||
sys.exit(1)
|
28
SOURCES/get-source.sh
Executable file
28
SOURCES/get-source.sh
Executable file
@ -0,0 +1,28 @@
|
||||
#! /bin/bash -ex
|
||||
|
||||
# Download a release of Python (if missing) and remove .exe files from it
|
||||
|
||||
version=$1
|
||||
|
||||
if [ -z "${version}" ]; then
|
||||
echo "Usage: $0 VERSION" >& 2
|
||||
echo "" >& 2
|
||||
echo "example: $0 3.6.6" >& 2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
versionedname=Python-${version}
|
||||
orig_archive=${versionedname}.tar.xz
|
||||
new_archive=${versionedname}-noexe.tar.xz
|
||||
|
||||
if [ ! -e ${orig_archive} ]; then
|
||||
wget -N https://www.python.org/ftp/python/${version}/${orig_archive}
|
||||
fi
|
||||
|
||||
deleted_names=$(tar --list -Jf ${orig_archive} | grep '\.exe$')
|
||||
|
||||
# tar --delete does not operate on compressed archives, so do
|
||||
# xz compression/decompression explicitly
|
||||
xz --decompress --stdout ${orig_archive} | \
|
||||
tar --delete -v ${deleted_names} | \
|
||||
xz --compress --stdout -3 -T0 > ${new_archive}
|
35
SOURCES/idle3.appdata.xml
Normal file
35
SOURCES/idle3.appdata.xml
Normal file
@ -0,0 +1,35 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<!-- Copyright 2017 Zbigniew Jędrzejewski-Szmek -->
|
||||
<application>
|
||||
<id type="desktop">idle3.desktop</id>
|
||||
<name>IDLE3</name>
|
||||
<metadata_licence>CC0</metadata_licence>
|
||||
<project_license>Python-2.0</project_license>
|
||||
<summary>Python 3 Integrated Development and Learning Environment</summary>
|
||||
<description>
|
||||
<p>
|
||||
IDLE is Python’s Integrated Development and Learning Environment.
|
||||
The GUI is uniform between Windows, Unix, and Mac OS X.
|
||||
IDLE provides an easy way to start writing, running, and debugging
|
||||
Python code.
|
||||
</p>
|
||||
<p>
|
||||
IDLE is written in pure Python, and uses the tkinter GUI toolkit.
|
||||
It provides:
|
||||
</p>
|
||||
<ul>
|
||||
<li>a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,</li>
|
||||
<li>a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,</li>
|
||||
<li>search within any window, replace within editor windows, and search through multiple files (grep),</li>
|
||||
<li>a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.</li>
|
||||
</ul>
|
||||
</description>
|
||||
<url type="homepage">https://docs.python.org/3/library/idle.html</url>
|
||||
<screenshots>
|
||||
<screenshot type="default">http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png</screenshot>
|
||||
</screenshots>
|
||||
<update_contact>zbyszek@in.waw.pl</update_contact>
|
||||
</application>
|
11
SOURCES/idle3.desktop
Normal file
11
SOURCES/idle3.desktop
Normal file
@ -0,0 +1,11 @@
|
||||
[Desktop Entry]
|
||||
Version=1.0
|
||||
Name=IDLE 3
|
||||
Comment=Python 3 Integrated Development and Learning Environment
|
||||
Exec=idle3 %F
|
||||
TryExec=idle3
|
||||
Terminal=false
|
||||
Type=Application
|
||||
Icon=idle3
|
||||
Categories=Development;IDE;
|
||||
MimeType=text/x-python;
|
8
SOURCES/no-python
Executable file
8
SOURCES/no-python
Executable file
@ -0,0 +1,8 @@
|
||||
#! /bin/bash
|
||||
|
||||
echo "For more information about this script,"
|
||||
echo "please see the manual page of the same name."
|
||||
|
||||
echo "Run: man unversioned-python"
|
||||
exit 2
|
||||
|
57
SOURCES/unversioned-python.1
Normal file
57
SOURCES/unversioned-python.1
Normal file
@ -0,0 +1,57 @@
|
||||
.\" unversioned-python.8
|
||||
.TH UNVERSIONED-PYTHON 8 "17 September 2018"
|
||||
.SH NAME
|
||||
unversioned-python \- info on how to set up the `python` command.
|
||||
.SH SYNOPSIS
|
||||
.B unversioned-python
|
||||
.SH DESCRIPTION
|
||||
.B unversioned-python
|
||||
The "unversioned" `python` command (/usr/bin/python) is missing by default.
|
||||
We recommend using `python3` or `python2` instead.
|
||||
If using the explicit versioned command is inconvenient,
|
||||
you can use `alternatives` to configure `python` to launch
|
||||
either Python 3 or Python 2.
|
||||
|
||||
Note: The `python3` or `python2` package needs to be installed before its
|
||||
functionality is selected.
|
||||
|
||||
.SH EXAXPLES
|
||||
.B alternatives
|
||||
.B --config
|
||||
.IR python
|
||||
|
||||
Interactively select what the `python` command runs.
|
||||
|
||||
|
||||
.B alternatives
|
||||
.B --set
|
||||
.IR python
|
||||
.IR /usr/bin/python3
|
||||
|
||||
Configure the `python` command to run Python 3
|
||||
|
||||
Note: this is non-standard behavior according to [PEP 394].
|
||||
|
||||
|
||||
.B alternatives
|
||||
.B --set
|
||||
.IR python
|
||||
.IR /usr/bin/python2
|
||||
|
||||
Configure the `python` command to run Python 2
|
||||
|
||||
Note: please review the support lifecycle of python2 before relying on it
|
||||
|
||||
|
||||
.B alternatives
|
||||
.B --auto
|
||||
.IR python
|
||||
|
||||
Undo configuration changes and revert to the default (missing `python` command)
|
||||
|
||||
|
||||
.SH LINKS
|
||||
|
||||
.B [PEP 394]:
|
||||
.IR https://www.python.org/dev/peps/pep-0394/
|
||||
|
3242
SPECS/python3.spec
Normal file
3242
SPECS/python3.spec
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user