import python3-3.6.8-4.el8_0
This commit is contained in:
commit
10e0f3ba3c
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
SOURCES/Python-3.6.8-noexe.tar.xz
|
1
.python3.metadata
Normal file
1
.python3.metadata
Normal file
@ -0,0 +1 @@
|
||||
a39802ac8f0c61645c6a50fbdd32e3ca92862ff5 SOURCES/Python-3.6.8-noexe.tar.xz
|
19
SOURCES/00001-rpath.patch
Normal file
19
SOURCES/00001-rpath.patch
Normal file
@ -0,0 +1,19 @@
|
||||
diff -up Python-3.1.1/Lib/distutils/unixccompiler.py.rpath Python-3.1.1/Lib/distutils/unixccompiler.py
|
||||
--- Python-3.1.1/Lib/distutils/unixccompiler.py.rpath 2009-09-04 17:29:34.000000000 -0400
|
||||
+++ Python-3.1.1/Lib/distutils/unixccompiler.py 2009-09-04 17:49:54.000000000 -0400
|
||||
@@ -141,6 +141,15 @@ class UnixCCompiler(CCompiler):
|
||||
if sys.platform == "cygwin":
|
||||
exe_extension = ".exe"
|
||||
|
||||
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
|
||||
+ """Remove standard library path from rpath"""
|
||||
+ libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args(
|
||||
+ libraries, library_dirs, runtime_library_dirs)
|
||||
+ libdir = sysconfig.get_config_var('LIBDIR')
|
||||
+ if runtime_library_dirs and (libdir in runtime_library_dirs):
|
||||
+ runtime_library_dirs.remove(libdir)
|
||||
+ return libraries, library_dirs, runtime_library_dirs
|
||||
+
|
||||
def preprocess(self, source, output_file=None, macros=None,
|
||||
include_dirs=None, extra_preargs=None, extra_postargs=None):
|
||||
fixed_args = self._fix_compile_args(None, macros, include_dirs)
|
202
SOURCES/00102-lib64.patch
Normal file
202
SOURCES/00102-lib64.patch
Normal file
@ -0,0 +1,202 @@
|
||||
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
|
||||
index 9474e9c..c0ce4c6 100644
|
||||
--- a/Lib/distutils/command/install.py
|
||||
+++ b/Lib/distutils/command/install.py
|
||||
@@ -30,14 +30,14 @@ WINDOWS_SCHEME = {
|
||||
INSTALL_SCHEMES = {
|
||||
'unix_prefix': {
|
||||
'purelib': '$base/lib/python$py_version_short/site-packages',
|
||||
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
|
||||
+ 'platlib': '$platbase/lib64/python$py_version_short/site-packages',
|
||||
'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
|
||||
'scripts': '$base/bin',
|
||||
'data' : '$base',
|
||||
},
|
||||
'unix_home': {
|
||||
'purelib': '$base/lib/python',
|
||||
- 'platlib': '$base/lib/python',
|
||||
+ 'platlib': '$base/lib64/python',
|
||||
'headers': '$base/include/python/$dist_name',
|
||||
'scripts': '$base/bin',
|
||||
'data' : '$base',
|
||||
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
|
||||
index 026cca7..6d3e077 100644
|
||||
--- a/Lib/distutils/sysconfig.py
|
||||
+++ b/Lib/distutils/sysconfig.py
|
||||
@@ -132,8 +132,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
|
||||
prefix = plat_specific and EXEC_PREFIX or PREFIX
|
||||
|
||||
if os.name == "posix":
|
||||
+ if plat_specific or standard_lib:
|
||||
+ lib = "lib64"
|
||||
+ else:
|
||||
+ lib = "lib"
|
||||
libpython = os.path.join(prefix,
|
||||
- "lib", "python" + get_python_version())
|
||||
+ lib, "python" + get_python_version())
|
||||
if standard_lib:
|
||||
return libpython
|
||||
else:
|
||||
diff a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
|
||||
--- a/Lib/distutils/tests/test_install.py
|
||||
+++ b/Lib/distutils/tests/test_install.py
|
||||
@@ -57,8 +57,9 @@
|
||||
self.assertEqual(got, expected)
|
||||
|
||||
libdir = os.path.join(destination, "lib", "python")
|
||||
+ platlibdir = os.path.join(destination, "lib64", "python")
|
||||
check_path(cmd.install_lib, libdir)
|
||||
- check_path(cmd.install_platlib, libdir)
|
||||
+ check_path(cmd.install_platlib, platlibdir)
|
||||
check_path(cmd.install_purelib, libdir)
|
||||
check_path(cmd.install_headers,
|
||||
os.path.join(destination, "include", "python", "foopkg"))
|
||||
diff --git a/Lib/site.py b/Lib/site.py
|
||||
index a84e3bb..ba0d3ea 100644
|
||||
--- a/Lib/site.py
|
||||
+++ b/Lib/site.py
|
||||
@@ -303,11 +303,15 @@ def getsitepackages(prefixes=None):
|
||||
seen.add(prefix)
|
||||
|
||||
if os.sep == '/':
|
||||
+ sitepackages.append(os.path.join(prefix, "lib64",
|
||||
+ "python" + sys.version[:3],
|
||||
+ "site-packages"))
|
||||
sitepackages.append(os.path.join(prefix, "lib",
|
||||
"python%d.%d" % sys.version_info[:2],
|
||||
"site-packages"))
|
||||
else:
|
||||
sitepackages.append(prefix)
|
||||
+ sitepackages.append(os.path.join(prefix, "lib64", "site-packages"))
|
||||
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
|
||||
if sys.platform == "darwin":
|
||||
# for framework builds *only* we add the standard Apple
|
||||
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
|
||||
index b9bbfe5..2a5f29c 100644
|
||||
--- a/Lib/sysconfig.py
|
||||
+++ b/Lib/sysconfig.py
|
||||
@@ -20,10 +20,10 @@ __all__ = [
|
||||
|
||||
_INSTALL_SCHEMES = {
|
||||
'posix_prefix': {
|
||||
- 'stdlib': '{installed_base}/lib/python{py_version_short}',
|
||||
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
|
||||
+ 'stdlib': '{installed_base}/lib64/python{py_version_short}',
|
||||
+ 'platstdlib': '{platbase}/lib64/python{py_version_short}',
|
||||
'purelib': '{base}/lib/python{py_version_short}/site-packages',
|
||||
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
|
||||
+ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages',
|
||||
'include':
|
||||
'{installed_base}/include/python{py_version_short}{abiflags}',
|
||||
'platinclude':
|
||||
@@ -61,10 +61,10 @@ _INSTALL_SCHEMES = {
|
||||
'data': '{userbase}',
|
||||
},
|
||||
'posix_user': {
|
||||
- 'stdlib': '{userbase}/lib/python{py_version_short}',
|
||||
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
|
||||
+ 'stdlib': '{userbase}/lib64/python{py_version_short}',
|
||||
+ 'platstdlib': '{userbase}/lib64/python{py_version_short}',
|
||||
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||
+ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages',
|
||||
'include': '{userbase}/include/python{py_version_short}',
|
||||
'scripts': '{userbase}/bin',
|
||||
'data': '{userbase}',
|
||||
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
|
||||
index f698927..bc977b5 100644
|
||||
--- a/Lib/test/test_site.py
|
||||
+++ b/Lib/test/test_site.py
|
||||
@@ -248,8 +248,8 @@ class HelperFunctionsTests(unittest.TestCase):
|
||||
self.assertEqual(dirs[1], wanted)
|
||||
elif os.sep == '/':
|
||||
# OS X non-framework builds, Linux, FreeBSD, etc
|
||||
- self.assertEqual(len(dirs), 1)
|
||||
- wanted = os.path.join('xoxo', 'lib',
|
||||
+ self.assertEqual(len(dirs), 2)
|
||||
+ wanted = os.path.join('xoxo', 'lib64',
|
||||
'python%d.%d' % sys.version_info[:2],
|
||||
'site-packages')
|
||||
self.assertEqual(dirs[0], wanted)
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 8fa7934..a693917 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -126,7 +126,7 @@ LIBDIR= @libdir@
|
||||
MANDIR= @mandir@
|
||||
INCLUDEDIR= @includedir@
|
||||
CONFINCLUDEDIR= $(exec_prefix)/include
|
||||
-SCRIPTDIR= $(prefix)/lib
|
||||
+SCRIPTDIR= $(prefix)/lib64
|
||||
ABIFLAGS= @ABIFLAGS@
|
||||
|
||||
# Detailed destination directories
|
||||
diff --git a/Modules/getpath.c b/Modules/getpath.c
|
||||
index 65b47a3..eaa756c 100644
|
||||
--- a/Modules/getpath.c
|
||||
+++ b/Modules/getpath.c
|
||||
@@ -494,7 +494,7 @@ calculate_path(void)
|
||||
_pythonpath = Py_DecodeLocale(PYTHONPATH, NULL);
|
||||
_prefix = Py_DecodeLocale(PREFIX, NULL);
|
||||
_exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL);
|
||||
- lib_python = Py_DecodeLocale("lib/python" VERSION, NULL);
|
||||
+ lib_python = Py_DecodeLocale("lib64/python" VERSION, NULL);
|
||||
|
||||
if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) {
|
||||
Py_FatalError(
|
||||
@@ -683,7 +683,7 @@ calculate_path(void)
|
||||
}
|
||||
else
|
||||
wcsncpy(zip_path, _prefix, MAXPATHLEN);
|
||||
- joinpath(zip_path, L"lib/python00.zip");
|
||||
+ joinpath(zip_path, L"lib64/python00.zip");
|
||||
bufsz = wcslen(zip_path); /* Replace "00" with version */
|
||||
zip_path[bufsz - 6] = VERSION[0];
|
||||
zip_path[bufsz - 5] = VERSION[2];
|
||||
@@ -695,7 +695,7 @@ calculate_path(void)
|
||||
fprintf(stderr,
|
||||
"Could not find platform dependent libraries <exec_prefix>\n");
|
||||
wcsncpy(exec_prefix, _exec_prefix, MAXPATHLEN);
|
||||
- joinpath(exec_prefix, L"lib/lib-dynload");
|
||||
+ joinpath(exec_prefix, L"lib64/lib-dynload");
|
||||
}
|
||||
/* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */
|
||||
|
||||
diff --git a/setup.py b/setup.py
|
||||
index 0f2dfc4..da37896 100644
|
||||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -492,7 +492,7 @@ class PyBuildExt(build_ext):
|
||||
# directories (i.e. '.' and 'Include') must be first. See issue
|
||||
# 10520.
|
||||
if not cross_compiling:
|
||||
- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
|
||||
+ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64')
|
||||
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
|
||||
# only change this for cross builds for 3.3, issues on Mageia
|
||||
if cross_compiling:
|
||||
@@ -780,11 +780,11 @@ class PyBuildExt(build_ext):
|
||||
elif curses_library:
|
||||
readline_libs.append(curses_library)
|
||||
elif self.compiler.find_library_file(lib_dirs +
|
||||
- ['/usr/lib/termcap'],
|
||||
+ ['/usr/lib64/termcap'],
|
||||
'termcap'):
|
||||
readline_libs.append('termcap')
|
||||
exts.append( Extension('readline', ['readline.c'],
|
||||
- library_dirs=['/usr/lib/termcap'],
|
||||
+ library_dirs=['/usr/lib64/termcap'],
|
||||
extra_link_args=readline_extra_link_args,
|
||||
libraries=readline_libs) )
|
||||
else:
|
||||
@@ -821,8 +821,8 @@ class PyBuildExt(build_ext):
|
||||
if krb5_h:
|
||||
ssl_incs += krb5_h
|
||||
ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
|
||||
- ['/usr/local/ssl/lib',
|
||||
- '/usr/contrib/ssl/lib/'
|
||||
+ ['/usr/local/ssl/lib64',
|
||||
+ '/usr/contrib/ssl/lib64/'
|
||||
] )
|
||||
|
||||
if (ssl_incs is not None and
|
53
SOURCES/00111-no-static-lib.patch
Normal file
53
SOURCES/00111-no-static-lib.patch
Normal file
@ -0,0 +1,53 @@
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 9cd482f..b074b26 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -549,7 +549,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c
|
||||
$(PYTHON_FOR_REGEN) ./Tools/clinic/clinic.py --make
|
||||
|
||||
# Build the interpreter
|
||||
-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
|
||||
+$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY)
|
||||
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
|
||||
|
||||
platform: $(BUILDPYTHON) pybuilddir.txt
|
||||
@@ -597,12 +597,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
|
||||
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
|
||||
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
|
||||
|
||||
-
|
||||
-# Build static library
|
||||
-$(LIBRARY): $(LIBRARY_OBJS)
|
||||
- -rm -f $@
|
||||
- $(AR) $(ARFLAGS) $@ $(LIBRARY_OBJS)
|
||||
-
|
||||
libpython$(LDVERSION).so: $(LIBRARY_OBJS)
|
||||
if test $(INSTSONAME) != $(LDLIBRARY); then \
|
||||
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
|
||||
@@ -692,7 +686,7 @@ Modules/Setup: $(srcdir)/Modules/Setup.dist
|
||||
echo "-----------------------------------------------"; \
|
||||
fi
|
||||
|
||||
-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
|
||||
+Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY)
|
||||
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
|
||||
|
||||
############################################################################
|
||||
@@ -1428,17 +1422,6 @@ libainstall: @DEF_MAKE_RULE@ python-config
|
||||
else true; \
|
||||
fi; \
|
||||
done
|
||||
- @if test -d $(LIBRARY); then :; else \
|
||||
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
|
||||
- if test "$(SHLIB_SUFFIX)" = .dll; then \
|
||||
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
|
||||
- else \
|
||||
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
|
||||
- fi; \
|
||||
- else \
|
||||
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \
|
||||
- fi; \
|
||||
- fi
|
||||
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
|
||||
$(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o
|
||||
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
|
46
SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch
Normal file
46
SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch
Normal file
@ -0,0 +1,46 @@
|
||||
diff -up Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/case.py
|
||||
--- Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400
|
||||
+++ Python-3.2.2/Lib/unittest/case.py 2011-09-09 06:35:16.365568382 -0400
|
||||
@@ -3,6 +3,7 @@
|
||||
import sys
|
||||
import functools
|
||||
import difflib
|
||||
+import os
|
||||
import logging
|
||||
import pprint
|
||||
import re
|
||||
@@ -101,5 +102,21 @@ def expectedFailure(func):
|
||||
raise self.test_case.failureException(msg)
|
||||
|
||||
+# Non-standard/downstream-only hooks for handling issues with specific test
|
||||
+# cases:
|
||||
+
|
||||
+def _skipInRpmBuild(reason):
|
||||
+ """
|
||||
+ Non-standard/downstream-only decorator for marking a specific unit test
|
||||
+ to be skipped when run within the %check of an rpmbuild.
|
||||
+
|
||||
+ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within
|
||||
+ the environment, and has no effect otherwise.
|
||||
+ """
|
||||
+ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ:
|
||||
+ return skip(reason)
|
||||
+ else:
|
||||
+ return _id
|
||||
+
|
||||
class _AssertRaisesBaseContext(_BaseTestCaseContext):
|
||||
|
||||
def __init__(self, expected, test_case, expected_regex=None):
|
||||
diff -up Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/__init__.py
|
||||
--- Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400
|
||||
+++ Python-3.2.2/Lib/unittest/__init__.py 2011-09-09 06:35:16.366568382 -0400
|
||||
@@ -57,7 +57,8 @@ __unittest = True
|
||||
|
||||
from .result import TestResult
|
||||
from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf,
|
||||
- skipUnless, expectedFailure)
|
||||
+ skipUnless, expectedFailure,
|
||||
+ _skipInRpmBuild)
|
||||
from .suite import BaseTestSuite, TestSuite
|
||||
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
|
||||
findTestCases)
|
15
SOURCES/00155-avoid-ctypes-thunks.patch
Normal file
15
SOURCES/00155-avoid-ctypes-thunks.patch
Normal file
@ -0,0 +1,15 @@
|
||||
diff -up Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 Python-3.2.3/Lib/ctypes/__init__.py
|
||||
--- Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 15:12:49.017867692 -0400
|
||||
+++ Python-3.2.3/Lib/ctypes/__init__.py 2012-04-20 15:15:09.501111408 -0400
|
||||
@@ -275,11 +275,6 @@ def _reset_cache():
|
||||
# _SimpleCData.c_char_p_from_param
|
||||
POINTER(c_char).from_param = c_char_p.from_param
|
||||
_pointer_type_cache[None] = c_void_p
|
||||
- # XXX for whatever reasons, creating the first instance of a callback
|
||||
- # function is needed for the unittests on Win64 to succeed. This MAY
|
||||
- # be a compiler bug, since the problem occurs only when _ctypes is
|
||||
- # compiled with the MS SDK compiler. Or an uninitialized variable?
|
||||
- CFUNCTYPE(c_int)(lambda: None)
|
||||
|
||||
def create_unicode_buffer(init, size=None):
|
||||
"""create_unicode_buffer(aString) -> character array
|
11
SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch
Normal file
11
SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch
Normal file
@ -0,0 +1,11 @@
|
||||
diff -up cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build cpython-59223da36dec/Lib/test/test_posix.py
|
||||
--- cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build 2012-08-07 17:15:59.000000000 -0400
|
||||
+++ cpython-59223da36dec/Lib/test/test_posix.py 2012-08-07 17:16:53.528330330 -0400
|
||||
@@ -973,6 +973,7 @@ class PosixTester(unittest.TestCase):
|
||||
posix.RTLD_GLOBAL
|
||||
posix.RTLD_LOCAL
|
||||
|
||||
+ @unittest._skipInRpmBuild('running kernel may not match kernel in chroot')
|
||||
@unittest.skipUnless(hasattr(os, 'SEEK_HOLE'),
|
||||
"test needs an OS that reports file holes")
|
||||
def test_fs_holes(self):
|
@ -0,0 +1,11 @@
|
||||
diff -up Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds Python-3.3.0b1/Lib/test/test_socket.py
|
||||
--- Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds 2012-07-24 15:02:30.823355067 -0400
|
||||
+++ Python-3.3.0b1/Lib/test/test_socket.py 2012-07-24 15:08:13.021354999 -0400
|
||||
@@ -2188,6 +2188,7 @@ class RecvmsgGenericStreamTests(RecvmsgG
|
||||
# Tests which require a stream socket and can use either recvmsg()
|
||||
# or recvmsg_into().
|
||||
|
||||
+ @unittest._skipInRpmBuild('fails intermittently when run within Koji')
|
||||
def testRecvmsgEOF(self):
|
||||
# Receive end-of-stream indicator (b"", peer socket closed).
|
||||
msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, 1024)
|
309
SOURCES/00170-gc-assertions.patch
Normal file
309
SOURCES/00170-gc-assertions.patch
Normal file
@ -0,0 +1,309 @@
|
||||
diff --git a/Include/object.h b/Include/object.h
|
||||
index 63e37b8..613b26c 100644
|
||||
--- a/Include/object.h
|
||||
+++ b/Include/object.h
|
||||
@@ -1071,6 +1071,49 @@ PyAPI_FUNC(void)
|
||||
_PyObject_DebugTypeStats(FILE *out);
|
||||
#endif /* ifndef Py_LIMITED_API */
|
||||
|
||||
+/*
|
||||
+ Define a pair of assertion macros.
|
||||
+
|
||||
+ These work like the regular C assert(), in that they will abort the
|
||||
+ process with a message on stderr if the given condition fails to hold,
|
||||
+ but compile away to nothing if NDEBUG is defined.
|
||||
+
|
||||
+ However, before aborting, Python will also try to call _PyObject_Dump() on
|
||||
+ the given object. This may be of use when investigating bugs in which a
|
||||
+ particular object is corrupt (e.g. buggy a tp_visit method in an extension
|
||||
+ module breaking the garbage collector), to help locate the broken objects.
|
||||
+
|
||||
+ The WITH_MSG variant allows you to supply an additional message that Python
|
||||
+ will attempt to print to stderr, after the object dump.
|
||||
+*/
|
||||
+#ifdef NDEBUG
|
||||
+/* No debugging: compile away the assertions: */
|
||||
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0)
|
||||
+#else
|
||||
+/* With debugging: generate checks: */
|
||||
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \
|
||||
+ ((expr) \
|
||||
+ ? (void)(0) \
|
||||
+ : _PyObject_AssertFailed((obj), \
|
||||
+ (msg), \
|
||||
+ (__STRING(expr)), \
|
||||
+ (__FILE__), \
|
||||
+ (__LINE__), \
|
||||
+ (__PRETTY_FUNCTION__)))
|
||||
+#endif
|
||||
+
|
||||
+#define PyObject_ASSERT(obj, expr) \
|
||||
+ PyObject_ASSERT_WITH_MSG(obj, expr, NULL)
|
||||
+
|
||||
+/*
|
||||
+ Declare and define the entrypoint even when NDEBUG is defined, to avoid
|
||||
+ causing compiler/linker errors when building extensions without NDEBUG
|
||||
+ against a Python built with NDEBUG defined
|
||||
+*/
|
||||
+PyAPI_FUNC(void) _PyObject_AssertFailed(PyObject *, const char *,
|
||||
+ const char *, const char *, int,
|
||||
+ const char *);
|
||||
+
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
|
||||
index 7e82b24..8ecc3d9 100644
|
||||
--- a/Lib/test/test_gc.py
|
||||
+++ b/Lib/test/test_gc.py
|
||||
@@ -2,9 +2,11 @@ import unittest
|
||||
from test.support import (verbose, refcount_test, run_unittest,
|
||||
strip_python_stderr, cpython_only, start_threads,
|
||||
temp_dir, requires_type_collecting, TESTFN, unlink)
|
||||
+from test.support import import_module
|
||||
from test.support.script_helper import assert_python_ok, make_script
|
||||
|
||||
import sys
|
||||
+import sysconfig
|
||||
import time
|
||||
import gc
|
||||
import weakref
|
||||
@@ -50,6 +52,8 @@ class GC_Detector(object):
|
||||
# gc collects it.
|
||||
self.wr = weakref.ref(C1055820(666), it_happened)
|
||||
|
||||
+BUILD_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS'])
|
||||
+
|
||||
@with_tp_del
|
||||
class Uncollectable(object):
|
||||
"""Create a reference cycle with multiple __del__ methods.
|
||||
@@ -877,6 +881,50 @@ class GCCallbackTests(unittest.TestCase):
|
||||
self.assertEqual(len(gc.garbage), 0)
|
||||
|
||||
|
||||
+ @unittest.skipIf(BUILD_WITH_NDEBUG,
|
||||
+ 'built with -NDEBUG')
|
||||
+ def test_refcount_errors(self):
|
||||
+ self.preclean()
|
||||
+ # Verify the "handling" of objects with broken refcounts
|
||||
+ import_module("ctypes") #skip if not supported
|
||||
+
|
||||
+ import subprocess
|
||||
+ code = '''if 1:
|
||||
+ a = []
|
||||
+ b = [a]
|
||||
+
|
||||
+ # Simulate the refcount of "a" being too low (compared to the
|
||||
+ # references held on it by live data), but keeping it above zero
|
||||
+ # (to avoid deallocating it):
|
||||
+ import ctypes
|
||||
+ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a))
|
||||
+
|
||||
+ # The garbage collector should now have a fatal error when it reaches
|
||||
+ # the broken object:
|
||||
+ import gc
|
||||
+ gc.collect()
|
||||
+ '''
|
||||
+ p = subprocess.Popen([sys.executable, "-c", code],
|
||||
+ stdout=subprocess.PIPE,
|
||||
+ stderr=subprocess.PIPE)
|
||||
+ stdout, stderr = p.communicate()
|
||||
+ p.stdout.close()
|
||||
+ p.stderr.close()
|
||||
+ # Verify that stderr has a useful error message:
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "\(\(gc\)->gc.gc_refs >> \(1\)\) != 0" failed.')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'refcount was too small')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'object : \[\]')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'type : list')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'refcount: 1')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'address : 0x[0-9a-f]+')
|
||||
+
|
||||
+
|
||||
class GCTogglingTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
gc.enable()
|
||||
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
|
||||
index 3bddc40..0cc24f7 100644
|
||||
--- a/Modules/gcmodule.c
|
||||
+++ b/Modules/gcmodule.c
|
||||
@@ -342,7 +342,8 @@ update_refs(PyGC_Head *containers)
|
||||
{
|
||||
PyGC_Head *gc = containers->gc.gc_next;
|
||||
for (; gc != containers; gc = gc->gc.gc_next) {
|
||||
- assert(_PyGCHead_REFS(gc) == GC_REACHABLE);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) == GC_REACHABLE);
|
||||
_PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc)));
|
||||
/* Python's cyclic gc should never see an incoming refcount
|
||||
* of 0: if something decref'ed to 0, it should have been
|
||||
@@ -362,7 +363,8 @@ update_refs(PyGC_Head *containers)
|
||||
* so serious that maybe this should be a release-build
|
||||
* check instead of an assert?
|
||||
*/
|
||||
- assert(_PyGCHead_REFS(gc) != 0);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) != 0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -377,7 +379,9 @@ visit_decref(PyObject *op, void *data)
|
||||
* generation being collected, which can be recognized
|
||||
* because only they have positive gc_refs.
|
||||
*/
|
||||
- assert(_PyGCHead_REFS(gc) != 0); /* else refcount was too small */
|
||||
+ PyObject_ASSERT_WITH_MSG(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) != 0,
|
||||
+ "refcount was too small"); /* else refcount was too small */
|
||||
if (_PyGCHead_REFS(gc) > 0)
|
||||
_PyGCHead_DECREF(gc);
|
||||
}
|
||||
@@ -437,9 +441,10 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
|
||||
* If gc_refs == GC_UNTRACKED, it must be ignored.
|
||||
*/
|
||||
else {
|
||||
- assert(gc_refs > 0
|
||||
- || gc_refs == GC_REACHABLE
|
||||
- || gc_refs == GC_UNTRACKED);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ gc_refs > 0
|
||||
+ || gc_refs == GC_REACHABLE
|
||||
+ || gc_refs == GC_UNTRACKED);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
@@ -481,7 +486,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
|
||||
*/
|
||||
PyObject *op = FROM_GC(gc);
|
||||
traverseproc traverse = Py_TYPE(op)->tp_traverse;
|
||||
- assert(_PyGCHead_REFS(gc) > 0);
|
||||
+ PyObject_ASSERT(op, _PyGCHead_REFS(gc) > 0);
|
||||
_PyGCHead_SET_REFS(gc, GC_REACHABLE);
|
||||
(void) traverse(op,
|
||||
(visitproc)visit_reachable,
|
||||
@@ -544,7 +549,7 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers)
|
||||
for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) {
|
||||
PyObject *op = FROM_GC(gc);
|
||||
|
||||
- assert(IS_TENTATIVELY_UNREACHABLE(op));
|
||||
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
|
||||
next = gc->gc.gc_next;
|
||||
|
||||
if (has_legacy_finalizer(op)) {
|
||||
@@ -620,7 +625,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
PyWeakReference **wrlist;
|
||||
|
||||
op = FROM_GC(gc);
|
||||
- assert(IS_TENTATIVELY_UNREACHABLE(op));
|
||||
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
|
||||
next = gc->gc.gc_next;
|
||||
|
||||
if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op)))
|
||||
@@ -641,9 +646,9 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
* the callback pointer intact. Obscure: it also
|
||||
* changes *wrlist.
|
||||
*/
|
||||
- assert(wr->wr_object == op);
|
||||
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == op);
|
||||
_PyWeakref_ClearRef(wr);
|
||||
- assert(wr->wr_object == Py_None);
|
||||
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None);
|
||||
if (wr->wr_callback == NULL)
|
||||
continue; /* no callback */
|
||||
|
||||
@@ -677,7 +682,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
*/
|
||||
if (IS_TENTATIVELY_UNREACHABLE(wr))
|
||||
continue;
|
||||
- assert(IS_REACHABLE(wr));
|
||||
+ PyObject_ASSERT(op, IS_REACHABLE(wr));
|
||||
|
||||
/* Create a new reference so that wr can't go away
|
||||
* before we can process it again.
|
||||
@@ -686,7 +691,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
|
||||
/* Move wr to wrcb_to_call, for the next pass. */
|
||||
wrasgc = AS_GC(wr);
|
||||
- assert(wrasgc != next); /* wrasgc is reachable, but
|
||||
+ PyObject_ASSERT(op, wrasgc != next);
|
||||
+ /* wrasgc is reachable, but
|
||||
next isn't, so they can't
|
||||
be the same */
|
||||
gc_list_move(wrasgc, &wrcb_to_call);
|
||||
@@ -702,11 +708,11 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
|
||||
gc = wrcb_to_call.gc.gc_next;
|
||||
op = FROM_GC(gc);
|
||||
- assert(IS_REACHABLE(op));
|
||||
- assert(PyWeakref_Check(op));
|
||||
+ PyObject_ASSERT(op, IS_REACHABLE(op));
|
||||
+ PyObject_ASSERT(op, PyWeakref_Check(op));
|
||||
wr = (PyWeakReference *)op;
|
||||
callback = wr->wr_callback;
|
||||
- assert(callback != NULL);
|
||||
+ PyObject_ASSERT(op, callback != NULL);
|
||||
|
||||
/* copy-paste of weakrefobject.c's handle_callback() */
|
||||
temp = PyObject_CallFunctionObjArgs(callback, wr, NULL);
|
||||
@@ -820,12 +826,14 @@ check_garbage(PyGC_Head *collectable)
|
||||
for (gc = collectable->gc.gc_next; gc != collectable;
|
||||
gc = gc->gc.gc_next) {
|
||||
_PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc)));
|
||||
- assert(_PyGCHead_REFS(gc) != 0);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) != 0);
|
||||
}
|
||||
subtract_refs(collectable);
|
||||
for (gc = collectable->gc.gc_next; gc != collectable;
|
||||
gc = gc->gc.gc_next) {
|
||||
- assert(_PyGCHead_REFS(gc) >= 0);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) >= 0);
|
||||
if (_PyGCHead_REFS(gc) != 0)
|
||||
return -1;
|
||||
}
|
||||
diff --git a/Objects/object.c b/Objects/object.c
|
||||
index fdd41a6..bfe806c 100644
|
||||
--- a/Objects/object.c
|
||||
+++ b/Objects/object.c
|
||||
@@ -2031,6 +2031,35 @@ _PyTrash_thread_destroy_chain(void)
|
||||
}
|
||||
}
|
||||
|
||||
+PyAPI_FUNC(void)
|
||||
+_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr,
|
||||
+ const char *file, int line, const char *function)
|
||||
+{
|
||||
+ fprintf(stderr,
|
||||
+ "%s:%d: %s: Assertion \"%s\" failed.\n",
|
||||
+ file, line, function, expr);
|
||||
+ if (msg) {
|
||||
+ fprintf(stderr, "%s\n", msg);
|
||||
+ }
|
||||
+
|
||||
+ fflush(stderr);
|
||||
+
|
||||
+ if (obj) {
|
||||
+ /* This might succeed or fail, but we're about to abort, so at least
|
||||
+ try to provide any extra info we can: */
|
||||
+ _PyObject_Dump(obj);
|
||||
+ }
|
||||
+ else {
|
||||
+ fprintf(stderr, "NULL object\n");
|
||||
+ }
|
||||
+
|
||||
+ fflush(stdout);
|
||||
+ fflush(stderr);
|
||||
+
|
||||
+ /* Terminate the process: */
|
||||
+ abort();
|
||||
+}
|
||||
+
|
||||
#ifndef Py_TRACE_REFS
|
||||
/* For Py_LIMITED_API, we need an out-of-line version of _Py_Dealloc.
|
||||
Define this here, so we can undefine the macro. */
|
30
SOURCES/00178-dont-duplicate-flags-in-sysconfig.patch
Normal file
30
SOURCES/00178-dont-duplicate-flags-in-sysconfig.patch
Normal file
@ -0,0 +1,30 @@
|
||||
diff -r 39b9b05c3085 Lib/distutils/sysconfig.py
|
||||
--- a/Lib/distutils/sysconfig.py Wed Apr 10 00:27:23 2013 +0200
|
||||
+++ b/Lib/distutils/sysconfig.py Wed Apr 10 10:14:18 2013 +0200
|
||||
@@ -362,7 +362,10 @@
|
||||
done[n] = item = ""
|
||||
if found:
|
||||
after = value[m.end():]
|
||||
- value = value[:m.start()] + item + after
|
||||
+ value = value[:m.start()]
|
||||
+ if item.strip() not in value:
|
||||
+ value += item
|
||||
+ value += after
|
||||
if "$" in after:
|
||||
notdone[name] = value
|
||||
else:
|
||||
diff -r 39b9b05c3085 Lib/sysconfig.py
|
||||
--- a/Lib/sysconfig.py Wed Apr 10 00:27:23 2013 +0200
|
||||
+++ b/Lib/sysconfig.py Wed Apr 10 10:14:18 2013 +0200
|
||||
@@ -296,7 +296,10 @@
|
||||
|
||||
if found:
|
||||
after = value[m.end():]
|
||||
- value = value[:m.start()] + item + after
|
||||
+ value = value[:m.start()]
|
||||
+ if item.strip() not in value:
|
||||
+ value += item
|
||||
+ value += after
|
||||
if "$" in after:
|
||||
notdone[name] = value
|
||||
else:
|
245
SOURCES/00189-add-rewheel-module.patch
Normal file
245
SOURCES/00189-add-rewheel-module.patch
Normal file
@ -0,0 +1,245 @@
|
||||
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
|
||||
index 4748ba4..986d5e9 100644
|
||||
--- a/Lib/ensurepip/__init__.py
|
||||
+++ b/Lib/ensurepip/__init__.py
|
||||
@@ -1,8 +1,10 @@
|
||||
import os
|
||||
import os.path
|
||||
import pkgutil
|
||||
+import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
+from ensurepip import rewheel
|
||||
|
||||
|
||||
__all__ = ["version", "bootstrap"]
|
||||
@@ -24,8 +26,15 @@ def _run_pip(args, additional_paths=None):
|
||||
sys.path = additional_paths + sys.path
|
||||
|
||||
# Install the bundled software
|
||||
- import pip._internal
|
||||
- return pip._internal.main(args)
|
||||
+ try:
|
||||
+ # pip 10
|
||||
+ from pip._internal import main
|
||||
+ except ImportError:
|
||||
+ # pip 9
|
||||
+ from pip import main
|
||||
+ if args[0] in ["install", "list", "wheel"]:
|
||||
+ args.append('--pre')
|
||||
+ return main(args)
|
||||
|
||||
|
||||
def version():
|
||||
@@ -88,20 +97,39 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
|
||||
# omit pip and easy_install
|
||||
os.environ["ENSUREPIP_OPTIONS"] = "install"
|
||||
|
||||
+ whls = []
|
||||
+ rewheel_dir = None
|
||||
+ # try to see if we have system-wide versions of _PROJECTS
|
||||
+ dep_records = rewheel.find_system_records([p[0] for p in _PROJECTS])
|
||||
+ # TODO: check if system-wide versions are the newest ones
|
||||
+ # if --upgrade is used?
|
||||
+ if all(dep_records):
|
||||
+ # if we have all _PROJECTS installed system-wide, we'll recreate
|
||||
+ # wheels from them and install those
|
||||
+ rewheel_dir = tempfile.TemporaryDirectory()
|
||||
+ for dr in dep_records:
|
||||
+ new_whl = rewheel.rewheel_from_record(dr, rewheel_dir.name)
|
||||
+ whls.append(os.path.join(rewheel_dir.name, new_whl))
|
||||
+ else:
|
||||
+ # if we don't have all the _PROJECTS installed system-wide,
|
||||
+ # let's just fall back to bundled wheels
|
||||
+ for project, version in _PROJECTS:
|
||||
+ whl = os.path.join(
|
||||
+ os.path.dirname(__file__),
|
||||
+ "_bundled",
|
||||
+ "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||
+ )
|
||||
+ whls.append(whl)
|
||||
+
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# Put our bundled wheels into a temporary directory and construct the
|
||||
# additional paths that need added to sys.path
|
||||
additional_paths = []
|
||||
- for project, version in _PROJECTS:
|
||||
- wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||
- whl = pkgutil.get_data(
|
||||
- "ensurepip",
|
||||
- "_bundled/{}".format(wheel_name),
|
||||
- )
|
||||
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||
- fp.write(whl)
|
||||
-
|
||||
- additional_paths.append(os.path.join(tmpdir, wheel_name))
|
||||
+ for whl in whls:
|
||||
+ shutil.copy(whl, tmpdir)
|
||||
+ additional_paths.append(os.path.join(tmpdir, os.path.basename(whl)))
|
||||
+ if rewheel_dir:
|
||||
+ rewheel_dir.cleanup()
|
||||
|
||||
# Construct the arguments to be passed to the pip command
|
||||
args = ["install", "--no-index", "--find-links", tmpdir]
|
||||
diff --git a/Lib/ensurepip/rewheel/__init__.py b/Lib/ensurepip/rewheel/__init__.py
|
||||
new file mode 100644
|
||||
index 0000000..753c764
|
||||
--- /dev/null
|
||||
+++ b/Lib/ensurepip/rewheel/__init__.py
|
||||
@@ -0,0 +1,143 @@
|
||||
+import argparse
|
||||
+import codecs
|
||||
+import csv
|
||||
+import email.parser
|
||||
+import os
|
||||
+import io
|
||||
+import re
|
||||
+import site
|
||||
+import subprocess
|
||||
+import sys
|
||||
+import zipfile
|
||||
+
|
||||
+def run():
|
||||
+ parser = argparse.ArgumentParser(description='Recreate wheel of package with given RECORD.')
|
||||
+ parser.add_argument('record_path',
|
||||
+ help='Path to RECORD file')
|
||||
+ parser.add_argument('-o', '--output-dir',
|
||||
+ help='Dir where to place the wheel, defaults to current working dir.',
|
||||
+ dest='outdir',
|
||||
+ default=os.path.curdir)
|
||||
+
|
||||
+ ns = parser.parse_args()
|
||||
+ retcode = 0
|
||||
+ try:
|
||||
+ print(rewheel_from_record(**vars(ns)))
|
||||
+ except BaseException as e:
|
||||
+ print('Failed: {}'.format(e))
|
||||
+ retcode = 1
|
||||
+ sys.exit(1)
|
||||
+
|
||||
+def find_system_records(projects):
|
||||
+ """Return list of paths to RECORD files for system-installed projects.
|
||||
+
|
||||
+ If a project is not installed, the resulting list contains None instead
|
||||
+ of a path to its RECORD
|
||||
+ """
|
||||
+ records = []
|
||||
+ # get system site-packages dirs
|
||||
+ sys_sitepack = site.getsitepackages([sys.base_prefix, sys.base_exec_prefix])
|
||||
+ sys_sitepack = [sp for sp in sys_sitepack if os.path.exists(sp)]
|
||||
+ # try to find all projects in all system site-packages
|
||||
+ for project in projects:
|
||||
+ path = None
|
||||
+ for sp in sys_sitepack:
|
||||
+ dist_info_re = os.path.join(sp, project) + r'-[^\{0}]+\.dist-info'.format(os.sep)
|
||||
+ candidates = [os.path.join(sp, p) for p in os.listdir(sp)]
|
||||
+ # filter out candidate dirs based on the above regexp
|
||||
+ filtered = [c for c in candidates if re.match(dist_info_re, c)]
|
||||
+ # if we have 0 or 2 or more dirs, something is wrong...
|
||||
+ if len(filtered) == 1:
|
||||
+ path = filtered[0]
|
||||
+ if path is not None:
|
||||
+ records.append(os.path.join(path, 'RECORD'))
|
||||
+ else:
|
||||
+ records.append(None)
|
||||
+ return records
|
||||
+
|
||||
+def rewheel_from_record(record_path, outdir):
|
||||
+ """Recreates a whee of package with given record_path and returns path
|
||||
+ to the newly created wheel."""
|
||||
+ site_dir = os.path.dirname(os.path.dirname(record_path))
|
||||
+ record_relpath = record_path[len(site_dir):].strip(os.path.sep)
|
||||
+ to_write, to_omit = get_records_to_pack(site_dir, record_relpath)
|
||||
+ new_wheel_name = get_wheel_name(record_path)
|
||||
+ new_wheel_path = os.path.join(outdir, new_wheel_name + '.whl')
|
||||
+
|
||||
+ new_wheel = zipfile.ZipFile(new_wheel_path, mode='w', compression=zipfile.ZIP_DEFLATED)
|
||||
+ # we need to write a new record with just the files that we will write,
|
||||
+ # e.g. not binaries and *.pyc/*.pyo files
|
||||
+ new_record = io.StringIO()
|
||||
+ writer = csv.writer(new_record)
|
||||
+
|
||||
+ # handle files that we can write straight away
|
||||
+ for f, sha_hash, size in to_write:
|
||||
+ new_wheel.write(os.path.join(site_dir, f), arcname=f)
|
||||
+ writer.writerow([f, sha_hash,size])
|
||||
+
|
||||
+ # rewrite the old wheel file with a new computed one
|
||||
+ writer.writerow([record_relpath, '', ''])
|
||||
+ new_wheel.writestr(record_relpath, new_record.getvalue())
|
||||
+
|
||||
+ new_wheel.close()
|
||||
+
|
||||
+ return new_wheel.filename
|
||||
+
|
||||
+def get_wheel_name(record_path):
|
||||
+ """Return proper name of the wheel, without .whl."""
|
||||
+
|
||||
+ wheel_info_path = os.path.join(os.path.dirname(record_path), 'WHEEL')
|
||||
+ with codecs.open(wheel_info_path, encoding='utf-8') as wheel_info_file:
|
||||
+ wheel_info = email.parser.Parser().parsestr(wheel_info_file.read())
|
||||
+
|
||||
+ metadata_path = os.path.join(os.path.dirname(record_path), 'METADATA')
|
||||
+ with codecs.open(metadata_path, encoding='utf-8') as metadata_file:
|
||||
+ metadata = email.parser.Parser().parsestr(metadata_file.read())
|
||||
+
|
||||
+ # construct name parts according to wheel spec
|
||||
+ distribution = metadata.get('Name')
|
||||
+ version = metadata.get('Version')
|
||||
+ build_tag = '' # nothing for now
|
||||
+ lang_tag = []
|
||||
+ for t in wheel_info.get_all('Tag'):
|
||||
+ lang_tag.append(t.split('-')[0])
|
||||
+ lang_tag = '.'.join(lang_tag)
|
||||
+ abi_tag, plat_tag = wheel_info.get('Tag').split('-')[1:3]
|
||||
+ # leave out build tag, if it is empty
|
||||
+ to_join = filter(None, [distribution, version, build_tag, lang_tag, abi_tag, plat_tag])
|
||||
+ return '-'.join(list(to_join))
|
||||
+
|
||||
+def get_records_to_pack(site_dir, record_relpath):
|
||||
+ """Accepts path of sitedir and path of RECORD file relative to it.
|
||||
+ Returns two lists:
|
||||
+ - list of files that can be written to new RECORD straight away
|
||||
+ - list of files that shouldn't be written or need some processing
|
||||
+ (pyc and pyo files, scripts)
|
||||
+ """
|
||||
+ record_file_path = os.path.join(site_dir, record_relpath)
|
||||
+ with codecs.open(record_file_path, encoding='utf-8') as record_file:
|
||||
+ record_contents = record_file.read()
|
||||
+ # temporary fix for https://github.com/pypa/pip/issues/1376
|
||||
+ # we need to ignore files under ".data" directory
|
||||
+ data_dir = os.path.dirname(record_relpath).strip(os.path.sep)
|
||||
+ data_dir = data_dir[:-len('dist-info')] + 'data'
|
||||
+
|
||||
+ to_write = []
|
||||
+ to_omit = []
|
||||
+ for l in record_contents.splitlines():
|
||||
+ spl = l.split(',')
|
||||
+ if len(spl) == 3:
|
||||
+ # new record will omit (or write differently):
|
||||
+ # - abs paths, paths with ".." (entry points),
|
||||
+ # - pyc+pyo files
|
||||
+ # - the old RECORD file
|
||||
+ # TODO: is there any better way to recognize an entry point?
|
||||
+ if os.path.isabs(spl[0]) or spl[0].startswith('..') or \
|
||||
+ spl[0].endswith('.pyc') or spl[0].endswith('.pyo') or \
|
||||
+ spl[0] == record_relpath or spl[0].startswith(data_dir):
|
||||
+ to_omit.append(spl)
|
||||
+ else:
|
||||
+ to_write.append(spl)
|
||||
+ else:
|
||||
+ pass # bad RECORD or empty line
|
||||
+ return to_write, to_omit
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 85e2ee3..4d34130 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -1256,7 +1256,7 @@ LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \
|
||||
test/test_asyncio \
|
||||
collections concurrent concurrent/futures encodings \
|
||||
email email/mime test/test_email test/test_email/data \
|
||||
- ensurepip ensurepip/_bundled \
|
||||
+ ensurepip ensurepip/_bundled ensurepip/rewheel \
|
||||
html json test/test_json http dbm xmlrpc \
|
||||
sqlite3 sqlite3/test \
|
||||
logging csv wsgiref urllib \
|
12
SOURCES/00205-make-libpl-respect-lib64.patch
Normal file
12
SOURCES/00205-make-libpl-respect-lib64.patch
Normal file
@ -0,0 +1,12 @@
|
||||
diff -up Python-3.5.0/Makefile.pre.in.lib Python-3.5.0/Makefile.pre.in
|
||||
--- Python-3.5.0/Makefile.pre.in.lib 2015-09-21 15:39:47.928286620 +0200
|
||||
+++ Python-3.5.0/Makefile.pre.in 2015-09-21 15:42:58.004042762 +0200
|
||||
@@ -1340,7 +1340,7 @@ inclinstall:
|
||||
|
||||
# Install the library and miscellaneous stuff needed for extending/embedding
|
||||
# This goes into $(exec_prefix)
|
||||
-LIBPL= @LIBPL@
|
||||
+LIBPL= $(LIBDEST)/config-$(LDVERSION)-$(MULTIARCH)
|
||||
|
||||
# pkgconfig directory
|
||||
LIBPC= $(LIBDIR)/pkgconfig
|
46
SOURCES/00251-change-user-install-location.patch
Normal file
46
SOURCES/00251-change-user-install-location.patch
Normal file
@ -0,0 +1,46 @@
|
||||
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
|
||||
index 0258d3d..4ebf50a 100644
|
||||
--- a/Lib/distutils/command/install.py
|
||||
+++ b/Lib/distutils/command/install.py
|
||||
@@ -418,8 +418,19 @@ class install(Command):
|
||||
raise DistutilsOptionError(
|
||||
"must not supply exec-prefix without prefix")
|
||||
|
||||
- self.prefix = os.path.normpath(sys.prefix)
|
||||
- self.exec_prefix = os.path.normpath(sys.exec_prefix)
|
||||
+ # self.prefix is set to sys.prefix + /local/
|
||||
+ # if neither RPM build nor virtual environment is
|
||||
+ # detected to make pip and distutils install packages
|
||||
+ # into the separate location.
|
||||
+ if (not (hasattr(sys, 'real_prefix') or
|
||||
+ sys.prefix != sys.base_prefix) and
|
||||
+ 'RPM_BUILD_ROOT' not in os.environ):
|
||||
+ addition = "/local"
|
||||
+ else:
|
||||
+ addition = ""
|
||||
+
|
||||
+ self.prefix = os.path.normpath(sys.prefix) + addition
|
||||
+ self.exec_prefix = os.path.normpath(sys.exec_prefix) + addition
|
||||
|
||||
else:
|
||||
if self.exec_prefix is None:
|
||||
diff --git a/Lib/site.py b/Lib/site.py
|
||||
index 0fc9200..c95202e 100644
|
||||
--- a/Lib/site.py
|
||||
+++ b/Lib/site.py
|
||||
@@ -322,7 +322,14 @@ def getsitepackages(prefixes=None):
|
||||
return sitepackages
|
||||
|
||||
def addsitepackages(known_paths, prefixes=None):
|
||||
- """Add site-packages to sys.path"""
|
||||
+ """Add site-packages to sys.path
|
||||
+
|
||||
+ '/usr/local' is included in PREFIXES if RPM build is not detected
|
||||
+ to make packages installed into this location visible.
|
||||
+
|
||||
+ """
|
||||
+ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ:
|
||||
+ PREFIXES.insert(0, "/usr/local")
|
||||
for sitedir in getsitepackages(prefixes):
|
||||
if os.path.isdir(sitedir):
|
||||
addsitedir(sitedir, known_paths)
|
901
SOURCES/00262-pep538_coerce_legacy_c_locale.patch
Normal file
901
SOURCES/00262-pep538_coerce_legacy_c_locale.patch
Normal file
@ -0,0 +1,901 @@
|
||||
diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst
|
||||
index d14793a..65aa3ad 100644
|
||||
--- a/Doc/using/cmdline.rst
|
||||
+++ b/Doc/using/cmdline.rst
|
||||
@@ -728,6 +728,45 @@ conflict.
|
||||
|
||||
.. versionadded:: 3.6
|
||||
|
||||
+
|
||||
+.. envvar:: PYTHONCOERCECLOCALE
|
||||
+
|
||||
+ If set to the value ``0``, causes the main Python command line application
|
||||
+ to skip coercing the legacy ASCII-based C locale to a more capable UTF-8
|
||||
+ based alternative. Note that this setting is checked even when the
|
||||
+ :option:`-E` or :option:`-I` options are used, as it is handled prior to
|
||||
+ the processing of command line options.
|
||||
+
|
||||
+ If this variable is *not* set, or is set to a value other than ``0``, and
|
||||
+ the current locale reported for the ``LC_CTYPE`` category is the default
|
||||
+ ``C`` locale, then the Python CLI will attempt to configure one of the
|
||||
+ following locales for the given locale categories before loading the
|
||||
+ interpreter runtime:
|
||||
+
|
||||
+ * ``C.UTF-8`` (``LC_ALL``)
|
||||
+ * ``C.utf8`` (``LC_ALL``)
|
||||
+ * ``UTF-8`` (``LC_CTYPE``)
|
||||
+
|
||||
+ If setting one of these locale categories succeeds, then the matching
|
||||
+ environment variables will be set (both ``LC_ALL`` and ``LANG`` for the
|
||||
+ ``LC_ALL`` category, and ``LC_CTYPE`` for the ``LC_CTYPE`` category) in
|
||||
+ the current process environment before the Python runtime is initialized.
|
||||
+
|
||||
+ Configuring one of these locales (either explicitly or via the above
|
||||
+ implicit locale coercion) will automatically set the error handler for
|
||||
+ :data:`sys.stdin` and :data:`sys.stdout` to ``surrogateescape``. This
|
||||
+ behavior can be overridden using :envvar:`PYTHONIOENCODING` as usual.
|
||||
+
|
||||
+ For debugging purposes, setting ``PYTHONCOERCECLOCALE=warn`` will cause
|
||||
+ Python to emit warning messages on ``stderr`` if either the locale coercion
|
||||
+ activates, or else if a locale that *would* have triggered coercion is
|
||||
+ still active when the Python runtime is initialized.
|
||||
+
|
||||
+ Availability: \*nix
|
||||
+
|
||||
+ .. versionadded:: 3.7
|
||||
+ See :pep:`538` for more details.
|
||||
+
|
||||
Debug-mode variables
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
diff --git a/Lib/test/support/script_helper.py b/Lib/test/support/script_helper.py
|
||||
index 507dc48..c3cb720 100644
|
||||
--- a/Lib/test/support/script_helper.py
|
||||
+++ b/Lib/test/support/script_helper.py
|
||||
@@ -56,8 +56,35 @@ def interpreter_requires_environment():
|
||||
return __cached_interp_requires_environment
|
||||
|
||||
|
||||
-_PythonRunResult = collections.namedtuple("_PythonRunResult",
|
||||
- ("rc", "out", "err"))
|
||||
+class _PythonRunResult(collections.namedtuple("_PythonRunResult",
|
||||
+ ("rc", "out", "err"))):
|
||||
+ """Helper for reporting Python subprocess run results"""
|
||||
+ def fail(self, cmd_line):
|
||||
+ """Provide helpful details about failed subcommand runs"""
|
||||
+ # Limit to 80 lines to ASCII characters
|
||||
+ maxlen = 80 * 100
|
||||
+ out, err = self.out, self.err
|
||||
+ if len(out) > maxlen:
|
||||
+ out = b'(... truncated stdout ...)' + out[-maxlen:]
|
||||
+ if len(err) > maxlen:
|
||||
+ err = b'(... truncated stderr ...)' + err[-maxlen:]
|
||||
+ out = out.decode('ascii', 'replace').rstrip()
|
||||
+ err = err.decode('ascii', 'replace').rstrip()
|
||||
+ raise AssertionError("Process return code is %d\n"
|
||||
+ "command line: %r\n"
|
||||
+ "\n"
|
||||
+ "stdout:\n"
|
||||
+ "---\n"
|
||||
+ "%s\n"
|
||||
+ "---\n"
|
||||
+ "\n"
|
||||
+ "stderr:\n"
|
||||
+ "---\n"
|
||||
+ "%s\n"
|
||||
+ "---"
|
||||
+ % (self.rc, cmd_line,
|
||||
+ out,
|
||||
+ err))
|
||||
|
||||
|
||||
# Executing the interpreter in a subprocess
|
||||
@@ -115,30 +142,7 @@ def run_python_until_end(*args, **env_vars):
|
||||
def _assert_python(expected_success, *args, **env_vars):
|
||||
res, cmd_line = run_python_until_end(*args, **env_vars)
|
||||
if (res.rc and expected_success) or (not res.rc and not expected_success):
|
||||
- # Limit to 80 lines to ASCII characters
|
||||
- maxlen = 80 * 100
|
||||
- out, err = res.out, res.err
|
||||
- if len(out) > maxlen:
|
||||
- out = b'(... truncated stdout ...)' + out[-maxlen:]
|
||||
- if len(err) > maxlen:
|
||||
- err = b'(... truncated stderr ...)' + err[-maxlen:]
|
||||
- out = out.decode('ascii', 'replace').rstrip()
|
||||
- err = err.decode('ascii', 'replace').rstrip()
|
||||
- raise AssertionError("Process return code is %d\n"
|
||||
- "command line: %r\n"
|
||||
- "\n"
|
||||
- "stdout:\n"
|
||||
- "---\n"
|
||||
- "%s\n"
|
||||
- "---\n"
|
||||
- "\n"
|
||||
- "stderr:\n"
|
||||
- "---\n"
|
||||
- "%s\n"
|
||||
- "---"
|
||||
- % (res.rc, cmd_line,
|
||||
- out,
|
||||
- err))
|
||||
+ res.fail(cmd_line)
|
||||
return res
|
||||
|
||||
def assert_python_ok(*args, **env_vars):
|
||||
diff --git a/Lib/test/test_c_locale_coercion.py b/Lib/test/test_c_locale_coercion.py
|
||||
new file mode 100644
|
||||
index 0000000..635c98f
|
||||
--- /dev/null
|
||||
+++ b/Lib/test/test_c_locale_coercion.py
|
||||
@@ -0,0 +1,371 @@
|
||||
+# Tests the attempted automatic coercion of the C locale to a UTF-8 locale
|
||||
+
|
||||
+import unittest
|
||||
+import locale
|
||||
+import os
|
||||
+import sys
|
||||
+import sysconfig
|
||||
+import shutil
|
||||
+import subprocess
|
||||
+from collections import namedtuple
|
||||
+
|
||||
+import test.support
|
||||
+from test.support.script_helper import (
|
||||
+ run_python_until_end,
|
||||
+ interpreter_requires_environment,
|
||||
+)
|
||||
+
|
||||
+# Set our expectation for the default encoding used in the C locale
|
||||
+# for the filesystem encoding and the standard streams
|
||||
+
|
||||
+# AIX uses iso8859-1 in the C locale, other *nix platforms use ASCII
|
||||
+if sys.platform.startswith("aix"):
|
||||
+ C_LOCALE_STREAM_ENCODING = "iso8859-1"
|
||||
+else:
|
||||
+ C_LOCALE_STREAM_ENCODING = "ascii"
|
||||
+
|
||||
+# FS encoding is UTF-8 on macOS, other *nix platforms use the locale encoding
|
||||
+if sys.platform == "darwin":
|
||||
+ C_LOCALE_FS_ENCODING = "utf-8"
|
||||
+else:
|
||||
+ C_LOCALE_FS_ENCODING = C_LOCALE_STREAM_ENCODING
|
||||
+
|
||||
+# Note that the above is probably still wrong in some cases, such as:
|
||||
+# * Windows when PYTHONLEGACYWINDOWSFSENCODING is set
|
||||
+# * AIX and any other platforms that use latin-1 in the C locale
|
||||
+#
|
||||
+# Options for dealing with this:
|
||||
+# * Don't set PYTHON_COERCE_C_LOCALE on such platforms (e.g. Windows doesn't)
|
||||
+# * Fix the test expectations to match the actual platform behaviour
|
||||
+
|
||||
+# In order to get the warning messages to match up as expected, the candidate
|
||||
+# order here must much the target locale order in Python/pylifecycle.c
|
||||
+_C_UTF8_LOCALES = ("C.UTF-8", "C.utf8", "UTF-8")
|
||||
+
|
||||
+# There's no reliable cross-platform way of checking locale alias
|
||||
+# lists, so the only way of knowing which of these locales will work
|
||||
+# is to try them with locale.setlocale(). We do that in a subprocess
|
||||
+# to avoid altering the locale of the test runner.
|
||||
+#
|
||||
+# If the relevant locale module attributes exist, and we're not on a platform
|
||||
+# where we expect it to always succeed, we also check that
|
||||
+# `locale.nl_langinfo(locale.CODESET)` works, as if it fails, the interpreter
|
||||
+# will skip locale coercion for that particular target locale
|
||||
+_check_nl_langinfo_CODESET = bool(
|
||||
+ sys.platform not in ("darwin", "linux") and
|
||||
+ hasattr(locale, "nl_langinfo") and
|
||||
+ hasattr(locale, "CODESET")
|
||||
+)
|
||||
+
|
||||
+def _set_locale_in_subprocess(locale_name):
|
||||
+ cmd_fmt = "import locale; print(locale.setlocale(locale.LC_CTYPE, '{}'))"
|
||||
+ if _check_nl_langinfo_CODESET:
|
||||
+ # If there's no valid CODESET, we expect coercion to be skipped
|
||||
+ cmd_fmt += "; import sys; sys.exit(not locale.nl_langinfo(locale.CODESET))"
|
||||
+ cmd = cmd_fmt.format(locale_name)
|
||||
+ result, py_cmd = run_python_until_end("-c", cmd, __isolated=True)
|
||||
+ return result.rc == 0
|
||||
+
|
||||
+
|
||||
+
|
||||
+_fields = "fsencoding stdin_info stdout_info stderr_info lang lc_ctype lc_all"
|
||||
+_EncodingDetails = namedtuple("EncodingDetails", _fields)
|
||||
+
|
||||
+class EncodingDetails(_EncodingDetails):
|
||||
+ # XXX (ncoghlan): Using JSON for child state reporting may be less fragile
|
||||
+ CHILD_PROCESS_SCRIPT = ";".join([
|
||||
+ "import sys, os",
|
||||
+ "print(sys.getfilesystemencoding())",
|
||||
+ "print(sys.stdin.encoding + ':' + sys.stdin.errors)",
|
||||
+ "print(sys.stdout.encoding + ':' + sys.stdout.errors)",
|
||||
+ "print(sys.stderr.encoding + ':' + sys.stderr.errors)",
|
||||
+ "print(os.environ.get('LANG', 'not set'))",
|
||||
+ "print(os.environ.get('LC_CTYPE', 'not set'))",
|
||||
+ "print(os.environ.get('LC_ALL', 'not set'))",
|
||||
+ ])
|
||||
+
|
||||
+ @classmethod
|
||||
+ def get_expected_details(cls, coercion_expected, fs_encoding, stream_encoding, env_vars):
|
||||
+ """Returns expected child process details for a given encoding"""
|
||||
+ _stream = stream_encoding + ":{}"
|
||||
+ # stdin and stdout should use surrogateescape either because the
|
||||
+ # coercion triggered, or because the C locale was detected
|
||||
+ stream_info = 2*[_stream.format("surrogateescape")]
|
||||
+ # stderr should always use backslashreplace
|
||||
+ stream_info.append(_stream.format("backslashreplace"))
|
||||
+ expected_lang = env_vars.get("LANG", "not set").lower()
|
||||
+ if coercion_expected:
|
||||
+ expected_lc_ctype = CLI_COERCION_TARGET.lower()
|
||||
+ else:
|
||||
+ expected_lc_ctype = env_vars.get("LC_CTYPE", "not set").lower()
|
||||
+ expected_lc_all = env_vars.get("LC_ALL", "not set").lower()
|
||||
+ env_info = expected_lang, expected_lc_ctype, expected_lc_all
|
||||
+ return dict(cls(fs_encoding, *stream_info, *env_info)._asdict())
|
||||
+
|
||||
+ @staticmethod
|
||||
+ def _handle_output_variations(data):
|
||||
+ """Adjust the output to handle platform specific idiosyncrasies
|
||||
+
|
||||
+ * Some platforms report ASCII as ANSI_X3.4-1968
|
||||
+ * Some platforms report ASCII as US-ASCII
|
||||
+ * Some platforms report UTF-8 instead of utf-8
|
||||
+ """
|
||||
+ data = data.replace(b"ANSI_X3.4-1968", b"ascii")
|
||||
+ data = data.replace(b"US-ASCII", b"ascii")
|
||||
+ data = data.lower()
|
||||
+ return data
|
||||
+
|
||||
+ @classmethod
|
||||
+ def get_child_details(cls, env_vars):
|
||||
+ """Retrieves fsencoding and standard stream details from a child process
|
||||
+
|
||||
+ Returns (encoding_details, stderr_lines):
|
||||
+
|
||||
+ - encoding_details: EncodingDetails for eager decoding
|
||||
+ - stderr_lines: result of calling splitlines() on the stderr output
|
||||
+
|
||||
+ The child is run in isolated mode if the current interpreter supports
|
||||
+ that.
|
||||
+ """
|
||||
+ result, py_cmd = run_python_until_end(
|
||||
+ "-c", cls.CHILD_PROCESS_SCRIPT,
|
||||
+ __isolated=True,
|
||||
+ **env_vars
|
||||
+ )
|
||||
+ if not result.rc == 0:
|
||||
+ result.fail(py_cmd)
|
||||
+ # All subprocess outputs in this test case should be pure ASCII
|
||||
+ adjusted_output = cls._handle_output_variations(result.out)
|
||||
+ stdout_lines = adjusted_output.decode("ascii").splitlines()
|
||||
+ child_encoding_details = dict(cls(*stdout_lines)._asdict())
|
||||
+ stderr_lines = result.err.decode("ascii").rstrip().splitlines()
|
||||
+ return child_encoding_details, stderr_lines
|
||||
+
|
||||
+
|
||||
+# Details of the shared library warning emitted at runtime
|
||||
+LEGACY_LOCALE_WARNING = (
|
||||
+ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII "
|
||||
+ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, "
|
||||
+ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible "
|
||||
+ "locales is recommended."
|
||||
+)
|
||||
+
|
||||
+# Details of the CLI locale coercion warning emitted at runtime
|
||||
+CLI_COERCION_WARNING_FMT = (
|
||||
+ "Python detected LC_CTYPE=C: LC_CTYPE coerced to {} (set another locale "
|
||||
+ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior)."
|
||||
+)
|
||||
+
|
||||
+
|
||||
+AVAILABLE_TARGETS = None
|
||||
+CLI_COERCION_TARGET = None
|
||||
+CLI_COERCION_WARNING = None
|
||||
+
|
||||
+def setUpModule():
|
||||
+ global AVAILABLE_TARGETS
|
||||
+ global CLI_COERCION_TARGET
|
||||
+ global CLI_COERCION_WARNING
|
||||
+
|
||||
+ if AVAILABLE_TARGETS is not None:
|
||||
+ # initialization already done
|
||||
+ return
|
||||
+ AVAILABLE_TARGETS = []
|
||||
+
|
||||
+ # Find the target locales available in the current system
|
||||
+ for target_locale in _C_UTF8_LOCALES:
|
||||
+ if _set_locale_in_subprocess(target_locale):
|
||||
+ AVAILABLE_TARGETS.append(target_locale)
|
||||
+
|
||||
+ if AVAILABLE_TARGETS:
|
||||
+ # Coercion is expected to use the first available target locale
|
||||
+ CLI_COERCION_TARGET = AVAILABLE_TARGETS[0]
|
||||
+ CLI_COERCION_WARNING = CLI_COERCION_WARNING_FMT.format(CLI_COERCION_TARGET)
|
||||
+
|
||||
+
|
||||
+class _LocaleHandlingTestCase(unittest.TestCase):
|
||||
+ # Base class to check expected locale handling behaviour
|
||||
+
|
||||
+ def _check_child_encoding_details(self,
|
||||
+ env_vars,
|
||||
+ expected_fs_encoding,
|
||||
+ expected_stream_encoding,
|
||||
+ expected_warnings,
|
||||
+ coercion_expected):
|
||||
+ """Check the C locale handling for the given process environment
|
||||
+
|
||||
+ Parameters:
|
||||
+ expected_fs_encoding: expected sys.getfilesystemencoding() result
|
||||
+ expected_stream_encoding: expected encoding for standard streams
|
||||
+ expected_warning: stderr output to expect (if any)
|
||||
+ """
|
||||
+ result = EncodingDetails.get_child_details(env_vars)
|
||||
+ encoding_details, stderr_lines = result
|
||||
+ expected_details = EncodingDetails.get_expected_details(
|
||||
+ coercion_expected,
|
||||
+ expected_fs_encoding,
|
||||
+ expected_stream_encoding,
|
||||
+ env_vars
|
||||
+ )
|
||||
+ self.assertEqual(encoding_details, expected_details)
|
||||
+ if expected_warnings is None:
|
||||
+ expected_warnings = []
|
||||
+ self.assertEqual(stderr_lines, expected_warnings)
|
||||
+
|
||||
+
|
||||
+class LocaleConfigurationTests(_LocaleHandlingTestCase):
|
||||
+ # Test explicit external configuration via the process environment
|
||||
+
|
||||
+ def setUpClass():
|
||||
+ # This relies on setupModule() having been run, so it can't be
|
||||
+ # handled via the @unittest.skipUnless decorator
|
||||
+ if not AVAILABLE_TARGETS:
|
||||
+ raise unittest.SkipTest("No C-with-UTF-8 locale available")
|
||||
+
|
||||
+ def test_external_target_locale_configuration(self):
|
||||
+
|
||||
+ # Explicitly setting a target locale should give the same behaviour as
|
||||
+ # is seen when implicitly coercing to that target locale
|
||||
+ self.maxDiff = None
|
||||
+
|
||||
+ expected_fs_encoding = "utf-8"
|
||||
+ expected_stream_encoding = "utf-8"
|
||||
+
|
||||
+ base_var_dict = {
|
||||
+ "LANG": "",
|
||||
+ "LC_CTYPE": "",
|
||||
+ "LC_ALL": "",
|
||||
+ }
|
||||
+ for env_var in ("LANG", "LC_CTYPE"):
|
||||
+ for locale_to_set in AVAILABLE_TARGETS:
|
||||
+ # XXX (ncoghlan): LANG=UTF-8 doesn't appear to work as
|
||||
+ # expected, so skip that combination for now
|
||||
+ # See https://bugs.python.org/issue30672 for discussion
|
||||
+ if env_var == "LANG" and locale_to_set == "UTF-8":
|
||||
+ continue
|
||||
+
|
||||
+ with self.subTest(env_var=env_var,
|
||||
+ configured_locale=locale_to_set):
|
||||
+ var_dict = base_var_dict.copy()
|
||||
+ var_dict[env_var] = locale_to_set
|
||||
+ self._check_child_encoding_details(var_dict,
|
||||
+ expected_fs_encoding,
|
||||
+ expected_stream_encoding,
|
||||
+ expected_warnings=None,
|
||||
+ coercion_expected=False)
|
||||
+
|
||||
+
|
||||
+
|
||||
+@test.support.cpython_only
|
||||
+@unittest.skipUnless(sysconfig.get_config_var("PY_COERCE_C_LOCALE"),
|
||||
+ "C locale coercion disabled at build time")
|
||||
+class LocaleCoercionTests(_LocaleHandlingTestCase):
|
||||
+ # Test implicit reconfiguration of the environment during CLI startup
|
||||
+
|
||||
+ def _check_c_locale_coercion(self,
|
||||
+ fs_encoding, stream_encoding,
|
||||
+ coerce_c_locale,
|
||||
+ expected_warnings=None,
|
||||
+ coercion_expected=True,
|
||||
+ **extra_vars):
|
||||
+ """Check the C locale handling for various configurations
|
||||
+
|
||||
+ Parameters:
|
||||
+ fs_encoding: expected sys.getfilesystemencoding() result
|
||||
+ stream_encoding: expected encoding for standard streams
|
||||
+ coerce_c_locale: setting to use for PYTHONCOERCECLOCALE
|
||||
+ None: don't set the variable at all
|
||||
+ str: the value set in the child's environment
|
||||
+ expected_warnings: expected warning lines on stderr
|
||||
+ extra_vars: additional environment variables to set in subprocess
|
||||
+ """
|
||||
+ self.maxDiff = None
|
||||
+
|
||||
+ if not AVAILABLE_TARGETS:
|
||||
+ # Locale coercion is disabled when there aren't any target locales
|
||||
+ fs_encoding = C_LOCALE_FS_ENCODING
|
||||
+ stream_encoding = C_LOCALE_STREAM_ENCODING
|
||||
+ coercion_expected = False
|
||||
+ if expected_warnings:
|
||||
+ expected_warnings = [LEGACY_LOCALE_WARNING]
|
||||
+
|
||||
+ base_var_dict = {
|
||||
+ "LANG": "",
|
||||
+ "LC_CTYPE": "",
|
||||
+ "LC_ALL": "",
|
||||
+ }
|
||||
+ base_var_dict.update(extra_vars)
|
||||
+ for env_var in ("LANG", "LC_CTYPE"):
|
||||
+ for locale_to_set in ("", "C", "POSIX", "invalid.ascii"):
|
||||
+ # XXX (ncoghlan): *BSD platforms don't behave as expected in the
|
||||
+ # POSIX locale, so we skip that for now
|
||||
+ # See https://bugs.python.org/issue30672 for discussion
|
||||
+ if locale_to_set == "POSIX":
|
||||
+ continue
|
||||
+ with self.subTest(env_var=env_var,
|
||||
+ nominal_locale=locale_to_set,
|
||||
+ PYTHONCOERCECLOCALE=coerce_c_locale):
|
||||
+ var_dict = base_var_dict.copy()
|
||||
+ var_dict[env_var] = locale_to_set
|
||||
+ if coerce_c_locale is not None:
|
||||
+ var_dict["PYTHONCOERCECLOCALE"] = coerce_c_locale
|
||||
+ # Check behaviour on successful coercion
|
||||
+ self._check_child_encoding_details(var_dict,
|
||||
+ fs_encoding,
|
||||
+ stream_encoding,
|
||||
+ expected_warnings,
|
||||
+ coercion_expected)
|
||||
+
|
||||
+ def test_test_PYTHONCOERCECLOCALE_not_set(self):
|
||||
+ # This should coerce to the first available target locale by default
|
||||
+ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=None)
|
||||
+
|
||||
+ def test_PYTHONCOERCECLOCALE_not_zero(self):
|
||||
+ # *Any* string other than "0" is considered "set" for our purposes
|
||||
+ # and hence should result in the locale coercion being enabled
|
||||
+ for setting in ("", "1", "true", "false"):
|
||||
+ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=setting)
|
||||
+
|
||||
+ def test_PYTHONCOERCECLOCALE_set_to_warn(self):
|
||||
+ # PYTHONCOERCECLOCALE=warn enables runtime warnings for legacy locales
|
||||
+ self._check_c_locale_coercion("utf-8", "utf-8",
|
||||
+ coerce_c_locale="warn",
|
||||
+ expected_warnings=[CLI_COERCION_WARNING])
|
||||
+
|
||||
+
|
||||
+ def test_PYTHONCOERCECLOCALE_set_to_zero(self):
|
||||
+ # The setting "0" should result in the locale coercion being disabled
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale="0",
|
||||
+ coercion_expected=False)
|
||||
+ # Setting LC_ALL=C shouldn't make any difference to the behaviour
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale="0",
|
||||
+ LC_ALL="C",
|
||||
+ coercion_expected=False)
|
||||
+
|
||||
+ def test_LC_ALL_set_to_C(self):
|
||||
+ # Setting LC_ALL should render the locale coercion ineffective
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale=None,
|
||||
+ LC_ALL="C",
|
||||
+ coercion_expected=False)
|
||||
+ # And result in a warning about a lack of locale compatibility
|
||||
+ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING,
|
||||
+ C_LOCALE_STREAM_ENCODING,
|
||||
+ coerce_c_locale="warn",
|
||||
+ LC_ALL="C",
|
||||
+ expected_warnings=[LEGACY_LOCALE_WARNING],
|
||||
+ coercion_expected=False)
|
||||
+
|
||||
+def test_main():
|
||||
+ test.support.run_unittest(
|
||||
+ LocaleConfigurationTests,
|
||||
+ LocaleCoercionTests
|
||||
+ )
|
||||
+ test.support.reap_children()
|
||||
+
|
||||
+if __name__ == "__main__":
|
||||
+ test_main()
|
||||
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
|
||||
index 38156b4..5922ed9 100644
|
||||
--- a/Lib/test/test_cmd_line.py
|
||||
+++ b/Lib/test/test_cmd_line.py
|
||||
@@ -153,6 +153,7 @@ class CmdLineTest(unittest.TestCase):
|
||||
env = os.environ.copy()
|
||||
# Use C locale to get ascii for the locale encoding
|
||||
env['LC_ALL'] = 'C'
|
||||
+ env['PYTHONCOERCECLOCALE'] = '0'
|
||||
code = (
|
||||
b'import locale; '
|
||||
b'print(ascii("' + undecodable + b'"), '
|
||||
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
|
||||
index 7866a5c..b41239a 100644
|
||||
--- a/Lib/test/test_sys.py
|
||||
+++ b/Lib/test/test_sys.py
|
||||
@@ -680,6 +680,7 @@ class SysModuleTest(unittest.TestCase):
|
||||
# Force the POSIX locale
|
||||
env = os.environ.copy()
|
||||
env["LC_ALL"] = "C"
|
||||
+ env["PYTHONCOERCECLOCALE"] = "0"
|
||||
code = '\n'.join((
|
||||
'import sys',
|
||||
'def dump(name):',
|
||||
diff --git a/Modules/main.c b/Modules/main.c
|
||||
index 585d696..96d8be4 100644
|
||||
--- a/Modules/main.c
|
||||
+++ b/Modules/main.c
|
||||
@@ -107,7 +107,11 @@ static const char usage_6[] =
|
||||
" predictable seed.\n"
|
||||
"PYTHONMALLOC: set the Python memory allocators and/or install debug hooks\n"
|
||||
" on Python memory allocators. Use PYTHONMALLOC=debug to install debug\n"
|
||||
-" hooks.\n";
|
||||
+" hooks.\n"
|
||||
+
|
||||
+"PYTHONCOERCECLOCALE: if this variable is set to 0, it disables the locale\n"
|
||||
+" coercion behavior. Use PYTHONCOERCECLOCALE=warn to request display of\n"
|
||||
+" locale coercion and locale compatibility warnings on stderr.\n";
|
||||
|
||||
static int
|
||||
usage(int exitcode, const wchar_t* program)
|
||||
diff --git a/Programs/_testembed.c b/Programs/_testembed.c
|
||||
index 813cf30..2a64092 100644
|
||||
--- a/Programs/_testembed.c
|
||||
+++ b/Programs/_testembed.c
|
||||
@@ -1,4 +1,5 @@
|
||||
#include <Python.h>
|
||||
+#include "pyconfig.h"
|
||||
#include "pythread.h"
|
||||
#include <stdio.h>
|
||||
|
||||
diff --git a/Programs/python.c b/Programs/python.c
|
||||
index a7afbc7..03f8295 100644
|
||||
--- a/Programs/python.c
|
||||
+++ b/Programs/python.c
|
||||
@@ -15,6 +15,21 @@ wmain(int argc, wchar_t **argv)
|
||||
}
|
||||
#else
|
||||
|
||||
+/* Access private pylifecycle helper API to better handle the legacy C locale
|
||||
+ *
|
||||
+ * The legacy C locale assumes ASCII as the default text encoding, which
|
||||
+ * causes problems not only for the CPython runtime, but also other
|
||||
+ * components like GNU readline.
|
||||
+ *
|
||||
+ * Accordingly, when the CLI detects it, it attempts to coerce it to a
|
||||
+ * more capable UTF-8 based alternative.
|
||||
+ *
|
||||
+ * See the documentation of the PYTHONCOERCECLOCALE setting for more details.
|
||||
+ *
|
||||
+ */
|
||||
+extern int _Py_LegacyLocaleDetected(void);
|
||||
+extern void _Py_CoerceLegacyLocale(void);
|
||||
+
|
||||
int
|
||||
main(int argc, char **argv)
|
||||
{
|
||||
@@ -25,7 +40,11 @@ main(int argc, char **argv)
|
||||
char *oldloc;
|
||||
|
||||
/* Force malloc() allocator to bootstrap Python */
|
||||
+#ifdef Py_DEBUG
|
||||
+ (void)_PyMem_SetupAllocators("malloc_debug");
|
||||
+# else
|
||||
(void)_PyMem_SetupAllocators("malloc");
|
||||
+# endif
|
||||
|
||||
argv_copy = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1));
|
||||
argv_copy2 = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1));
|
||||
@@ -49,7 +68,21 @@ main(int argc, char **argv)
|
||||
return 1;
|
||||
}
|
||||
|
||||
+#ifdef __ANDROID__
|
||||
+ /* Passing "" to setlocale() on Android requests the C locale rather
|
||||
+ * than checking environment variables, so request C.UTF-8 explicitly
|
||||
+ */
|
||||
+ setlocale(LC_ALL, "C.UTF-8");
|
||||
+#else
|
||||
+ /* Reconfigure the locale to the default for this process */
|
||||
setlocale(LC_ALL, "");
|
||||
+#endif
|
||||
+
|
||||
+ if (_Py_LegacyLocaleDetected()) {
|
||||
+ _Py_CoerceLegacyLocale();
|
||||
+ }
|
||||
+
|
||||
+ /* Convert from char to wchar_t based on the locale settings */
|
||||
for (i = 0; i < argc; i++) {
|
||||
argv_copy[i] = Py_DecodeLocale(argv[i], NULL);
|
||||
if (!argv_copy[i]) {
|
||||
@@ -70,7 +103,11 @@ main(int argc, char **argv)
|
||||
|
||||
/* Force again malloc() allocator to release memory blocks allocated
|
||||
before Py_Main() */
|
||||
+#ifdef Py_DEBUG
|
||||
+ (void)_PyMem_SetupAllocators("malloc_debug");
|
||||
+# else
|
||||
(void)_PyMem_SetupAllocators("malloc");
|
||||
+# endif
|
||||
|
||||
for (i = 0; i < argc; i++) {
|
||||
PyMem_RawFree(argv_copy2[i]);
|
||||
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
|
||||
index ecfdfee..4fee178 100644
|
||||
--- a/Python/pylifecycle.c
|
||||
+++ b/Python/pylifecycle.c
|
||||
@@ -167,6 +167,7 @@ Py_SetStandardStreamEncoding(const char *encoding, const char *errors)
|
||||
return 0;
|
||||
}
|
||||
|
||||
+
|
||||
/* Global initializations. Can be undone by Py_FinalizeEx(). Don't
|
||||
call this twice without an intervening Py_FinalizeEx() call. When
|
||||
initializations fail, a fatal error is issued and the function does
|
||||
@@ -301,6 +302,183 @@ import_init(PyInterpreterState *interp, PyObject *sysmod)
|
||||
}
|
||||
|
||||
|
||||
+/* Helper functions to better handle the legacy C locale
|
||||
+ *
|
||||
+ * The legacy C locale assumes ASCII as the default text encoding, which
|
||||
+ * causes problems not only for the CPython runtime, but also other
|
||||
+ * components like GNU readline.
|
||||
+ *
|
||||
+ * Accordingly, when the CLI detects it, it attempts to coerce it to a
|
||||
+ * more capable UTF-8 based alternative as follows:
|
||||
+ *
|
||||
+ * if (_Py_LegacyLocaleDetected()) {
|
||||
+ * _Py_CoerceLegacyLocale();
|
||||
+ * }
|
||||
+ *
|
||||
+ * See the documentation of the PYTHONCOERCECLOCALE setting for more details.
|
||||
+ *
|
||||
+ * Locale coercion also impacts the default error handler for the standard
|
||||
+ * streams: while the usual default is "strict", the default for the legacy
|
||||
+ * C locale and for any of the coercion target locales is "surrogateescape".
|
||||
+ */
|
||||
+
|
||||
+int
|
||||
+_Py_LegacyLocaleDetected(void)
|
||||
+{
|
||||
+#ifndef MS_WINDOWS
|
||||
+ /* On non-Windows systems, the C locale is considered a legacy locale */
|
||||
+ /* XXX (ncoghlan): some platforms (notably Mac OS X) don't appear to treat
|
||||
+ * the POSIX locale as a simple alias for the C locale, so
|
||||
+ * we may also want to check for that explicitly.
|
||||
+ */
|
||||
+ const char *ctype_loc = setlocale(LC_CTYPE, NULL);
|
||||
+ return ctype_loc != NULL && strcmp(ctype_loc, "C") == 0;
|
||||
+#else
|
||||
+ /* Windows uses code pages instead of locales, so no locale is legacy */
|
||||
+ return 0;
|
||||
+#endif
|
||||
+}
|
||||
+
|
||||
+
|
||||
+static const char *_C_LOCALE_WARNING =
|
||||
+ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII "
|
||||
+ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, "
|
||||
+ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible "
|
||||
+ "locales is recommended.\n";
|
||||
+
|
||||
+static int
|
||||
+_legacy_locale_warnings_enabled(void)
|
||||
+{
|
||||
+ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE");
|
||||
+ return (coerce_c_locale != NULL &&
|
||||
+ strncmp(coerce_c_locale, "warn", 5) == 0);
|
||||
+}
|
||||
+
|
||||
+static void
|
||||
+_emit_stderr_warning_for_legacy_locale(void)
|
||||
+{
|
||||
+ if (_legacy_locale_warnings_enabled()) {
|
||||
+ if (_Py_LegacyLocaleDetected()) {
|
||||
+ fprintf(stderr, "%s", _C_LOCALE_WARNING);
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+typedef struct _CandidateLocale {
|
||||
+ const char *locale_name; /* The locale to try as a coercion target */
|
||||
+} _LocaleCoercionTarget;
|
||||
+
|
||||
+static _LocaleCoercionTarget _TARGET_LOCALES[] = {
|
||||
+ {"C.UTF-8"},
|
||||
+ {"C.utf8"},
|
||||
+ {"UTF-8"},
|
||||
+ {NULL}
|
||||
+};
|
||||
+
|
||||
+static char *
|
||||
+get_default_standard_stream_error_handler(void)
|
||||
+{
|
||||
+ const char *ctype_loc = setlocale(LC_CTYPE, NULL);
|
||||
+ if (ctype_loc != NULL) {
|
||||
+ /* "surrogateescape" is the default in the legacy C locale */
|
||||
+ if (strcmp(ctype_loc, "C") == 0) {
|
||||
+ return "surrogateescape";
|
||||
+ }
|
||||
+
|
||||
+#ifdef PY_COERCE_C_LOCALE
|
||||
+ /* "surrogateescape" is the default in locale coercion target locales */
|
||||
+ const _LocaleCoercionTarget *target = NULL;
|
||||
+ for (target = _TARGET_LOCALES; target->locale_name; target++) {
|
||||
+ if (strcmp(ctype_loc, target->locale_name) == 0) {
|
||||
+ return "surrogateescape";
|
||||
+ }
|
||||
+ }
|
||||
+#endif
|
||||
+ }
|
||||
+
|
||||
+ /* Otherwise return NULL to request the typical default error handler */
|
||||
+ return NULL;
|
||||
+}
|
||||
+
|
||||
+#ifdef PY_COERCE_C_LOCALE
|
||||
+static const char *_C_LOCALE_COERCION_WARNING =
|
||||
+ "Python detected LC_CTYPE=C: LC_CTYPE coerced to %.20s (set another locale "
|
||||
+ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior).\n";
|
||||
+
|
||||
+static void
|
||||
+_coerce_default_locale_settings(const _LocaleCoercionTarget *target)
|
||||
+{
|
||||
+
|
||||
+ const char *newloc = target->locale_name;
|
||||
+
|
||||
+ /* Reset locale back to currently configured defaults */
|
||||
+ setlocale(LC_ALL, "");
|
||||
+
|
||||
+ /* Set the relevant locale environment variable */
|
||||
+ if (setenv("LC_CTYPE", newloc, 1)) {
|
||||
+ fprintf(stderr,
|
||||
+ "Error setting LC_CTYPE, skipping C locale coercion\n");
|
||||
+ return;
|
||||
+ }
|
||||
+ if (_legacy_locale_warnings_enabled()) {
|
||||
+ fprintf(stderr, _C_LOCALE_COERCION_WARNING, newloc);
|
||||
+ }
|
||||
+
|
||||
+ /* Reconfigure with the overridden environment variables */
|
||||
+ setlocale(LC_ALL, "");
|
||||
+}
|
||||
+#endif
|
||||
+
|
||||
+
|
||||
+void
|
||||
+_Py_CoerceLegacyLocale(void)
|
||||
+{
|
||||
+#ifdef PY_COERCE_C_LOCALE
|
||||
+ /* We ignore the Python -E and -I flags here, as the CLI needs to sort out
|
||||
+ * the locale settings *before* we try to do anything with the command
|
||||
+ * line arguments. For cross-platform debugging purposes, we also need
|
||||
+ * to give end users a way to force even scripts that are otherwise
|
||||
+ * isolated from their environment to use the legacy ASCII-centric C
|
||||
+ * locale.
|
||||
+ *
|
||||
+ * Ignoring -E and -I is safe from a security perspective, as we only use
|
||||
+ * the setting to turn *off* the implicit locale coercion, and anyone with
|
||||
+ * access to the process environment already has the ability to set
|
||||
+ * `LC_ALL=C` to override the C level locale settings anyway.
|
||||
+ */
|
||||
+ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE");
|
||||
+ if (coerce_c_locale == NULL || strncmp(coerce_c_locale, "0", 2) != 0) {
|
||||
+ /* PYTHONCOERCECLOCALE is not set, or is set to something other than "0" */
|
||||
+ const char *locale_override = getenv("LC_ALL");
|
||||
+ if (locale_override == NULL || *locale_override == '\0') {
|
||||
+ /* LC_ALL is also not set (or is set to an empty string) */
|
||||
+ const _LocaleCoercionTarget *target = NULL;
|
||||
+ for (target = _TARGET_LOCALES; target->locale_name; target++) {
|
||||
+ const char *new_locale = setlocale(LC_CTYPE,
|
||||
+ target->locale_name);
|
||||
+ if (new_locale != NULL) {
|
||||
+#if !defined(__APPLE__) && defined(HAVE_LANGINFO_H) && defined(CODESET)
|
||||
+ /* Also ensure that nl_langinfo works in this locale */
|
||||
+ char *codeset = nl_langinfo(CODESET);
|
||||
+ if (!codeset || *codeset == '\0') {
|
||||
+ /* CODESET is not set or empty, so skip coercion */
|
||||
+ new_locale = NULL;
|
||||
+ setlocale(LC_CTYPE, "");
|
||||
+ continue;
|
||||
+ }
|
||||
+#endif
|
||||
+ /* Successfully configured locale, so make it the default */
|
||||
+ _coerce_default_locale_settings(target);
|
||||
+ return;
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ /* No C locale warning here, as Py_Initialize will emit one later */
|
||||
+#endif
|
||||
+}
|
||||
+
|
||||
+
|
||||
void
|
||||
_Py_InitializeEx_Private(int install_sigs, int install_importlib)
|
||||
{
|
||||
@@ -315,11 +493,19 @@ _Py_InitializeEx_Private(int install_sigs, int install_importlib)
|
||||
initialized = 1;
|
||||
_Py_Finalizing = NULL;
|
||||
|
||||
-#ifdef HAVE_SETLOCALE
|
||||
+#ifdef __ANDROID__
|
||||
+ /* Passing "" to setlocale() on Android requests the C locale rather
|
||||
+ * than checking environment variables, so request C.UTF-8 explicitly
|
||||
+ */
|
||||
+ setlocale(LC_CTYPE, "C.UTF-8");
|
||||
+#else
|
||||
+#ifndef MS_WINDOWS
|
||||
/* Set up the LC_CTYPE locale, so we can obtain
|
||||
the locale's charset without having to switch
|
||||
locales. */
|
||||
setlocale(LC_CTYPE, "");
|
||||
+ _emit_stderr_warning_for_legacy_locale();
|
||||
+#endif
|
||||
#endif
|
||||
|
||||
if ((p = Py_GETENV("PYTHONDEBUG")) && *p != '\0')
|
||||
@@ -1247,12 +1433,8 @@ initstdio(void)
|
||||
}
|
||||
}
|
||||
if (!errors && !(pythonioencoding && *pythonioencoding)) {
|
||||
- /* When the LC_CTYPE locale is the POSIX locale ("C locale"),
|
||||
- stdin and stdout use the surrogateescape error handler by
|
||||
- default, instead of the strict error handler. */
|
||||
- char *loc = setlocale(LC_CTYPE, NULL);
|
||||
- if (loc != NULL && strcmp(loc, "C") == 0)
|
||||
- errors = "surrogateescape";
|
||||
+ /* Choose the default error handler based on the current locale */
|
||||
+ errors = get_default_standard_stream_error_handler();
|
||||
}
|
||||
}
|
||||
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 3f2459a..7444486 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -3360,6 +3360,40 @@ then
|
||||
fi
|
||||
AC_MSG_RESULT($with_pymalloc)
|
||||
|
||||
+# Check for --with-c-locale-coercion
|
||||
+AC_MSG_CHECKING(for --with-c-locale-coercion)
|
||||
+AC_ARG_WITH(c-locale-coercion,
|
||||
+ AS_HELP_STRING([--with(out)-c-locale-coercion],
|
||||
+ [disable/enable C locale coercion to a UTF-8 based locale]))
|
||||
+
|
||||
+if test -z "$with_c_locale_coercion"
|
||||
+then
|
||||
+ with_c_locale_coercion="yes"
|
||||
+fi
|
||||
+if test "$with_c_locale_coercion" != "no"
|
||||
+then
|
||||
+ AC_DEFINE(PY_COERCE_C_LOCALE, 1,
|
||||
+ [Define if you want to coerce the C locale to a UTF-8 based locale])
|
||||
+fi
|
||||
+AC_MSG_RESULT($with_c_locale_coercion)
|
||||
+
|
||||
+# Check for --with-c-locale-warning
|
||||
+AC_MSG_CHECKING(for --with-c-locale-warning)
|
||||
+AC_ARG_WITH(c-locale-warning,
|
||||
+ AS_HELP_STRING([--with(out)-c-locale-warning],
|
||||
+ [disable/enable locale compatibility warning in the C locale]))
|
||||
+
|
||||
+if test -z "$with_c_locale_warning"
|
||||
+then
|
||||
+ with_c_locale_warning="yes"
|
||||
+fi
|
||||
+if test "$with_c_locale_warning" != "no"
|
||||
+then
|
||||
+ AC_DEFINE(PY_WARN_ON_C_LOCALE, 1,
|
||||
+ [Define to emit a locale compatibility warning in the C locale])
|
||||
+fi
|
||||
+AC_MSG_RESULT($with_c_locale_warning)
|
||||
+
|
||||
# Check for Valgrind support
|
||||
AC_MSG_CHECKING([for --with-valgrind])
|
||||
AC_ARG_WITH([valgrind],
|
58
SOURCES/00274-fix-arch-names.patch
Normal file
58
SOURCES/00274-fix-arch-names.patch
Normal file
@ -0,0 +1,58 @@
|
||||
diff -up Python-3.5.0/configure.ac.than Python-3.5.0/configure.ac
|
||||
--- Python-3.5.0/configure.ac.than 2015-11-13 11:51:32.039560172 -0500
|
||||
+++ Python-3.5.0/configure.ac 2015-11-13 11:52:11.670168157 -0500
|
||||
@@ -788,9 +788,9 @@ cat >> conftest.c <<EOF
|
||||
alpha-linux-gnu
|
||||
# elif defined(__ARM_EABI__) && defined(__ARM_PCS_VFP)
|
||||
# if defined(__ARMEL__)
|
||||
- arm-linux-gnueabihf
|
||||
+ arm-linux-gnueabi
|
||||
# else
|
||||
- armeb-linux-gnueabihf
|
||||
+ armeb-linux-gnueabi
|
||||
# endif
|
||||
# elif defined(__ARM_EABI__) && !defined(__ARM_PCS_VFP)
|
||||
# if defined(__ARMEL__)
|
||||
@@ -810,7 +810,7 @@ cat >> conftest.c <<EOF
|
||||
# elif _MIPS_SIM == _ABIN32
|
||||
mips64el-linux-gnuabin32
|
||||
# elif _MIPS_SIM == _ABI64
|
||||
- mips64el-linux-gnuabi64
|
||||
+ mips64el-linux-gnu
|
||||
# else
|
||||
# error unknown platform triplet
|
||||
# endif
|
||||
@@ -820,7 +820,7 @@ cat >> conftest.c <<EOF
|
||||
# elif _MIPS_SIM == _ABIN32
|
||||
mips64-linux-gnuabin32
|
||||
# elif _MIPS_SIM == _ABI64
|
||||
- mips64-linux-gnuabi64
|
||||
+ mips64-linux-gnu
|
||||
# else
|
||||
# error unknown platform triplet
|
||||
# endif
|
||||
@@ -830,9 +830,9 @@ cat >> conftest.c <<EOF
|
||||
powerpc-linux-gnuspe
|
||||
# elif defined(__powerpc64__)
|
||||
# if defined(__LITTLE_ENDIAN__)
|
||||
- powerpc64le-linux-gnu
|
||||
+ ppc64le-linux-gnu
|
||||
# else
|
||||
- powerpc64-linux-gnu
|
||||
+ ppc64-linux-gnu
|
||||
# endif
|
||||
# elif defined(__powerpc__)
|
||||
powerpc-linux-gnu
|
||||
diff --git a/config.sub b/config.sub
|
||||
index 40ea5df..932128b 100755
|
||||
--- a/config.sub
|
||||
+++ b/config.sub
|
||||
@@ -1045,7 +1045,7 @@ case $basic_machine in
|
||||
;;
|
||||
ppc64) basic_machine=powerpc64-unknown
|
||||
;;
|
||||
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
|
||||
+ ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
|
||||
;;
|
||||
ppc64le | powerpc64little)
|
||||
basic_machine=powerpc64le-unknown
|
228
SOURCES/00294-define-TLS-cipher-suite-on-build-time.patch
Normal file
228
SOURCES/00294-define-TLS-cipher-suite-on-build-time.patch
Normal file
@ -0,0 +1,228 @@
|
||||
diff --git a/Lib/ssl.py b/Lib/ssl.py
|
||||
index 1f3a31a..b54a684 100644
|
||||
--- a/Lib/ssl.py
|
||||
+++ b/Lib/ssl.py
|
||||
@@ -116,6 +116,7 @@ except ImportError:
|
||||
|
||||
|
||||
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN, HAS_TLSv1_3
|
||||
+from _ssl import _DEFAULT_CIPHERS
|
||||
from _ssl import _OPENSSL_API_VERSION
|
||||
|
||||
|
||||
@@ -174,48 +175,7 @@ else:
|
||||
CHANNEL_BINDING_TYPES = []
|
||||
|
||||
|
||||
-# Disable weak or insecure ciphers by default
|
||||
-# (OpenSSL's default setting is 'DEFAULT:!aNULL:!eNULL')
|
||||
-# Enable a better set of ciphers by default
|
||||
-# This list has been explicitly chosen to:
|
||||
-# * TLS 1.3 ChaCha20 and AES-GCM cipher suites
|
||||
-# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE)
|
||||
-# * Prefer ECDHE over DHE for better performance
|
||||
-# * Prefer AEAD over CBC for better performance and security
|
||||
-# * Prefer AES-GCM over ChaCha20 because most platforms have AES-NI
|
||||
-# (ChaCha20 needs OpenSSL 1.1.0 or patched 1.0.2)
|
||||
-# * Prefer any AES-GCM and ChaCha20 over any AES-CBC for better
|
||||
-# performance and security
|
||||
-# * Then Use HIGH cipher suites as a fallback
|
||||
-# * Disable NULL authentication, NULL encryption, 3DES and MD5 MACs
|
||||
-# for security reasons
|
||||
-_DEFAULT_CIPHERS = (
|
||||
- 'TLS13-AES-256-GCM-SHA384:TLS13-CHACHA20-POLY1305-SHA256:'
|
||||
- 'TLS13-AES-128-GCM-SHA256:'
|
||||
- 'ECDH+AESGCM:ECDH+CHACHA20:DH+AESGCM:DH+CHACHA20:ECDH+AES256:DH+AES256:'
|
||||
- 'ECDH+AES128:DH+AES:ECDH+HIGH:DH+HIGH:RSA+AESGCM:RSA+AES:RSA+HIGH:'
|
||||
- '!aNULL:!eNULL:!MD5:!3DES'
|
||||
- )
|
||||
-
|
||||
-# Restricted and more secure ciphers for the server side
|
||||
-# This list has been explicitly chosen to:
|
||||
-# * TLS 1.3 ChaCha20 and AES-GCM cipher suites
|
||||
-# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE)
|
||||
-# * Prefer ECDHE over DHE for better performance
|
||||
-# * Prefer AEAD over CBC for better performance and security
|
||||
-# * Prefer AES-GCM over ChaCha20 because most platforms have AES-NI
|
||||
-# * Prefer any AES-GCM and ChaCha20 over any AES-CBC for better
|
||||
-# performance and security
|
||||
-# * Then Use HIGH cipher suites as a fallback
|
||||
-# * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, RC4, and
|
||||
-# 3DES for security reasons
|
||||
-_RESTRICTED_SERVER_CIPHERS = (
|
||||
- 'TLS13-AES-256-GCM-SHA384:TLS13-CHACHA20-POLY1305-SHA256:'
|
||||
- 'TLS13-AES-128-GCM-SHA256:'
|
||||
- 'ECDH+AESGCM:ECDH+CHACHA20:DH+AESGCM:DH+CHACHA20:ECDH+AES256:DH+AES256:'
|
||||
- 'ECDH+AES128:DH+AES:ECDH+HIGH:DH+HIGH:RSA+AESGCM:RSA+AES:RSA+HIGH:'
|
||||
- '!aNULL:!eNULL:!MD5:!DSS:!RC4:!3DES'
|
||||
-)
|
||||
+_RESTRICTED_SERVER_CIPHERS = _DEFAULT_CIPHERS
|
||||
|
||||
|
||||
class CertificateError(ValueError):
|
||||
@@ -389,8 +349,6 @@ class SSLContext(_SSLContext):
|
||||
|
||||
def __new__(cls, protocol=PROTOCOL_TLS, *args, **kwargs):
|
||||
self = _SSLContext.__new__(cls, protocol)
|
||||
- if protocol != _SSLv2_IF_EXISTS:
|
||||
- self.set_ciphers(_DEFAULT_CIPHERS)
|
||||
return self
|
||||
|
||||
def __init__(self, protocol=PROTOCOL_TLS):
|
||||
@@ -505,8 +463,6 @@ def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None,
|
||||
# verify certs and host name in client mode
|
||||
context.verify_mode = CERT_REQUIRED
|
||||
context.check_hostname = True
|
||||
- elif purpose == Purpose.CLIENT_AUTH:
|
||||
- context.set_ciphers(_RESTRICTED_SERVER_CIPHERS)
|
||||
|
||||
if cafile or capath or cadata:
|
||||
context.load_verify_locations(cafile, capath, cadata)
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 9785a59..34a7ec2 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -18,6 +18,7 @@ import asyncore
|
||||
import weakref
|
||||
import platform
|
||||
import functools
|
||||
+import sysconfig
|
||||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
@@ -36,7 +37,7 @@ PROTOCOLS = sorted(ssl._PROTOCOL_NAMES)
|
||||
HOST = support.HOST
|
||||
IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL')
|
||||
IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0)
|
||||
-
|
||||
+PY_SSL_DEFAULT_CIPHERS = sysconfig.get_config_var('PY_SSL_DEFAULT_CIPHERS')
|
||||
|
||||
def data_file(*name):
|
||||
return os.path.join(os.path.dirname(__file__), *name)
|
||||
@@ -889,6 +890,19 @@ class ContextTests(unittest.TestCase):
|
||||
with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"):
|
||||
ctx.set_ciphers("^$:,;?*'dorothyx")
|
||||
|
||||
+ @unittest.skipUnless(PY_SSL_DEFAULT_CIPHERS == 1,
|
||||
+ "Test applies only to Python default ciphers")
|
||||
+ def test_python_ciphers(self):
|
||||
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
+ ciphers = ctx.get_ciphers()
|
||||
+ for suite in ciphers:
|
||||
+ name = suite['name']
|
||||
+ self.assertNotIn("PSK", name)
|
||||
+ self.assertNotIn("SRP", name)
|
||||
+ self.assertNotIn("MD5", name)
|
||||
+ self.assertNotIn("RC4", name)
|
||||
+ self.assertNotIn("3DES", name)
|
||||
+
|
||||
@unittest.skipIf(ssl.OPENSSL_VERSION_INFO < (1, 0, 2, 0, 0), 'OpenSSL too old')
|
||||
def test_get_ciphers(self):
|
||||
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
|
||||
index 5e007da..130f006 100644
|
||||
--- a/Modules/_ssl.c
|
||||
+++ b/Modules/_ssl.c
|
||||
@@ -237,6 +237,31 @@ SSL_SESSION_get_ticket_lifetime_hint(const SSL_SESSION *s)
|
||||
|
||||
#endif /* OpenSSL < 1.1.0 or LibreSSL < 2.7.0 */
|
||||
|
||||
+/* Default cipher suites */
|
||||
+#ifndef PY_SSL_DEFAULT_CIPHERS
|
||||
+#define PY_SSL_DEFAULT_CIPHERS 1
|
||||
+#endif
|
||||
+
|
||||
+#if PY_SSL_DEFAULT_CIPHERS == 0
|
||||
+ #ifndef PY_SSL_DEFAULT_CIPHER_STRING
|
||||
+ #error "Py_SSL_DEFAULT_CIPHERS 0 needs Py_SSL_DEFAULT_CIPHER_STRING"
|
||||
+ #endif
|
||||
+#elif PY_SSL_DEFAULT_CIPHERS == 1
|
||||
+/* Python custom selection of sensible ciper suites
|
||||
+ * DEFAULT: OpenSSL's default cipher list. Since 1.0.2 the list is in sensible order.
|
||||
+ * !aNULL:!eNULL: really no NULL ciphers
|
||||
+ * !MD5:!3DES:!DES:!RC4:!IDEA:!SEED: no weak or broken algorithms on old OpenSSL versions.
|
||||
+ * !aDSS: no authentication with discrete logarithm DSA algorithm
|
||||
+ * !SRP:!PSK: no secure remote password or pre-shared key authentication
|
||||
+ */
|
||||
+ #define PY_SSL_DEFAULT_CIPHER_STRING "DEFAULT:!aNULL:!eNULL:!MD5:!3DES:!DES:!RC4:!IDEA:!SEED:!aDSS:!SRP:!PSK"
|
||||
+#elif PY_SSL_DEFAULT_CIPHERS == 2
|
||||
+/* Ignored in SSLContext constructor, only used to as _ssl.DEFAULT_CIPHER_STRING */
|
||||
+ #define PY_SSL_DEFAULT_CIPHER_STRING SSL_DEFAULT_CIPHER_LIST
|
||||
+#else
|
||||
+ #error "Unsupported PY_SSL_DEFAULT_CIPHERS"
|
||||
+#endif
|
||||
+
|
||||
|
||||
enum py_ssl_error {
|
||||
/* these mirror ssl.h */
|
||||
@@ -2803,7 +2828,12 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version)
|
||||
/* A bare minimum cipher list without completely broken cipher suites.
|
||||
* It's far from perfect but gives users a better head start. */
|
||||
if (proto_version != PY_SSL_VERSION_SSL2) {
|
||||
- result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL:!MD5");
|
||||
+#if PY_SSL_DEFAULT_CIPHERS == 2
|
||||
+ /* stick to OpenSSL's default settings */
|
||||
+ result = 1;
|
||||
+#else
|
||||
+ result = SSL_CTX_set_cipher_list(ctx, PY_SSL_DEFAULT_CIPHER_STRING);
|
||||
+#endif
|
||||
} else {
|
||||
/* SSLv2 needs MD5 */
|
||||
result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL");
|
||||
@@ -5343,6 +5373,9 @@ PyInit__ssl(void)
|
||||
(PyObject *)&PySSLSession_Type) != 0)
|
||||
return NULL;
|
||||
|
||||
+ PyModule_AddStringConstant(m, "_DEFAULT_CIPHERS",
|
||||
+ PY_SSL_DEFAULT_CIPHER_STRING);
|
||||
+
|
||||
PyModule_AddIntConstant(m, "SSL_ERROR_ZERO_RETURN",
|
||||
PY_SSL_ERROR_ZERO_RETURN);
|
||||
PyModule_AddIntConstant(m, "SSL_ERROR_WANT_READ",
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 3703701..2eff514 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -5598,6 +5598,42 @@ if test "$have_getrandom" = yes; then
|
||||
[Define to 1 if the getrandom() function is available])
|
||||
fi
|
||||
|
||||
+# ssl module default cipher suite string
|
||||
+AH_TEMPLATE(PY_SSL_DEFAULT_CIPHERS,
|
||||
+ [Default cipher suites list for ssl module.
|
||||
+ 1: Python's preferred selection, 2: leave OpenSSL defaults untouched, 0: custom string])
|
||||
+AH_TEMPLATE(PY_SSL_DEFAULT_CIPHER_STRING,
|
||||
+ [Cipher suite string for PY_SSL_DEFAULT_CIPHERS=0]
|
||||
+)
|
||||
+AC_MSG_CHECKING(for --with-ssl-default-suites)
|
||||
+AC_ARG_WITH(ssl-default-suites,
|
||||
+ AS_HELP_STRING([--with-ssl-default-suites=@<:@python|openssl|STRING@:>@],
|
||||
+ [Override default cipher suites string,
|
||||
+ python: use Python's preferred selection (default),
|
||||
+ openssl: leave OpenSSL's defaults untouched,
|
||||
+ STRING: use a custom string,
|
||||
+ PROTOCOL_SSLv2 ignores the setting]),
|
||||
+[
|
||||
+AC_MSG_RESULT($withval)
|
||||
+case "$withval" in
|
||||
+ python)
|
||||
+ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1)
|
||||
+ ;;
|
||||
+ openssl)
|
||||
+ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 2)
|
||||
+ ;;
|
||||
+ *)
|
||||
+ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 0)
|
||||
+ AC_DEFINE_UNQUOTED(PY_SSL_DEFAULT_CIPHER_STRING, "$withval")
|
||||
+ ;;
|
||||
+esac
|
||||
+],
|
||||
+[
|
||||
+AC_MSG_RESULT(python)
|
||||
+AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1)
|
||||
+])
|
||||
+
|
||||
+
|
||||
# generate output files
|
||||
AC_CONFIG_FILES(Makefile.pre Modules/Setup.config Misc/python.pc Misc/python-config.sh)
|
||||
AC_CONFIG_FILES([Modules/ld_so_aix], [chmod +x Modules/ld_so_aix])
|
137
SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch
Normal file
137
SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch
Normal file
@ -0,0 +1,137 @@
|
||||
diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst
|
||||
index d991254..647af61 100644
|
||||
--- a/Doc/library/urllib.parse.rst
|
||||
+++ b/Doc/library/urllib.parse.rst
|
||||
@@ -121,6 +121,11 @@ or on combining URL components into a URL string.
|
||||
Unmatched square brackets in the :attr:`netloc` attribute will raise a
|
||||
:exc:`ValueError`.
|
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC
|
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``,
|
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is
|
||||
+ decomposed before parsing, no error will be raised.
|
||||
+
|
||||
.. versionchanged:: 3.2
|
||||
Added IPv6 URL parsing capabilities.
|
||||
|
||||
@@ -133,6 +138,10 @@ or on combining URL components into a URL string.
|
||||
Out-of-range port numbers now raise :exc:`ValueError`, instead of
|
||||
returning :const:`None`.
|
||||
|
||||
+ .. versionchanged:: 3.6.9
|
||||
+ Characters that affect netloc parsing under NFKC normalization will
|
||||
+ now raise :exc:`ValueError`.
|
||||
+
|
||||
|
||||
.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None)
|
||||
|
||||
@@ -256,10 +265,19 @@ or on combining URL components into a URL string.
|
||||
Unmatched square brackets in the :attr:`netloc` attribute will raise a
|
||||
:exc:`ValueError`.
|
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC
|
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``,
|
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is
|
||||
+ decomposed before parsing, no error will be raised.
|
||||
+
|
||||
.. versionchanged:: 3.6
|
||||
Out-of-range port numbers now raise :exc:`ValueError`, instead of
|
||||
returning :const:`None`.
|
||||
|
||||
+ .. versionchanged:: 3.6.9
|
||||
+ Characters that affect netloc parsing under NFKC normalization will
|
||||
+ now raise :exc:`ValueError`.
|
||||
+
|
||||
|
||||
.. function:: urlunsplit(parts)
|
||||
|
||||
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
|
||||
index be50b47..68f633c 100644
|
||||
--- a/Lib/test/test_urlparse.py
|
||||
+++ b/Lib/test/test_urlparse.py
|
||||
@@ -1,3 +1,5 @@
|
||||
+import sys
|
||||
+import unicodedata
|
||||
import unittest
|
||||
import urllib.parse
|
||||
|
||||
@@ -984,6 +986,34 @@ class UrlParseTestCase(unittest.TestCase):
|
||||
expected.append(name)
|
||||
self.assertCountEqual(urllib.parse.__all__, expected)
|
||||
|
||||
+ def test_urlsplit_normalization(self):
|
||||
+ # Certain characters should never occur in the netloc,
|
||||
+ # including under normalization.
|
||||
+ # Ensure that ALL of them are detected and cause an error
|
||||
+ illegal_chars = '/:#?@'
|
||||
+ hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars}
|
||||
+ denorm_chars = [
|
||||
+ c for c in map(chr, range(128, sys.maxunicode))
|
||||
+ if (hex_chars & set(unicodedata.decomposition(c).split()))
|
||||
+ and c not in illegal_chars
|
||||
+ ]
|
||||
+ # Sanity check that we found at least one such character
|
||||
+ self.assertIn('\u2100', denorm_chars)
|
||||
+ self.assertIn('\uFF03', denorm_chars)
|
||||
+
|
||||
+ # bpo-36742: Verify port separators are ignored when they
|
||||
+ # existed prior to decomposition
|
||||
+ urllib.parse.urlsplit('http://\u30d5\u309a:80')
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ urllib.parse.urlsplit('http://\u30d5\u309a\ufe1380')
|
||||
+
|
||||
+ for scheme in ["http", "https", "ftp"]:
|
||||
+ for netloc in ["netloc{}false.netloc", "n{}user@netloc"]:
|
||||
+ for c in denorm_chars:
|
||||
+ url = "{}://{}/path".format(scheme, netloc.format(c))
|
||||
+ with self.subTest(url=url, char='{:04X}'.format(ord(c))):
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ urllib.parse.urlsplit(url)
|
||||
|
||||
class Utility_Tests(unittest.TestCase):
|
||||
"""Testcase to test the various utility functions in the urllib."""
|
||||
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
|
||||
index 85e68c8..fa8827a 100644
|
||||
--- a/Lib/urllib/parse.py
|
||||
+++ b/Lib/urllib/parse.py
|
||||
@@ -391,6 +391,24 @@ def _splitnetloc(url, start=0):
|
||||
delim = min(delim, wdelim) # use earliest delim position
|
||||
return url[start:delim], url[delim:] # return (domain, rest)
|
||||
|
||||
+def _checknetloc(netloc):
|
||||
+ if not netloc or not any(ord(c) > 127 for c in netloc):
|
||||
+ return
|
||||
+ # looking for characters like \u2100 that expand to 'a/c'
|
||||
+ # IDNA uses NFKC equivalence, so normalize for this check
|
||||
+ import unicodedata
|
||||
+ n = netloc.replace('@', '') # ignore characters already included
|
||||
+ n = n.replace(':', '') # but not the surrounding text
|
||||
+ n = n.replace('#', '')
|
||||
+ n = n.replace('?', '')
|
||||
+ netloc2 = unicodedata.normalize('NFKC', n)
|
||||
+ if n == netloc2:
|
||||
+ return
|
||||
+ for c in '/?#@:':
|
||||
+ if c in netloc2:
|
||||
+ raise ValueError("netloc '" + netloc + "' contains invalid " +
|
||||
+ "characters under NFKC normalization")
|
||||
+
|
||||
def urlsplit(url, scheme='', allow_fragments=True):
|
||||
"""Parse a URL into 5 components:
|
||||
<scheme>://<netloc>/<path>?<query>#<fragment>
|
||||
@@ -420,6 +438,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
|
||||
url, fragment = url.split('#', 1)
|
||||
if '?' in url:
|
||||
url, query = url.split('?', 1)
|
||||
+ _checknetloc(netloc)
|
||||
v = SplitResult(scheme, netloc, url, query, fragment)
|
||||
_parse_cache[key] = v
|
||||
return _coerce_result(v)
|
||||
@@ -443,6 +462,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
|
||||
url, fragment = url.split('#', 1)
|
||||
if '?' in url:
|
||||
url, query = url.split('?', 1)
|
||||
+ _checknetloc(netloc)
|
||||
v = SplitResult(scheme, netloc, url, query, fragment)
|
||||
_parse_cache[key] = v
|
||||
return _coerce_result(v)
|
62
SOURCES/check-pyc-and-pyo-timestamps.py
Normal file
62
SOURCES/check-pyc-and-pyo-timestamps.py
Normal file
@ -0,0 +1,62 @@
|
||||
"""Checks if all *.pyc and *.pyo files have later mtime than their *.py files."""
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
|
||||
# list of test and other files that we expect not to have bytecode
|
||||
not_compiled = [
|
||||
'/usr/bin/pathfix.py',
|
||||
'test/bad_coding.py',
|
||||
'test/bad_coding2.py',
|
||||
'test/badsyntax_3131.py',
|
||||
'test/badsyntax_future3.py',
|
||||
'test/badsyntax_future4.py',
|
||||
'test/badsyntax_future5.py',
|
||||
'test/badsyntax_future6.py',
|
||||
'test/badsyntax_future7.py',
|
||||
'test/badsyntax_future8.py',
|
||||
'test/badsyntax_future9.py',
|
||||
'test/badsyntax_future10.py',
|
||||
'test/badsyntax_async1.py',
|
||||
'test/badsyntax_async2.py',
|
||||
'test/badsyntax_async3.py',
|
||||
'test/badsyntax_async4.py',
|
||||
'test/badsyntax_async5.py',
|
||||
'test/badsyntax_async6.py',
|
||||
'test/badsyntax_async7.py',
|
||||
'test/badsyntax_async8.py',
|
||||
'test/badsyntax_async9.py',
|
||||
'test/badsyntax_pep3120.py',
|
||||
'lib2to3/tests/data/bom.py',
|
||||
'lib2to3/tests/data/crlf.py',
|
||||
'lib2to3/tests/data/different_encoding.py',
|
||||
'lib2to3/tests/data/false_encoding.py',
|
||||
'lib2to3/tests/data/py2_test_grammar.py',
|
||||
'.debug-gdb.py',
|
||||
]
|
||||
failed = 0
|
||||
|
||||
|
||||
def bytecode_expected(source):
|
||||
for f in not_compiled:
|
||||
if source.endswith(f):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
compiled = filter(lambda f: bytecode_expected(f), sys.argv[1:])
|
||||
for f in compiled:
|
||||
# check both pyo and pyc
|
||||
to_check = map(lambda b: importlib.util.cache_from_source(f, b), (True, False))
|
||||
f_mtime = os.path.getmtime(f)
|
||||
for c in to_check:
|
||||
c_mtime = os.path.getmtime(c)
|
||||
if c_mtime < f_mtime:
|
||||
sys.stderr.write('Failed bytecompilation timestamps check: ')
|
||||
sys.stderr.write('Bytecode file {} is older than source file {}.\n'.format(c, f))
|
||||
failed += 1
|
||||
|
||||
if failed:
|
||||
sys.stderr.write('\n{} files failed bytecompilation timestamps check.\n'.format(failed))
|
||||
sys.exit(1)
|
28
SOURCES/get-source.sh
Executable file
28
SOURCES/get-source.sh
Executable file
@ -0,0 +1,28 @@
|
||||
#! /bin/bash -ex
|
||||
|
||||
# Download a release of Python (if missing) and remove .exe files from it
|
||||
|
||||
version=$1
|
||||
|
||||
if [ -z "${version}" ]; then
|
||||
echo "Usage: $0 VERSION" >& 2
|
||||
echo "" >& 2
|
||||
echo "example: $0 3.6.6" >& 2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
versionedname=Python-${version}
|
||||
orig_archive=${versionedname}.tar.xz
|
||||
new_archive=${versionedname}-noexe.tar.xz
|
||||
|
||||
if [ ! -e ${orig_archive} ]; then
|
||||
wget -N https://www.python.org/ftp/python/${version}/${orig_archive}
|
||||
fi
|
||||
|
||||
deleted_names=$(tar --list -Jf ${orig_archive} | grep '\.exe$')
|
||||
|
||||
# tar --delete does not operate on compressed archives, so do
|
||||
# xz compression/decompression explicitly
|
||||
xz --decompress --stdout ${orig_archive} | \
|
||||
tar --delete -v ${deleted_names} | \
|
||||
xz --compress --stdout -3 -T0 > ${new_archive}
|
35
SOURCES/idle3.appdata.xml
Normal file
35
SOURCES/idle3.appdata.xml
Normal file
@ -0,0 +1,35 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<!-- Copyright 2017 Zbigniew Jędrzejewski-Szmek -->
|
||||
<application>
|
||||
<id type="desktop">idle3.desktop</id>
|
||||
<name>IDLE3</name>
|
||||
<metadata_licence>CC0</metadata_licence>
|
||||
<project_license>Python-2.0</project_license>
|
||||
<summary>Python 3 Integrated Development and Learning Environment</summary>
|
||||
<description>
|
||||
<p>
|
||||
IDLE is Python’s Integrated Development and Learning Environment.
|
||||
The GUI is uniform between Windows, Unix, and Mac OS X.
|
||||
IDLE provides an easy way to start writing, running, and debugging
|
||||
Python code.
|
||||
</p>
|
||||
<p>
|
||||
IDLE is written in pure Python, and uses the tkinter GUI toolkit.
|
||||
It provides:
|
||||
</p>
|
||||
<ul>
|
||||
<li>a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,</li>
|
||||
<li>a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,</li>
|
||||
<li>search within any window, replace within editor windows, and search through multiple files (grep),</li>
|
||||
<li>a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.</li>
|
||||
</ul>
|
||||
</description>
|
||||
<url type="homepage">https://docs.python.org/3/library/idle.html</url>
|
||||
<screenshots>
|
||||
<screenshot type="default">http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png</screenshot>
|
||||
</screenshots>
|
||||
<update_contact>zbyszek@in.waw.pl</update_contact>
|
||||
</application>
|
11
SOURCES/idle3.desktop
Normal file
11
SOURCES/idle3.desktop
Normal file
@ -0,0 +1,11 @@
|
||||
[Desktop Entry]
|
||||
Version=1.0
|
||||
Name=IDLE 3
|
||||
Comment=Python 3 Integrated Development and Learning Environment
|
||||
Exec=idle3 %F
|
||||
TryExec=idle3
|
||||
Terminal=false
|
||||
Type=Application
|
||||
Icon=idle3
|
||||
Categories=Development;IDE;
|
||||
MimeType=text/x-python;
|
8
SOURCES/no-python
Executable file
8
SOURCES/no-python
Executable file
@ -0,0 +1,8 @@
|
||||
#! /bin/bash
|
||||
|
||||
echo "For more information about this script,"
|
||||
echo "please see the manual page of the same name."
|
||||
|
||||
echo "Run: man unversioned-python"
|
||||
exit 2
|
||||
|
57
SOURCES/unversioned-python.1
Normal file
57
SOURCES/unversioned-python.1
Normal file
@ -0,0 +1,57 @@
|
||||
.\" unversioned-python.8
|
||||
.TH UNVERSIONED-PYTHON 8 "17 September 2018"
|
||||
.SH NAME
|
||||
unversioned-python \- info on how to set up the `python` command.
|
||||
.SH SYNOPSIS
|
||||
.B unversioned-python
|
||||
.SH DESCRIPTION
|
||||
.B unversioned-python
|
||||
The "unversioned" `python` command (/usr/bin/python) is missing by default.
|
||||
We recommend using `python3` or `python2` instead.
|
||||
If using the explicit versioned command is inconvenient,
|
||||
you can use `alternatives` to configure `python` to launch
|
||||
either Python 3 or Python 2.
|
||||
|
||||
Note: The `python3` or `python2` package needs to be installed before its
|
||||
functionality is selected.
|
||||
|
||||
.SH EXAXPLES
|
||||
.B alternatives
|
||||
.B --config
|
||||
.IR python
|
||||
|
||||
Interactively select what the `python` command runs.
|
||||
|
||||
|
||||
.B alternatives
|
||||
.B --set
|
||||
.IR python
|
||||
.IR /usr/bin/python3
|
||||
|
||||
Configure the `python` command to run Python 3
|
||||
|
||||
Note: this is non-standard behavior according to [PEP 394].
|
||||
|
||||
|
||||
.B alternatives
|
||||
.B --set
|
||||
.IR python
|
||||
.IR /usr/bin/python2
|
||||
|
||||
Configure the `python` command to run Python 2
|
||||
|
||||
Note: please review the support lifecycle of python2 before relying on it
|
||||
|
||||
|
||||
.B alternatives
|
||||
.B --auto
|
||||
.IR python
|
||||
|
||||
Undo configuration changes and revert to the default (missing `python` command)
|
||||
|
||||
|
||||
.SH LINKS
|
||||
|
||||
.B [PEP 394]:
|
||||
.IR https://www.python.org/dev/peps/pep-0394/
|
||||
|
2832
SPECS/python3.spec
Normal file
2832
SPECS/python3.spec
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user