Compare commits

...

No commits in common. "imports/c8s-stream-2.7/python2-2.7.18-6.module+el8.5.0+11014+88fc0d0b" and "c8" have entirely different histories.

15 changed files with 1040 additions and 2091 deletions

2
.gitignore vendored
View File

@ -1 +1 @@
SOURCES/Python-2.7.18-noexe.tar.xz
SOURCES/Python-2.7.17-noexe.tar.xz

View File

@ -1 +1 @@
ce5e27d588d635469bdec487c4b1def2ffa84ba2 SOURCES/Python-2.7.18-noexe.tar.xz
e63124a9a86b4b52c09384915a0842adf00b9d45 SOURCES/Python-2.7.17-noexe.tar.xz

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,41 @@
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -41,6 +41,10 @@
# A very generous timeout when it comes to local connections...
CONNECTION_TIMEOUT = 20.
+# The hmac module implicitly defaults to using MD5.
+# Support using a stronger algorithm for the challenge/response code:
+HMAC_DIGEST_NAME='sha256'
+
_mmap_counter = itertools.count()
default_family = 'AF_INET'
@@ -700,12 +704,16 @@
WELCOME = b'#WELCOME#'
FAILURE = b'#FAILURE#'
+def get_digestmod_for_hmac():
+ import hashlib
+ return getattr(hashlib, HMAC_DIGEST_NAME)
+
def deliver_challenge(connection, authkey):
import hmac
assert isinstance(authkey, bytes)
message = os.urandom(MESSAGE_LENGTH)
connection.send_bytes(CHALLENGE + message)
- digest = hmac.new(authkey, message).digest()
+ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest()
response = connection.recv_bytes(256) # reject large message
if response == digest:
connection.send_bytes(WELCOME)
@@ -719,7 +727,7 @@
message = connection.recv_bytes(256) # reject large message
assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message
message = message[len(CHALLENGE):]
- digest = hmac.new(authkey, message).digest()
+ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest()
connection.send_bytes(digest)
response = connection.recv_bytes(256) # reject large message
if response != WELCOME:

View File

@ -1,70 +0,0 @@
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
index 5021ebf..29a7d1b 100644
--- a/Lib/ensurepip/__init__.py
+++ b/Lib/ensurepip/__init__.py
@@ -1,9 +1,10 @@
#!/usr/bin/env python2
from __future__ import print_function
+import distutils.version
+import glob
import os
import os.path
-import pkgutil
import shutil
import sys
import tempfile
@@ -12,9 +13,19 @@ import tempfile
__all__ = ["version", "bootstrap"]
-_SETUPTOOLS_VERSION = "41.2.0"
+_WHEEL_DIR = "/usr/share/python{}-wheels/".format(sys.version_info[0])
-_PIP_VERSION = "19.2.3"
+def _get_most_recent_wheel_version(pkg):
+ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg))
+ suffix = "-py2.py3-none-any.whl"
+ pattern = "{}*{}".format(prefix, suffix)
+ versions = (p[len(prefix):-len(suffix)] for p in glob.glob(pattern))
+ return str(max(versions, key=distutils.version.LooseVersion))
+
+
+_SETUPTOOLS_VERSION = _get_most_recent_wheel_version("setuptools")
+
+_PIP_VERSION = _get_most_recent_wheel_version("pip")
_PROJECTS = [
("setuptools", _SETUPTOOLS_VERSION),
@@ -28,8 +39,13 @@ def _run_pip(args, additional_paths=None):
sys.path = additional_paths + sys.path
# Install the bundled software
- import pip._internal
- return pip._internal.main(args)
+ try:
+ # pip 10
+ from pip._internal import main
+ except ImportError:
+ # pip 9
+ from pip import main
+ return main(args)
def version():
@@ -100,12 +116,9 @@ def _bootstrap(root=None, upgrade=False, user=False,
additional_paths = []
for project, version in _PROJECTS:
wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
- whl = pkgutil.get_data(
- "ensurepip",
- "_bundled/{}".format(wheel_name),
- )
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
- fp.write(whl)
+ with open(os.path.join(_WHEEL_DIR, wheel_name), "rb") as sfp:
+ with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
+ fp.write(sfp.read())
additional_paths.append(os.path.join(tmpdir, wheel_name))

View File

@ -0,0 +1,249 @@
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
index 5021ebf..63c763a 100644
--- a/Lib/ensurepip/__init__.py
+++ b/Lib/ensurepip/__init__.py
@@ -7,6 +7,7 @@ import pkgutil
import shutil
import sys
import tempfile
+from ensurepip import rewheel
__all__ = ["version", "bootstrap"]
@@ -29,6 +30,8 @@ def _run_pip(args, additional_paths=None):
# Install the bundled software
import pip._internal
+ if args[0] in ["install", "list", "wheel"]:
+ args.append('--pre')
return pip._internal.main(args)
@@ -93,21 +96,40 @@ def _bootstrap(root=None, upgrade=False, user=False,
# omit pip and easy_install
os.environ["ENSUREPIP_OPTIONS"] = "install"
+ whls = []
+ rewheel_dir = None
+ # try to see if we have system-wide versions of _PROJECTS
+ dep_records = rewheel.find_system_records([p[0] for p in _PROJECTS])
+ # TODO: check if system-wide versions are the newest ones
+ # if --upgrade is used?
+ if all(dep_records):
+ # if we have all _PROJECTS installed system-wide, we'll recreate
+ # wheels from them and install those
+ rewheel_dir = tempfile.mkdtemp()
+ for dr in dep_records:
+ new_whl = rewheel.rewheel_from_record(dr, rewheel_dir)
+ whls.append(os.path.join(rewheel_dir, new_whl))
+ else:
+ # if we don't have all the _PROJECTS installed system-wide,
+ # let's just fall back to bundled wheels
+ for project, version in _PROJECTS:
+ whl = os.path.join(
+ os.path.dirname(__file__),
+ "_bundled",
+ "{}-{}-py2.py3-none-any.whl".format(project, version)
+ )
+ whls.append(whl)
+
tmpdir = tempfile.mkdtemp()
try:
# Put our bundled wheels into a temporary directory and construct the
# additional paths that need added to sys.path
additional_paths = []
- for project, version in _PROJECTS:
- wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
- whl = pkgutil.get_data(
- "ensurepip",
- "_bundled/{}".format(wheel_name),
- )
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
- fp.write(whl)
-
- additional_paths.append(os.path.join(tmpdir, wheel_name))
+ for whl in whls:
+ shutil.copy(whl, tmpdir)
+ additional_paths.append(os.path.join(tmpdir, os.path.basename(whl)))
+ if rewheel_dir:
+ shutil.rmtree(rewheel_dir)
# Construct the arguments to be passed to the pip command
args = ["install", "--no-index", "--find-links", tmpdir]
diff --git a/Lib/ensurepip/rewheel/__init__.py b/Lib/ensurepip/rewheel/__init__.py
new file mode 100644
index 0000000..75c2094
--- /dev/null
+++ b/Lib/ensurepip/rewheel/__init__.py
@@ -0,0 +1,158 @@
+import argparse
+import codecs
+import csv
+import email.parser
+import os
+import io
+import re
+import site
+import subprocess
+import sys
+import zipfile
+
+def run():
+ parser = argparse.ArgumentParser(description='Recreate wheel of package with given RECORD.')
+ parser.add_argument('record_path',
+ help='Path to RECORD file')
+ parser.add_argument('-o', '--output-dir',
+ help='Dir where to place the wheel, defaults to current working dir.',
+ dest='outdir',
+ default=os.path.curdir)
+
+ ns = parser.parse_args()
+ retcode = 0
+ try:
+ print(rewheel_from_record(**vars(ns)))
+ except BaseException as e:
+ print('Failed: {}'.format(e))
+ retcode = 1
+ sys.exit(1)
+
+def find_system_records(projects):
+ """Return list of paths to RECORD files for system-installed projects.
+
+ If a project is not installed, the resulting list contains None instead
+ of a path to its RECORD
+ """
+ records = []
+ # get system site-packages dirs
+ if hasattr(sys, 'real_prefix'):
+ #we are in python2 virtualenv and sys.real_prefix is the original sys.prefix
+ _orig_prefixes = site.PREFIXES
+ setattr(site, 'PREFIXES', [sys.real_prefix]*2)
+ sys_sitepack = site.getsitepackages()
+ setattr(site, 'PREFIXES', _orig_prefixes)
+ elif hasattr(sys, 'base_prefix'): # python3 venv doesn't inject real_prefix to sys
+ # we are on python3 and base(_exec)_prefix is unchanged in venv
+ sys_sitepack = site.getsitepackages([sys.base_prefix, sys.base_exec_prefix])
+ else:
+ # we are in python2 without virtualenv
+ sys_sitepack = site.getsitepackages()
+
+ sys_sitepack = [sp for sp in sys_sitepack if os.path.exists(sp)]
+ # try to find all projects in all system site-packages
+ for project in projects:
+ path = None
+ for sp in sys_sitepack:
+ dist_info_re = os.path.join(sp, project) + '-[^\{0}]+\.dist-info'.format(os.sep)
+ candidates = [os.path.join(sp, p) for p in os.listdir(sp)]
+ # filter out candidate dirs based on the above regexp
+ filtered = [c for c in candidates if re.match(dist_info_re, c)]
+ # if we have 0 or 2 or more dirs, something is wrong...
+ if len(filtered) == 1:
+ path = filtered[0]
+ if path is not None:
+ records.append(os.path.join(path, 'RECORD'))
+ else:
+ records.append(None)
+ return records
+
+def rewheel_from_record(record_path, outdir):
+ """Recreates a whee of package with given record_path and returns path
+ to the newly created wheel."""
+ site_dir = os.path.dirname(os.path.dirname(record_path))
+ record_relpath = record_path[len(site_dir):].strip(os.path.sep)
+ to_write, to_omit = get_records_to_pack(site_dir, record_relpath)
+ new_wheel_name = get_wheel_name(record_path)
+ new_wheel_path = os.path.join(outdir, new_wheel_name + '.whl')
+
+ new_wheel = zipfile.ZipFile(new_wheel_path, mode='w', compression=zipfile.ZIP_DEFLATED)
+ # we need to write a new record with just the files that we will write,
+ # e.g. not binaries and *.pyc/*.pyo files
+ if sys.version_info[0] < 3:
+ new_record = io.BytesIO()
+ else:
+ new_record = io.StringIO()
+ writer = csv.writer(new_record)
+
+ # handle files that we can write straight away
+ for f, sha_hash, size in to_write:
+ new_wheel.write(os.path.join(site_dir, f), arcname=f)
+ writer.writerow([f, sha_hash,size])
+
+ # rewrite the old wheel file with a new computed one
+ writer.writerow([record_relpath, '', ''])
+ new_wheel.writestr(record_relpath, new_record.getvalue())
+
+ new_wheel.close()
+
+ return new_wheel.filename
+
+def get_wheel_name(record_path):
+ """Return proper name of the wheel, without .whl."""
+
+ wheel_info_path = os.path.join(os.path.dirname(record_path), 'WHEEL')
+ with codecs.open(wheel_info_path, encoding='utf-8') as wheel_info_file:
+ wheel_info = email.parser.Parser().parsestr(wheel_info_file.read().encode('utf-8'))
+
+ metadata_path = os.path.join(os.path.dirname(record_path), 'METADATA')
+ with codecs.open(metadata_path, encoding='utf-8') as metadata_file:
+ metadata = email.parser.Parser().parsestr(metadata_file.read().encode('utf-8'))
+
+ # construct name parts according to wheel spec
+ distribution = metadata.get('Name')
+ version = metadata.get('Version')
+ build_tag = '' # nothing for now
+ lang_tag = []
+ for t in wheel_info.get_all('Tag'):
+ lang_tag.append(t.split('-')[0])
+ lang_tag = '.'.join(lang_tag)
+ abi_tag, plat_tag = wheel_info.get('Tag').split('-')[1:3]
+ # leave out build tag, if it is empty
+ to_join = filter(None, [distribution, version, build_tag, lang_tag, abi_tag, plat_tag])
+ return '-'.join(list(to_join))
+
+def get_records_to_pack(site_dir, record_relpath):
+ """Accepts path of sitedir and path of RECORD file relative to it.
+ Returns two lists:
+ - list of files that can be written to new RECORD straight away
+ - list of files that shouldn't be written or need some processing
+ (pyc and pyo files, scripts)
+ """
+ record_file_path = os.path.join(site_dir, record_relpath)
+ with codecs.open(record_file_path, encoding='utf-8') as record_file:
+ record_contents = record_file.read()
+ # temporary fix for https://github.com/pypa/pip/issues/1376
+ # we need to ignore files under ".data" directory
+ data_dir = os.path.dirname(record_relpath).strip(os.path.sep)
+ data_dir = data_dir[:-len('dist-info')] + 'data'
+
+ to_write = []
+ to_omit = []
+ for l in record_contents.splitlines():
+ spl = l.split(',')
+ if len(spl) == 3:
+ # new record will omit (or write differently):
+ # - abs paths, paths with ".." (entry points),
+ # - pyc+pyo files
+ # - the old RECORD file
+ # TODO: is there any better way to recognize an entry point?
+ if os.path.isabs(spl[0]) or spl[0].startswith('..') or \
+ spl[0].endswith('.pyc') or spl[0].endswith('.pyo') or \
+ spl[0] == record_relpath or spl[0].startswith(data_dir):
+ to_omit.append(spl)
+ else:
+ to_write.append(spl)
+ else:
+ pass # bad RECORD or empty line
+ return to_write, to_omit
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 877698c..2c43611 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -1065,7 +1065,7 @@ LIBSUBDIRS= lib-tk lib-tk/test lib-tk/test/test_tkinter \
test/tracedmodules \
encodings compiler hotshot \
email email/mime email/test email/test/data \
- ensurepip ensurepip/_bundled \
+ ensurepip ensurepip/_bundled ensurepip/rewheel\
json json/tests \
sqlite3 sqlite3/test \
logging bsddb bsddb/test csv importlib wsgiref \

View File

@ -1,53 +0,0 @@
diff -U3 -r Python-2.7.14.orig/Lib/site.py Python-2.7.14/Lib/site.py
--- Python-2.7.14.orig/Lib/site.py 2018-01-29 15:05:04.517599815 +0100
+++ Python-2.7.14/Lib/site.py 2018-01-30 09:13:17.305270500 +0100
@@ -515,6 +515,41 @@
"'import usercustomize' failed; use -v for traceback"
+def handle_ambiguous_python_version():
+ """Warn or fail if /usr/bin/python is used
+
+ Behavior depends on the value of PYTHON_DISALLOW_AMBIGUOUS_VERSION:
+ - "warn" - print warning to stderr
+ - "1" - print error and exit with positive exit code
+ - otherwise: do nothing
+
+ This is a Fedora modification, see the Change page for details:
+ See https://fedoraproject.org/wiki/Changes/Avoid_usr_bin_python_in_RPM_Build
+ """
+ if sys.executable == "/usr/bin/python":
+ setting = os.environ.get("PYTHON_DISALLOW_AMBIGUOUS_VERSION")
+ if setting == 'warn':
+ print>>sys.stderr, (
+ "DEPRECATION WARNING: python2 invoked with /usr/bin/python.\n"
+ " Use /usr/bin/python3 or /usr/bin/python2\n"
+ " /usr/bin/python will be removed or switched to Python 3"
+ " in the future.\n"
+ " If you cannot make the switch now, please follow"
+ " instructions at"
+ " https://fedoraproject.org/wiki/Changes/"
+ "Avoid_usr_bin_python_in_RPM_Build#Quick_Opt-Out")
+ elif setting == '1':
+ print>>sys.stderr, (
+ "ERROR: python2 invoked with /usr/bin/python.\n"
+ " Use /usr/bin/python3 or /usr/bin/python2\n"
+ " /usr/bin/python will be switched to Python 3"
+ " in the future.\n"
+ " More details are at"
+ " https://fedoraproject.org/wiki/Changes/"
+ "Avoid_usr_bin_python_in_RPM_Build#Quick_Opt-Out")
+ exit(1)
+
+
def main():
global ENABLE_USER_SITE
@@ -543,6 +578,7 @@
# this module is run as a script, because this code is executed twice.
if hasattr(sys, "setdefaultencoding"):
del sys.setdefaultencoding
+ handle_ambiguous_python_version()
main()

View File

@ -0,0 +1,52 @@
--- Python-2.7.15-orig/Python/pythonrun.c
+++ Python-2.7.15/Python/pythonrun.c
@@ -180,6 +182,49 @@
char buf[128];
#endif
extern void _Py_ReadyTypes(void);
+ char *py2_allow_flag = getenv("RHEL_ALLOW_PYTHON2_FOR_BUILD");
+
+ // Fail unless a specific workaround is applied
+ if ((!py2_allow_flag || strcmp(py2_allow_flag, "1") != 0)
+ && (strstr(Py_GetProgramName(), "for-tests") == NULL)
+ ) {
+ fprintf(stderr,
+ "\n"
+ "ERROR: Python 2 is disabled in RHEL8.\n"
+ "\n"
+ "- For guidance on porting to Python 3, see the\n"
+ " Conservative Python3 Porting Guide:\n"
+ " http://portingguide.readthedocs.io/\n"
+ "\n"
+ "- If you need Python 2 at runtime:\n"
+ " - Use the python27 module\n"
+ "\n"
+ "- If you do not have access to BZ#1533919:\n"
+ " - Use the python27 module\n"
+ "\n"
+ "- If you need to use Python 2 only at RPM build time:\n"
+ " - File a bug blocking BZ#1533919:\n"
+ " https://bugzilla.redhat.com/show_bug.cgi?id=1533919\n"
+ " - Set the environment variable RHEL_ALLOW_PYTHON2_FOR_BUILD=1\n"
+ " (Note that if you do not file the bug as above,\n"
+ " this workaround will break without warning in the future.)\n"
+ "\n"
+ "- If you need to use Python 2 only for tests:\n"
+ " - File a bug blocking BZ#1533919:\n"
+ " https://bugzilla.redhat.com/show_bug.cgi?id=1533919\n"
+ " (If your test tool does not have a Bugzilla component,\n"
+ " feel free to use `python2`.)\n"
+ " - Use /usr/bin/python2-for-tests instead of python2 to run\n"
+ " your tests.\n"
+ " (Note that if you do not file the bug as above,\n"
+ " this workaround will break without warning in the future.)\n"
+ "\n"
+ "For details, see https://hurl.corp.redhat.com/rhel8-py2\n"
+ "\n"
+ );
+ fflush(stderr);
+ Py_FatalError("Python 2 is disabled");
+ }
if (initialized)
return;

View File

@ -1,70 +0,0 @@
From b099ce737f6e6cc9f3a1bf756af78eaa1c1480cd Mon Sep 17 00:00:00 2001
From: Rishi <rishi_devan@mail.com>
Date: Wed, 15 Jul 2020 13:51:00 +0200
Subject: [PATCH] 00351-cve-2019-20907-fix-infinite-loop-in-tarfile.patch
00351 #
Avoid infinite loop when reading specially crafted TAR files using the tarfile module
(CVE-2019-20907).
See: https://bugs.python.org/issue39017
---
Lib/tarfile.py | 2 ++
Lib/test/recursion.tar | Bin 0 -> 516 bytes
Lib/test/test_tarfile.py | 7 +++++++
.../2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst | 1 +
4 files changed, 10 insertions(+)
create mode 100644 Lib/test/recursion.tar
create mode 100644 Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index adf91d5..574a6bb 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -1400,6 +1400,8 @@ class TarInfo(object):
length, keyword = match.groups()
length = int(length)
+ if length == 0:
+ raise InvalidHeaderError("invalid header")
value = buf[match.end(2) + 1:match.start(1) + length - 1]
keyword = keyword.decode("utf8")
diff --git a/Lib/test/recursion.tar b/Lib/test/recursion.tar
new file mode 100644
index 0000000000000000000000000000000000000000..b8237251964983f54ed1966297e887636cd0c5f4
GIT binary patch
literal 516
zcmYdFPRz+kEn=W0Fn}74P8%Xw3X=l~85kIuo0>8xq$A1Gm}!7)KUsFc41m#O8A5+e
I1_}|j06>QaCIA2c
literal 0
HcmV?d00001
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index 89bd738..4592156 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -325,6 +325,13 @@ class CommonReadTest(ReadTest):
class MiscReadTest(CommonReadTest):
taropen = tarfile.TarFile.taropen
+ def test_length_zero_header(self):
+ # bpo-39017 (CVE-2019-20907): reading a zero-length header should fail
+ # with an exception
+ with self.assertRaisesRegexp(tarfile.ReadError, "file could not be opened successfully"):
+ with tarfile.open(support.findfile('recursion.tar')) as tar:
+ pass
+
def test_no_name_argument(self):
with open(self.tarname, "rb") as fobj:
tar = tarfile.open(fileobj=fobj, mode=self.mode)
diff --git a/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst b/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
new file mode 100644
index 0000000..ad26676
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
@@ -0,0 +1 @@
+Avoid infinite loop when reading specially crafted TAR files using the tarfile module (CVE-2019-20907).
--
2.25.4

View File

@ -1,88 +0,0 @@
diff --git a/Lib/httplib.py b/Lib/httplib.py
index fcc4152..a636774 100644
--- a/Lib/httplib.py
+++ b/Lib/httplib.py
@@ -257,6 +257,10 @@ _contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f-\xff]')
# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
# We are more lenient for assumed real world compatibility purposes.
+# These characters are not allowed within HTTP method names
+# to prevent http header injection.
+_contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]')
+
# We always set the Content-Length header for these methods because some
# servers will otherwise respond with a 411
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
@@ -935,6 +939,8 @@ class HTTPConnection:
else:
raise CannotSendRequest()
+ self._validate_method(method)
+
# Save the method for use later in the response phase
self._method = method
@@ -1020,6 +1026,16 @@ class HTTPConnection:
# On Python 2, request is already encoded (default)
return request
+ def _validate_method(self, method):
+ """Validate a method name for putrequest."""
+ # prevent http header injection
+ match = _contains_disallowed_method_pchar_re.search(method)
+ if match:
+ raise ValueError(
+ "method can't contain control characters. %r "
+ "(found at least %r)"
+ % (method, match.group()))
+
def _validate_path(self, url):
"""Validate a url for putrequest."""
# Prevent CVE-2019-9740.
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index d8a57f7..96a61dd 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -385,6 +385,29 @@ class HeaderTests(TestCase):
conn.putheader(name, value)
+class HttpMethodTests(TestCase):
+ def test_invalid_method_names(self):
+ methods = (
+ 'GET\r',
+ 'POST\n',
+ 'PUT\n\r',
+ 'POST\nValue',
+ 'POST\nHOST:abc',
+ 'GET\nrHost:abc\n',
+ 'POST\rRemainder:\r',
+ 'GET\rHOST:\n',
+ '\nPUT'
+ )
+
+ for method in methods:
+ with self.assertRaisesRegexp(
+ ValueError, "method can't contain control characters"):
+ conn = httplib.HTTPConnection('example.com')
+ conn.sock = FakeSocket(None)
+ conn.request(method=method, url="/")
+
+
+
class BasicTest(TestCase):
def test_status_lines(self):
# Test HTTP status lines
@@ -1009,9 +1032,9 @@ class TunnelTests(TestCase):
@test_support.reap_threads
def test_main(verbose=None):
- test_support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest,
- HTTPTest, HTTPSTest, SourceAddressTest,
- TunnelTests)
+ test_support.run_unittest(HeaderTests, OfflineTest, HttpMethodTests,
+ BasicTest, TimeoutTest, HTTPTest, HTTPSTest,
+ SourceAddressTest, TunnelTests)
if __name__ == '__main__':
test_main()

View File

@ -1,42 +0,0 @@
diff --git a/Lib/test/multibytecodec_support.py b/Lib/test/multibytecodec_support.py
index 5b2329b6d84..53b5d64d453 100644
--- a/Lib/test/multibytecodec_support.py
+++ b/Lib/test/multibytecodec_support.py
@@ -279,30 +279,22 @@ class TestBase_Mapping(unittest.TestCase):
self._test_mapping_file_plain()
def _test_mapping_file_plain(self):
- _unichr = lambda c: eval("u'\\U%08x'" % int(c, 16))
- unichrs = lambda s: u''.join(_unichr(c) for c in s.split('+'))
+ def unichrs(s):
+ return ''.join(unichr(int(x, 16)) for x in s.split('+'))
urt_wa = {}
with self.open_mapping_file() as f:
for line in f:
if not line:
break
- data = line.split('#')[0].strip().split()
+ data = line.split('#')[0].split()
if len(data) != 2:
continue
- csetval = eval(data[0])
- if csetval <= 0x7F:
- csetch = chr(csetval & 0xff)
- elif csetval >= 0x1000000:
- csetch = chr(csetval >> 24) + chr((csetval >> 16) & 0xff) + \
- chr((csetval >> 8) & 0xff) + chr(csetval & 0xff)
- elif csetval >= 0x10000:
- csetch = chr(csetval >> 16) + \
- chr((csetval >> 8) & 0xff) + chr(csetval & 0xff)
- elif csetval >= 0x100:
- csetch = chr(csetval >> 8) + chr(csetval & 0xff)
- else:
+ if data[0][:2] != '0x':
+ self.fail("Invalid line: {!r}".format(line))
+ csetch = bytes.fromhex(data[0][2:])
+ if len(csetch) == 1 and 0x80 <= csetch[0]:
continue
unich = unichrs(data[1])

View File

@ -1,181 +0,0 @@
commit 30e41798f40c684be57d7ccfebf5c6ad94c0ff97
Author: Petr Viktorin <pviktori@redhat.com>
Date: Wed Jan 20 15:21:43 2021 +0100
CVE-2021-3177: Replace snprintf with Python unicode formatting in ctypes param reprs
Backport of Python3 commit 916610ef90a0d0761f08747f7b0905541f0977c7:
https://bugs.python.org/issue42938
https://github.com/python/cpython/pull/24239
diff --git a/Lib/ctypes/test/test_parameters.py b/Lib/ctypes/test/test_parameters.py
index 23c1b6e2259..77300d71ae1 100644
--- a/Lib/ctypes/test/test_parameters.py
+++ b/Lib/ctypes/test/test_parameters.py
@@ -206,6 +206,49 @@ class SimpleTypesTestCase(unittest.TestCase):
with self.assertRaises(ZeroDivisionError):
WorseStruct().__setstate__({}, b'foo')
+ def test_parameter_repr(self):
+ from ctypes import (
+ c_bool,
+ c_char,
+ c_wchar,
+ c_byte,
+ c_ubyte,
+ c_short,
+ c_ushort,
+ c_int,
+ c_uint,
+ c_long,
+ c_ulong,
+ c_longlong,
+ c_ulonglong,
+ c_float,
+ c_double,
+ c_longdouble,
+ c_char_p,
+ c_wchar_p,
+ c_void_p,
+ )
+ self.assertRegexpMatches(repr(c_bool.from_param(True)), r"^<cparam '\?' at 0x[A-Fa-f0-9]+>$")
+ self.assertEqual(repr(c_char.from_param('a')), "<cparam 'c' ('a')>")
+ self.assertRegexpMatches(repr(c_wchar.from_param('a')), r"^<cparam 'u' at 0x[A-Fa-f0-9]+>$")
+ self.assertEqual(repr(c_byte.from_param(98)), "<cparam 'b' (98)>")
+ self.assertEqual(repr(c_ubyte.from_param(98)), "<cparam 'B' (98)>")
+ self.assertEqual(repr(c_short.from_param(511)), "<cparam 'h' (511)>")
+ self.assertEqual(repr(c_ushort.from_param(511)), "<cparam 'H' (511)>")
+ self.assertRegexpMatches(repr(c_int.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
+ self.assertRegexpMatches(repr(c_uint.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
+ self.assertRegexpMatches(repr(c_long.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
+ self.assertRegexpMatches(repr(c_ulong.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
+ self.assertRegexpMatches(repr(c_longlong.from_param(20000)), r"^<cparam '[liq]' \(20000\)>$")
+ self.assertRegexpMatches(repr(c_ulonglong.from_param(20000)), r"^<cparam '[LIQ]' \(20000\)>$")
+ self.assertEqual(repr(c_float.from_param(1.5)), "<cparam 'f' (1.5)>")
+ self.assertEqual(repr(c_double.from_param(1.5)), "<cparam 'd' (1.5)>")
+ self.assertEqual(repr(c_double.from_param(1e300)), "<cparam 'd' (1e+300)>")
+ self.assertRegexpMatches(repr(c_longdouble.from_param(1.5)), r"^<cparam ('d' \(1.5\)|'g' at 0x[A-Fa-f0-9]+)>$")
+ self.assertRegexpMatches(repr(c_char_p.from_param(b'hihi')), "^<cparam 'z' \(0x[A-Fa-f0-9]+\)>$")
+ self.assertRegexpMatches(repr(c_wchar_p.from_param('hihi')), "^<cparam 'Z' \(0x[A-Fa-f0-9]+\)>$")
+ self.assertRegexpMatches(repr(c_void_p.from_param(0x12)), r"^<cparam 'P' \(0x0*12\)>$")
+
################################################################
if __name__ == '__main__':
diff --git a/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
new file mode 100644
index 00000000000..7df65a156fe
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
@@ -0,0 +1,2 @@
+Avoid static buffers when computing the repr of :class:`ctypes.c_double` and
+:class:`ctypes.c_longdouble` values.
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
index 066fefc0cca..5cc3c4cf685 100644
--- a/Modules/_ctypes/callproc.c
+++ b/Modules/_ctypes/callproc.c
@@ -460,50 +460,62 @@ PyCArg_dealloc(PyCArgObject *self)
static PyObject *
PyCArg_repr(PyCArgObject *self)
{
- char buffer[256];
switch(self->tag) {
case 'b':
case 'B':
- sprintf(buffer, "<cparam '%c' (%d)>",
+ return PyString_FromFormat("<cparam '%c' (%d)>",
self->tag, self->value.b);
- break;
case 'h':
case 'H':
- sprintf(buffer, "<cparam '%c' (%d)>",
+ return PyString_FromFormat("<cparam '%c' (%d)>",
self->tag, self->value.h);
- break;
case 'i':
case 'I':
- sprintf(buffer, "<cparam '%c' (%d)>",
+ return PyString_FromFormat("<cparam '%c' (%d)>",
self->tag, self->value.i);
- break;
case 'l':
case 'L':
- sprintf(buffer, "<cparam '%c' (%ld)>",
+ return PyString_FromFormat("<cparam '%c' (%ld)>",
self->tag, self->value.l);
- break;
#ifdef HAVE_LONG_LONG
case 'q':
case 'Q':
- sprintf(buffer,
- "<cparam '%c' (%" PY_FORMAT_LONG_LONG "d)>",
+ return PyString_FromFormat("<cparam '%c' (%lld)>",
self->tag, self->value.q);
- break;
#endif
case 'd':
- sprintf(buffer, "<cparam '%c' (%f)>",
- self->tag, self->value.d);
- break;
- case 'f':
- sprintf(buffer, "<cparam '%c' (%f)>",
- self->tag, self->value.f);
- break;
-
+ case 'f': {
+ PyObject *s = PyString_FromFormat("<cparam '%c' (", self->tag);
+ if (s == NULL) {
+ return NULL;
+ }
+ PyObject *f = PyFloat_FromDouble((self->tag == 'f') ? self->value.f : self->value.d);
+ if (f == NULL) {
+ Py_DECREF(s);
+ return NULL;
+ }
+ PyObject *r = PyObject_Repr(f);
+ Py_DECREF(f);
+ if (r == NULL) {
+ Py_DECREF(s);
+ return NULL;
+ }
+ PyString_ConcatAndDel(&s, r);
+ if (s == NULL) {
+ return NULL;
+ }
+ r = PyString_FromString(")>");
+ if (r == NULL) {
+ Py_DECREF(s);
+ return NULL;
+ }
+ PyString_ConcatAndDel(&s, r);
+ return s;
+ }
case 'c':
- sprintf(buffer, "<cparam '%c' (%c)>",
+ return PyString_FromFormat("<cparam '%c' ('%c')>",
self->tag, self->value.c);
- break;
/* Hm, are these 'z' and 'Z' codes useful at all?
Shouldn't they be replaced by the functionality of c_string
@@ -512,16 +524,13 @@ PyCArg_repr(PyCArgObject *self)
case 'z':
case 'Z':
case 'P':
- sprintf(buffer, "<cparam '%c' (%p)>",
+ return PyUnicode_FromFormat("<cparam '%c' (%p)>",
self->tag, self->value.p);
- break;
default:
- sprintf(buffer, "<cparam '%c' at %p>",
- self->tag, self);
- break;
+ return PyString_FromFormat("<cparam '%c' at %p>",
+ (unsigned char)self->tag, (void *)self);
}
- return PyString_FromString(buffer);
}
static PyMemberDef PyCArgType_members[] = {

View File

@ -1,707 +0,0 @@
From 976a4010aa4e450855dce5fa4c865bcbdc86cccd Mon Sep 17 00:00:00 2001
From: Charalampos Stratakis <cstratak@redhat.com>
Date: Fri, 16 Apr 2021 18:02:00 +0200
Subject: [PATCH] CVE-2021-23336: Add `separator` argument to parse_qs; warn
with default
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Partially backports https://bugs.python.org/issue42967 : [security] Address a web cache-poisoning issue reported in urllib.parse.parse_qsl().
Backported from the python3 branch.
However, this solution is different than the upstream solution in Python 3.
Based on the downstream solution for python 3.6.13 by Petr Viktorin.
An optional argument seperator is added to specify the separator.
It is recommended to set it to '&' or ';' to match the application or proxy in use.
The default can be set with an env variable of a config file.
If neither the argument, env var or config file specifies a separator, "&" is used
but a warning is raised if parse_qs is used on input that contains ';'.
Co-authors of the downstream change:
Co-authored-by: Petr Viktorin <pviktori@redhat.com>
Co-authors of the upstream change (who do not necessarily agree with this):
Co-authored-by: Adam Goldschmidt <adamgold7@gmail.com>
Co-authored-by: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com>
Co-authored-by: Éric Araujo <merwok@netwok.org>
---
Doc/library/cgi.rst | 5 +-
Doc/library/urlparse.rst | 15 ++-
Lib/cgi.py | 34 +++---
Lib/test/test_cgi.py | 59 ++++++++++-
Lib/test/test_urlparse.py | 210 +++++++++++++++++++++++++++++++++++++-
Lib/urlparse.py | 78 +++++++++++++-
6 files changed, 369 insertions(+), 32 deletions(-)
diff --git a/Doc/library/cgi.rst b/Doc/library/cgi.rst
index ecd62c8c019..a96cd38717b 100644
--- a/Doc/library/cgi.rst
+++ b/Doc/library/cgi.rst
@@ -285,10 +285,10 @@ These are useful if you want more control, or if you want to employ some of the
algorithms implemented in this module in other circumstances.
-.. function:: parse(fp[, environ[, keep_blank_values[, strict_parsing]]])
+.. function:: parse(fp[, environ[, keep_blank_values[, strict_parsing[, separator]]]])
Parse a query in the environment or from a file (the file defaults to
- ``sys.stdin`` and environment defaults to ``os.environ``). The *keep_blank_values* and *strict_parsing* parameters are
+ ``sys.stdin`` and environment defaults to ``os.environ``). The *keep_blank_values*, *strict_parsing* and *separator* parameters are
passed to :func:`urlparse.parse_qs` unchanged.
@@ -316,7 +316,6 @@ algorithms implemented in this module in other circumstances.
Note that this does not parse nested multipart parts --- use
:class:`FieldStorage` for that.
-
.. function:: parse_header(string)
Parse a MIME header (such as :mailheader:`Content-Type`) into a main value and a
diff --git a/Doc/library/urlparse.rst b/Doc/library/urlparse.rst
index 0989c88c302..97d1119257c 100644
--- a/Doc/library/urlparse.rst
+++ b/Doc/library/urlparse.rst
@@ -136,7 +136,7 @@ The :mod:`urlparse` module defines the following functions:
now raise :exc:`ValueError`.
-.. function:: parse_qs(qs[, keep_blank_values[, strict_parsing[, max_num_fields]]])
+.. function:: parse_qs(qs[, keep_blank_values[, strict_parsing[, max_num_fields[, separator]]]])
Parse a query string given as a string argument (data of type
:mimetype:`application/x-www-form-urlencoded`). Data are returned as a
@@ -157,6 +157,15 @@ The :mod:`urlparse` module defines the following functions:
read. If set, then throws a :exc:`ValueError` if there are more than
*max_num_fields* fields read.
+ The optional argument *separator* is the symbol to use for separating the
+ query arguments. It is recommended to set it to ``'&'`` or ``';'``.
+ It defaults to ``'&'``; a warning is raised if this default is used.
+ This default may be changed with the following environment variable settings:
+
+ - ``PYTHON_URLLIB_QS_SEPARATOR='&'``: use only ``&`` as separator, without warning (as in Python 3.6.13+ or 3.10)
+ - ``PYTHON_URLLIB_QS_SEPARATOR=';'``: use only ``;`` as separator
+ - ``PYTHON_URLLIB_QS_SEPARATOR=legacy``: use both ``&`` and ``;`` (as in previous versions of Python)
+
Use the :func:`urllib.urlencode` function to convert such dictionaries into
query strings.
@@ -186,6 +195,9 @@ The :mod:`urlparse` module defines the following functions:
read. If set, then throws a :exc:`ValueError` if there are more than
*max_num_fields* fields read.
+ The optional argument *separator* is the symbol to use for separating the
+ query arguments. It works as in :py:func:`parse_qs`.
+
Use the :func:`urllib.urlencode` function to convert such lists of pairs into
query strings.
@@ -195,6 +207,7 @@ The :mod:`urlparse` module defines the following functions:
.. versionchanged:: 2.7.16
Added *max_num_fields* parameter.
+
.. function:: urlunparse(parts)
Construct a URL from a tuple as returned by ``urlparse()``. The *parts* argument
diff --git a/Lib/cgi.py b/Lib/cgi.py
index 5b903e03477..1421f2d90e0 100755
--- a/Lib/cgi.py
+++ b/Lib/cgi.py
@@ -121,7 +121,8 @@ log = initlog # The current logging function
# 0 ==> unlimited input
maxlen = 0
-def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
+def parse(fp=None, environ=os.environ, keep_blank_values=0,
+ strict_parsing=0, separator=None):
"""Parse a query in the environment or from a file (default stdin)
Arguments, all optional:
@@ -140,6 +141,8 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
+
+ separator: str. The symbol to use for separating the query arguments.
"""
if fp is None:
fp = sys.stdin
@@ -171,25 +174,26 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
else:
qs = ""
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
- return urlparse.parse_qs(qs, keep_blank_values, strict_parsing)
+ return urlparse.parse_qs(qs, keep_blank_values, strict_parsing, separator=separator)
# parse query string function called from urlparse,
# this is done in order to maintain backward compatibility.
-def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
+def parse_qs(qs, keep_blank_values=0, strict_parsing=0, separator=None):
"""Parse a query given as a string argument."""
warn("cgi.parse_qs is deprecated, use urlparse.parse_qs instead",
PendingDeprecationWarning, 2)
- return urlparse.parse_qs(qs, keep_blank_values, strict_parsing)
+ return urlparse.parse_qs(qs, keep_blank_values, strict_parsing,
+ separator=separator)
-def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
+def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None, separator=None):
"""Parse a query given as a string argument."""
warn("cgi.parse_qsl is deprecated, use urlparse.parse_qsl instead",
PendingDeprecationWarning, 2)
return urlparse.parse_qsl(qs, keep_blank_values, strict_parsing,
- max_num_fields)
+ max_num_fields, separator=separator)
def parse_multipart(fp, pdict):
"""Parse multipart input.
@@ -288,7 +292,6 @@ def parse_multipart(fp, pdict):
return partdict
-
def _parseparam(s):
while s[:1] == ';':
s = s[1:]
@@ -395,7 +398,7 @@ class FieldStorage:
def __init__(self, fp=None, headers=None, outerboundary="",
environ=os.environ, keep_blank_values=0, strict_parsing=0,
- max_num_fields=None):
+ max_num_fields=None, separator=None):
"""Constructor. Read multipart/* until last part.
Arguments, all optional:
@@ -430,6 +433,7 @@ class FieldStorage:
self.keep_blank_values = keep_blank_values
self.strict_parsing = strict_parsing
self.max_num_fields = max_num_fields
+ self.separator = separator
if 'REQUEST_METHOD' in environ:
method = environ['REQUEST_METHOD'].upper()
self.qs_on_post = None
@@ -613,7 +617,8 @@ class FieldStorage:
if self.qs_on_post:
qs += '&' + self.qs_on_post
query = urlparse.parse_qsl(qs, self.keep_blank_values,
- self.strict_parsing, self.max_num_fields)
+ self.strict_parsing, self.max_num_fields,
+ self.separator)
self.list = [MiniFieldStorage(key, value) for key, value in query]
self.skip_lines()
@@ -629,7 +634,8 @@ class FieldStorage:
query = urlparse.parse_qsl(self.qs_on_post,
self.keep_blank_values,
self.strict_parsing,
- self.max_num_fields)
+ self.max_num_fields,
+ self.separator)
self.list.extend(MiniFieldStorage(key, value)
for key, value in query)
FieldStorageClass = None
@@ -649,7 +655,8 @@ class FieldStorage:
headers = rfc822.Message(self.fp)
part = klass(self.fp, headers, ib,
environ, keep_blank_values, strict_parsing,
- max_num_fields)
+ max_num_fields,
+ separator=self.separator)
if max_num_fields is not None:
max_num_fields -= 1
@@ -817,10 +824,11 @@ class FormContentDict(UserDict.UserDict):
form.dict == {key: [val, val, ...], ...}
"""
- def __init__(self, environ=os.environ, keep_blank_values=0, strict_parsing=0):
+ def __init__(self, environ=os.environ, keep_blank_values=0, strict_parsing=0, separator=None):
self.dict = self.data = parse(environ=environ,
keep_blank_values=keep_blank_values,
- strict_parsing=strict_parsing)
+ strict_parsing=strict_parsing,
+ separator=separator)
self.query_string = environ['QUERY_STRING']
diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py
index 743c2afbd4c..9956ea9d4e8 100644
--- a/Lib/test/test_cgi.py
+++ b/Lib/test/test_cgi.py
@@ -61,12 +61,9 @@ parse_strict_test_cases = [
("", ValueError("bad query field: ''")),
("&", ValueError("bad query field: ''")),
("&&", ValueError("bad query field: ''")),
- (";", ValueError("bad query field: ''")),
- (";&;", ValueError("bad query field: ''")),
# Should the next few really be valid?
("=", {}),
("=&=", {}),
- ("=;=", {}),
# This rest seem to make sense
("=a", {'': ['a']}),
("&=a", ValueError("bad query field: ''")),
@@ -81,8 +78,6 @@ parse_strict_test_cases = [
("a=a+b&b=b+c", {'a': ['a b'], 'b': ['b c']}),
("a=a+b&a=b+a", {'a': ['a b', 'b a']}),
("x=1&y=2.0&z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
- ("x=1;y=2.0&z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
- ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
("Hbc5161168c542333633315dee1182227:key_store_seqid=400006&cuyer=r&view=bustomer&order_id=0bb2e248638833d48cb7fed300000f1b&expire=964546263&lobale=en-US&kid=130003.300038&ss=env",
{'Hbc5161168c542333633315dee1182227:key_store_seqid': ['400006'],
'cuyer': ['r'],
@@ -177,6 +172,60 @@ class CgiTests(unittest.TestCase):
self.assertItemsEqual(sd.items(),
first_second_elts(expect.items()))
+ def test_separator(self):
+ parse_semicolon = [
+ ("x=1;y=2.0", {'x': ['1'], 'y': ['2.0']}),
+ ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
+ (";", ValueError("bad query field: ''")),
+ (";;", ValueError("bad query field: ''")),
+ ("=;a", ValueError("bad query field: 'a'")),
+ (";b=a", ValueError("bad query field: ''")),
+ ("b;=a", ValueError("bad query field: 'b'")),
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
+ ("a=a+b;a=b+a", {'a': ['a b', 'b a']}),
+ ]
+ for orig, expect in parse_semicolon:
+ env = {'QUERY_STRING': orig}
+ fcd = cgi.FormContentDict(env, separator=';')
+ sd = cgi.SvFormContentDict(env, separator=';')
+ fs = cgi.FieldStorage(environ=env, separator=';')
+ if isinstance(expect, dict):
+ # test dict interface
+ self.assertEqual(len(expect), len(fcd))
+ self.assertItemsEqual(expect.keys(), fcd.keys())
+ self.assertItemsEqual(expect.values(), fcd.values())
+ self.assertItemsEqual(expect.items(), fcd.items())
+ self.assertEqual(fcd.get("nonexistent field", "default"), "default")
+ self.assertEqual(len(sd), len(fs))
+ self.assertItemsEqual(sd.keys(), fs.keys())
+ self.assertEqual(fs.getvalue("nonexistent field", "default"), "default")
+ # test individual fields
+ for key in expect.keys():
+ expect_val = expect[key]
+ self.assertTrue(fcd.has_key(key))
+ self.assertItemsEqual(fcd[key], expect[key])
+ self.assertEqual(fcd.get(key, "default"), fcd[key])
+ self.assertTrue(fs.has_key(key))
+ if len(expect_val) > 1:
+ single_value = 0
+ else:
+ single_value = 1
+ try:
+ val = sd[key]
+ except IndexError:
+ self.assertFalse(single_value)
+ self.assertEqual(fs.getvalue(key), expect_val)
+ else:
+ self.assertTrue(single_value)
+ self.assertEqual(val, expect_val[0])
+ self.assertEqual(fs.getvalue(key), expect_val[0])
+ self.assertItemsEqual(sd.getlist(key), expect_val)
+ if single_value:
+ self.assertItemsEqual(sd.values(),
+ first_elts(expect.values()))
+ self.assertItemsEqual(sd.items(),
+ first_second_elts(expect.items()))
+
def test_weird_formcontentdict(self):
# Test the weird FormContentDict classes
env = {'QUERY_STRING': "x=1&y=2.0&z=2-3.%2b0&1=1abc"}
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index 86c4a0595c4..21875bb2991 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -3,6 +3,12 @@ import sys
import unicodedata
import unittest
import urlparse
+from test.support import EnvironmentVarGuard
+from warnings import catch_warnings, filterwarnings
+import tempfile
+import contextlib
+import os.path
+import shutil
RFC1808_BASE = "http://a/b/c/d;p?q#f"
RFC2396_BASE = "http://a/b/c/d;p?q"
@@ -24,16 +30,29 @@ parse_qsl_test_cases = [
("&a=b", [('a', 'b')]),
("a=a+b&b=b+c", [('a', 'a b'), ('b', 'b c')]),
("a=1&a=2", [('a', '1'), ('a', '2')]),
+]
+
+parse_qsl_test_cases_semicolon = [
(";", []),
(";;", []),
(";a=b", [('a', 'b')]),
("a=a+b;b=b+c", [('a', 'a b'), ('b', 'b c')]),
("a=1;a=2", [('a', '1'), ('a', '2')]),
- (b";", []),
- (b";;", []),
- (b";a=b", [(b'a', b'b')]),
- (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
- (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]),
+]
+
+parse_qsl_test_cases_legacy = [
+ ("a=1;a=2&a=3", [('a', '1'), ('a', '2'), ('a', '3')]),
+ ("a=1;b=2&c=3", [('a', '1'), ('b', '2'), ('c', '3')]),
+ ("a=1&b=2&c=3;", [('a', '1'), ('b', '2'), ('c', '3')]),
+]
+
+parse_qsl_test_cases_warn = [
+ (";a=b", [(';a', 'b')]),
+ ("a=a+b;b=b+c", [('a', 'a b;b=b c')]),
+ (b";a=b", [(b';a', b'b')]),
+ (b"a=a+b;b=b+c", [(b'a', b'a b;b=b c')]),
+ ("a=1;a=2&a=3", [('a', '1;a=2'), ('a', '3')]),
+ (b"a=1;a=2&a=3", [(b'a', b'1;a=2'), (b'a', b'3')]),
]
parse_qs_test_cases = [
@@ -57,6 +76,9 @@ parse_qs_test_cases = [
(b"&a=b", {b'a': [b'b']}),
(b"a=a+b&b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
(b"a=1&a=2", {b'a': [b'1', b'2']}),
+]
+
+parse_qs_test_cases_semicolon = [
(";", {}),
(";;", {}),
(";a=b", {'a': ['b']}),
@@ -69,6 +91,24 @@ parse_qs_test_cases = [
(b"a=1;a=2", {b'a': [b'1', b'2']}),
]
+parse_qs_test_cases_legacy = [
+ ("a=1;a=2&a=3", {'a': ['1', '2', '3']}),
+ ("a=1;b=2&c=3", {'a': ['1'], 'b': ['2'], 'c': ['3']}),
+ ("a=1&b=2&c=3;", {'a': ['1'], 'b': ['2'], 'c': ['3']}),
+ (b"a=1;a=2&a=3", {b'a': [b'1', b'2', b'3']}),
+ (b"a=1;b=2&c=3", {b'a': [b'1'], b'b': [b'2'], b'c': [b'3']}),
+ (b"a=1&b=2&c=3;", {b'a': [b'1'], b'b': [b'2'], b'c': [b'3']}),
+]
+
+parse_qs_test_cases_warn = [
+ (";a=b", {';a': ['b']}),
+ ("a=a+b;b=b+c", {'a': ['a b;b=b c']}),
+ (b";a=b", {b';a': [b'b']}),
+ (b"a=a+b;b=b+c", {b'a':[ b'a b;b=b c']}),
+ ("a=1;a=2&a=3", {'a': ['1;a=2', '3']}),
+ (b"a=1;a=2&a=3", {b'a': [b'1;a=2', b'3']}),
+]
+
class UrlParseTestCase(unittest.TestCase):
def checkRoundtrips(self, url, parsed, split):
@@ -141,6 +181,40 @@ class UrlParseTestCase(unittest.TestCase):
self.assertEqual(result, expect_without_blanks,
"Error parsing %r" % orig)
+ def test_qs_default_warn(self):
+ for orig, expect in parse_qs_test_cases_warn:
+ with catch_warnings(record=True) as w:
+ filterwarnings(action='always',
+ category=urlparse._QueryStringSeparatorWarning)
+ result = urlparse.parse_qs(orig, keep_blank_values=True)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 1)
+ self.assertEqual(w[0].category, urlparse._QueryStringSeparatorWarning)
+
+ def test_qsl_default_warn(self):
+ for orig, expect in parse_qsl_test_cases_warn:
+ with catch_warnings(record=True) as w:
+ filterwarnings(action='always',
+ category=urlparse._QueryStringSeparatorWarning)
+ result = urlparse.parse_qsl(orig, keep_blank_values=True)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 1)
+ self.assertEqual(w[0].category, urlparse._QueryStringSeparatorWarning)
+
+ def test_default_qs_no_warnings(self):
+ for orig, expect in parse_qs_test_cases:
+ with catch_warnings(record=True) as w:
+ result = urlparse.parse_qs(orig, keep_blank_values=True)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_default_qsl_no_warnings(self):
+ for orig, expect in parse_qsl_test_cases:
+ with catch_warnings(record=True) as w:
+ result = urlparse.parse_qsl(orig, keep_blank_values=True)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
def test_roundtrips(self):
testcases = [
('file:///tmp/junk.txt',
@@ -626,6 +700,132 @@ class UrlParseTestCase(unittest.TestCase):
self.assertEqual(urlparse.urlparse("http://www.python.org:80"),
('http','www.python.org:80','','','',''))
+ def test_parse_qs_separator_bytes(self):
+ expected = {b'a': [b'1'], b'b': [b'2']}
+
+ result = urlparse.parse_qs(b'a=1;b=2', separator=b';')
+ self.assertEqual(result, expected)
+ result = urlparse.parse_qs(b'a=1;b=2', separator=';')
+ self.assertEqual(result, expected)
+ result = urlparse.parse_qs('a=1;b=2', separator=';')
+ self.assertEqual(result, {'a': ['1'], 'b': ['2']})
+
+ @contextlib.contextmanager
+ def _qsl_sep_config(self, sep):
+ """Context for the given parse_qsl default separator configured in config file"""
+ old_filename = urlparse._QS_SEPARATOR_CONFIG_FILENAME
+ urlparse._default_qs_separator = None
+ try:
+ tmpdirname = tempfile.mkdtemp()
+ filename = os.path.join(tmpdirname, 'conf.cfg')
+ with open(filename, 'w') as file:
+ file.write('[parse_qs]\n')
+ file.write('PYTHON_URLLIB_QS_SEPARATOR = {}'.format(sep))
+ urlparse._QS_SEPARATOR_CONFIG_FILENAME = filename
+ yield
+ finally:
+ urlparse._QS_SEPARATOR_CONFIG_FILENAME = old_filename
+ urlparse._default_qs_separator = None
+ shutil.rmtree(tmpdirname)
+
+ def test_parse_qs_separator_semicolon(self):
+ for orig, expect in parse_qs_test_cases_semicolon:
+ result = urlparse.parse_qs(orig, separator=';')
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = ';'
+ result = urlparse.parse_qs(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+ with self._qsl_sep_config(';'), catch_warnings(record=True) as w:
+ result = urlparse.parse_qs(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_parse_qsl_separator_semicolon(self):
+ for orig, expect in parse_qsl_test_cases_semicolon:
+ result = urlparse.parse_qsl(orig, separator=';')
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = ';'
+ result = urlparse.parse_qsl(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+ with self._qsl_sep_config(';'), catch_warnings(record=True) as w:
+ result = urlparse.parse_qsl(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_parse_qs_separator_legacy(self):
+ for orig, expect in parse_qs_test_cases_legacy:
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = 'legacy'
+ result = urlparse.parse_qs(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+ with self._qsl_sep_config('legacy'), catch_warnings(record=True) as w:
+ result = urlparse.parse_qs(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_parse_qsl_separator_legacy(self):
+ for orig, expect in parse_qsl_test_cases_legacy:
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = 'legacy'
+ result = urlparse.parse_qsl(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+ with self._qsl_sep_config('legacy'), catch_warnings(record=True) as w:
+ result = urlparse.parse_qsl(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_parse_qs_separator_bad_value_env_or_config(self):
+ for bad_sep in '', 'abc', 'safe', '&;', 'SEP':
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = bad_sep
+ with self.assertRaises(ValueError):
+ urlparse.parse_qsl('a=1;b=2')
+ with self._qsl_sep_config('bad_sep'), catch_warnings(record=True) as w:
+ with self.assertRaises(ValueError):
+ urlparse.parse_qsl('a=1;b=2')
+
+ def test_parse_qs_separator_bad_value_arg(self):
+ for bad_sep in True, {}, '':
+ with self.assertRaises(ValueError):
+ urlparse.parse_qsl('a=1;b=2', separator=bad_sep)
+
+ def test_parse_qs_separator_num_fields(self):
+ for qs, sep in (
+ ('a&b&c', '&'),
+ ('a;b;c', ';'),
+ ('a&b;c', 'legacy'),
+ ):
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ if sep != 'legacy':
+ with self.assertRaises(ValueError):
+ urlparse.parse_qsl(qs, separator=sep, max_num_fields=2)
+ if sep:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = sep
+ with self.assertRaises(ValueError):
+ urlparse.parse_qsl(qs, max_num_fields=2)
+
+ def test_parse_qs_separator_priority(self):
+ # env variable trumps config file
+ with self._qsl_sep_config('~'), EnvironmentVarGuard() as environ:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = '!'
+ result = urlparse.parse_qs('a=1!b=2~c=3')
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
+ # argument trumps config file
+ with self._qsl_sep_config('~'):
+ result = urlparse.parse_qs('a=1$b=2~c=3', separator='$')
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
+ # argument trumps env variable
+ with EnvironmentVarGuard() as environ:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = '~'
+ result = urlparse.parse_qs('a=1$b=2~c=3', separator='$')
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
+
def test_urlsplit_normalization(self):
# Certain characters should never occur in the netloc,
# including under normalization.
diff --git a/Lib/urlparse.py b/Lib/urlparse.py
index 798b467b605..69504d8fd93 100644
--- a/Lib/urlparse.py
+++ b/Lib/urlparse.py
@@ -29,6 +29,7 @@ test_urlparse.py provides a good indicator of parsing behavior.
"""
import re
+import os
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
"urlsplit", "urlunsplit", "parse_qs", "parse_qsl"]
@@ -382,7 +383,8 @@ def unquote(s):
append(item)
return ''.join(res)
-def parse_qs(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
+def parse_qs(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None,
+ separator=None):
"""Parse a query given as a string argument.
Arguments:
@@ -405,14 +407,23 @@ def parse_qs(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
"""
dict = {}
for name, value in parse_qsl(qs, keep_blank_values, strict_parsing,
- max_num_fields):
+ max_num_fields, separator):
if name in dict:
dict[name].append(value)
else:
dict[name] = [value]
return dict
-def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
+class _QueryStringSeparatorWarning(RuntimeWarning):
+ """Warning for using default `separator` in parse_qs or parse_qsl"""
+
+# The default "separator" for parse_qsl can be specified in a config file.
+# It's cached after first read.
+_QS_SEPARATOR_CONFIG_FILENAME = '/etc/python/urllib.cfg'
+_default_qs_separator = None
+
+def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None,
+ separator=None):
"""Parse a query given as a string argument.
Arguments:
@@ -434,15 +445,72 @@ def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
Returns a list, as G-d intended.
"""
+
+ if (not separator or (not isinstance(separator, (str, bytes)))) and separator is not None:
+ raise ValueError("Separator must be of type string or bytes.")
+
+ # Used when both "&" and ";" act as separators. (Need a non-string value.)
+ _legacy = object()
+
+ if separator is None:
+ global _default_qs_separator
+ separator = _default_qs_separator
+ envvar_name = 'PYTHON_URLLIB_QS_SEPARATOR'
+ if separator is None:
+ # Set default separator from environment variable
+ separator = os.environ.get(envvar_name)
+ config_source = 'environment variable'
+ if separator is None:
+ # Set default separator from the configuration file
+ try:
+ file = open(_QS_SEPARATOR_CONFIG_FILENAME)
+ except EnvironmentError:
+ pass
+ else:
+ with file:
+ import ConfigParser
+ config = ConfigParser.ConfigParser()
+ config.readfp(file)
+ separator = config.get('parse_qs', envvar_name)
+ _default_qs_separator = separator
+ config_source = _QS_SEPARATOR_CONFIG_FILENAME
+ if separator is None:
+ # The default is '&', but warn if not specified explicitly
+ if ';' in qs:
+ from warnings import warn
+ warn("The default separator of urlparse.parse_qsl and "
+ + "parse_qs was changed to '&' to avoid a web cache "
+ + "poisoning issue (CVE-2021-23336). "
+ + "By default, semicolons no longer act as query field "
+ + "separators. "
+ + "See https://access.redhat.com/articles/5860431 for "
+ + "more details.",
+ _QueryStringSeparatorWarning, stacklevel=2)
+ separator = '&'
+ elif separator == 'legacy':
+ separator = _legacy
+ elif len(separator) != 1:
+ raise ValueError(
+ '{} (from {}) must contain '.format(envvar_name, config_source)
+ + '1 character, or "legacy". See '
+ + 'https://access.redhat.com/articles/5860431 for more details.'
+ )
+
# If max_num_fields is defined then check that the number of fields
# is less than max_num_fields. This prevents a memory exhaustion DOS
# attack via post bodies with many fields.
if max_num_fields is not None:
- num_fields = 1 + qs.count('&') + qs.count(';')
+ if separator is _legacy:
+ num_fields = 1 + qs.count('&') + qs.count(';')
+ else:
+ num_fields = 1 + qs.count(separator)
if max_num_fields < num_fields:
raise ValueError('Max number of fields exceeded')
- pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
+ if separator is _legacy:
+ pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
+ else:
+ pairs = [s1 for s1 in qs.split(separator)]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:
--
2.30.2

View File

@ -0,0 +1,21 @@
diff -urN Python-2.7.13/Modules/Setup.dist Python-2.7.13_modul/Modules/Setup.dist
--- Python-2.7.13/Modules/Setup.dist 2017-04-21 14:57:13.767444374 +0200
+++ Python-2.7.13_modul/Modules/Setup.dist 2017-04-21 14:56:49.658953833 +0200
@@ -326,7 +326,7 @@
# every system.
# *** Always uncomment this (leave the leading underscore in!):
-_tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \
+#_tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \
# *** Uncomment and edit to reflect where your Tcl/Tk libraries are:
# -L/usr/local/lib \
# *** Uncomment and edit to reflect where your Tcl/Tk headers are:
@@ -345,7 +345,7 @@
# *** Uncomment and edit for TOGL extension only:
# -DWITH_TOGL togl.c \
# *** Uncomment and edit to reflect your Tcl/Tk versions:
- -ltk -ltcl \
+# -ltk -ltcl \
# *** Uncomment and edit to reflect where your X11 libraries are:
# -L/usr/X11R6/lib \
# *** Or uncomment this for Solaris:

View File

@ -5,17 +5,15 @@
# Note that the bcond macros are named for the CLI option they create.
# "%%bcond_without" means "ENABLE by default and create a --without option"
# Whether to use RPM build wheels from the python2-{pip,setuptools}-wheel package
# Uses upstream bundled prebuilt wheels otherwise
%bcond_without rpmwheels
# Ability to reuse RPM-installed pip using rewheel
%bcond_with rewheel
# Extra build for debugging the interpreter or C-API extensions
# (the -debug subpackages)
%bcond_without debug_build
%bcond_with debug_build
# Only use this when bootstrapping python3
# Needed to build setuptools for the first time
%bcond_with python3_bootstrap
# Remove extra packages
%bcond_with tk_and_tools
%global unicode ucs4
@ -48,7 +46,7 @@
%global with_systemtap 1
# some arches don't have valgrind so we need to disable its support on them
%ifnarch s390 %{mips} riscv64
%ifarch %{valgrind_arches}
%global with_valgrind 1
%else
%global with_valgrind 0
@ -56,16 +54,25 @@
%global with_gdbm 1
%if 0%{?_module_build}
%global with_valgrind 0
%global with_systemtap 0
# (Don't) Run the test suite in %%check
%bcond_with tests
%else
# Run the test suite in %%check
%bcond_without tests
%endif
# Disable automatic bytecompilation. The python2.7 binary is not yet
# available in /usr/bin when Python is built. Also, the bytecompilation fails
# on files that test invalid syntax.
# This doesn't work now, see https://bugzilla.redhat.com/show_bug.cgi?id=1597664
# %%undefine __brp_python_bytecompile
# … and this is working workaround
%define __brp_python_bytecompile %{nil}
%undefine __brp_python_bytecompile
# The above is broken now
# https://bugzilla.redhat.com/show_bug.cgi?id=1597664
# This is an older non-standard way to disable the brp script, as a workaround
%undefine py_auto_byte_compile
# We need to get a newer configure generated out of configure.in for the following
# patches:
@ -103,11 +110,12 @@
Summary: An interpreted, interactive, object-oriented programming language
Name: %{python}
# Remember to also rebase python2-docs when changing this:
Version: 2.7.18
Release: 6%{?dist}
Version: 2.7.17
Release: 1%{?dist}
License: Python
Group: Development/Languages
Requires: %{python}-libs%{?_isa} = %{version}-%{release}
Requires: %{python}-for-tests%{?_isa} = %{version}-%{release}
Provides: python(abi) = %{pybasever}
@ -118,10 +126,11 @@ Provides: python(abi) = %{pybasever}
# (keep this list alphabetized)
BuildRequires: autoconf
%if ! 0%{?_module_build}
BuildRequires: bluez-libs-devel
%endif
BuildRequires: bzip2
BuildRequires: bzip2-devel
BuildRequires: git-core
BuildRequires: glibc-devel
BuildRequires: gmp-devel
BuildRequires: libdb-devel
@ -131,7 +140,9 @@ BuildRequires: openssl-devel
BuildRequires: pkgconfig
BuildRequires: readline-devel
BuildRequires: sqlite-devel
%if %{with tk_and_tools}
BuildRequires: tcl-devel
%endif
# For the nis module
BuildRequires: libnsl2-devel
@ -147,8 +158,10 @@ BuildRequires: gcc-c++
# ABI change without soname bump, reverted
BuildRequires: gdbm-devel >= 1:1.13
%endif
%if %{with tk_and_tools}
BuildRequires: libGL-devel
BuildRequires: libX11-devel
%endif #{with tk_and_tools}
%if 0%{?with_systemtap}
BuildRequires: systemtap-sdt-devel
@ -158,8 +171,10 @@ BuildRequires: systemtap-sdt-devel
%endif # with_systemtap
BuildRequires: tar
%if %{with tk_and_tools}
BuildRequires: tix-devel
BuildRequires: tk-devel
%endif #{with tk_and_tools}
%if 0%{?with_valgrind}
BuildRequires: valgrind-devel
@ -167,21 +182,16 @@ BuildRequires: valgrind-devel
BuildRequires: zlib-devel
%if %{with rpmwheels}
BuildRequires: python2-setuptools-wheel
BuildRequires: python2-pip-wheel
%endif
%if %{with rewheel}
BuildRequires: python2-setuptools
Requires: python2-setuptools
# Runtime require alternatives
Requires: /usr/sbin/alternatives
Requires(post): /usr/sbin/alternatives
Requires(postun): /usr/sbin/alternatives
%if ! 0%{?_module_build}
BuildRequires: python2-pip
Requires: python2-pip
%endif # !module_build
%endif # rewheel
# Previously, this was required for our rewheel patch to work.
# This is technically no longer needed, but we keep it recommended
# for the developer experience.
Recommends: python2-setuptools
Recommends: python2-pip
# =======================
@ -567,15 +577,20 @@ Patch144: 00144-no-gdbm.patch
# 00146 #
# Support OpenSSL FIPS mode (e.g. when OPENSSL_FORCE_FIPS_MODE=1 is set)
# - handle some failures from OpenSSL (e.g. on attempts to use MD5 in a
# - handle failures from OpenSSL (e.g. on attempts to use MD5 in a
# FIPS-enforcing environment)
# - add a new "usedforsecurity" keyword argument to the various digest
# algorithms in hashlib so that you can whitelist a callsite with
# "usedforsecurity=False"
# (sent upstream for python 3 as http://bugs.python.org/issue9216; this is a
# backport to python 2.7; see RHEL6 patch 119)
# - enforce usage of the _hashlib implementation: don't fall back to the _md5
# and _sha* modules (leading to clearer error messages if fips selftests
# fail)
# - don't build the _md5 and _sha* modules; rely on the _hashlib implementation
# of hashlib (for example, md5.py will use _hashlib's implementation of MD5,
# if permitted by the FIPS setting)
# Resolves: rhbz#1734126
# (rhbz#563986)
Patch146: 00146-hashlib-fips.patch
# 00147 #
@ -615,6 +630,14 @@ Patch165: 00165-crypt-module-salt-backport.patch
# Not yet sent upstream
Patch167: 00167-disable-stack-navigation-tests-when-optimized-in-test_gdb.patch
# 00169 #
# Use SHA-256 rather than implicitly using MD5 within the challenge handling
# in multiprocessing.connection
#
# Sent upstream as http://bugs.python.org/issue17258
# (rhbz#879695)
Patch169: 00169-avoid-implicit-usage-of-md5-in-multiprocessing.patch
# 00170 #
# In debug builds, try to print repr() when a C-level assert fails in the
# garbage collector (typically indicating a reference-counting error
@ -661,11 +684,6 @@ Patch185: 00185-urllib2-honors-noproxy-for-ftp.patch
# symbol)
Patch187: 00187-add-RPATH-to-pyexpat.patch
# 00189 #
# Instead of bundled wheels, use our RPM packaged wheels from
# /usr/share/python2-wheels
Patch189: 00189-use-rpm-wheels.patch
# 00191 #
# Disabling NOOP test as it fails without internet connection
Patch191: 00191-disable-NOOP.patch
@ -677,58 +695,28 @@ Patch191: 00191-disable-NOOP.patch
# Patch provided by John C. Peterson
Patch193: 00193-enable-loading-sqlite-extensions.patch
# 00198 #
Patch198: 00198-add-rewheel-module.patch
# 00257 #
# Python's threading library doesn't use the monotonic clock when handling wait timeouts,
# so when the system clock is set backwards, the wait doesn't return after the timeout,
# causing deadlocks.
# This patch works around the issue.
# Resolves: rhbz#1653754
# Resolves: rhbz#1565560
# DOWNSTREAM ONLY PATCH
Patch257: 00257-threading-wait-clamp-remaining-time.patch
# 00288 #
# Adds a warning when /usr/bin/python is invoked during rpmbuild
# See https://fedoraproject.org/wiki/Changes/Avoid_usr_bin_python_in_RPM_Build
Patch288: 00288-ambiguous-python-version-rpmbuild-warn.patch
Patch288: 00288-disable-python2.patch
# 00289 #
# Disable automatic detection for the nis module
# (we handle it it in Setup.dist, see Patch0)
Patch289: 00289-disable-nis-detection.patch
# 00351 #
# Avoid infinite loop when reading specially crafted TAR files using the tarfile module
# (CVE-2019-20907).
# See: https://bugs.python.org/issue39017
Patch351: 00351-cve-2019-20907-fix-infinite-loop-in-tarfile.patch
# 00354 #
# Reject control chars in HTTP method in httplib.putrequest to prevent
# HTTP header injection
#
# Backported from Python 3.5-3.10 (and adjusted for py2's single-module httplib):
# - https://bugs.python.org/issue39603
Patch354: 00354-cve-2020-26116-http-request-method-crlf-injection-in-httplib.patch
# 00355 #
# No longer call eval() on content received via HTTP in the CJK codec tests
# Backported from the python3 branches upstream: https://bugs.python.org/issue41944
# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1889886
Patch355: 00355-CVE-2020-27619.patch
# 00357 #
# Security fix for CVE-2021-3177
# Stack-based buffer overflow in PyCArg_repr in _ctypes/callproc.c
# Backported from the upstream python3 branches: https://bugs.python.org/issue42938
Patch357: 00357-CVE-2021-3177.patch
# 00359 #
# CVE-2021-23336 python: Web Cache Poisoning via urllib.parse.parse_qsl and
# urllib.parse.parse_qs by using a semicolon in query parameters
# Upstream: https://bugs.python.org/issue42967
# Main BZ: https://bugzilla.redhat.com/show_bug.cgi?id=1928904
Patch359: 00359-CVE-2021-23336.patch
# (New patches go here ^^^)
#
# When adding new patches to "python2" and "python3" in Fedora, EL, etc.,
@ -742,6 +730,9 @@ Patch359: 00359-CVE-2021-23336.patch
# %%{regenerate_autotooling_patch}
# above:
# Disable tk for modularity builds to break up build dependencies
Patch04000: 04000-modularity-disable-tk.patch
Patch5000: 05000-autotool-intermediates.patch
# ======================================================
@ -752,10 +743,6 @@ Patch5000: 05000-autotool-intermediates.patch
# alongside the system one e.g. python26, python33 etc
Provides: python27 = %{version}-%{release}
# When the user tries to `yum install python`, yum will list this package among
# the possible alternatives
Provides: alternative-for(python)
URL: https://www.python.org/
@ -771,7 +758,19 @@ package.
This package provides the "python2" executable; most of the actual
implementation is within the "python2-libs" package.
For the unversioned "python" executable, see manual page "unversioned-python".
%package for-tests
Summary: The python2-for-tests-command
Requires: %{python}-libs%{?_isa} = %{version}-%{release}
%description for-tests
This package provides the "python2-for-tests" executable, a working
Python 2 interpreter intended for use only in test harnesses that
were not ported to Python 3 yet.
Install this package, but not "python2", to approximate a system that
lacks Python 2 entirely.
%package libs
Summary: Runtime libraries for Python 2
@ -794,13 +793,8 @@ Requires: glibc%{?_isa} >= 2.24.90-26
Requires: gdbm%{?_isa} >= 1:1.13
%endif
%if %{with rpmwheels}
Requires: python2-setuptools-wheel
Requires: python2-pip-wheel
%else
Provides: bundled(python2-pip) = 18.1
Provides: bundled(python2-setuptools) = 40.6.2
%endif
Provides: python-libs = %{version}-%{release}
Provides: python-libs%{?_isa} = %{version}-%{release}
%description libs
This package contains files used to embed Python 2 into applications.
@ -811,32 +805,8 @@ Group: Development/Libraries
Requires: %{python}%{?_isa} = %{version}-%{release}
Requires: python-rpm-macros
Requires: python2-rpm-macros
Requires: pkgconfig
%if %{without python3_bootstrap}
# When bootstrapping python3, we need to build setuptools
# But setuptools BR python2-devel and that brings in python3-rpm-generators
# python3-rpm-generators needs python3-setuptools, so we cannot have it yet
Requires: python3-rpm-generators
%endif
# This is not "API" (packages that need setuptools should still BuildRequire it)
# However some packages apparently can build both with and without setuptools
# producing egg-info as file or directory (depending on setuptools presence).
# Directory-to-file updates are problematic in RPM, so we ensure setuptools is
# installed when -devel is required.
# See https://bugzilla.redhat.com/show_bug.cgi?id=1623922
# See https://fedoraproject.org/wiki/Packaging:Directory_Replacement
#
# This is not necessary when rebuilding when we're bundling the python2 stack
# into a Flatpak containe with prefix=/app, because we never upgrade packages
# in the Flatpak context. We want to avoid
# python2-setuptools => BuildRequires => python2-devel => Requires python2-setuptools
# since the old python2-setuptools will be the /usr version not the /app version.
%if !0%{?flatpak}
Requires: python2-setuptools
%endif
Requires: pkgconfig
# https://bugzilla.redhat.com/show_bug.cgi?id=1217376
# https://bugzilla.redhat.com/show_bug.cgi?id=1496757
@ -848,16 +818,23 @@ Requires: redhat-rpm-config
# package
Conflicts: %{python} < %{version}-%{release}
Provides: python-devel = %{version}-%{release}
Provides: python-devel%{?_isa} = %{version}-%{release}
%description devel
This package contains libraries and header files used to build applications
with and native libraries for Python 2
%if %{with tk_and_tools}
%package tools
Summary: A collection of development tools included with Python 2
Group: Development/Tools
Requires: %{name} = %{version}-%{release}
Requires: %{python}-tkinter = %{version}-%{release}
Provides: python-tools = %{version}-%{release}
Provides: python-tools%{?_isa} = %{version}-%{release}
%description tools
This package includes several tools to help with the development of Python 2
programs, including IDLE (an IDE with editing and debugging facilities), a
@ -868,8 +845,12 @@ Summary: A graphical user interface for the Python 2 scripting language
Group: Development/Languages
Requires: %{name} = %{version}-%{release}
Provides: tkinter = %{version}-%{release}
Provides: tkinter%{?_isa} = %{version}-%{release}
Provides: tkinter2 = %{version}-%{release}
Provides: tkinter2%{?_isa} = %{version}-%{release}
Provides: python-tkinter = %{version}-%{release}
Provides: python-tkinter%{?_isa} = %{version}-%{release}
%description tkinter
@ -878,12 +859,16 @@ the Python 2 scripting language.
You should install the python2tkinter package if you'd like to use a graphical
user interface for Python 2 programming.
%endif %{with tk_and_tools}
%package test
Summary: The test modules from the main python2 package
Group: Development/Languages
Requires: %{name} = %{version}-%{release}
Provides: python-test = %{version}-%{release}
Provides: python-test%{?_isa} = %{version}-%{release}
%description test
The test modules from the main python2 package: %{name}
@ -905,8 +890,13 @@ Requires: %{name}%{?_isa} = %{version}-%{release}
Requires: %{name}-libs%{?_isa} = %{version}-%{release}
Requires: %{name}-devel%{?_isa} = %{version}-%{release}
Requires: %{name}-test%{?_isa} = %{version}-%{release}
%if %{with tk_and_tools}
Requires: %{python}-tkinter%{?_isa} = %{version}-%{release}
Requires: %{name}-tools%{?_isa} = %{version}-%{release}
%endif #{with tk_and_tools}
Provides: python-debug = %{version}-%{release}
Provides: python-debug%{?_isa} = %{version}-%{release}
%description debug
python2-debug provides a version of the Python 2 runtime with numerous debugging
@ -1024,38 +1014,32 @@ rm -r Modules/zlib || exit 1
%if !%{with_gdbm}
%patch144 -p1
%endif
%patch146 -p1
#patch146 -p1
%patch147 -p1
%patch155 -p1
%patch156 -p1
%patch165 -p1
mv Modules/cryptmodule.c Modules/_cryptmodule.c
%patch167 -p1
%patch169 -p1
%patch170 -p1
%patch174 -p1 -b .fix-for-usr-move
%patch180 -p1
%patch181 -p1
%patch185 -p1
%patch187 -p1
%if %{with rpmwheels}
%patch189 -p1
rm Lib/ensurepip/_bundled/*.whl
%endif
%patch191 -p1
%patch193 -p1
%if %{with rewheel}
%patch198 -p1
%endif
%patch257 -p1
%patch288 -p1
%patch289 -p1
# Patch 351 adds binary file for testing. We need to apply it using Git.
git apply %{PATCH351}
%patch354 -p1
%patch355 -p1
%patch357 -p1
%patch359 -p1
%if ! %{with tk_and_tools}
%patch4000 -p1
%endif
# This shouldn't be necesarry, but is right now (2.2a3)
find -name "*~" |xargs rm -f
@ -1072,6 +1056,7 @@ find -name "*~" |xargs rm -f
# ======================================================
%build
export RHEL_ALLOW_PYTHON2_FOR_BUILD=1
topdir=$(pwd)
export CFLAGS="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv"
export CXXFLAGS="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv"
@ -1208,6 +1193,7 @@ BuildPython optimized \
# ======================================================
%install
export RHEL_ALLOW_PYTHON2_FOR_BUILD=1
topdir=$(pwd)
rm -rf %{buildroot}
mkdir -p %{buildroot}%{_prefix} %{buildroot}%{_mandir}
@ -1250,8 +1236,8 @@ make install DESTDIR=%{buildroot}
# but doing so generated noise when ldconfig was rerun (rhbz:562980)
#
%if 0%{?with_gdb_hooks}
DirHoldingGdbPy=%{_usr}/lib/debug/%{_libdir}
PathOfGdbPy=$DirHoldingGdbPy/$PyInstSoName-%{version}-%{release}.%{_arch}.debug-gdb.py
DirHoldingGdbPy=%{_prefix}/lib/debug/%{_libdir}
PathOfGdbPy=$DirHoldingGdbPy/$PyInstSoName.debug-gdb.py
mkdir -p %{buildroot}$DirHoldingGdbPy
cp $topdir/Tools/gdb/libpython.py %{buildroot}$PathOfGdbPy
@ -1362,6 +1348,7 @@ ln -s ./pynche2 %{buildroot}%{_bindir}/pynche
mv %{buildroot}%{_bindir}/pydoc %{buildroot}%{_bindir}/pydoc%{pybasever}
ln -s ./pydoc%{pybasever} %{buildroot}%{_bindir}/pydoc2
ln -s ./pydoc2 %{buildroot}%{_bindir}/pydoc
mv %{buildroot}%{_bindir}/pygettext.py %{buildroot}%{_bindir}/pygettext%{pybasever}.py
ln -s ./pygettext%{pybasever}.py %{buildroot}%{_bindir}/pygettext2.py
@ -1492,34 +1479,19 @@ find %{buildroot} -type f -a -name "*.py" -print0 | \
/usr/bin/chmod 755 %{buildroot}%{_libdir}/libpython%{pybasever}.so.1.0
%if %{with debug_build}
/usr/bin/chmod 755 %{buildroot}%{_libdir}/libpython%{pybasever}_d.so.1.0
%endif
%endif # with debug_build
# Remove pyc/pyo files from /usr/bin
# They are not needed, and due to them, the resulting RPM is not multilib-clean
# https://bugzilla.redhat.com/show_bug.cgi?id=1703575
rm %{buildroot}%{_bindir}/*.py{c,o}
# Remove the /usr/bin/python executable so that the user can chose where it
# points to by installing streams of the @python module
# Add an executable for tests
cp %{buildroot}%{_bindir}/python%{pybasever} %{buildroot}%{_bindir}/python2-for-tests
# Remove unversioned executables
rm %{buildroot}%{_bindir}/python
# The same with the symlink to man page
rm %{buildroot}%{_mandir}/man1/python.1
# Remove unversioned executables in /usr/bin/ so that there is no "default"
# Python 2 or Python 3 version
rm %{buildroot}%{_bindir}/idle
rm %{buildroot}%{_bindir}/msgfmt.py
rm %{buildroot}%{_bindir}/pygettext.py
rm %{buildroot}%{_bindir}/pydoc
rm %{buildroot}%{_bindir}/pynche
rm %{buildroot}%{_bindir}/smtpd.py
rm %{buildroot}%{_bindir}/python-config
# All ghost files controlled by alternatives need to exist for the files
# section check to succeed
# - Don't list /usr/bin/python as a ghost file so `yum install /usr/bin/python`
# doesn't install this package
touch %{buildroot}%{_bindir}/unversioned-python
touch %{buildroot}%{_mandir}/man1/python.1.gz
# Remove unversioned manpage
rm %{buildroot}%{_mandir}/man1/python.*
# ======================================================
@ -1527,6 +1499,7 @@ touch %{buildroot}%{_mandir}/man1/python.1.gz
# ======================================================
%check
export RHEL_ALLOW_PYTHON2_FOR_BUILD=1
topdir=$(pwd)
CheckPython() {
ConfName=$1
@ -1594,39 +1567,20 @@ CheckPython \
# Cleaning up
# ======================================================
%post
# Alternative for /usr/bin/python -> /usr/bin/python2 + man page
alternatives --install %{_bindir}/unversioned-python \
python \
%{_bindir}/python2 \
200 \
--slave %{_bindir}/python \
unversioned-python \
%{_bindir}/python2 \
--slave %{_mandir}/man1/python.1.gz \
unversioned-python-man \
%{_mandir}/man1/python2.1.gz
%postun
# Do this only during uninstall process (not during update)
if [ $1 -eq 0 ]; then
alternatives --remove python \
%{_bindir}/python2
fi
%files
%defattr(-, root, root, -)
%{!?_licensedir:%global license %%doc}
%license LICENSE
%doc README
%{_bindir}/pydoc2*
%{_bindir}/pydoc*
%{_bindir}/%{python}
%{_bindir}/python%{pybasever}
%{_mandir}/man1/python2.1*
%{_mandir}/man1/python2.7.1*
%ghost %{_bindir}/unversioned-python
%ghost %{_mandir}/man1/python.1*
%{_mandir}/*/*
%files for-tests
%license LICENSE
%{_bindir}/python2-for-tests
%files libs
%defattr(-,root,root,-)
@ -1727,6 +1681,7 @@ fi
%{pylibdir}/distutils/*.py*
%{pylibdir}/distutils/README
%{pylibdir}/distutils/command
%exclude %{pylibdir}/distutils/command/wininst-*.exe
%dir %{pylibdir}/email
%{pylibdir}/email/*.py*
%{pylibdir}/email/mime
@ -1779,11 +1734,11 @@ fi
%dir %{pylibdir}/ensurepip/
%{pylibdir}/ensurepip/*.py*
%if %{with rpmwheels}
%exclude %{pylibdir}/ensurepip/_bundled
%else
%dir %{pylibdir}/ensurepip/_bundled
%{pylibdir}/ensurepip/_bundled/*.whl
%if %{with rewheel}
%dir %{pylibdir}/ensurepip/rewheel/
%{pylibdir}/ensurepip/rewheel/*.py*
%endif
@ -1794,6 +1749,7 @@ fi
%{_libdir}/pkgconfig/python2.pc
%{pylibdir}/config/*
%exclude %{pylibdir}/config/Makefile
%exclude %{pylibdir}/distutils/command/wininst-*.exe
%{_includedir}/python%{pybasever}/*.h
%exclude %{_includedir}/python%{pybasever}/%{_pyconfig_h}
%doc Misc/README.valgrind Misc/valgrind-python.supp Misc/gdbinit
@ -1801,19 +1757,20 @@ fi
%{_bindir}/python%{pybasever}-config
%{_libdir}/libpython%{pybasever}.so
%if %{with tk_and_tools}
%files tools
%defattr(-,root,root,755)
%doc Tools/pynche/README.pynche
%{site_packages}/pynche
%{_bindir}/smtpd2*.py*
%{_bindir}/smtpd*.py*
# https://bugzilla.redhat.com/show_bug.cgi?id=1111275
%exclude %{_bindir}/2to3*
%{_bindir}/idle2*
%{_bindir}/pynche2*
%{_bindir}/pygettext2*.py*
%{_bindir}/msgfmt2*.py*
%{_bindir}/idle*
%{_bindir}/pynche*
%{_bindir}/pygettext*.py*
%{_bindir}/msgfmt*.py*
%{tools_dir}
%{demo_dir}
%{pylibdir}/Doc
@ -1823,6 +1780,27 @@ fi
%{pylibdir}/lib-tk
%{dynload_dir}/_tkinter.so
%else #{with tk_and_tools}
%exclude %{site_packages}/pynche
%exclude %{_bindir}/smtpd*.py*
# https://bugzilla.redhat.com/show_bug.cgi?id=1111275
%exclude %{_bindir}/2to3*
%exclude %{_bindir}/idle*
%exclude %{_bindir}/pynche*
%exclude %{_bindir}/pygettext*.py*
%exclude %{_bindir}/msgfmt*.py*
%exclude %{tools_dir}
%exclude %{demo_dir}
%exclude %{pylibdir}/Doc
%exclude %{pylibdir}/lib-tk
%exclude %{dynload_dir}/_tkinter.so
%endif
%files test
%defattr(-, root, root, -)
%{pylibdir}/bsddb/test
@ -1964,8 +1942,10 @@ fi
# None for now; we could build precanned versions that have the appropriate
# shebang if needed
%if ! 0%{?_module_build}
# Analog of the tkinter subpackage's files:
%{dynload_dir}/_tkinter_d.so
%endif
# Analog of the -test subpackage's files:
%{dynload_dir}/_ctypes_test_d.so
@ -1994,127 +1974,58 @@ fi
# ======================================================
%changelog
* Wed May 12 2021 Charalampos Stratakis <cstratak@redhat.com> - 2.7.18-6
- Security fix for CVE-2020-27619: eval() call on content received via HTTP in the CJK codec tests
Resolves: rhbz#1889886
* Fri Apr 16 2021 Charalampos Stratakis <cstratak@redhat.com> - 2.7.18-5
- Fix for CVE-2021-23336
Resolves: rhbz#1928904
* Fri Jan 22 2021 Charalampos Stratakis <cstratak@redhat.com> - 2.7.18-4
- Security fix for CVE-2021-3177
Resolves: rhbz#1919163
* Wed Jan 13 2021 Charalampos Stratakis <cstratak@redhat.com> - 2.7.18-3
- Fixes for bundling prefix=/app build in gimp/inkscape containers
Resolves: rhbz#1907592
* Fri Oct 09 2020 Charalampos Stratakis <cstratak@redhat.com> - 2.7.18-2
- Security fix for CVE-2020-26116: Reject control chars in HTTP method in httplib.putrequest
Resolves: rhbz#1883258
* Fri Oct 09 2020 Charalampos Stratakis <cstratak@redhat.com> - 2.7.18-1
- Update to 2.7.18
Resolves: rhbz#1886754
* Mon Aug 17 2020 Tomas Orsava <torsava@redhat.com> - 2.7.17-2
- Avoid infinite loop when reading specially crafted TAR files (CVE-2019-20907)
Resolves: rhbz#1856481
* Wed Oct 23 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.17-1
- Update to 2.7.17
Resolves: rhbz#1759944
Resolves: rhbz#1759946
* Tue Sep 03 2019 Tomas Orsava <torsava@redhat.com> - 2.7.16-12
- Adding FIPS compliance to Python 2 in RHEL8:
- Updated patch 146 with a new version of the FIPS patch
- Patch 169 has been removed, obsoleted by the updated patch 146
Resolves: rhbz#1734126
* Tue Jul 02 2019 Miro Hrončok <mhroncok@redhat.com> - 2.7.16-11
- Use RPM built wheels of pip and setuptools in ensurepip instead of our rewheel patch
- Require python2-setuptools from python2-devel to prevent packaging errors
Resolves: rhbz#1718398
* Tue Jun 11 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-10
* Tue Jun 11 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-8
- Fix urlparse.urlsplit() error message for Unicode URL
Resolves: rhbz#1689327
Resolves: rhbz#1689328
* Fri Jun 07 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-9
* Fri Jun 07 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-7
- Security fix for CVE-2019-10160
Resolves: rhbz#1689327
Resolves: rhbz#1689328
* Thu May 30 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-8
* Thu May 30 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-6
- Security fix for CVE-2019-9948
Resolves: rhbz#1704176
Resolves: rhbz#1704177
* Thu May 30 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-7
* Thu May 30 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-5
- Disallow control chars in http URLs
- Fixes CVE-2019-9740 and CVE-2019-9947
Resolves: rhbz#1703539 and rhbz#1704367
Resolves: rhbz#1704369 and rhbz#1703537
* Tue May 21 2019 Tomas Orsava <torsava@redhat.com> - 2.7.16-6
- Remove pyc/pyo files from /usr/bin
Resolves: rhbz#1696741
* Fri May 03 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-5
* Fri May 03 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-4
- Updated fix for CVE-2019-9636
Resolves: rhbz#1689327
* Thu Apr 25 2019 Tomas Orsava <torsava@redhat.com> - 2.7.16-4
- Bumping due to problems with modular RPM upgrade path
- Resolves: rhbz#1695587
Resolves: rhbz#1689328
* Fri Apr 12 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-3
- Fix coverity scan static analysis issues
Resolves: rhbz#1690919
Resolves: rhbz#1602667
* Wed Apr 3 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-2
- Security fix for CVE-2019-9636 (rhbz#1689327)
- Security fix for CVE-2019-9636 (rhbz#1689328)
* Tue Feb 19 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-1
* Mon Apr 1 2019 Charalampos Stratakis <cstratak@redhat.com> - 2.7.16-1
- Update to 2.7.16
Resolves: rhbz#1680967
Resolves: rhbz#1680964
* Wed Dec 12 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-21
- Fix Tkinter
- Remove wininst exe files, that are no longer included, from the files section
- Resolves: rhbz#1656488
* Thu Nov 29 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-16
- Bump NVR to redo CI gating tests, because the "update test" was
malfunctioning and had to be fixed
Resolves: rhbz#1565560
* Wed Dec 12 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-20
- Fix launcher of pynche
- Resolves: rhbz#1656479
* Tue Dec 11 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-19
- Remove remaining unversioned executables (idle, msgfmt.py, pygettext.py,
pynche, smtpd.py, python-config)
Resolves: rhbz#1656511
* Wed Nov 28 2018 Charalampos Stratakis <cstratak@redhat.com> - 2.7.15-18
* Wed Nov 28 2018 Charalampos Stratakis <cstratak@redhat.com> - 2.7.15-15
- Workaround Python's threading library issue with non returning wait, for signals with timeout
Resolves: rhbz#1653754
Resolves: rhbz#1565560
* Tue Nov 13 2018 Charalampos Stratakis <cstratak@redhat.com> - 2.7.15-17
* Tue Nov 13 2018 Charalampos Stratakis <cstratak@redhat.com> - 2.7.15-14
- Add choices for sort option of cProfile for better output
Resolves: rhbz#1649473
Resolves: rhbz#1565101
* Wed Nov 07 2018 Lumír Balhar <lbalhar@redhat.com> - 2.7.15-16
* Wed Nov 07 2018 Lumír Balhar <lbalhar@redhat.com> - 2.7.15-13
- Bring audiotest.au back to package. It's not copyrighted anymore.
- Resolves: rhbz#1647692
* Tue Oct 16 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-15
- Slightly edit the description
- Related: rhbz#1633537
* Sun Oct 14 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-14
- Add Requires (/post/postun) on /usr/sbin/alternatives
- Resolves: rhbz#1633537
* Fri Oct 12 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-13
- Don't list /usr/bin/python as a ghost file so `yum install /usr/bin/python`
doesn't install this package
- Resolves: rhbz#1633537
- Resolves: rhbz#1643970
* Fri Oct 12 2018 Petr Viktorin <pviktori@redhat.com> - 2.7.15-12
- Remove Windows binaries from the source archive
@ -2122,43 +2033,41 @@ Resolves: rhbz#1649473
* Fri Oct 12 2018 Charalampos Stratakis <cstratak@redhat.com> - 2.7.15-11
- Fix test_dbm_gnu for gdbm 1.15 which fails on ppc64le
Resolves: rhbz#1638716
Resolves: rhbz#1638710
* Thu Oct 11 2018 Miro Hrončok <mhroncok@redhat.com> - 2.7.15-10
* Mon Sep 24 2018 Miro Hrončok <mhroncok@redhat.com> - 2.7.15-10
- Security fix for CVE-2018-14647
Resolves: rhbz#1632095
* Mon Oct 08 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-9
- Set a special Provides tag that advertises the `python2` package as an
alternative to the non-existing `python` package
- Resolves: rhbz#1633559
* Sat Aug 04 2018 Petr Viktorin <pviktori@redhat.com> - 2.7.15-9
- Disable Python 2 at the C level
* Thu Oct 04 2018 Lumír Balhar <lbalhar@redhat.com> - 2.7.15-8
- Remove unversioned provides
- Resolves: rhbz#1628242
* Fri Aug 3 2018 Florian Weimer <fweimer@redhat.com> - 2.7.15-8
- Honor %%{valgrind_arches}
* Tue Oct 02 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-7
- Implement the alternatives system for Python in RHEL8
- Resolves: rhbz#1633537
* Thu Aug 09 2018 Lumír Balhar <lbalhar@redhat.com> - 2.7.15-6
- Remove unversioned symlink to manual page and to pydoc binary.
They are both available in streams of `python` module.
- Resolves: rhbz#1613343
* Thu Aug 02 2018 Charalampos Stratakis <cstratak@redhat.com> - 2.7.15-5
* Thu Aug 02 2018 Charalampos Stratakis <cstratak@redhat.com> - 2.7.15-7
- Disable optimizations
- Disable ssl related tests for now
* Wed Aug 01 2018 Lumír Balhar <lbalhar@redhat.com> - 2.7.15-4
- Hotfix issue with byte compilation macro - rhbz#1597664
* Wed Jul 25 2018 Petr Kubat <pkubat@redhat.com> - 2.7.15-6
- Rebuilt for gdbm
* Thu May 24 2018 Tomas Orsava <torsava@redhat.com> - 2.7.15-3
- Remove the /usr/bin/python executable so that the user can chose where it
points to by installing various streams of the `python` module
* Mon Jul 09 2018 Petr Viktorin <pviktori@redhat.com> - 2.7.15-5
- Don't build the tkinter and tools subpackages
* Tue May 15 2018 Charalampos Stratakis <cstratak@redhat.com> - 2.7.15-2
- Fix loading of the gdb python plugin (rhbz#1578001)
* Thu Jun 28 2018 Petr Viktorin <pviktori@redhat.com> - 2.7.15-4
- Disable Python 2
- Exclude the unversioned commands in /usr/bin
- Exclude the unversioned man page
- No longer Provide unversioned "python"
* Tue Jun 26 2018 Petr Viktorin <pviktori@redhat.com> - 2.7.15-3
- Bump release
* Fri Jun 22 2018 Petr Viktorin <pviktori@redhat.com> - 2.7.15-2
- Provide the python2-for-tests package
- Disable rewheel & ensurepip
- Disable the debug build
* Tue May 01 2018 Miro Hrončok <mhroncok@redhat.com> - 2.7.15-1
- Update to version 2.7.15