import python3-3.6.8-23.el8
This commit is contained in:
parent
7df068181d
commit
15200b22d6
@ -1,245 +0,0 @@
|
||||
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
|
||||
index 4748ba4..986d5e9 100644
|
||||
--- a/Lib/ensurepip/__init__.py
|
||||
+++ b/Lib/ensurepip/__init__.py
|
||||
@@ -1,8 +1,10 @@
|
||||
import os
|
||||
import os.path
|
||||
import pkgutil
|
||||
+import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
+from ensurepip import rewheel
|
||||
|
||||
|
||||
__all__ = ["version", "bootstrap"]
|
||||
@@ -24,8 +26,15 @@ def _run_pip(args, additional_paths=None):
|
||||
sys.path = additional_paths + sys.path
|
||||
|
||||
# Install the bundled software
|
||||
- import pip._internal
|
||||
- return pip._internal.main(args)
|
||||
+ try:
|
||||
+ # pip 10
|
||||
+ from pip._internal import main
|
||||
+ except ImportError:
|
||||
+ # pip 9
|
||||
+ from pip import main
|
||||
+ if args[0] in ["install", "list", "wheel"]:
|
||||
+ args.append('--pre')
|
||||
+ return main(args)
|
||||
|
||||
|
||||
def version():
|
||||
@@ -88,20 +97,39 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
|
||||
# omit pip and easy_install
|
||||
os.environ["ENSUREPIP_OPTIONS"] = "install"
|
||||
|
||||
+ whls = []
|
||||
+ rewheel_dir = None
|
||||
+ # try to see if we have system-wide versions of _PROJECTS
|
||||
+ dep_records = rewheel.find_system_records([p[0] for p in _PROJECTS])
|
||||
+ # TODO: check if system-wide versions are the newest ones
|
||||
+ # if --upgrade is used?
|
||||
+ if all(dep_records):
|
||||
+ # if we have all _PROJECTS installed system-wide, we'll recreate
|
||||
+ # wheels from them and install those
|
||||
+ rewheel_dir = tempfile.TemporaryDirectory()
|
||||
+ for dr in dep_records:
|
||||
+ new_whl = rewheel.rewheel_from_record(dr, rewheel_dir.name)
|
||||
+ whls.append(os.path.join(rewheel_dir.name, new_whl))
|
||||
+ else:
|
||||
+ # if we don't have all the _PROJECTS installed system-wide,
|
||||
+ # let's just fall back to bundled wheels
|
||||
+ for project, version in _PROJECTS:
|
||||
+ whl = os.path.join(
|
||||
+ os.path.dirname(__file__),
|
||||
+ "_bundled",
|
||||
+ "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||
+ )
|
||||
+ whls.append(whl)
|
||||
+
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# Put our bundled wheels into a temporary directory and construct the
|
||||
# additional paths that need added to sys.path
|
||||
additional_paths = []
|
||||
- for project, version in _PROJECTS:
|
||||
- wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||
- whl = pkgutil.get_data(
|
||||
- "ensurepip",
|
||||
- "_bundled/{}".format(wheel_name),
|
||||
- )
|
||||
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||
- fp.write(whl)
|
||||
-
|
||||
- additional_paths.append(os.path.join(tmpdir, wheel_name))
|
||||
+ for whl in whls:
|
||||
+ shutil.copy(whl, tmpdir)
|
||||
+ additional_paths.append(os.path.join(tmpdir, os.path.basename(whl)))
|
||||
+ if rewheel_dir:
|
||||
+ rewheel_dir.cleanup()
|
||||
|
||||
# Construct the arguments to be passed to the pip command
|
||||
args = ["install", "--no-index", "--find-links", tmpdir]
|
||||
diff --git a/Lib/ensurepip/rewheel/__init__.py b/Lib/ensurepip/rewheel/__init__.py
|
||||
new file mode 100644
|
||||
index 0000000..753c764
|
||||
--- /dev/null
|
||||
+++ b/Lib/ensurepip/rewheel/__init__.py
|
||||
@@ -0,0 +1,143 @@
|
||||
+import argparse
|
||||
+import codecs
|
||||
+import csv
|
||||
+import email.parser
|
||||
+import os
|
||||
+import io
|
||||
+import re
|
||||
+import site
|
||||
+import subprocess
|
||||
+import sys
|
||||
+import zipfile
|
||||
+
|
||||
+def run():
|
||||
+ parser = argparse.ArgumentParser(description='Recreate wheel of package with given RECORD.')
|
||||
+ parser.add_argument('record_path',
|
||||
+ help='Path to RECORD file')
|
||||
+ parser.add_argument('-o', '--output-dir',
|
||||
+ help='Dir where to place the wheel, defaults to current working dir.',
|
||||
+ dest='outdir',
|
||||
+ default=os.path.curdir)
|
||||
+
|
||||
+ ns = parser.parse_args()
|
||||
+ retcode = 0
|
||||
+ try:
|
||||
+ print(rewheel_from_record(**vars(ns)))
|
||||
+ except BaseException as e:
|
||||
+ print('Failed: {}'.format(e))
|
||||
+ retcode = 1
|
||||
+ sys.exit(1)
|
||||
+
|
||||
+def find_system_records(projects):
|
||||
+ """Return list of paths to RECORD files for system-installed projects.
|
||||
+
|
||||
+ If a project is not installed, the resulting list contains None instead
|
||||
+ of a path to its RECORD
|
||||
+ """
|
||||
+ records = []
|
||||
+ # get system site-packages dirs
|
||||
+ sys_sitepack = site.getsitepackages([sys.base_prefix, sys.base_exec_prefix])
|
||||
+ sys_sitepack = [sp for sp in sys_sitepack if os.path.exists(sp)]
|
||||
+ # try to find all projects in all system site-packages
|
||||
+ for project in projects:
|
||||
+ path = None
|
||||
+ for sp in sys_sitepack:
|
||||
+ dist_info_re = os.path.join(sp, project) + r'-[^\{0}]+\.dist-info'.format(os.sep)
|
||||
+ candidates = [os.path.join(sp, p) for p in os.listdir(sp)]
|
||||
+ # filter out candidate dirs based on the above regexp
|
||||
+ filtered = [c for c in candidates if re.match(dist_info_re, c)]
|
||||
+ # if we have 0 or 2 or more dirs, something is wrong...
|
||||
+ if len(filtered) == 1:
|
||||
+ path = filtered[0]
|
||||
+ if path is not None:
|
||||
+ records.append(os.path.join(path, 'RECORD'))
|
||||
+ else:
|
||||
+ records.append(None)
|
||||
+ return records
|
||||
+
|
||||
+def rewheel_from_record(record_path, outdir):
|
||||
+ """Recreates a whee of package with given record_path and returns path
|
||||
+ to the newly created wheel."""
|
||||
+ site_dir = os.path.dirname(os.path.dirname(record_path))
|
||||
+ record_relpath = record_path[len(site_dir):].strip(os.path.sep)
|
||||
+ to_write, to_omit = get_records_to_pack(site_dir, record_relpath)
|
||||
+ new_wheel_name = get_wheel_name(record_path)
|
||||
+ new_wheel_path = os.path.join(outdir, new_wheel_name + '.whl')
|
||||
+
|
||||
+ new_wheel = zipfile.ZipFile(new_wheel_path, mode='w', compression=zipfile.ZIP_DEFLATED)
|
||||
+ # we need to write a new record with just the files that we will write,
|
||||
+ # e.g. not binaries and *.pyc/*.pyo files
|
||||
+ new_record = io.StringIO()
|
||||
+ writer = csv.writer(new_record)
|
||||
+
|
||||
+ # handle files that we can write straight away
|
||||
+ for f, sha_hash, size in to_write:
|
||||
+ new_wheel.write(os.path.join(site_dir, f), arcname=f)
|
||||
+ writer.writerow([f, sha_hash,size])
|
||||
+
|
||||
+ # rewrite the old wheel file with a new computed one
|
||||
+ writer.writerow([record_relpath, '', ''])
|
||||
+ new_wheel.writestr(record_relpath, new_record.getvalue())
|
||||
+
|
||||
+ new_wheel.close()
|
||||
+
|
||||
+ return new_wheel.filename
|
||||
+
|
||||
+def get_wheel_name(record_path):
|
||||
+ """Return proper name of the wheel, without .whl."""
|
||||
+
|
||||
+ wheel_info_path = os.path.join(os.path.dirname(record_path), 'WHEEL')
|
||||
+ with codecs.open(wheel_info_path, encoding='utf-8') as wheel_info_file:
|
||||
+ wheel_info = email.parser.Parser().parsestr(wheel_info_file.read())
|
||||
+
|
||||
+ metadata_path = os.path.join(os.path.dirname(record_path), 'METADATA')
|
||||
+ with codecs.open(metadata_path, encoding='utf-8') as metadata_file:
|
||||
+ metadata = email.parser.Parser().parsestr(metadata_file.read())
|
||||
+
|
||||
+ # construct name parts according to wheel spec
|
||||
+ distribution = metadata.get('Name')
|
||||
+ version = metadata.get('Version')
|
||||
+ build_tag = '' # nothing for now
|
||||
+ lang_tag = []
|
||||
+ for t in wheel_info.get_all('Tag'):
|
||||
+ lang_tag.append(t.split('-')[0])
|
||||
+ lang_tag = '.'.join(lang_tag)
|
||||
+ abi_tag, plat_tag = wheel_info.get('Tag').split('-')[1:3]
|
||||
+ # leave out build tag, if it is empty
|
||||
+ to_join = filter(None, [distribution, version, build_tag, lang_tag, abi_tag, plat_tag])
|
||||
+ return '-'.join(list(to_join))
|
||||
+
|
||||
+def get_records_to_pack(site_dir, record_relpath):
|
||||
+ """Accepts path of sitedir and path of RECORD file relative to it.
|
||||
+ Returns two lists:
|
||||
+ - list of files that can be written to new RECORD straight away
|
||||
+ - list of files that shouldn't be written or need some processing
|
||||
+ (pyc and pyo files, scripts)
|
||||
+ """
|
||||
+ record_file_path = os.path.join(site_dir, record_relpath)
|
||||
+ with codecs.open(record_file_path, encoding='utf-8') as record_file:
|
||||
+ record_contents = record_file.read()
|
||||
+ # temporary fix for https://github.com/pypa/pip/issues/1376
|
||||
+ # we need to ignore files under ".data" directory
|
||||
+ data_dir = os.path.dirname(record_relpath).strip(os.path.sep)
|
||||
+ data_dir = data_dir[:-len('dist-info')] + 'data'
|
||||
+
|
||||
+ to_write = []
|
||||
+ to_omit = []
|
||||
+ for l in record_contents.splitlines():
|
||||
+ spl = l.split(',')
|
||||
+ if len(spl) == 3:
|
||||
+ # new record will omit (or write differently):
|
||||
+ # - abs paths, paths with ".." (entry points),
|
||||
+ # - pyc+pyo files
|
||||
+ # - the old RECORD file
|
||||
+ # TODO: is there any better way to recognize an entry point?
|
||||
+ if os.path.isabs(spl[0]) or spl[0].startswith('..') or \
|
||||
+ spl[0].endswith('.pyc') or spl[0].endswith('.pyo') or \
|
||||
+ spl[0] == record_relpath or spl[0].startswith(data_dir):
|
||||
+ to_omit.append(spl)
|
||||
+ else:
|
||||
+ to_write.append(spl)
|
||||
+ else:
|
||||
+ pass # bad RECORD or empty line
|
||||
+ return to_write, to_omit
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 85e2ee3..4d34130 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -1256,7 +1256,7 @@ LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \
|
||||
test/test_asyncio \
|
||||
collections concurrent concurrent/futures encodings \
|
||||
email email/mime test/test_email test/test_email/data \
|
||||
- ensurepip ensurepip/_bundled \
|
||||
+ ensurepip ensurepip/_bundled ensurepip/rewheel \
|
||||
html json test/test_json http dbm xmlrpc \
|
||||
sqlite3 sqlite3/test \
|
||||
logging csv wsgiref urllib \
|
70
SOURCES/00189-use-rpm-wheels.patch
Normal file
70
SOURCES/00189-use-rpm-wheels.patch
Normal file
@ -0,0 +1,70 @@
|
||||
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
|
||||
index 09c572d..167d27b 100644
|
||||
--- a/Lib/ensurepip/__init__.py
|
||||
+++ b/Lib/ensurepip/__init__.py
|
||||
@@ -1,16 +1,27 @@
|
||||
+import distutils.version
|
||||
+import glob
|
||||
import os
|
||||
import os.path
|
||||
-import pkgutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
__all__ = ["version", "bootstrap"]
|
||||
|
||||
+_WHEEL_DIR = "/usr/share/python{}-wheels/".format(sys.version_info[0])
|
||||
|
||||
-_SETUPTOOLS_VERSION = "40.6.2"
|
||||
+def _get_most_recent_wheel_version(pkg):
|
||||
+ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg))
|
||||
+ suffix = "-py2.py3-none-any.whl"
|
||||
+ pattern = "{}*{}".format(prefix, suffix)
|
||||
+ versions = (p[len(prefix):-len(suffix)] for p in glob.glob(pattern))
|
||||
+ return str(max(versions, key=distutils.version.LooseVersion))
|
||||
+
|
||||
+
|
||||
+_SETUPTOOLS_VERSION = _get_most_recent_wheel_version("setuptools")
|
||||
+
|
||||
+_PIP_VERSION = _get_most_recent_wheel_version("pip")
|
||||
|
||||
-_PIP_VERSION = "18.1"
|
||||
|
||||
_PROJECTS = [
|
||||
("setuptools", _SETUPTOOLS_VERSION),
|
||||
@@ -23,9 +34,15 @@ def _run_pip(args, additional_paths=None):
|
||||
if additional_paths is not None:
|
||||
sys.path = additional_paths + sys.path
|
||||
|
||||
- # Install the bundled software
|
||||
- import pip._internal
|
||||
- return pip._internal.main(args)
|
||||
+ try:
|
||||
+ # pip 10
|
||||
+ from pip._internal import main
|
||||
+ except ImportError:
|
||||
+ # pip 9
|
||||
+ from pip import main
|
||||
+ if args[0] in ["install", "list", "wheel"]:
|
||||
+ args.append('--pre')
|
||||
+ return main(args)
|
||||
|
||||
|
||||
def version():
|
||||
@@ -94,12 +111,9 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
|
||||
additional_paths = []
|
||||
for project, version in _PROJECTS:
|
||||
wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||
- whl = pkgutil.get_data(
|
||||
- "ensurepip",
|
||||
- "_bundled/{}".format(wheel_name),
|
||||
- )
|
||||
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||
- fp.write(whl)
|
||||
+ with open(os.path.join(_WHEEL_DIR, wheel_name), "rb") as sfp:
|
||||
+ with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||
+ fp.write(sfp.read())
|
||||
|
||||
additional_paths.append(os.path.join(tmpdir, wheel_name))
|
||||
|
14
SOURCES/00316-mark-bdist_wininst-unsupported.patch
Normal file
14
SOURCES/00316-mark-bdist_wininst-unsupported.patch
Normal file
@ -0,0 +1,14 @@
|
||||
diff --git a/Lib/distutils/command/bdist_wininst.py b/Lib/distutils/command/bdist_wininst.py
|
||||
index fde5675..15434c3 100644
|
||||
--- a/Lib/distutils/command/bdist_wininst.py
|
||||
+++ b/Lib/distutils/command/bdist_wininst.py
|
||||
@@ -55,6 +55,9 @@ class bdist_wininst(Command):
|
||||
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
|
||||
'skip-build']
|
||||
|
||||
+ # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows
|
||||
+ _unsupported = (sys.platform != "win32")
|
||||
+
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
949
SOURCES/00318-fixes-for-tls-13.patch
Normal file
949
SOURCES/00318-fixes-for-tls-13.patch
Normal file
@ -0,0 +1,949 @@
|
||||
From 412ccf4c6f8c417006c0a93392a8274a425074c0 Mon Sep 17 00:00:00 2001
|
||||
From: Victor Stinner <vstinner@redhat.com>
|
||||
Date: Wed, 29 May 2019 04:04:54 +0200
|
||||
Subject: [PATCH 1/5] bpo-32947: test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1
|
||||
(GH-11612)
|
||||
|
||||
Backport partially commit 529525fb5a8fd9b96ab4021311a598c77588b918:
|
||||
complete the previous partial backport (commit
|
||||
2a4ee8aa01d61b6a9c8e9c65c211e61bdb471826.
|
||||
|
||||
Co-Authored-By: Christian Heimes <christian@python.org>
|
||||
---
|
||||
Lib/test/test_ssl.py | 15 +++++++++++++++
|
||||
.../2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst | 1 +
|
||||
2 files changed, 16 insertions(+)
|
||||
create mode 100644 Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst
|
||||
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index cb0acda..639109f 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -2043,6 +2043,16 @@ if _have_threads:
|
||||
sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n"
|
||||
% (msg, ctype, msg.lower(), ctype))
|
||||
self.write(msg.lower())
|
||||
+ except ConnectionResetError:
|
||||
+ # XXX: OpenSSL 1.1.1 sometimes raises ConnectionResetError
|
||||
+ # when connection is not shut down gracefully.
|
||||
+ if self.server.chatty and support.verbose:
|
||||
+ sys.stdout.write(
|
||||
+ " Connection reset by peer: {}\n".format(
|
||||
+ self.addr)
|
||||
+ )
|
||||
+ self.close()
|
||||
+ self.running = False
|
||||
except OSError:
|
||||
if self.server.chatty:
|
||||
handle_error("Test server failure:\n")
|
||||
@@ -2122,6 +2132,11 @@ if _have_threads:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
self.stop()
|
||||
+ except BaseException as e:
|
||||
+ if support.verbose and self.chatty:
|
||||
+ sys.stdout.write(
|
||||
+ ' connection handling failed: ' + repr(e) + '\n')
|
||||
+
|
||||
self.sock.close()
|
||||
|
||||
def stop(self):
|
||||
diff --git a/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst b/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst
|
||||
new file mode 100644
|
||||
index 0000000..f508504
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst
|
||||
@@ -0,0 +1 @@
|
||||
+test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1.
|
||||
--
|
||||
2.21.0
|
||||
|
||||
|
||||
From 6b728ec778067849dd1f0d9b73cf1ac47dafa270 Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Wed, 25 Sep 2019 09:12:59 -0700
|
||||
Subject: [PATCH 2/5] bpo-38271: encrypt private key test files with AES256
|
||||
(GH-16385)
|
||||
|
||||
The private keys for test_ssl were encrypted with 3DES in traditional
|
||||
PKCSGH-5 format. 3DES and the digest algorithm of PKCSGH-5 are blocked by
|
||||
some strict crypto policies. Use PKCSGH-8 format with AES256 encryption
|
||||
instead.
|
||||
|
||||
Signed-off-by: Christian Heimes <christian@python.org>
|
||||
|
||||
https://bugs.python.org/issue38271
|
||||
|
||||
Automerge-Triggered-By: @tiran
|
||||
(cherry picked from commit bfd0c963d88f3df69489ee250655e2b8f3d235bd)
|
||||
|
||||
Co-authored-by: Christian Heimes <christian@python.org>
|
||||
---
|
||||
Lib/test/keycert.passwd.pem | 85 ++++++++++---------
|
||||
Lib/test/make_ssl_certs.py | 4 +-
|
||||
Lib/test/ssl_key.passwd.pem | 84 +++++++++---------
|
||||
.../2019-09-25-13-11-29.bpo-38271.iHXNIg.rst | 4 +
|
||||
4 files changed, 91 insertions(+), 86 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst
|
||||
|
||||
diff --git a/Lib/test/keycert.passwd.pem b/Lib/test/keycert.passwd.pem
|
||||
index cbb3c3b..c330c36 100644
|
||||
--- a/Lib/test/keycert.passwd.pem
|
||||
+++ b/Lib/test/keycert.passwd.pem
|
||||
@@ -1,45 +1,45 @@
|
||||
------BEGIN RSA PRIVATE KEY-----
|
||||
-Proc-Type: 4,ENCRYPTED
|
||||
-DEK-Info: DES-EDE3-CBC,D134E931C96D9DEC
|
||||
-
|
||||
-nuGFEej7vIjkYWSMz5OJeVTNntDRQi6ZM4DBm3g8T7i/0odr3WFqGMMKZcIhLYQf
|
||||
-rgRq7RSKtrJ1y5taVucMV+EuCjyfzDo0TsYt+ZrXv/D08eZhjRmkhoHnGVF0TqQm
|
||||
-nQEXM/ERT4J2RM78dnG+homMkI76qOqxgGbRqQqJo6AiVRcAZ45y8s96bru2TAB8
|
||||
-+pWjO/v0Je7AFVdwSU52N8OOY6uoSAygW+0UY1WVxbVGJF2XfRsNpPX+YQHYl6e+
|
||||
-3xM5XBVCgr6kmdAyub5qUJ38X3TpdVGoR0i+CVS9GTr2pSRib1zURAeeHnlqiUZM
|
||||
-4m0Gn9s72nJevU1wxED8pwOhR8fnHEmMKGD2HPhKoOCbzDhwwBZO27TNa1uWeM3f
|
||||
-M5oixKDi2PqMn3y2cDx1NjJtP661688EcJ5a2Ih9BgO9xpnhSyzBWEKcAn0tJB0H
|
||||
-/56M0FW6cdOOIzMveGGL7sHW5E+iOdI1n5e7C6KJUzew78Y9qJnhS53EdI6qTz9R
|
||||
-wsIsj1i070Fk6RbPo6zpLlF6w7Zj8GlZaZA7OZZv9wo5VEV/0ST8gmiiBOBc4C6Y
|
||||
-u9hyLIIu4dFEBKyQHRvBnQSLNpKx6or1OGFDVBay2In9Yh2BHh1+vOj/OIz/wq48
|
||||
-EHOIV27fRJxLu4jeK5LIGDhuPnMJ8AJYQ0bQOUP6fd7p+TxWkAQZPB/Dx/cs3hxr
|
||||
-nFEdzx+eO+IAsObx/b1EGZyEJyETBslu4GwYX7/KK3HsJhDJ1bdZ//28jOCaoir6
|
||||
-ZOMT72GRwmVoQTJ0XpccfjHfKJDRLT7C1xvzo4Eibth0hpTZkA75IUYUp6qK/PuJ
|
||||
-kH/qdiC7QIkRKtsrawW4vEDna3YtxIYhQqz9+KwO6u/0gzooZtv1RU4U3ifMDB5u
|
||||
-5P5GAzACRqlY8QYBkM869lvWqzQPHvybC4ak9Yx6/heMO9ddjdIW9BaK8BLxvN/6
|
||||
-UCD936Y4fWltt09jHZIoxWFykouBwmd7bXooNYXmDRNmjTdVhKJuOEOQw8hDzx7e
|
||||
-pWFJ9Z/V4Qm1tvXbCD7QFqMCDoY3qFvVG8DBqXpmxe1yPfz21FWrT7IuqDXAD3ns
|
||||
-vxfN/2a+Cy04U9FBNVCvWqWIs5AgNpdCMJC2FlXKTy+H3/7rIjNyFyvbX0vxIXtK
|
||||
-liOVNXiyVM++KZXqktqMUDlsJENmIHV9B046luqbgW018fHkyEYlL3iRZGbYegwr
|
||||
-XO9VVIKVPw1BEvJ8VNdGFGuZGepd8qX2ezfYADrNR+4t85HDm8inbjTobSjWuljs
|
||||
-ftUNkOeCHqAvWCFQTLCfdykvV08EJfVY79y7yFPtfRV2gxYokXFifjo3su9sVQr1
|
||||
-UiIS5ZAsIC1hBXWeXoBN7QVTkFi7Yto6E1q2k10LiT3obpUUUQ/oclhrJOCJVjrS
|
||||
-oRcj2QBy8OT4T9slJr5maTWdgd7Lt6+I6cGQXPaDvjGOJl0eBYM14vhx4rRQWytJ
|
||||
-k07hhHFO4+9CGCuHS8AAy2gR6acYFWt2ZiiNZ0z/iPIHNK4YEyy9aLf6uZH/KQjE
|
||||
-jmHToo7XD6QvCAEC5qTHby3o3LfHIhyZi/4L+AhS4FKUHF6M0peeyYt4z3HaK2d2
|
||||
-N6mHLPdjwNjra7GOmcns4gzcrdfoF+R293KpPal4PjknvR3dZL4kKP/ougTAM5zv
|
||||
-qDIvRbkHzjP8ChTpoLcJsNVXykNcNkjcSi0GHtIpYjh6QX6P2uvR/S4+Bbb9p9rn
|
||||
-hIy/ovu9tWN2hiPxGPe6torF6BulAxsTYlDercC204AyzsrdA0pr6HBgJH9C6ML1
|
||||
-TchwodbFJqn9rSv91i1liusAGoOvE81AGBdrXY7LxfSNhYY1IK6yR/POJPTd53sA
|
||||
-uX2/j6Rtoksd/2BHPM6AUnI/2B9slhuzWX2aCtWLeuwvXDS6rYuTigaQmLkzTRfM
|
||||
-dlMI3s9KLXxgi5YVumUZleJWXwBNP7KiKajd+VTSD+7WAhyhM5FIG5wVOaxmy4G2
|
||||
-TyqZ/Ax9d2VEjTQHWvQlLPQ4Mp0EIz0aEl94K/S8CK8bJRH6+PRkar+dJi1xqlL+
|
||||
-BYb42At9mEJ8odLlFikvNi1+t7jqXk5jRi5C0xFKx3nTtzoH2zNUeuA3R6vSocVK
|
||||
-45jnze9IkKmxMlJ4loR5sgszdpDCD3kXqjtCcbMTmcrGyzJek3HSOTpiEORoTFOe
|
||||
-Rhg6jH5lm+QcC263oipojS0qEQcnsWJP2CylNYMYHR9O/9NQxT3o2lsRHqZTMELV
|
||||
-uQa/SFH+paQNbZOj8MRwPSqqiIxJFuLswKte1R+W7LKn1yBSM7Pp39lNbzGvJD2E
|
||||
-YRfnCwFpJ54voVAuQ4jXJvigCW2qeCjXlxeD6K2j4eGJEEOmIjIW1wjubyBY6OI3
|
||||
------END RSA PRIVATE KEY-----
|
||||
+-----BEGIN ENCRYPTED PRIVATE KEY-----
|
||||
+MIIHbTBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQIhD+rJdxqb6ECAggA
|
||||
+MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBDTdyjCP3riOSUfxix4aXEvBIIH
|
||||
+ECGkbsFabrcFMZcplw5jHMaOlG7rYjUzwDJ80JM8uzbv2Jb8SvNlns2+xmnEvH/M
|
||||
+mNvRmnXmplbVjH3XBMK8o2Psnr2V/a0j7/pgqpRxHykG+koOY4gzdt3MAg8JPbS2
|
||||
+hymSl+Y5EpciO3xLfz4aFL1ZNqspQbO/TD13Ij7DUIy7xIRBMp4taoZCrP0cEBAZ
|
||||
++wgu9m23I4dh3E8RUBzWyFFNic2MVVHrui6JbHc4dIHfyKLtXJDhUcS0vIC9PvcV
|
||||
+jhorh3UZC4lM+/jjXV5AhzQ0VrJ2tXAUX2dA144XHzkSH2QmwfnajPsci7BL2CGC
|
||||
+rjyTy4NfB/lDwU+55dqJZQSKXMxAapJMrtgw7LD5CKQcN6zmfhXGssJ7HQUXKkaX
|
||||
+I1YOFzuUD7oo56BVCnVswv0jX9RxrE5QYNreMlOP9cS+kIYH65N+PAhlURuQC14K
|
||||
+PgDkHn5knSa2UQA5tc5f7zdHOZhGRUfcjLP+KAWA3nh+/2OKw/X3zuPx75YT/FKe
|
||||
+tACPw5hjEpl62m9Xa0eWepZXwqkIOkzHMmCyNCsbC0mmRoEjmvfnslfsmnh4Dg/c
|
||||
+4YsTYMOLLIeCa+WIc38aA5W2lNO9lW0LwLhX1rP+GRVPv+TVHXlfoyaI+jp0iXrJ
|
||||
+t3xxT0gaiIR/VznyS7Py68QV/zB7VdqbsNzS7LdquHK1k8+7OYiWjY3gqyU40Iu2
|
||||
+d1eSnIoDvQJwyYp7XYXbOlXNLY+s1Qb7yxcW3vXm0Bg3gKT8r1XHWJ9rj+CxAn5r
|
||||
+ysfkPs1JsesxzzQjwTiDNvHnBnZnwxuxfBr26ektEHmuAXSl8V6dzLN/aaPjpTj4
|
||||
+CkE7KyqX3U9bLkp+ztl4xWKEmW44nskzm0+iqrtrxMyTfvvID4QrABjZL4zmWIqc
|
||||
+e3ZfA3AYk9VDIegk/YKGC5VZ8YS7ZXQ0ASK652XqJ7QlMKTxxV7zda6Fp4uW6/qN
|
||||
+ezt5wgbGGhZQXj2wDQmWNQYyG/juIgYTpCUA54U5XBIjuR6pg+Ytm0UrvNjsUoAC
|
||||
+wGelyqaLDq8U8jdIFYVTJy9aJjQOYXjsUJ0dZN2aGHSlju0ZGIZc49cTIVQ9BTC5
|
||||
+Yc0Vlwzpl+LuA25DzKZNSb/ci0lO/cQGJ2uXQQgaNgdsHlu8nukENGJhnIzx4fzK
|
||||
+wEh3yHxhTRCzPPwDfXmx0IHXrPqJhSpAgaXBVIm8OjvmMxO+W75W4uLfNY/B7e2H
|
||||
+3cjklGuvkofOf7sEOrGUYf4cb6Obg8FpvHgpKo5Twwmoh/qvEKckBFqNhZXDDl88
|
||||
+GbGlSEgyaAV1Ig8s1NJKBolWFa0juyPAwJ8vT1T4iwW7kQ7KXKt2UNn96K/HxkLu
|
||||
+pikvukz8oRHMlfVHa0R48UB1fFHwZLzPmwkpu6ancIxk3uO3yfhf6iDk3bmnyMlz
|
||||
+g3k/b6MrLYaOVByRxay85jH3Vvgqfgn6wa6BJ7xQ81eZ8B45gFuTH0J5JtLL7SH8
|
||||
+darRPLCYfA+Ums9/H6pU5EXfd3yfjMIbvhCXHkJrrljkZ+th3p8dyto6wmYqIY6I
|
||||
+qR9sU+o6DhRaiP8tCICuhHxQpXylUM6WeJkJwduTJ8KWIvzsj4mReIKOl/oC2jSd
|
||||
+gIdKhb9Q3zj9ce4N5m6v66tyvjxGZ+xf3BvUPDD+LwZeXgf7OBsNVbXzQbzto594
|
||||
+nbCzPocFi3gERE50ru4K70eQCy08TPG5NpOz+DDdO5vpAuMLYEuI7O3L+3GjW40Q
|
||||
+G5bu7H5/i7o/RWR67qhG/7p9kPw3nkUtYgnvnWaPMIuTfb4c2d069kjlfgWjIbbI
|
||||
+tpSKmm5DHlqTE4/ECAbIEDtSaw9dXHCdL3nh5+n428xDdGbjN4lT86tfu17EYKzl
|
||||
+ydH1RJ1LX3o3TEj9UkmDPt7LnftvwybMFEcP7hM2xD4lC++wKQs7Alg6dTkBnJV4
|
||||
+5xU78WRntJkJTU7kFkpPKA0QfyCuSF1fAMoukDBkqUdOj6jE0BlJQlHk5iwgnJlt
|
||||
+uEdkTjHZEjIUxWC6llPcAzaPNlmnD45AgfEW+Jn21IvutmJiQAz5lm9Z9PXaR0C8
|
||||
+hXB6owRY67C0YKQwXhoNf6xQun2xGBGYy5rPEEezX1S1tUH5GR/KW1Lh+FzFqHXI
|
||||
+ZEb5avfDqHKehGAjPON+Br7akuQ125M9LLjKuSyPaQzeeCAy356Xd7XzVwbPddbm
|
||||
+9S9WSPqzaPgh10chIHoNoC8HMd33dB5j9/Q6jrbU/oPlptu/GlorWblvJdcTuBGI
|
||||
+IVn45RFnkG8hCz0GJSNzW7+70YdESQbfJW79vssWMaiSjFE0pMyFXrFR5lBywBTx
|
||||
+PiGEUWtvrKG94X1TMlGUzDzDJOQNZ9dT94bonNe9pVmP5BP4/DzwwiWh6qrzWk6p
|
||||
+j8OE4cfCSh2WvHnhJbH7/N0v+JKjtxeIeJ16jx/K2oK5
|
||||
+-----END ENCRYPTED PRIVATE KEY-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEWTCCAsGgAwIBAgIJAJinz4jHSjLtMA0GCSqGSIb3DQEBCwUAMF8xCzAJBgNV
|
||||
BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u
|
||||
@@ -66,3 +66,4 @@ jMqTFlmO7kpf/jpCSmamp3/JSEE1BJKHwQ6Ql4nzRA2N1mnvWH7Zxcv043gkHeAu
|
||||
9Wc2uXpw9xF8itV4Uvcdr3dwqByvIqn7iI/gB+4l41e0u8OmH2MKOx4Nxlly5TNW
|
||||
HcVKQHyOeyvnINuBAQ==
|
||||
-----END CERTIFICATE-----
|
||||
+
|
||||
diff --git a/Lib/test/make_ssl_certs.py b/Lib/test/make_ssl_certs.py
|
||||
index 3622765..41b5f46 100644
|
||||
--- a/Lib/test/make_ssl_certs.py
|
||||
+++ b/Lib/test/make_ssl_certs.py
|
||||
@@ -206,8 +206,8 @@ if __name__ == '__main__':
|
||||
with open('ssl_key.pem', 'w') as f:
|
||||
f.write(key)
|
||||
print("password protecting ssl_key.pem in ssl_key.passwd.pem")
|
||||
- check_call(['openssl','rsa','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-des3','-passout','pass:somepass'])
|
||||
- check_call(['openssl','rsa','-in','ssl_key.pem','-out','keycert.passwd.pem','-des3','-passout','pass:somepass'])
|
||||
+ check_call(['openssl','pkey','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-aes256','-passout','pass:somepass'])
|
||||
+ check_call(['openssl','pkey','-in','ssl_key.pem','-out','keycert.passwd.pem','-aes256','-passout','pass:somepass'])
|
||||
|
||||
with open('keycert.pem', 'w') as f:
|
||||
f.write(key)
|
||||
diff --git a/Lib/test/ssl_key.passwd.pem b/Lib/test/ssl_key.passwd.pem
|
||||
index e4f1370..46de61a 100644
|
||||
--- a/Lib/test/ssl_key.passwd.pem
|
||||
+++ b/Lib/test/ssl_key.passwd.pem
|
||||
@@ -1,42 +1,42 @@
|
||||
------BEGIN RSA PRIVATE KEY-----
|
||||
-Proc-Type: 4,ENCRYPTED
|
||||
-DEK-Info: DES-EDE3-CBC,8064BE1494B24B13
|
||||
-
|
||||
-KJrffOMbo8M0I3PzcYxRZGMpKD1yB3Ii4+bT5XoanxjIJ+4fdx6LfZ0Rsx+riyzs
|
||||
-tymsQu/iYY9j+4rCvN9+eetsL1X6iZpiimKsLexcid9M3fb0vxED5Sgw0dvunCUA
|
||||
-xhqjLIKR92MKbODHf6KrDKCpsiPbjq4gZ7P+uCGXAMHL3MXIJSC0hW9rK7Ce6oyO
|
||||
-CjpIcgB8x+GUWZZZhAFdlzIHMZrteNP2P5HK6QcaT71P034Dz1hhqoj4Q0t+Fta2
|
||||
-4tfsM/bnTR/l6hwlhPa1e3Uj322tDTDWBScgWANn5+sEWldLmozMaWhZsn22pfk2
|
||||
-KjRMGXG024JVheV882nbdOBvG7oq+lxkZ/ZP+vvqJqnvYtf7WtM8UivzYpe5Hz5b
|
||||
-kVvWzPjBLUSZ9whM9rDLqSSqMPyPvDTuEmLkuq+xm7pYJmsLqIMP2klZLqRxLX6K
|
||||
-uqwplb8UG440qauxgnQ905PId1l2fJEnRtV+7vXprA0L0QotgXLVHBhLmTFM+3PH
|
||||
-9H3onf31dionUAPrn3nfVE36HhvVgRyvDBnBzJSIMighgq21Qx/d1dk0DRYi1hUI
|
||||
-nCHl0YJPXheVcXR7JiSF2XQCAaFuS1Mr7NCXfWZOZQC/0dkvmHnl9DUAhuqq9BNZ
|
||||
-1cKhZXcKHadg2/r0Zup/oDzmHPUEfTAXT0xbqoWlhkdwbF2veWQ96A/ncx3ISTb4
|
||||
-PkXBlX9rdia8nmtyQDQRn4NuvchbaGkj4WKFC8pF8Hn7naHqwjpHaDUimBc0CoQW
|
||||
-edNJqruKWwtSVLuwKHCC2gZFX9AXSKJXJz/QRSUlhFGOhuF/J6yKaXj6n5lxWNiQ
|
||||
-54J+OP/hz2aS95CD2+Zf1SKpxdWiLZSIQqESpmmUrXROixNJZ/Z7gI74Dd9dSJOH
|
||||
-W+3AU03vrrFZVrJVZhjcINHoH1Skh6JKscH18L6x4U868nSr4SrRLX8BhHllOQyD
|
||||
-bmU+PZAjF8ZBIaCtTGulDXD29F73MeAZeTSsgQjFu0iKLj1wPiphbx8i/SUtR4YP
|
||||
-X6PVA04g66r1NBw+3RQASVorZ3g1MSFvITHXcbKkBDeJH2z1+c6t/VVyTONnQhM5
|
||||
-lLgRSk6HCbetvT9PKxWrWutA12pdBYEHdZhMHVf2+xclky7l09w8hg2/qqcdGRGe
|
||||
-oAOZ72t0l5ObNyaruDKUS6f4AjOyWq/Xj5xuFtf1n3tQHyslSyCTPcAbQhDfTHUx
|
||||
-vixb/V9qvYPt7OCn8py7v1M69NH42QVFAvwveDIFjZdqfIKBoJK2V4qPoevJI6uj
|
||||
-Q5ByMt8OXOjSXNpHXpYQWUiWeCwOEBXJX8rzCHdMtg37jJ0zCmeErR1NTdg+EujM
|
||||
-TWYgd06jlT67tURST0aB2kg4ijKgUJefD313LW1zC6gVsTbjSZxYyRbPfSP6flQB
|
||||
-yCi1C19E2OsgleqbkBVC5GlYUzaJT7SGjCRmGx1eqtbrALu+LVH24Wceexlpjydl
|
||||
-+s2nf/DZlKun/tlPh6YioifPCJjByZMQOCEfIox6BkemZETz8uYA4TTWimG13Z03
|
||||
-gyDGC2jdpEW414J2qcQDvrdUgJ+HlhrAAHaWpMQDbXYxBGoZ+3+ORvQV4kAsCwL8
|
||||
-k3EIrVpePdik+1xgOWsyLj6QxFXlTMvL6Wc5pnArFPORsgHEolJvxSPTf9aAHNPn
|
||||
-V2WBvxiLBtYpGrujAUM40Syx/aN2RPtcXYPAusHUBw+S8/p+/8Kg8GZmnIXG3F89
|
||||
-45Eepl2quZYIrou7a1fwIpIIZ0hFiBQ1mlHVMFtxwVHS1bQb3SU2GeO+JcGjdVXc
|
||||
-04qeGuQ5M164eQ5C0T7ZQ1ULiUlFWKD30m+cjqmZzt3d7Q0mKpMKuESIuZJo/wpD
|
||||
-Nas432aLKUhcNx/pOYLkKJRpGZKOupQoD5iUj/j44o8JoFkDK33v2S57XB5QGz28
|
||||
-9Zuhx49b3W8mbM6EBanlQKLWJGCxXqc/jhYhFWn+b0MhidynFgA0oeWvf6ZDyt6H
|
||||
-Yi5Etxsar09xp0Do3NxtQXLuSUu0ji2pQzSIKuoqQWKqldm6VrpwojiqJhy4WQBQ
|
||||
-aVVyFeWBC7G3Zj76dO+yp2sfJ0itJUQ8AIB9Cg0f34rEZu+r9luPmqBoUeL95Tk7
|
||||
-YvCOU3Jl8Iqysv8aNpVXT8sa8rrSbruWCByEePZ37RIdHLMVBwVY0eVaFQjrjU7E
|
||||
-mXmM9eaoYLfXOllsQ+M2+qPFUITr/GU3Qig13DhK/+yC1R6V2a0l0WRhMltIPYKW
|
||||
-Ztvvr4hK5LcYCeS113BLiMbDIMMZZYGDZGMdC8DnnVbT2loF0Rfmp80Af31KmMQ4
|
||||
-6XvMatW9UDjBoY5a/YMpdm7SRwm+MgV2KNPpc2kST87/yi9oprGAb8qiarHiHTM0
|
||||
------END RSA PRIVATE KEY-----
|
||||
+-----BEGIN ENCRYPTED PRIVATE KEY-----
|
||||
+MIIHbTBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQI072N7W+PDDMCAggA
|
||||
+MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBA/AuaRNi4vE4KGqI4In+70BIIH
|
||||
+ENGS5Vex5NID873frmd1UZEHZ+O/Bd0wDb+NUpIqesHkRYf7kKi6Gnr+nKQ/oVVn
|
||||
+Lm3JjE7c8ECP0OkOOXmiXuWL1SkzBBWqCI4stSGUPvBiHsGwNnvJAaGjUffgMlcC
|
||||
+aJOA2+dnejLkzblq4CB2LQdm06N3Xoe9tyqtQaUHxfzJAf5Ydd8uj7vpKN2MMhY7
|
||||
+icIPJwSyh0N7S6XWVtHEokr9Kp4y2hS5a+BgCWV1/1z0aF7agnSVndmT1VR+nWmc
|
||||
+lM14k+lethmHMB+fsNSjnqeJ7XOPlOTHqhiZ9bBSTgF/xr5Bck/NiKRzHjdovBox
|
||||
+TKg+xchaBhpRh7wBPBIlNJeHmIjv+8obOKjKU98Ig/7R9+IryZaNcKAH0PuOT+Sw
|
||||
+QHXiCGQbOiYHB9UyhDTWiB7YVjd8KHefOFxfHzOQb/iBhbv1x3bTl3DgepvRN6VO
|
||||
+dIsPLoIZe42sdf9GeMsk8mGJyZUQ6AzsfhWk3grb/XscizPSvrNsJ2VL1R7YTyT3
|
||||
+3WA4ZXR1EqvXnWL7N/raemQjy62iOG6t7fcF5IdP9CMbWP+Plpsz4cQW7FtesCTq
|
||||
+a5ZXraochQz361ODFNIeBEGU+0qqXUtZDlmos/EySkZykSeU/L0bImS62VGE3afo
|
||||
+YXBmznTTT9kkFkqv7H0MerfJsrE/wF8puP3GM01DW2JRgXRpSWlvbPV/2LnMtRuD
|
||||
+II7iH4rWDtTjCN6BWKAgDOnPkc9sZ4XulqT32lcUeV6LTdMBfq8kMEc8eDij1vUT
|
||||
+maVCRpuwaq8EIT3lVgNLufHiG96ojlyYtj3orzw22IjkgC/9ee8UDik9CqbMVmFf
|
||||
+fVHhsw8LNSg8Q4bmwm5Eg2w2it2gtI68+mwr75oCxuJ/8OMjW21Prj8XDh5reie2
|
||||
+c0lDKQOFZ9UnLU1bXR/6qUM+JFKR4DMq+fOCuoQSVoyVUEOsJpvBOYnYZN9cxsZm
|
||||
+vh9dKafMEcKZ8flsbr+gOmOw7+Py2ifSlf25E/Frb1W4gtbTb0LQVHb6+drutrZj
|
||||
+8HEu4CnHYFCD4ZnOJb26XlZCb8GFBddW86yJYyUqMMV6Q1aJfAOAglsTo1LjIMOZ
|
||||
+byo0BTAmwUevU/iuOXQ4qRBXXcoidDcTCrxfUSPG9wdt9l+m5SdQpWqfQ+fx5O7m
|
||||
+SLlrHyZCiPSFMtC9DxqjIklHjf5W3wslGLgaD30YXa4VDYkRihf3CNsxGQ+tVvef
|
||||
+l0ZjoAitF7Gaua06IESmKnpHe23dkr1cjYq+u2IV+xGH8LeExdwsQ9kpuTeXPnQs
|
||||
+JOA99SsFx1ct32RrwjxnDDsiNkaViTKo9GDkV3jQTfoFgAVqfSgg9wGXpqUqhNG7
|
||||
+TiSIHCowllLny2zn4XrXCy2niD3VDt0skb3l/PaegHE2z7S5YY85nQtYwpLiwB9M
|
||||
+SQ08DYKxPBZYKtS2iZ/fsA1gjSRQDPg/SIxMhUC3M3qH8iWny1Lzl25F2Uq7VVEX
|
||||
+LdTUtaby49jRTT3CQGr5n6z7bMbUegiY7h8WmOekuThGDH+4xZp6+rDP4GFk4FeK
|
||||
+JcF70vMQYIjQZhadic6olv+9VtUP42ltGG/yP9a3eWRkzfAf2eCh6B1rYdgEWwE8
|
||||
+rlcZzwM+y6eUmeNF2FVWB8iWtTMQHy+dYNPM+Jtus1KQKxiiq/yCRs7nWvzWRFWA
|
||||
+HRyqV0J6/lqgm4FvfktFt1T0W+mDoLJOR2/zIwMy2lgL5zeHuR3SaMJnCikJbqKS
|
||||
+HB3UvrhAWUcZqdH29+FhVWeM7ybyF1Wccmf+IIC/ePLa6gjtqPV8lG/5kbpcpnB6
|
||||
+UQY8WWaKMxyr3jJ9bAX5QKshchp04cDecOLZrpFGNNQngR8RxSEkiIgAqNxWunIu
|
||||
+KrdBDrupv/XAgEOclmgToY3iywLJSV5gHAyHWDUhRH4cFCLiGPl4XIcnXOuTze3H
|
||||
+3j+EYSiS3v3DhHjp33YU2pXlJDjiYsKzAXejEh66++Y8qaQdCAad3ruWRCzW3kgk
|
||||
+Md0A1VGzntTnQsewvExQEMZH2LtYIsPv3KCYGeSAuLabX4tbGk79PswjnjLLEOr0
|
||||
+Ghf6RF6qf5/iFyJoG4vrbKT8kx6ywh0InILCdjUunuDskIBxX6tEcr9XwajoIvb2
|
||||
+kcmGdjam5kKLS7QOWQTl8/r/cuFes0dj34cX5Qpq+Gd7tRq/D+b0207926Cxvftv
|
||||
+qQ1cVn8HiLxKkZzd3tpf2xnoV1zkTL0oHrNg+qzxoxXUTUcwtIf1d/HRbYEAhi/d
|
||||
+bBBoFeftEHWNq+sJgS9bH+XNzo/yK4u04B5miOq8v4CSkJdzu+ZdF22d4cjiGmtQ
|
||||
+8BTmcn0Unzm+u5H0+QSZe54QBHJGNXXOIKMTkgnOdW27g4DbI1y7fCqJiSMbRW6L
|
||||
+oHmMfbdB3GWqGbsUkhY8i6h9op0MU6WOX7ea2Rxyt4t6
|
||||
+-----END ENCRYPTED PRIVATE KEY-----
|
||||
diff --git a/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst b/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst
|
||||
new file mode 100644
|
||||
index 0000000..8f43d32
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst
|
||||
@@ -0,0 +1,4 @@
|
||||
+The private keys for test_ssl were encrypted with 3DES in traditional
|
||||
+PKCS#5 format. 3DES and the digest algorithm of PKCS#5 are blocked by
|
||||
+some strict crypto policies. Use PKCS#8 format with AES256 encryption
|
||||
+instead.
|
||||
--
|
||||
2.21.0
|
||||
|
||||
|
||||
From d8584f9bb3fb841a1b21ed25abc2237ea8bbc206 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Tue, 26 Nov 2019 23:57:21 +0100
|
||||
Subject: [PATCH 3/5] Use PROTOCOL_TLS_CLIENT/SERVER
|
||||
|
||||
Replaces PROTOCOL_TLSv* and PROTOCOL_SSLv23 with PROTOCOL_TLS_CLIENT and
|
||||
PROTOCOL_TLS_SERVER.
|
||||
|
||||
Partially backports a170fa162dc03f0a014373349e548954fff2e567
|
||||
---
|
||||
Lib/ssl.py | 7 +-
|
||||
Lib/test/test_logging.py | 2 +-
|
||||
Lib/test/test_ssl.py | 169 +++++++++++++++++++--------------------
|
||||
3 files changed, 87 insertions(+), 91 deletions(-)
|
||||
|
||||
diff --git a/Lib/ssl.py b/Lib/ssl.py
|
||||
index 0114387..c5c5529 100644
|
||||
--- a/Lib/ssl.py
|
||||
+++ b/Lib/ssl.py
|
||||
@@ -473,7 +473,7 @@ def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None,
|
||||
context.load_default_certs(purpose)
|
||||
return context
|
||||
|
||||
-def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=None,
|
||||
+def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=CERT_NONE,
|
||||
check_hostname=False, purpose=Purpose.SERVER_AUTH,
|
||||
certfile=None, keyfile=None,
|
||||
cafile=None, capath=None, cadata=None):
|
||||
@@ -492,9 +492,12 @@ def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=None,
|
||||
# by default.
|
||||
context = SSLContext(protocol)
|
||||
|
||||
+ if not check_hostname:
|
||||
+ context.check_hostname = False
|
||||
if cert_reqs is not None:
|
||||
context.verify_mode = cert_reqs
|
||||
- context.check_hostname = check_hostname
|
||||
+ if check_hostname:
|
||||
+ context.check_hostname = True
|
||||
|
||||
if keyfile and not certfile:
|
||||
raise ValueError("certfile must be specified")
|
||||
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
|
||||
index 763a5d1..d5c63b4 100644
|
||||
--- a/Lib/test/test_logging.py
|
||||
+++ b/Lib/test/test_logging.py
|
||||
@@ -1830,7 +1830,7 @@ class HTTPHandlerTest(BaseTest):
|
||||
else:
|
||||
here = os.path.dirname(__file__)
|
||||
localhost_cert = os.path.join(here, "keycert.pem")
|
||||
- sslctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
+ sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
sslctx.load_cert_chain(localhost_cert)
|
||||
|
||||
context = ssl.create_default_context(cafile=localhost_cert)
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 639109f..a7bf2f7 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -155,6 +155,8 @@ def test_wrap_socket(sock, ssl_version=ssl.PROTOCOL_TLS, *,
|
||||
**kwargs):
|
||||
context = ssl.SSLContext(ssl_version)
|
||||
if cert_reqs is not None:
|
||||
+ if cert_reqs == ssl.CERT_NONE:
|
||||
+ context.check_hostname = False
|
||||
context.verify_mode = cert_reqs
|
||||
if ca_certs is not None:
|
||||
context.load_verify_locations(ca_certs)
|
||||
@@ -1377,7 +1379,7 @@ class ContextTests(unittest.TestCase):
|
||||
self._assert_context_options(ctx)
|
||||
|
||||
def test_check_hostname(self):
|
||||
- ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
self.assertFalse(ctx.check_hostname)
|
||||
|
||||
# Requires CERT_REQUIRED or CERT_OPTIONAL
|
||||
@@ -2386,17 +2388,13 @@ if _have_threads:
|
||||
server_params_test(context, context,
|
||||
chatty=True, connectionchatty=True)
|
||||
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
- client_context.load_verify_locations(SIGNING_CA)
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
- # server_context.load_verify_locations(SIGNING_CA)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE2)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
|
||||
with self.subTest(client=ssl.PROTOCOL_TLS_CLIENT, server=ssl.PROTOCOL_TLS_SERVER):
|
||||
server_params_test(client_context=client_context,
|
||||
server_context=server_context,
|
||||
chatty=True, connectionchatty=True,
|
||||
- sni_name='fakehostname')
|
||||
+ sni_name='localhost')
|
||||
|
||||
client_context.check_hostname = False
|
||||
with self.subTest(client=ssl.PROTOCOL_TLS_SERVER, server=ssl.PROTOCOL_TLS_CLIENT):
|
||||
@@ -2404,7 +2402,7 @@ if _have_threads:
|
||||
server_params_test(client_context=server_context,
|
||||
server_context=client_context,
|
||||
chatty=True, connectionchatty=True,
|
||||
- sni_name='fakehostname')
|
||||
+ sni_name='localhost')
|
||||
self.assertIn('called a function you should not call',
|
||||
str(e.exception))
|
||||
|
||||
@@ -2469,39 +2467,38 @@ if _have_threads:
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.verify_mode = ssl.CERT_REQUIRED
|
||||
- context.load_verify_locations(SIGNING_CA)
|
||||
tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0)
|
||||
- self.assertEqual(context.verify_flags, ssl.VERIFY_DEFAULT | tf)
|
||||
+ self.assertEqual(client_context.verify_flags, ssl.VERIFY_DEFAULT | tf)
|
||||
|
||||
# VERIFY_DEFAULT should pass
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket()) as s:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
s.connect((HOST, server.port))
|
||||
cert = s.getpeercert()
|
||||
self.assertTrue(cert, "Can't get peer certificate.")
|
||||
|
||||
# VERIFY_CRL_CHECK_LEAF without a loaded CRL file fails
|
||||
- context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF
|
||||
+ client_context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF
|
||||
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket()) as s:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
with self.assertRaisesRegex(ssl.SSLError,
|
||||
"certificate verify failed"):
|
||||
s.connect((HOST, server.port))
|
||||
|
||||
# now load a CRL file. The CRL file is signed by the CA.
|
||||
- context.load_verify_locations(CRLFILE)
|
||||
+ client_context.load_verify_locations(CRLFILE)
|
||||
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket()) as s:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
s.connect((HOST, server.port))
|
||||
cert = s.getpeercert()
|
||||
self.assertTrue(cert, "Can't get peer certificate.")
|
||||
@@ -2510,19 +2507,13 @@ if _have_threads:
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
-
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.verify_mode = ssl.CERT_REQUIRED
|
||||
- context.check_hostname = True
|
||||
- context.load_verify_locations(SIGNING_CA)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
|
||||
# correct hostname should verify
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket(),
|
||||
- server_hostname="localhost") as s:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
s.connect((HOST, server.port))
|
||||
cert = s.getpeercert()
|
||||
self.assertTrue(cert, "Can't get peer certificate.")
|
||||
@@ -2530,7 +2521,7 @@ if _have_threads:
|
||||
# incorrect hostname should raise an exception
|
||||
server = ThreadedEchoServer(context=server_context, chatty=True)
|
||||
with server:
|
||||
- with context.wrap_socket(socket.socket(),
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
server_hostname="invalid") as s:
|
||||
with self.assertRaisesRegex(ssl.CertificateError,
|
||||
"hostname 'invalid' doesn't match 'localhost'"):
|
||||
@@ -2542,7 +2533,7 @@ if _have_threads:
|
||||
with socket.socket() as s:
|
||||
with self.assertRaisesRegex(ValueError,
|
||||
"check_hostname requires server_hostname"):
|
||||
- context.wrap_socket(s)
|
||||
+ client_context.wrap_socket(s)
|
||||
|
||||
def test_wrong_cert(self):
|
||||
"""Connecting when the server rejects the client's certificate
|
||||
@@ -2767,7 +2758,6 @@ if _have_threads:
|
||||
msgs = (b"msg 1", b"MSG 2", b"STARTTLS", b"MSG 3", b"msg 4", b"ENDTLS", b"msg 5", b"msg 6")
|
||||
|
||||
server = ThreadedEchoServer(CERTFILE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
starttls_server=True,
|
||||
chatty=True,
|
||||
connectionchatty=True)
|
||||
@@ -2795,7 +2785,7 @@ if _have_threads:
|
||||
sys.stdout.write(
|
||||
" client: read %r from server, starting TLS...\n"
|
||||
% msg)
|
||||
- conn = test_wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ conn = test_wrap_socket(s)
|
||||
wrapped = True
|
||||
elif indata == b"ENDTLS" and msg.startswith(b"ok"):
|
||||
# ENDTLS ok, switch back to clear text
|
||||
@@ -2882,7 +2872,7 @@ if _have_threads:
|
||||
|
||||
server = ThreadedEchoServer(CERTFILE,
|
||||
certreqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_SERVER,
|
||||
cacerts=CERTFILE,
|
||||
chatty=True,
|
||||
connectionchatty=False)
|
||||
@@ -2892,7 +2882,7 @@ if _have_threads:
|
||||
certfile=CERTFILE,
|
||||
ca_certs=CERTFILE,
|
||||
cert_reqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_CLIENT)
|
||||
s.connect((HOST, server.port))
|
||||
# helper methods for standardising recv* method signatures
|
||||
def _recv_into():
|
||||
@@ -3034,7 +3024,7 @@ if _have_threads:
|
||||
def test_nonblocking_send(self):
|
||||
server = ThreadedEchoServer(CERTFILE,
|
||||
certreqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_SERVER,
|
||||
cacerts=CERTFILE,
|
||||
chatty=True,
|
||||
connectionchatty=False)
|
||||
@@ -3044,7 +3034,7 @@ if _have_threads:
|
||||
certfile=CERTFILE,
|
||||
ca_certs=CERTFILE,
|
||||
cert_reqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_CLIENT)
|
||||
s.connect((HOST, server.port))
|
||||
s.setblocking(False)
|
||||
|
||||
@@ -3190,9 +3180,11 @@ if _have_threads:
|
||||
Basic tests for SSLSocket.version().
|
||||
More tests are done in the test_protocol_*() methods.
|
||||
"""
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
+ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
+ context.check_hostname = False
|
||||
+ context.verify_mode = ssl.CERT_NONE
|
||||
with ThreadedEchoServer(CERTFILE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_SERVER,
|
||||
chatty=False) as server:
|
||||
with context.wrap_socket(socket.socket()) as s:
|
||||
self.assertIs(s.version(), None)
|
||||
@@ -3247,7 +3239,7 @@ if _have_threads:
|
||||
|
||||
server = ThreadedEchoServer(CERTFILE,
|
||||
certreqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1,
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_SERVER,
|
||||
cacerts=CERTFILE,
|
||||
chatty=True,
|
||||
connectionchatty=False)
|
||||
@@ -3257,7 +3249,7 @@ if _have_threads:
|
||||
certfile=CERTFILE,
|
||||
ca_certs=CERTFILE,
|
||||
cert_reqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_CLIENT)
|
||||
s.connect((HOST, server.port))
|
||||
# get the data
|
||||
cb_data = s.get_channel_binding("tls-unique")
|
||||
@@ -3282,7 +3274,7 @@ if _have_threads:
|
||||
certfile=CERTFILE,
|
||||
ca_certs=CERTFILE,
|
||||
cert_reqs=ssl.CERT_NONE,
|
||||
- ssl_version=ssl.PROTOCOL_TLSv1)
|
||||
+ ssl_version=ssl.PROTOCOL_TLS_CLIENT)
|
||||
s.connect((HOST, server.port))
|
||||
new_cb_data = s.get_channel_binding("tls-unique")
|
||||
if support.verbose:
|
||||
@@ -3299,32 +3291,35 @@ if _have_threads:
|
||||
s.close()
|
||||
|
||||
def test_compression(self):
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
if support.verbose:
|
||||
sys.stdout.write(" got compression: {!r}\n".format(stats['compression']))
|
||||
self.assertIn(stats['compression'], { None, 'ZLIB', 'RLE' })
|
||||
|
||||
+
|
||||
@unittest.skipUnless(hasattr(ssl, 'OP_NO_COMPRESSION'),
|
||||
"ssl.OP_NO_COMPRESSION needed for this test")
|
||||
def test_compression_disabled(self):
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- context.options |= ssl.OP_NO_COMPRESSION
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ client_context.options |= ssl.OP_NO_COMPRESSION
|
||||
+ server_context.options |= ssl.OP_NO_COMPRESSION
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
self.assertIs(stats['compression'], None)
|
||||
|
||||
def test_dh_params(self):
|
||||
# Check we can get a connection with ephemeral Diffie-Hellman
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- context.load_dh_params(DHFILE)
|
||||
- context.set_ciphers("kEDH")
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ server_context.load_dh_params(DHFILE)
|
||||
+ server_context.set_ciphers("kEDH")
|
||||
+ server_context.options |= ssl.OP_NO_TLSv1_3
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
cipher = stats["cipher"][0]
|
||||
parts = cipher.split("-")
|
||||
if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts:
|
||||
@@ -3332,22 +3327,20 @@ if _have_threads:
|
||||
|
||||
def test_selected_alpn_protocol(self):
|
||||
# selected_alpn_protocol() is None unless ALPN is used.
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
self.assertIs(stats['client_alpn_protocol'], None)
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_ALPN, "ALPN support required")
|
||||
def test_selected_alpn_protocol_if_server_uses_alpn(self):
|
||||
# selected_alpn_protocol() is None unless ALPN is used by the client.
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- client_context.load_verify_locations(CERTFILE)
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(CERTFILE)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
server_context.set_alpn_protocols(['foo', 'bar'])
|
||||
stats = server_params_test(client_context, server_context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
self.assertIs(stats['client_alpn_protocol'], None)
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_ALPN, "ALPN support needed for this test")
|
||||
@@ -3394,10 +3387,10 @@ if _have_threads:
|
||||
|
||||
def test_selected_npn_protocol(self):
|
||||
# selected_npn_protocol() is None unless NPN is used
|
||||
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- context.load_cert_chain(CERTFILE)
|
||||
- stats = server_params_test(context, context,
|
||||
- chatty=True, connectionchatty=True)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ chatty=True, connectionchatty=True,
|
||||
+ sni_name=hostname)
|
||||
self.assertIs(stats['client_npn_protocol'], None)
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_NPN, "NPN support needed for this test")
|
||||
@@ -3430,12 +3423,11 @@ if _have_threads:
|
||||
self.assertEqual(server_result, expected, msg % (server_result, "server"))
|
||||
|
||||
def sni_contexts(self):
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
+ server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
- other_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
+ other_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
other_context.load_cert_chain(SIGNED_CERTFILE2)
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- client_context.verify_mode = ssl.CERT_REQUIRED
|
||||
+ client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
client_context.load_verify_locations(SIGNING_CA)
|
||||
return server_context, other_context, client_context
|
||||
|
||||
@@ -3448,6 +3440,8 @@ if _have_threads:
|
||||
calls = []
|
||||
server_context, other_context, client_context = self.sni_contexts()
|
||||
|
||||
+ client_context.check_hostname = False
|
||||
+
|
||||
def servername_cb(ssl_sock, server_name, initial_context):
|
||||
calls.append((server_name, initial_context))
|
||||
if server_name is not None:
|
||||
@@ -3533,11 +3527,7 @@ if _have_threads:
|
||||
self.assertIn("TypeError", stderr.getvalue())
|
||||
|
||||
def test_shared_ciphers(self):
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- client_context.verify_mode = ssl.CERT_REQUIRED
|
||||
- client_context.load_verify_locations(SIGNING_CA)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
if ssl.OPENSSL_VERSION_INFO >= (1, 0, 2):
|
||||
client_context.set_ciphers("AES128:AES256")
|
||||
server_context.set_ciphers("AES256")
|
||||
@@ -3555,7 +3545,8 @@ if _have_threads:
|
||||
# TLS 1.3 ciphers are always enabled
|
||||
expected_algs.extend(["TLS_CHACHA20", "TLS_AES"])
|
||||
|
||||
- stats = server_params_test(client_context, server_context)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ sni_name=hostname)
|
||||
ciphers = stats['server_shared_ciphers'][0]
|
||||
self.assertGreater(len(ciphers), 0)
|
||||
for name, tls_version, bits in ciphers:
|
||||
@@ -3595,14 +3586,13 @@ if _have_threads:
|
||||
self.assertEqual(s.recv(1024), TEST_DATA)
|
||||
|
||||
def test_session(self):
|
||||
- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
|
||||
- client_context.verify_mode = ssl.CERT_REQUIRED
|
||||
- client_context.load_verify_locations(SIGNING_CA)
|
||||
+ client_context, server_context, hostname = testing_context()
|
||||
+ # TODO: sessions aren't compatible with TLSv1.3 yet
|
||||
+ client_context.options |= ssl.OP_NO_TLSv1_3
|
||||
|
||||
# first connection without session
|
||||
- stats = server_params_test(client_context, server_context)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ sni_name=hostname)
|
||||
session = stats['session']
|
||||
self.assertTrue(session.id)
|
||||
self.assertGreater(session.time, 0)
|
||||
@@ -3616,7 +3606,8 @@ if _have_threads:
|
||||
self.assertEqual(sess_stat['hits'], 0)
|
||||
|
||||
# reuse session
|
||||
- stats = server_params_test(client_context, server_context, session=session)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ session=session, sni_name=hostname)
|
||||
sess_stat = server_context.session_stats()
|
||||
self.assertEqual(sess_stat['accept'], 2)
|
||||
self.assertEqual(sess_stat['hits'], 1)
|
||||
@@ -3629,7 +3620,8 @@ if _have_threads:
|
||||
self.assertGreaterEqual(session2.timeout, session.timeout)
|
||||
|
||||
# another one without session
|
||||
- stats = server_params_test(client_context, server_context)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ sni_name=hostname)
|
||||
self.assertFalse(stats['session_reused'])
|
||||
session3 = stats['session']
|
||||
self.assertNotEqual(session3.id, session.id)
|
||||
@@ -3639,7 +3631,8 @@ if _have_threads:
|
||||
self.assertEqual(sess_stat['hits'], 1)
|
||||
|
||||
# reuse session again
|
||||
- stats = server_params_test(client_context, server_context, session=session)
|
||||
+ stats = server_params_test(client_context, server_context,
|
||||
+ session=session, sni_name=hostname)
|
||||
self.assertTrue(stats['session_reused'])
|
||||
session4 = stats['session']
|
||||
self.assertEqual(session4.id, session.id)
|
||||
--
|
||||
2.21.0
|
||||
|
||||
|
||||
From 743c3e09b485092b51a982ab9859ffc79cbb7791 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Wed, 27 Nov 2019 00:01:17 +0100
|
||||
Subject: [PATCH 4/5] Adjust some tests for TLS 1.3 compatibility
|
||||
|
||||
Partially backports some changes from 529525fb5a8fd9b96ab4021311a598c77588b918
|
||||
and 2614ed4c6e4b32eafb683f2378ed20e87d42976d
|
||||
---
|
||||
Lib/test/test_ssl.py | 17 ++++++++++++++---
|
||||
1 file changed, 14 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index a7bf2f7..43c2dbc 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -3189,7 +3189,12 @@ if _have_threads:
|
||||
with context.wrap_socket(socket.socket()) as s:
|
||||
self.assertIs(s.version(), None)
|
||||
s.connect((HOST, server.port))
|
||||
- self.assertEqual(s.version(), 'TLSv1')
|
||||
+ if IS_OPENSSL_1_1:
|
||||
+ self.assertEqual(s.version(), 'TLSv1.3')
|
||||
+ elif ssl.OPENSSL_VERSION_INFO >= (1, 0, 2):
|
||||
+ self.assertEqual(s.version(), 'TLSv1.2')
|
||||
+ else: # 0.9.8 to 1.0.1
|
||||
+ self.assertIn(s.version(), ('TLSv1', 'TLSv1.2'))
|
||||
self.assertIs(s.version(), None)
|
||||
|
||||
@unittest.skipUnless(ssl.HAS_TLSv1_3,
|
||||
@@ -3259,7 +3264,10 @@ if _have_threads:
|
||||
|
||||
# check if it is sane
|
||||
self.assertIsNotNone(cb_data)
|
||||
- self.assertEqual(len(cb_data), 12) # True for TLSv1
|
||||
+ if s.version() == 'TLSv1.3':
|
||||
+ self.assertEqual(len(cb_data), 48)
|
||||
+ else:
|
||||
+ self.assertEqual(len(cb_data), 12) # True for TLSv1
|
||||
|
||||
# and compare with the peers version
|
||||
s.write(b"CB tls-unique\n")
|
||||
@@ -3283,7 +3291,10 @@ if _have_threads:
|
||||
# is it really unique
|
||||
self.assertNotEqual(cb_data, new_cb_data)
|
||||
self.assertIsNotNone(cb_data)
|
||||
- self.assertEqual(len(cb_data), 12) # True for TLSv1
|
||||
+ if s.version() == 'TLSv1.3':
|
||||
+ self.assertEqual(len(cb_data), 48)
|
||||
+ else:
|
||||
+ self.assertEqual(len(cb_data), 12) # True for TLSv1
|
||||
s.write(b"CB tls-unique\n")
|
||||
peer_data_repr = s.read().strip()
|
||||
self.assertEqual(peer_data_repr,
|
||||
--
|
||||
2.21.0
|
||||
|
||||
|
||||
From cd250c8a782f36c7a6f5ffabc922cb75744fa9c0 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Tue, 26 Nov 2019 23:18:10 +0100
|
||||
Subject: [PATCH 5/5] Skip the ssl tests that rely on TLSv1 and TLSv1.1
|
||||
availability
|
||||
|
||||
---
|
||||
Lib/test/test_ssl.py | 32 +++++++++++++++++++++++---------
|
||||
1 file changed, 23 insertions(+), 9 deletions(-)
|
||||
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 43c2dbc..b35db25 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -39,6 +39,13 @@ IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL')
|
||||
IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0)
|
||||
PY_SSL_DEFAULT_CIPHERS = sysconfig.get_config_var('PY_SSL_DEFAULT_CIPHERS')
|
||||
|
||||
+# On RHEL8 openssl disables TLSv1 and TLSv1.1 on runtime.
|
||||
+# Since we don't have a good way to detect runtime changes
|
||||
+# on the allowed protocols, we hardcode the default config
|
||||
+# with those flags.
|
||||
+TLSv1_enabled = False
|
||||
+TLSv1_1_enabled = False
|
||||
+
|
||||
def data_file(*name):
|
||||
return os.path.join(os.path.dirname(__file__), *name)
|
||||
|
||||
@@ -2380,7 +2387,8 @@ if _have_threads:
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
for protocol in PROTOCOLS:
|
||||
- if protocol in {ssl.PROTOCOL_TLS_CLIENT, ssl.PROTOCOL_TLS_SERVER}:
|
||||
+ if protocol in {ssl.PROTOCOL_TLS_CLIENT, ssl.PROTOCOL_TLS_SERVER,
|
||||
+ ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1}:
|
||||
continue
|
||||
with self.subTest(protocol=ssl._PROTOCOL_NAMES[protocol]):
|
||||
context = ssl.SSLContext(protocol)
|
||||
@@ -2650,17 +2658,20 @@ if _have_threads:
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False)
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True)
|
||||
- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1')
|
||||
+ if TLSv1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1')
|
||||
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_OPTIONAL)
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_OPTIONAL)
|
||||
- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
|
||||
+ if TLSv1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
|
||||
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_REQUIRED)
|
||||
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED)
|
||||
- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
|
||||
+ if TLSv1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
|
||||
|
||||
# Server with specific SSL options
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
@@ -2698,9 +2709,10 @@ if _have_threads:
|
||||
"""Connecting to a TLSv1 server with various client options"""
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1')
|
||||
- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
|
||||
- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
|
||||
+ if TLSv1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1')
|
||||
+ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
|
||||
+ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv2'):
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
@@ -2716,7 +2728,8 @@ if _have_threads:
|
||||
Testing against older TLS versions."""
|
||||
if support.verbose:
|
||||
sys.stdout.write("\n")
|
||||
- try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
|
||||
+ if TLSv1_1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv2'):
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv2, False)
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3'):
|
||||
@@ -2724,7 +2737,8 @@ if _have_threads:
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv23, False,
|
||||
client_options=ssl.OP_NO_TLSv1_1)
|
||||
|
||||
- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
|
||||
+ if TLSv1_1_enabled:
|
||||
+ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1, False)
|
||||
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1, False)
|
||||
|
||||
--
|
||||
2.21.0
|
||||
|
@ -1,44 +0,0 @@
|
||||
bpo-32947: test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1
|
||||
|
||||
Backport partially commit 529525fb5a8fd9b96ab4021311a598c77588b918:
|
||||
complete the previous partial backport (commit
|
||||
2a4ee8aa01d61b6a9c8e9c65c211e61bdb471826.
|
||||
|
||||
Reported upstream:
|
||||
|
||||
* https://bugs.python.org/issue32947#msg333990
|
||||
* https://github.com/python/cpython/pull/11612
|
||||
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 7f8f636..05c09a6 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -2021,6 +2021,16 @@ if _have_threads:
|
||||
sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n"
|
||||
% (msg, ctype, msg.lower(), ctype))
|
||||
self.write(msg.lower())
|
||||
+ except ConnectionResetError:
|
||||
+ # XXX: OpenSSL 1.1.1 sometimes raises ConnectionResetError
|
||||
+ # when connection is not shut down gracefully.
|
||||
+ if self.server.chatty and support.verbose:
|
||||
+ sys.stdout.write(
|
||||
+ " Connection reset by peer: {}\n".format(
|
||||
+ self.addr)
|
||||
+ )
|
||||
+ self.close()
|
||||
+ self.running = False
|
||||
except OSError:
|
||||
if self.server.chatty:
|
||||
handle_error("Test server failure:\n")
|
||||
@@ -2100,6 +2110,11 @@ if _have_threads:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
self.stop()
|
||||
+ except BaseException as e:
|
||||
+ if support.verbose and self.chatty:
|
||||
+ sys.stdout.write(
|
||||
+ ' connection handling failed: ' + repr(e) + '\n')
|
||||
+
|
||||
self.sock.close()
|
||||
|
||||
def stop(self):
|
117
SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch
Normal file
117
SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch
Normal file
@ -0,0 +1,117 @@
|
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
|
||||
index 883201f..cf4d84d 100644
|
||||
--- a/Lib/test/test_ssl.py
|
||||
+++ b/Lib/test/test_ssl.py
|
||||
@@ -3891,6 +3891,37 @@ class TestPostHandshakeAuth(unittest.TestCase):
|
||||
s.write(b'PHA')
|
||||
self.assertIn(b'WRONG_SSL_VERSION', s.recv(1024))
|
||||
|
||||
+ def test_bpo37428_pha_cert_none(self):
|
||||
+ # verify that post_handshake_auth does not implicitly enable cert
|
||||
+ # validation.
|
||||
+ hostname = 'localhost'
|
||||
+ client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
+ client_context.post_handshake_auth = True
|
||||
+ client_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
+ # no cert validation and CA on client side
|
||||
+ client_context.check_hostname = False
|
||||
+ client_context.verify_mode = ssl.CERT_NONE
|
||||
+
|
||||
+ server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
+ server_context.load_cert_chain(SIGNED_CERTFILE)
|
||||
+ server_context.load_verify_locations(SIGNING_CA)
|
||||
+ server_context.post_handshake_auth = True
|
||||
+ server_context.verify_mode = ssl.CERT_REQUIRED
|
||||
+
|
||||
+ server = ThreadedEchoServer(context=server_context, chatty=False)
|
||||
+ with server:
|
||||
+ with client_context.wrap_socket(socket.socket(),
|
||||
+ server_hostname=hostname) as s:
|
||||
+ s.connect((HOST, server.port))
|
||||
+ s.write(b'HASCERT')
|
||||
+ self.assertEqual(s.recv(1024), b'FALSE\n')
|
||||
+ s.write(b'PHA')
|
||||
+ self.assertEqual(s.recv(1024), b'OK\n')
|
||||
+ s.write(b'HASCERT')
|
||||
+ self.assertEqual(s.recv(1024), b'TRUE\n')
|
||||
+ # server cert has not been validated
|
||||
+ self.assertEqual(s.getpeercert(), {})
|
||||
+
|
||||
|
||||
def test_main(verbose=False):
|
||||
if support.verbose:
|
||||
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
|
||||
index ec366f0..9bf1cde 100644
|
||||
--- a/Modules/_ssl.c
|
||||
+++ b/Modules/_ssl.c
|
||||
@@ -732,6 +732,26 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock,
|
||||
#endif
|
||||
SSL_set_mode(self->ssl, mode);
|
||||
|
||||
+#ifdef TLS1_3_VERSION
|
||||
+ if (sslctx->post_handshake_auth == 1) {
|
||||
+ if (socket_type == PY_SSL_SERVER) {
|
||||
+ /* bpo-37428: OpenSSL does not ignore SSL_VERIFY_POST_HANDSHAKE.
|
||||
+ * Set SSL_VERIFY_POST_HANDSHAKE flag only for server sockets and
|
||||
+ * only in combination with SSL_VERIFY_PEER flag. */
|
||||
+ int mode = SSL_get_verify_mode(self->ssl);
|
||||
+ if (mode & SSL_VERIFY_PEER) {
|
||||
+ int (*verify_cb)(int, X509_STORE_CTX *) = NULL;
|
||||
+ verify_cb = SSL_get_verify_callback(self->ssl);
|
||||
+ mode |= SSL_VERIFY_POST_HANDSHAKE;
|
||||
+ SSL_set_verify(self->ssl, mode, verify_cb);
|
||||
+ }
|
||||
+ } else {
|
||||
+ /* client socket */
|
||||
+ SSL_set_post_handshake_auth(self->ssl, 1);
|
||||
+ }
|
||||
+ }
|
||||
+#endif
|
||||
+
|
||||
#if HAVE_SNI
|
||||
if (server_hostname != NULL) {
|
||||
/* Don't send SNI for IP addresses. We cannot simply use inet_aton() and
|
||||
@@ -2765,10 +2785,10 @@ _set_verify_mode(PySSLContext *self, enum py_ssl_cert_requirements n)
|
||||
"invalid value for verify_mode");
|
||||
return -1;
|
||||
}
|
||||
-#ifdef TLS1_3_VERSION
|
||||
- if (self->post_handshake_auth)
|
||||
- mode |= SSL_VERIFY_POST_HANDSHAKE;
|
||||
-#endif
|
||||
+
|
||||
+ /* bpo-37428: newPySSLSocket() sets SSL_VERIFY_POST_HANDSHAKE flag for
|
||||
+ * server sockets and SSL_set_post_handshake_auth() for client. */
|
||||
+
|
||||
/* keep current verify cb */
|
||||
verify_cb = SSL_CTX_get_verify_callback(self->ctx);
|
||||
SSL_CTX_set_verify(self->ctx, mode, verify_cb);
|
||||
@@ -3346,8 +3366,6 @@ get_post_handshake_auth(PySSLContext *self, void *c) {
|
||||
#if TLS1_3_VERSION
|
||||
static int
|
||||
set_post_handshake_auth(PySSLContext *self, PyObject *arg, void *c) {
|
||||
- int (*verify_cb)(int, X509_STORE_CTX *) = NULL;
|
||||
- int mode = SSL_CTX_get_verify_mode(self->ctx);
|
||||
int pha = PyObject_IsTrue(arg);
|
||||
|
||||
if (pha == -1) {
|
||||
@@ -3355,17 +3373,8 @@ set_post_handshake_auth(PySSLContext *self, PyObject *arg, void *c) {
|
||||
}
|
||||
self->post_handshake_auth = pha;
|
||||
|
||||
- /* client-side socket setting, ignored by server-side */
|
||||
- SSL_CTX_set_post_handshake_auth(self->ctx, pha);
|
||||
-
|
||||
- /* server-side socket setting, ignored by client-side */
|
||||
- verify_cb = SSL_CTX_get_verify_callback(self->ctx);
|
||||
- if (pha) {
|
||||
- mode |= SSL_VERIFY_POST_HANDSHAKE;
|
||||
- } else {
|
||||
- mode ^= SSL_VERIFY_POST_HANDSHAKE;
|
||||
- }
|
||||
- SSL_CTX_set_verify(self->ctx, mode, verify_cb);
|
||||
+ /* bpo-37428: newPySSLSocket() sets SSL_VERIFY_POST_HANDSHAKE flag for
|
||||
+ * server sockets and SSL_set_post_handshake_auth() for client. */
|
||||
|
||||
return 0;
|
||||
}
|
70
SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch
Normal file
70
SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch
Normal file
@ -0,0 +1,70 @@
|
||||
diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst
|
||||
index 2f59ece..d756916 100644
|
||||
--- a/Doc/library/http.client.rst
|
||||
+++ b/Doc/library/http.client.rst
|
||||
@@ -88,6 +88,11 @@ The module provides the following classes:
|
||||
:func:`ssl._create_unverified_context` can be passed to the *context*
|
||||
parameter.
|
||||
|
||||
+ .. versionchanged:: 3.7.4
|
||||
+ This class now enables TLS 1.3
|
||||
+ :attr:`ssl.SSLContext.post_handshake_auth` for the default *context* or
|
||||
+ when *cert_file* is passed with a custom *context*.
|
||||
+
|
||||
.. deprecated:: 3.6
|
||||
|
||||
*key_file* and *cert_file* are deprecated in favor of *context*.
|
||||
diff --git a/Lib/http/client.py b/Lib/http/client.py
|
||||
index 1a6bd8a..f0d2642 100644
|
||||
--- a/Lib/http/client.py
|
||||
+++ b/Lib/http/client.py
|
||||
@@ -1390,6 +1390,9 @@ else:
|
||||
self.cert_file = cert_file
|
||||
if context is None:
|
||||
context = ssl._create_default_https_context()
|
||||
+ # enable PHA for TLS 1.3 connections if available
|
||||
+ if context.post_handshake_auth is not None:
|
||||
+ context.post_handshake_auth = True
|
||||
will_verify = context.verify_mode != ssl.CERT_NONE
|
||||
if check_hostname is None:
|
||||
check_hostname = context.check_hostname
|
||||
@@ -1398,6 +1401,10 @@ else:
|
||||
"either CERT_OPTIONAL or CERT_REQUIRED")
|
||||
if key_file or cert_file:
|
||||
context.load_cert_chain(cert_file, key_file)
|
||||
+ # cert and key file means the user wants to authenticate.
|
||||
+ # enable TLS 1.3 PHA implicitly even for custom contexts.
|
||||
+ if context.post_handshake_auth is not None:
|
||||
+ context.post_handshake_auth = True
|
||||
self._context = context
|
||||
self._check_hostname = check_hostname
|
||||
|
||||
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
|
||||
index 714d521..5795b7a 100644
|
||||
--- a/Lib/test/test_httplib.py
|
||||
+++ b/Lib/test/test_httplib.py
|
||||
@@ -1709,6 +1709,24 @@ class HTTPSTest(TestCase):
|
||||
self.assertEqual(h, c.host)
|
||||
self.assertEqual(p, c.port)
|
||||
|
||||
+ def test_tls13_pha(self):
|
||||
+ import ssl
|
||||
+ if not ssl.HAS_TLSv1_3:
|
||||
+ self.skipTest('TLS 1.3 support required')
|
||||
+ # just check status of PHA flag
|
||||
+ h = client.HTTPSConnection('localhost', 443)
|
||||
+ self.assertTrue(h._context.post_handshake_auth)
|
||||
+
|
||||
+ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
+ self.assertFalse(context.post_handshake_auth)
|
||||
+ h = client.HTTPSConnection('localhost', 443, context=context)
|
||||
+ self.assertIs(h._context, context)
|
||||
+ self.assertFalse(h._context.post_handshake_auth)
|
||||
+
|
||||
+ h = client.HTTPSConnection('localhost', 443, context=context,
|
||||
+ cert_file=CERT_localhost)
|
||||
+ self.assertTrue(h._context.post_handshake_auth)
|
||||
+
|
||||
|
||||
class RequestBodyTest(TestCase):
|
||||
"""Test cases where a request includes a message body."""
|
5571
SOURCES/00329-fips.patch
Normal file
5571
SOURCES/00329-fips.patch
Normal file
File diff suppressed because it is too large
Load Diff
93
SOURCES/00330-CVE-2018-20852.patch
Normal file
93
SOURCES/00330-CVE-2018-20852.patch
Normal file
@ -0,0 +1,93 @@
|
||||
diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py
|
||||
index adf956d..97599d4 100644
|
||||
--- a/Lib/http/cookiejar.py
|
||||
+++ b/Lib/http/cookiejar.py
|
||||
@@ -1148,6 +1148,11 @@ class DefaultCookiePolicy(CookiePolicy):
|
||||
req_host, erhn = eff_request_host(request)
|
||||
domain = cookie.domain
|
||||
|
||||
+ if domain and not domain.startswith("."):
|
||||
+ dotdomain = "." + domain
|
||||
+ else:
|
||||
+ dotdomain = domain
|
||||
+
|
||||
# strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't
|
||||
if (cookie.version == 0 and
|
||||
(self.strict_ns_domain & self.DomainStrictNonDomain) and
|
||||
@@ -1160,7 +1165,7 @@ class DefaultCookiePolicy(CookiePolicy):
|
||||
_debug(" effective request-host name %s does not domain-match "
|
||||
"RFC 2965 cookie domain %s", erhn, domain)
|
||||
return False
|
||||
- if cookie.version == 0 and not ("."+erhn).endswith(domain):
|
||||
+ if cookie.version == 0 and not ("."+erhn).endswith(dotdomain):
|
||||
_debug(" request-host %s does not match Netscape cookie domain "
|
||||
"%s", req_host, domain)
|
||||
return False
|
||||
@@ -1174,7 +1179,11 @@ class DefaultCookiePolicy(CookiePolicy):
|
||||
req_host = "."+req_host
|
||||
if not erhn.startswith("."):
|
||||
erhn = "."+erhn
|
||||
- if not (req_host.endswith(domain) or erhn.endswith(domain)):
|
||||
+ if domain and not domain.startswith("."):
|
||||
+ dotdomain = "." + domain
|
||||
+ else:
|
||||
+ dotdomain = domain
|
||||
+ if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)):
|
||||
#_debug(" request domain %s does not match cookie domain %s",
|
||||
# req_host, domain)
|
||||
return False
|
||||
diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py
|
||||
index abc625d..6e1b308 100644
|
||||
--- a/Lib/test/test_http_cookiejar.py
|
||||
+++ b/Lib/test/test_http_cookiejar.py
|
||||
@@ -415,6 +415,7 @@ class CookieTests(unittest.TestCase):
|
||||
("http://foo.bar.com/", ".foo.bar.com", True),
|
||||
("http://foo.bar.com/", "foo.bar.com", True),
|
||||
("http://foo.bar.com/", ".bar.com", True),
|
||||
+ ("http://foo.bar.com/", "bar.com", True),
|
||||
("http://foo.bar.com/", "com", True),
|
||||
("http://foo.com/", "rhubarb.foo.com", False),
|
||||
("http://foo.com/", ".foo.com", True),
|
||||
@@ -425,6 +426,8 @@ class CookieTests(unittest.TestCase):
|
||||
("http://foo/", "foo", True),
|
||||
("http://foo/", "foo.local", True),
|
||||
("http://foo/", ".local", True),
|
||||
+ ("http://barfoo.com", ".foo.com", False),
|
||||
+ ("http://barfoo.com", "foo.com", False),
|
||||
]:
|
||||
request = urllib.request.Request(url)
|
||||
r = pol.domain_return_ok(domain, request)
|
||||
@@ -959,6 +962,33 @@ class CookieTests(unittest.TestCase):
|
||||
c.add_cookie_header(req)
|
||||
self.assertFalse(req.has_header("Cookie"))
|
||||
|
||||
+ c.clear()
|
||||
+
|
||||
+ pol.set_blocked_domains([])
|
||||
+ req = urllib.request.Request("http://acme.com/")
|
||||
+ res = FakeResponse(headers, "http://acme.com/")
|
||||
+ cookies = c.make_cookies(res, req)
|
||||
+ c.extract_cookies(res, req)
|
||||
+ self.assertEqual(len(c), 1)
|
||||
+
|
||||
+ req = urllib.request.Request("http://acme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertTrue(req.has_header("Cookie"))
|
||||
+
|
||||
+ req = urllib.request.Request("http://badacme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertFalse(pol.return_ok(cookies[0], req))
|
||||
+ self.assertFalse(req.has_header("Cookie"))
|
||||
+
|
||||
+ p = pol.set_blocked_domains(["acme.com"])
|
||||
+ req = urllib.request.Request("http://acme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertFalse(req.has_header("Cookie"))
|
||||
+
|
||||
+ req = urllib.request.Request("http://badacme.com/")
|
||||
+ c.add_cookie_header(req)
|
||||
+ self.assertFalse(req.has_header("Cookie"))
|
||||
+
|
||||
def test_secure(self):
|
||||
for ns in True, False:
|
||||
for whitespace in " ", "":
|
95
SOURCES/00332-CVE-2019-16056.patch
Normal file
95
SOURCES/00332-CVE-2019-16056.patch
Normal file
@ -0,0 +1,95 @@
|
||||
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
|
||||
index 737951e4b1b1..bc9c9b6241d4 100644
|
||||
--- a/Lib/email/_header_value_parser.py
|
||||
+++ b/Lib/email/_header_value_parser.py
|
||||
@@ -1561,6 +1561,8 @@ def get_domain(value):
|
||||
token, value = get_dot_atom(value)
|
||||
except errors.HeaderParseError:
|
||||
token, value = get_atom(value)
|
||||
+ if value and value[0] == '@':
|
||||
+ raise errors.HeaderParseError('Invalid Domain')
|
||||
if leader is not None:
|
||||
token[:0] = [leader]
|
||||
domain.append(token)
|
||||
diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py
|
||||
index cdfa3729adc7..41ff6f8c000d 100644
|
||||
--- a/Lib/email/_parseaddr.py
|
||||
+++ b/Lib/email/_parseaddr.py
|
||||
@@ -379,7 +379,12 @@ def getaddrspec(self):
|
||||
aslist.append('@')
|
||||
self.pos += 1
|
||||
self.gotonext()
|
||||
- return EMPTYSTRING.join(aslist) + self.getdomain()
|
||||
+ domain = self.getdomain()
|
||||
+ if not domain:
|
||||
+ # Invalid domain, return an empty address instead of returning a
|
||||
+ # local part to denote failed parsing.
|
||||
+ return EMPTYSTRING
|
||||
+ return EMPTYSTRING.join(aslist) + domain
|
||||
|
||||
def getdomain(self):
|
||||
"""Get the complete domain name from an address."""
|
||||
@@ -394,6 +399,10 @@ def getdomain(self):
|
||||
elif self.field[self.pos] == '.':
|
||||
self.pos += 1
|
||||
sdlist.append('.')
|
||||
+ elif self.field[self.pos] == '@':
|
||||
+ # bpo-34155: Don't parse domains with two `@` like
|
||||
+ # `a@malicious.org@important.com`.
|
||||
+ return EMPTYSTRING
|
||||
elif self.field[self.pos] in self.atomends:
|
||||
break
|
||||
else:
|
||||
diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py
|
||||
index a2c900fa7fd2..02ef3e1006c6 100644
|
||||
--- a/Lib/test/test_email/test__header_value_parser.py
|
||||
+++ b/Lib/test/test_email/test__header_value_parser.py
|
||||
@@ -1418,6 +1418,16 @@ def test_get_addr_spec_dot_atom(self):
|
||||
self.assertEqual(addr_spec.domain, 'example.com')
|
||||
self.assertEqual(addr_spec.addr_spec, 'star.a.star@example.com')
|
||||
|
||||
+ def test_get_addr_spec_multiple_domains(self):
|
||||
+ with self.assertRaises(errors.HeaderParseError):
|
||||
+ parser.get_addr_spec('star@a.star@example.com')
|
||||
+
|
||||
+ with self.assertRaises(errors.HeaderParseError):
|
||||
+ parser.get_addr_spec('star@a@example.com')
|
||||
+
|
||||
+ with self.assertRaises(errors.HeaderParseError):
|
||||
+ parser.get_addr_spec('star@172.17.0.1@example.com')
|
||||
+
|
||||
# get_obs_route
|
||||
|
||||
def test_get_obs_route_simple(self):
|
||||
diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py
|
||||
index f97ccc6711cc..68d052279987 100644
|
||||
--- a/Lib/test/test_email/test_email.py
|
||||
+++ b/Lib/test/test_email/test_email.py
|
||||
@@ -3035,6 +3035,20 @@ def test_parseaddr_empty(self):
|
||||
self.assertEqual(utils.parseaddr('<>'), ('', ''))
|
||||
self.assertEqual(utils.formataddr(utils.parseaddr('<>')), '')
|
||||
|
||||
+ def test_parseaddr_multiple_domains(self):
|
||||
+ self.assertEqual(
|
||||
+ utils.parseaddr('a@b@c'),
|
||||
+ ('', '')
|
||||
+ )
|
||||
+ self.assertEqual(
|
||||
+ utils.parseaddr('a@b.c@c'),
|
||||
+ ('', '')
|
||||
+ )
|
||||
+ self.assertEqual(
|
||||
+ utils.parseaddr('a@172.17.0.1@c'),
|
||||
+ ('', '')
|
||||
+ )
|
||||
+
|
||||
def test_noquote_dump(self):
|
||||
self.assertEqual(
|
||||
utils.formataddr(('A Silly Person', 'person@dom.ain')),
|
||||
diff --git a/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst
|
||||
new file mode 100644
|
||||
index 000000000000..50292e29ed1d
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Fix parsing of invalid email addresses with more than one ``@`` (e.g. a@b@c.com.) to not return the part before 2nd ``@`` as valid email address. Patch by maxking & jpic.
|
296
SOURCES/00333-reduce-pgo-tests.patch
Normal file
296
SOURCES/00333-reduce-pgo-tests.patch
Normal file
@ -0,0 +1,296 @@
|
||||
diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py
|
||||
index 538ff05..e7f2013 100644
|
||||
--- a/Lib/test/libregrtest/cmdline.py
|
||||
+++ b/Lib/test/libregrtest/cmdline.py
|
||||
@@ -263,7 +263,9 @@ def _create_parser():
|
||||
help='only write the name of test cases that will be run'
|
||||
' , don\'t execute them')
|
||||
group.add_argument('-P', '--pgo', dest='pgo', action='store_true',
|
||||
- help='enable Profile Guided Optimization training')
|
||||
+ help='enable Profile Guided Optimization (PGO) training')
|
||||
+ group.add_argument('--pgo-extended', action='store_true',
|
||||
+ help='enable extended PGO training (slower training)')
|
||||
group.add_argument('--fail-env-changed', action='store_true',
|
||||
help='if a test file alters the environment, mark '
|
||||
'the test as failed')
|
||||
@@ -339,6 +341,8 @@ def _parse_args(args, **kwargs):
|
||||
parser.error("-G/--failfast needs either -v or -W")
|
||||
if ns.pgo and (ns.verbose or ns.verbose2 or ns.verbose3):
|
||||
parser.error("--pgo/-v don't go together!")
|
||||
+ if ns.pgo_extended:
|
||||
+ ns.pgo = True # pgo_extended implies pgo
|
||||
|
||||
if ns.nowindows:
|
||||
print("Warning: the --nowindows (-n) option is deprecated. "
|
||||
diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py
|
||||
index b6d05f6..524dbfa 100644
|
||||
--- a/Lib/test/libregrtest/main.py
|
||||
+++ b/Lib/test/libregrtest/main.py
|
||||
@@ -17,6 +17,7 @@ from test.libregrtest.runtest import (
|
||||
INTERRUPTED, CHILD_ERROR, TEST_DID_NOT_RUN,
|
||||
PROGRESS_MIN_TIME, format_test_result)
|
||||
from test.libregrtest.setup import setup_tests
|
||||
+from test.libregrtest.pgo import setup_pgo_tests
|
||||
from test.libregrtest.utils import removepy, count, format_duration, printlist
|
||||
from test import support
|
||||
try:
|
||||
@@ -214,6 +215,10 @@ class Regrtest:
|
||||
|
||||
removepy(self.tests)
|
||||
|
||||
+ if self.ns.pgo:
|
||||
+ # add default PGO tests if no tests are specified
|
||||
+ setup_pgo_tests(self.ns)
|
||||
+
|
||||
stdtests = STDTESTS[:]
|
||||
nottests = NOTTESTS.copy()
|
||||
if self.ns.exclude:
|
||||
@@ -601,6 +606,7 @@ class Regrtest:
|
||||
input("Press any key to continue...")
|
||||
|
||||
support.PGO = self.ns.pgo
|
||||
+ support.PGO_EXTENDED = self.ns.pgo_extended
|
||||
|
||||
setup_tests(self.ns)
|
||||
|
||||
diff --git a/Lib/test/libregrtest/pgo.py b/Lib/test/libregrtest/pgo.py
|
||||
new file mode 100644
|
||||
index 0000000..379ff05
|
||||
--- /dev/null
|
||||
+++ b/Lib/test/libregrtest/pgo.py
|
||||
@@ -0,0 +1,55 @@
|
||||
+# Set of tests run by default if --pgo is specified. The tests below were
|
||||
+# chosen based on the following criteria: either they exercise a commonly used
|
||||
+# C extension module or type, or they run some relatively typical Python code.
|
||||
+# Long running tests should be avoided because the PGO instrumented executable
|
||||
+# runs slowly.
|
||||
+PGO_TESTS = [
|
||||
+ 'test_array',
|
||||
+ 'test_base64',
|
||||
+ 'test_binascii',
|
||||
+ 'test_binop',
|
||||
+ 'test_bisect',
|
||||
+ 'test_bytes',
|
||||
+ 'test_bz2',
|
||||
+ 'test_cmath',
|
||||
+ 'test_codecs',
|
||||
+ 'test_collections',
|
||||
+ 'test_complex',
|
||||
+ 'test_dataclasses',
|
||||
+ 'test_datetime',
|
||||
+ 'test_decimal',
|
||||
+ 'test_difflib',
|
||||
+ 'test_embed',
|
||||
+ 'test_float',
|
||||
+ 'test_fstring',
|
||||
+ 'test_functools',
|
||||
+ 'test_generators',
|
||||
+ 'test_hashlib',
|
||||
+ 'test_heapq',
|
||||
+ 'test_int',
|
||||
+ 'test_itertools',
|
||||
+ 'test_json',
|
||||
+ 'test_long',
|
||||
+ 'test_lzma',
|
||||
+ 'test_math',
|
||||
+ 'test_memoryview',
|
||||
+ 'test_operator',
|
||||
+ 'test_ordered_dict',
|
||||
+ 'test_pickle',
|
||||
+ 'test_pprint',
|
||||
+ 'test_re',
|
||||
+ 'test_set',
|
||||
+ 'test_sqlite',
|
||||
+ 'test_statistics',
|
||||
+ 'test_struct',
|
||||
+ 'test_tabnanny',
|
||||
+ 'test_time',
|
||||
+ 'test_unicode',
|
||||
+ 'test_xml_etree',
|
||||
+ 'test_xml_etree_c',
|
||||
+]
|
||||
+
|
||||
+def setup_pgo_tests(ns):
|
||||
+ if not ns.args and not ns.pgo_extended:
|
||||
+ # run default set of tests for PGO training
|
||||
+ ns.args = PGO_TESTS[:]
|
||||
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
|
||||
index 764057a..468ee46 100644
|
||||
--- a/Lib/test/pickletester.py
|
||||
+++ b/Lib/test/pickletester.py
|
||||
@@ -2039,6 +2039,7 @@ class AbstractPickleTests(unittest.TestCase):
|
||||
|
||||
FRAME_SIZE_TARGET = 64 * 1024
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
def check_frame_opcodes(self, pickled):
|
||||
"""
|
||||
Check the arguments of FRAME opcodes in a protocol 4+ pickle.
|
||||
@@ -2059,6 +2060,7 @@ class AbstractPickleTests(unittest.TestCase):
|
||||
frame_size = len(pickled) - last_pos - frame_opcode_size
|
||||
self.assertEqual(frame_size, last_arg)
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
def test_framing_many_objects(self):
|
||||
obj = list(range(10**5))
|
||||
for proto in range(4, pickle.HIGHEST_PROTOCOL + 1):
|
||||
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
|
||||
index 66c0fed..e80a819 100644
|
||||
--- a/Lib/test/support/__init__.py
|
||||
+++ b/Lib/test/support/__init__.py
|
||||
@@ -953,6 +953,10 @@ SAVEDCWD = os.getcwd()
|
||||
# useful for PGO
|
||||
PGO = False
|
||||
|
||||
+# Set by libregrtest/main.py if we are running the extended (time consuming)
|
||||
+# PGO task. If this is True, PGO is also True.
|
||||
+PGO_EXTENDED = False
|
||||
+
|
||||
@contextlib.contextmanager
|
||||
def temp_dir(path=None, quiet=False):
|
||||
"""Return a context manager that creates a temporary directory.
|
||||
@@ -2442,6 +2446,11 @@ def skip_unless_xattr(test):
|
||||
msg = "no non-broken extended attribute support"
|
||||
return test if ok else unittest.skip(msg)(test)
|
||||
|
||||
+def skip_if_pgo_task(test):
|
||||
+ """Skip decorator for tests not run in (non-extended) PGO task"""
|
||||
+ ok = not PGO or PGO_EXTENDED
|
||||
+ msg = "Not run for (non-extended) PGO task"
|
||||
+ return test if ok else unittest.skip(msg)(test)
|
||||
|
||||
def fs_is_case_insensitive(directory):
|
||||
"""Detects if the file system for the specified directory is case-insensitive."""
|
||||
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
|
||||
index f340f23..ebb151c 100644
|
||||
--- a/Lib/test/test_bz2.py
|
||||
+++ b/Lib/test/test_bz2.py
|
||||
@@ -654,6 +654,7 @@ class BZ2CompressorTest(BaseTest):
|
||||
data += bz2c.flush()
|
||||
self.assertEqual(ext_decompress(data), self.TEXT)
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
@bigmemtest(size=_4G + 100, memuse=2)
|
||||
def testCompress4G(self, size):
|
||||
# "Test BZ2Compressor.compress()/flush() with >4GiB input"
|
||||
@@ -712,6 +713,7 @@ class BZ2DecompressorTest(BaseTest):
|
||||
self.assertRaises(EOFError, bz2d.decompress, b"anything")
|
||||
self.assertRaises(EOFError, bz2d.decompress, b"")
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
@bigmemtest(size=_4G + 100, memuse=3.3)
|
||||
def testDecompress4G(self, size):
|
||||
# "Test BZ2Decompressor.decompress() with >4GiB input"
|
||||
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
|
||||
index 9317951..8c1d016 100644
|
||||
--- a/Lib/test/test_itertools.py
|
||||
+++ b/Lib/test/test_itertools.py
|
||||
@@ -2023,6 +2023,7 @@ class RegressionTests(unittest.TestCase):
|
||||
self.assertRaises(AssertionError, list, cycle(gen1()))
|
||||
self.assertEqual(hist, [0,1])
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
def test_long_chain_of_empty_iterables(self):
|
||||
# Make sure itertools.chain doesn't run into recursion limits when
|
||||
# dealing with long chains of empty iterables. Even with a high
|
||||
diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py
|
||||
index 3dc2c1e..117de0a 100644
|
||||
--- a/Lib/test/test_lzma.py
|
||||
+++ b/Lib/test/test_lzma.py
|
||||
@@ -333,6 +333,7 @@ class CompressorDecompressorTestCase(unittest.TestCase):
|
||||
|
||||
# Test with inputs larger than 4GiB.
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
@bigmemtest(size=_4G + 100, memuse=2)
|
||||
def test_compressor_bigmem(self, size):
|
||||
lzc = LZMACompressor()
|
||||
@@ -344,6 +345,7 @@ class CompressorDecompressorTestCase(unittest.TestCase):
|
||||
finally:
|
||||
ddata = None
|
||||
|
||||
+ @support.skip_if_pgo_task
|
||||
@bigmemtest(size=_4G + 100, memuse=3)
|
||||
def test_decompressor_bigmem(self, size):
|
||||
lzd = LZMADecompressor()
|
||||
diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py
|
||||
index 5347bb1..9d83217 100644
|
||||
--- a/Lib/test/test_regrtest.py
|
||||
+++ b/Lib/test/test_regrtest.py
|
||||
@@ -6,6 +6,7 @@ Note: test_regrtest cannot be run twice in parallel.
|
||||
|
||||
import contextlib
|
||||
import faulthandler
|
||||
+import glob
|
||||
import io
|
||||
import os.path
|
||||
import platform
|
||||
@@ -532,6 +533,31 @@ class BaseTestCase(unittest.TestCase):
|
||||
return proc.stdout
|
||||
|
||||
|
||||
+class CheckActualTests(BaseTestCase):
|
||||
+ """
|
||||
+ Check that regrtest appears to find the expected set of tests.
|
||||
+ """
|
||||
+
|
||||
+ def test_finds_expected_number_of_tests(self):
|
||||
+ args = ['-Wd', '-E', '-bb', '-m', 'test.regrtest', '--list-tests']
|
||||
+ output = self.run_python(args)
|
||||
+ rough_number_of_tests_found = len(output.splitlines())
|
||||
+ actual_testsuite_glob = os.path.join(os.path.dirname(__file__),
|
||||
+ 'test*.py')
|
||||
+ rough_counted_test_py_files = len(glob.glob(actual_testsuite_glob))
|
||||
+ # We're not trying to duplicate test finding logic in here,
|
||||
+ # just give a rough estimate of how many there should be and
|
||||
+ # be near that. This is a regression test to prevent mishaps
|
||||
+ # such as https://bugs.python.org/issue37667 in the future.
|
||||
+ # If you need to change the values in here during some
|
||||
+ # mythical future test suite reorganization, don't go
|
||||
+ # overboard with logic and keep that goal in mind.
|
||||
+ self.assertGreater(rough_number_of_tests_found,
|
||||
+ rough_counted_test_py_files*9//10,
|
||||
+ msg='Unexpectedly low number of tests found in:\n'
|
||||
+ f'{", ".join(output.splitlines())}')
|
||||
+
|
||||
+
|
||||
class ProgramsTestCase(BaseTestCase):
|
||||
"""
|
||||
Test various ways to run the Python test suite. Use options close
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index b452289..cc428ac 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -247,9 +247,10 @@ TCLTK_INCLUDES= @TCLTK_INCLUDES@
|
||||
TCLTK_LIBS= @TCLTK_LIBS@
|
||||
|
||||
# The task to run while instrumented when building the profile-opt target.
|
||||
-# We exclude unittests with -x that take a rediculious amount of time to
|
||||
-# run in the instrumented training build or do not provide much value.
|
||||
-PROFILE_TASK=-m test.regrtest --pgo
|
||||
+# To speed up profile generation, we don't run the full unit test suite
|
||||
+# by default. The default is "-m test --pgo". To run more tests, use
|
||||
+# PROFILE_TASK="-m test --pgo-extended"
|
||||
+PROFILE_TASK= @PROFILE_TASK@
|
||||
|
||||
# report files for gcov / lcov coverage report
|
||||
COVERAGE_INFO= $(abs_builddir)/coverage.info
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index c071ec3..816fc5a 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -1308,6 +1308,14 @@ else
|
||||
DEF_MAKE_RULE="all"
|
||||
fi
|
||||
|
||||
+AC_ARG_VAR(PROFILE_TASK, Python args for PGO generation task)
|
||||
+AC_MSG_CHECKING(PROFILE_TASK)
|
||||
+if test -z "$PROFILE_TASK"
|
||||
+then
|
||||
+ PROFILE_TASK='-m test --pgo'
|
||||
+fi
|
||||
+AC_MSG_RESULT($PROFILE_TASK)
|
||||
+
|
||||
# Make llvm-relatec checks work on systems where llvm tools are not installed with their
|
||||
# normal names in the default $PATH (ie: Ubuntu). They exist under the
|
||||
# non-suffixed name in their versioned llvm directory.
|
@ -14,7 +14,7 @@ URL: https://www.python.org/
|
||||
# WARNING When rebasing to a new Python version,
|
||||
# remember to update the python3-docs package as well
|
||||
Version: %{pybasever}.8
|
||||
Release: 11%{?dist}
|
||||
Release: 23%{?dist}
|
||||
License: Python
|
||||
|
||||
|
||||
@ -25,21 +25,16 @@ License: Python
|
||||
# Note that the bcond macros are named for the CLI option they create.
|
||||
# "%%bcond_without" means "ENABLE by default and create a --without option"
|
||||
|
||||
# Whether to use RPM build wheels from the python-{pip,setuptools}-wheel package
|
||||
# Uses upstream bundled prebuilt wheels otherwise
|
||||
%bcond_without rpmwheels
|
||||
|
||||
# Expensive optimizations (mainly, profile-guided optimizations)
|
||||
%ifarch %{ix86} x86_64
|
||||
%bcond_without optimizations
|
||||
%else
|
||||
# On some architectures, the optimized build takes tens of hours, possibly
|
||||
# longer than Koji's 24-hour timeout. Disable optimizations here.
|
||||
%bcond_with optimizations
|
||||
%endif
|
||||
|
||||
# Run the test suite in %%check
|
||||
%bcond_without tests
|
||||
|
||||
# Ability to reuse RPM-installed pip using rewheel
|
||||
%bcond_without rewheel
|
||||
|
||||
# Extra build for debugging the interpreter or C-API extensions
|
||||
# (the -debug subpackages)
|
||||
%bcond_without debug_build
|
||||
@ -209,9 +204,9 @@ BuildRequires: /usr/bin/dtrace
|
||||
# workaround http://bugs.python.org/issue19804 (test_uuid requires ifconfig)
|
||||
BuildRequires: /usr/sbin/ifconfig
|
||||
|
||||
%if %{with rewheel}
|
||||
BuildRequires: python3-setuptools
|
||||
BuildRequires: python3-pip
|
||||
%if %{with rpmwheels}
|
||||
BuildRequires: python3-setuptools-wheel
|
||||
BuildRequires: python3-pip-wheel
|
||||
|
||||
# Verify that the BuildRoot includes python36.
|
||||
# Not actually needed for build.
|
||||
@ -321,10 +316,9 @@ Patch170: 00170-gc-assertions.patch
|
||||
Patch178: 00178-dont-duplicate-flags-in-sysconfig.patch
|
||||
|
||||
# 00189 #
|
||||
# Add the rewheel module, allowing to recreate wheels from already installed
|
||||
# ones
|
||||
# https://github.com/bkabrda/rewheel
|
||||
Patch189: 00189-add-rewheel-module.patch
|
||||
# Instead of bundled wheels, use our RPM packaged wheels from
|
||||
# /usr/share/python3-wheels
|
||||
Patch189: 00189-use-rpm-wheels.patch
|
||||
|
||||
# 00205 #
|
||||
# LIBPL variable in makefile takes LIBPL from configure.ac
|
||||
@ -357,6 +351,13 @@ Patch274: 00274-fix-arch-names.patch
|
||||
# See also: https://bugzilla.redhat.com/show_bug.cgi?id=1489816
|
||||
Patch294: 00294-define-TLS-cipher-suite-on-build-time.patch
|
||||
|
||||
# 00316 #
|
||||
# We remove the exe files from distutil's bdist_wininst
|
||||
# So we mark the command as unsupported - and the tests are skipped
|
||||
# Fixed upstream and backported from the 3.7 branch: https://bugs.python.org/issue10945
|
||||
# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1754040
|
||||
Patch316: 00316-mark-bdist_wininst-unsupported.patch
|
||||
|
||||
# 00317 #
|
||||
# Security fix for CVE-2019-5010: Fix segfault in ssl's cert parser
|
||||
# https://bugzilla.redhat.com/show_bug.cgi?id=1666789
|
||||
@ -364,11 +365,29 @@ Patch294: 00294-define-TLS-cipher-suite-on-build-time.patch
|
||||
Patch317: 00317-CVE-2019-5010.patch
|
||||
|
||||
# 00318 #
|
||||
# test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1
|
||||
# Various fixes for TLS 1.3 and OpenSSL 1.1.1
|
||||
# https://bugzilla.redhat.com/show_bug.cgi?id=1639531
|
||||
|
||||
# test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1
|
||||
# https://bugs.python.org/issue32947#msg333990
|
||||
# https://github.com/python/cpython/pull/11612
|
||||
Patch318: 00318-test-ssl-fix-for-tls-13.patch
|
||||
|
||||
# Encrypt private key test files with AES256
|
||||
# https://bugs.python.org/issue38271
|
||||
# https://github.com/python/cpython/pull/16396
|
||||
|
||||
# Prefer PROTOCOL_TLS_CLIENT/SERVER (partial backport)
|
||||
# https://bugs.python.org/issue31346
|
||||
# https://github.com/python/cpython/pull/3058
|
||||
|
||||
# Enable TLS 1.3 in tests (partial backport)
|
||||
# https://bugs.python.org/issue33618
|
||||
# https://github.com/python/cpython/pull/7082
|
||||
|
||||
# OpenSSL 1.1.1-pre1 / TLS 1.3 fixes (partial backport)
|
||||
# https://bugs.python.org/issue32947
|
||||
# https://github.com/python/cpython/pull/5923
|
||||
Patch318: 00318-fixes-for-tls-13.patch
|
||||
|
||||
# 00319 #
|
||||
# Fix test_tarfile on ppc64
|
||||
@ -377,10 +396,10 @@ Patch318: 00318-test-ssl-fix-for-tls-13.patch
|
||||
Patch319: 00319-test_tarfile_ppc64.patch
|
||||
|
||||
# 00320 #
|
||||
# Security fix for CVE-2019-9636: Information Disclosure due to urlsplit improper NFKC normalization
|
||||
# Security fix for CVE-2019-9636 and CVE-2019-10160: Information Disclosure due to urlsplit improper NFKC normalization
|
||||
# Fixed upstream: https://bugs.python.org/issue36216 and https://bugs.python.org/issue36742
|
||||
# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1689318
|
||||
Patch320: 00320-CVE-2019-9636.patch
|
||||
Patch320: 00320-CVE-2019-9636-and-CVE-2019-10160.patch
|
||||
|
||||
# 00324 #
|
||||
# Disallow control chars in http URLs
|
||||
@ -397,6 +416,78 @@ Patch324: 00324-disallow-control-chars-in-http-urls.patch
|
||||
# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1714643
|
||||
Patch325: 00325-CVE-2019-9948.patch
|
||||
|
||||
# 00326 #
|
||||
# Don't set the post-handshake authentication verify flag on client side
|
||||
# on TLS 1.3, as it also implicitly enables cert chain validation and an
|
||||
# SSL/TLS connection will fail when verify mode is set to CERT_NONE.
|
||||
# Fixed upstream: https://bugs.python.org/issue37428
|
||||
# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1725721
|
||||
Patch326: 00326-do-not-set-PHA-verify-flag-on-client-side.patch
|
||||
|
||||
# 00327 #
|
||||
# Enable TLS 1.3 post-handshake authentication in http.client for default
|
||||
# context or if a cert_file is passed to HTTPSConnection
|
||||
# Fixed upstream: https://bugs.python.org/issue37440
|
||||
# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1671353
|
||||
Patch327: 00327-enable-tls-1.3-PHA-in-http.client.patch
|
||||
|
||||
# 00329 #
|
||||
# Support OpenSSL FIPS mode
|
||||
# - Fallback implementations md5, sha1, sha256, sha512 are removed in favor of OpenSSL wrappers
|
||||
# - In FIPS mode, OpenSSL wrappers are always used in hashlib
|
||||
# - add a new "usedforsecurity" keyword argument to the various digest
|
||||
# algorithms in hashlib so that you can whitelist a callsite with
|
||||
# "usedforsecurity=False"
|
||||
# The change has been implemented upstream since Python 3.9:
|
||||
# https://bugs.python.org/issue9216
|
||||
# - OpenSSL wrappers for the hashes blake2{b512,s256},
|
||||
# sha3_{224,256,384,512}, shake_{128,256} are now exported from _hashlib
|
||||
# - In FIPS mode, the blake2, sha3 and shake hashes use OpenSSL wrappers
|
||||
# and do not offer extended functionality (keys, tree hashing, custom digest size)
|
||||
# - In FIPS mode, hmac.HMAC can only be instantiated with an OpenSSL wrapper
|
||||
# or an string with OpenSSL hash name as the "digestmod" argument.
|
||||
# The argument must be specified (instead of defaulting to ‘md5’).
|
||||
#
|
||||
# - Also while in FIPS mode, we utilize OpenSSL's DRBG and disable the
|
||||
# os.getrandom() function.
|
||||
#
|
||||
# Upstream changes that have also been backported with this patch
|
||||
# to allow tests to pass on stricter environments:
|
||||
#
|
||||
# Avoid MD5 or check for MD5 availablity
|
||||
# https://bugs.python.org/issue38270
|
||||
# https://github.com/python/cpython/pull/16393
|
||||
# https://github.com/python/cpython/pull/16437
|
||||
# https://github.com/python/cpython/pull/17446
|
||||
#
|
||||
# add usedforsecurity to hashlib constructors (partial backport for fixing a uuid test)
|
||||
# https://github.com/python/cpython/pull/16044
|
||||
# Resolves: rhbz#1731424
|
||||
Patch329: 00329-fips.patch
|
||||
|
||||
# 00330 #
|
||||
# Fix CVE-2018-20852: cookie domain check returning incorrect results
|
||||
# Fixed upstream: https://bugs.python.org/issue35121
|
||||
# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1741553
|
||||
Patch330: 00330-CVE-2018-20852.patch
|
||||
|
||||
# 00332 #
|
||||
# Fix CVE-2019-16056: Don't parse email addresses containing
|
||||
# multiple '@' characters.
|
||||
# Fixed upstream: https://bugs.python.org/issue34155
|
||||
# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1750776
|
||||
Patch332: 00332-CVE-2019-16056.patch
|
||||
|
||||
# 00333 #
|
||||
# Reduce the number of tests run during the profile guided optimizations build,
|
||||
# as running the whole test suite during profiling increases the build time
|
||||
# substantially, with negligible performance gain.
|
||||
# Fixed upstream and backported from the 3.8 branch:
|
||||
# https://bugs.python.org/issue36044
|
||||
# https://bugs.python.org/issue37667
|
||||
# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1749576
|
||||
Patch333: 00333-reduce-pgo-tests.patch
|
||||
|
||||
# (New patches go here ^^^)
|
||||
#
|
||||
# When adding new patches to "python" and "python3" in Fedora, EL, etc.,
|
||||
@ -422,9 +513,25 @@ Provides: python(abi) = %{pybasever}
|
||||
|
||||
Requires: %{name}-libs%{?_isa} = %{version}-%{release}
|
||||
|
||||
%if %{with rewheel}
|
||||
%if %{with rpmwheels}
|
||||
|
||||
# RHEL8 was forked from F28 and thus required python3-setuptools here
|
||||
# for the rewheel module to work. We've since backported the use of RPM
|
||||
# prepared wheels from F29+ into RHEL8, and thus this dependency isn't
|
||||
# strictly needed.
|
||||
# However, it is possible, that some packages in BaseOS actually depend on
|
||||
# setuptools without declaring the dependency in their spec file. Thus
|
||||
# we're keeping this dependency here to avoid the possibility of breaking
|
||||
# them.
|
||||
Requires: platform-python-setuptools
|
||||
Requires: platform-python-pip
|
||||
# For python3-pip the Requires has been reduced to Recommends, as there are
|
||||
# generally less packages that depend on pip than packages that depend on
|
||||
# setuptools at runtime, and thus there's less chance of breakage.
|
||||
# (rhbz#1756217).
|
||||
Recommends: platform-python-pip
|
||||
|
||||
Requires: python3-setuptools-wheel
|
||||
Requires: python3-pip-wheel
|
||||
%endif
|
||||
|
||||
# Runtime require alternatives
|
||||
@ -473,6 +580,14 @@ Requires: chkconfig
|
||||
Requires: gdbm%{?_isa} >= 1:1.13
|
||||
%endif
|
||||
|
||||
%if %{with rpmwheels}
|
||||
Requires: python3-setuptools-wheel
|
||||
Requires: python3-pip-wheel
|
||||
%else
|
||||
Provides: bundled(python3-pip) = 18.1
|
||||
Provides: bundled(python3-setuptools) = 40.6.2
|
||||
%endif
|
||||
|
||||
# There are files in the standard library that have python shebang.
|
||||
# We've filtered the automatic requirement out so libs are installable without
|
||||
# the main package. This however makes it pulled in by default.
|
||||
@ -528,6 +643,15 @@ Requires: python-rpm-macros
|
||||
Requires: python3-rpm-macros
|
||||
Requires: python3-rpm-generators
|
||||
|
||||
# This is not "API" (packages that need setuptools should still BuildRequire it)
|
||||
# However some packages apparently can build both with and without setuptools
|
||||
# producing egg-info as file or directory (depending on setuptools presence).
|
||||
# Directory-to-file updates are problematic in RPM, so we ensure setuptools is
|
||||
# installed when -devel is required.
|
||||
# See https://bugzilla.redhat.com/show_bug.cgi?id=1623914
|
||||
# See https://fedoraproject.org/wiki/Packaging:Directory_Replacement
|
||||
Requires: platform-python-setuptools
|
||||
|
||||
Provides: %{name}-2to3 = %{version}-%{release}
|
||||
Provides: 2to3 = %{version}-%{release}
|
||||
|
||||
@ -640,11 +764,6 @@ so extensions for both versions can co-exist in the same directory.
|
||||
rm -r Modules/expat
|
||||
rm -r Modules/zlib
|
||||
|
||||
%if %{with rewheel}
|
||||
%global pip_version %(pip%{pybasever} --version | cut -d' ' -f2)
|
||||
sed -r -i s/'_PIP_VERSION = "[0-9.]+"'/'_PIP_VERSION = "%{pip_version}"'/ Lib/ensurepip/__init__.py
|
||||
%endif
|
||||
|
||||
#
|
||||
# Apply patches:
|
||||
#
|
||||
@ -661,8 +780,9 @@ sed -r -i s/'_PIP_VERSION = "[0-9.]+"'/'_PIP_VERSION = "%{pip_version}"'/ Lib/en
|
||||
%patch170 -p1
|
||||
%patch178 -p1
|
||||
|
||||
%if %{with rewheel}
|
||||
%if %{with rpmwheels}
|
||||
%patch189 -p1
|
||||
rm Lib/ensurepip/_bundled/*.whl
|
||||
%endif
|
||||
|
||||
%patch205 -p1
|
||||
@ -670,12 +790,19 @@ sed -r -i s/'_PIP_VERSION = "[0-9.]+"'/'_PIP_VERSION = "%{pip_version}"'/ Lib/en
|
||||
%patch262 -p1
|
||||
%patch274 -p1
|
||||
%patch294 -p1
|
||||
%patch316 -p1
|
||||
%patch317 -p1
|
||||
%patch318 -p1
|
||||
%patch319 -p1
|
||||
%patch320 -p1
|
||||
%patch324 -p1
|
||||
%patch325 -p1
|
||||
%patch326 -p1
|
||||
%patch327 -p1
|
||||
%patch329 -p1
|
||||
%patch330 -p1
|
||||
%patch332 -p1
|
||||
%patch333 -p1
|
||||
|
||||
|
||||
# Remove files that should be generated by the build
|
||||
@ -763,7 +890,7 @@ BuildPython() {
|
||||
%{nil}
|
||||
|
||||
# Invoke the build
|
||||
make EXTRA_CFLAGS="$CFLAGS $MoreCFlags" %{?_smp_mflags}
|
||||
%make_build CFLAGS_NODIST="$CFLAGS_NODIST $MoreCFlags"
|
||||
|
||||
popd
|
||||
echo FINISHED: BUILD OF PYTHON FOR CONFIGURATION: $ConfName
|
||||
@ -1102,17 +1229,10 @@ CheckPython() {
|
||||
WITHIN_PYTHON_RPM_BUILD= \
|
||||
LD_LIBRARY_PATH=$ConfDir $ConfDir/python -m test.regrtest \
|
||||
-wW --slowest --findleaks \
|
||||
-x test_distutils \
|
||||
-x test_bdist_rpm \
|
||||
%ifarch %{mips64}
|
||||
-x test_ctypes \
|
||||
%endif
|
||||
%ifarch s390x
|
||||
-x test_gdb \
|
||||
%endif
|
||||
%ifarch ppc64le
|
||||
-x test_gdb \
|
||||
%endif
|
||||
|
||||
echo FINISHED: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName
|
||||
|
||||
@ -1179,7 +1299,9 @@ fi
|
||||
|
||||
%exclude %{_bindir}/pyvenv
|
||||
%{_bindir}/pyvenv-%{pybasever}
|
||||
%{_mandir}/*/*
|
||||
|
||||
%{_mandir}/man1/python3.6.1*
|
||||
%{_mandir}/man1/unversioned-python.1*
|
||||
|
||||
%files libs
|
||||
%license LICENSE
|
||||
@ -1214,13 +1336,11 @@ fi
|
||||
%dir %{pylibdir}/ensurepip/__pycache__/
|
||||
%{pylibdir}/ensurepip/*.py
|
||||
%{pylibdir}/ensurepip/__pycache__/*%{bytecode_suffixes}
|
||||
%if %{with rpmwheels}
|
||||
%exclude %{pylibdir}/ensurepip/_bundled
|
||||
|
||||
%if %{with rewheel}
|
||||
%dir %{pylibdir}/ensurepip/rewheel/
|
||||
%dir %{pylibdir}/ensurepip/rewheel/__pycache__/
|
||||
%{pylibdir}/ensurepip/rewheel/*.py
|
||||
%{pylibdir}/ensurepip/rewheel/__pycache__/*%{bytecode_suffixes}
|
||||
%else
|
||||
%dir %{pylibdir}/ensurepip/_bundled
|
||||
%{pylibdir}/ensurepip/_bundled/*.whl
|
||||
%endif
|
||||
|
||||
# The majority of the test module lives in the test subpackage
|
||||
@ -1248,11 +1368,8 @@ fi
|
||||
%{pylibdir}/pydoc_data
|
||||
|
||||
%{dynload_dir}/_blake2.%{SOABI_optimized}.so
|
||||
%{dynload_dir}/_md5.%{SOABI_optimized}.so
|
||||
%{dynload_dir}/_sha1.%{SOABI_optimized}.so
|
||||
%{dynload_dir}/_sha256.%{SOABI_optimized}.so
|
||||
%{dynload_dir}/_sha3.%{SOABI_optimized}.so
|
||||
%{dynload_dir}/_sha512.%{SOABI_optimized}.so
|
||||
%{dynload_dir}/_hmacopenssl.%{SOABI_optimized}.so
|
||||
|
||||
%{dynload_dir}/_asyncio.%{SOABI_optimized}.so
|
||||
%{dynload_dir}/_bisect.%{SOABI_optimized}.so
|
||||
@ -1487,11 +1604,8 @@ fi
|
||||
# ...with debug builds of the built-in "extension" modules:
|
||||
|
||||
%{dynload_dir}/_blake2.%{SOABI_debug}.so
|
||||
%{dynload_dir}/_md5.%{SOABI_debug}.so
|
||||
%{dynload_dir}/_sha1.%{SOABI_debug}.so
|
||||
%{dynload_dir}/_sha256.%{SOABI_debug}.so
|
||||
%{dynload_dir}/_sha3.%{SOABI_debug}.so
|
||||
%{dynload_dir}/_sha512.%{SOABI_debug}.so
|
||||
%{dynload_dir}/_hmacopenssl.%{SOABI_debug}.so
|
||||
|
||||
%{dynload_dir}/_asyncio.%{SOABI_debug}.so
|
||||
%{dynload_dir}/_bisect.%{SOABI_debug}.so
|
||||
@ -1606,8 +1720,63 @@ fi
|
||||
# ======================================================
|
||||
|
||||
%changelog
|
||||
* Wed Nov 27 2019 Charalampos Stratakis <cstratak@redhat.com> - 3.6.8-23
|
||||
- Modify the test suite to better handle disabled SSL/TLS versions and FIPS mode
|
||||
- Use OpenSSL's DRBG and disable os.getrandom() function in FIPS mode
|
||||
Resolves: rhbz#1754028, rhbz#1754027, rhbz#1754026, rhbz#1774471
|
||||
|
||||
* Thu Oct 24 2019 Tomas Orsava <torsava@redhat.com> - 3.6.8-22
|
||||
- Changed Requires into Recommends for python3-pip to allow a lower RHEL8
|
||||
footprint for containers and other minimal environments
|
||||
Resolves: rhbz#1756217
|
||||
|
||||
* Wed Oct 16 2019 Tomas Orsava <torsava@redhat.com> - 3.6.8-21
|
||||
- Patch 329 (FIPS) modified: Added workaround for mod_ssl:
|
||||
Skip error checking in _Py_hashlib_fips_error
|
||||
Resolves: rhbz#1760106
|
||||
|
||||
* Mon Oct 14 2019 Charalampos Stratakis <cstratak@redhat.com> - 3.6.8-20
|
||||
- Security fix for CVE-2019-16056
|
||||
Resolves: rhbz#1750776
|
||||
|
||||
* Wed Oct 09 2019 Charalampos Stratakis <cstratak@redhat.com> - 3.6.8-19
|
||||
- Skip windows specific test_get_exe_bytes test case and enable test_distutils
|
||||
Resolves: rhbz#1754040
|
||||
|
||||
* Mon Oct 07 2019 Charalampos Stratakis <cstratak@redhat.com> - 3.6.8-18
|
||||
- Reduce the number of tests running during the profile guided optimizations build
|
||||
- Enable profile guided optimizations for all the supported architectures
|
||||
Resolves: rhbz#1749576
|
||||
|
||||
* Mon Oct 07 2019 Charalampos Stratakis <cstratak@redhat.com> - 3.6.8-17
|
||||
- Security fix for CVE-2018-20852
|
||||
Resolves: rhbz#1741553
|
||||
|
||||
* Fri Oct 04 2019 Charalampos Stratakis <cstratak@redhat.com> - 3.6.8-16
|
||||
- Properly pass the -Og optimization flag to the debug build
|
||||
Resolves: rhbz#1712977 and rhbz#1714733
|
||||
|
||||
* Thu Aug 29 2019 Tomas Orsava <torsava@redhat.com> - 3.6.8-15
|
||||
- Patch 329 that adds support for OpenSSL FIPS mode has been improved and
|
||||
bugfixed
|
||||
Resolves: rhbz#1744670 rhbz#1745499 rhbz#1745685
|
||||
|
||||
* Tue Aug 06 2019 Tomas Orsava <torsava@redhat.com> - 3.6.8-14
|
||||
- Adding a new patch 329 that adds support for OpenSSL FIPS mode
|
||||
- Explicitly listing man pages in files section to fix an RPM warning
|
||||
Resolves: rhbz#1731424
|
||||
|
||||
* Tue Jul 02 2019 Charalampos Stratakis <cstratak@redhat.com> - 3.6.8-13
|
||||
- Do not set PHA verify flag on client side (rhbz#1725721)
|
||||
- Enable TLS 1.3 post-handshake authentication in http.client (rhbz#1671353)
|
||||
|
||||
* Fri Jun 21 2019 Miro Hrončok <mhroncok@redhat.com> - 3.6.8-12
|
||||
- Use RPM built wheels of pip and setuptools in ensurepip instead of our rewheel patch
|
||||
- Require platform-python-setuptools from platform-python-devel to prevent packaging errors
|
||||
Resolves: rhbz#1701286
|
||||
|
||||
* Fri Jun 07 2019 Charalampos Stratakis <cstratak@redhat.com> - 3.6.8-11
|
||||
- Yet another fix for CVE-2019-9636
|
||||
- Fix for CVE-2019-10160
|
||||
Resolves: rhbz#1689318
|
||||
|
||||
* Wed May 29 2019 Charalampos Stratakis <cstratak@redhat.com> - 3.6.8-10
|
||||
|
Loading…
Reference in New Issue
Block a user