diff --git a/SOURCES/00189-add-rewheel-module.patch b/SOURCES/00189-add-rewheel-module.patch deleted file mode 100644 index 419d7e4..0000000 --- a/SOURCES/00189-add-rewheel-module.patch +++ /dev/null @@ -1,245 +0,0 @@ -diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py -index 4748ba4..986d5e9 100644 ---- a/Lib/ensurepip/__init__.py -+++ b/Lib/ensurepip/__init__.py -@@ -1,8 +1,10 @@ - import os - import os.path - import pkgutil -+import shutil - import sys - import tempfile -+from ensurepip import rewheel - - - __all__ = ["version", "bootstrap"] -@@ -24,8 +26,15 @@ def _run_pip(args, additional_paths=None): - sys.path = additional_paths + sys.path - - # Install the bundled software -- import pip._internal -- return pip._internal.main(args) -+ try: -+ # pip 10 -+ from pip._internal import main -+ except ImportError: -+ # pip 9 -+ from pip import main -+ if args[0] in ["install", "list", "wheel"]: -+ args.append('--pre') -+ return main(args) - - - def version(): -@@ -88,20 +97,39 @@ def _bootstrap(*, root=None, upgrade=False, user=False, - # omit pip and easy_install - os.environ["ENSUREPIP_OPTIONS"] = "install" - -+ whls = [] -+ rewheel_dir = None -+ # try to see if we have system-wide versions of _PROJECTS -+ dep_records = rewheel.find_system_records([p[0] for p in _PROJECTS]) -+ # TODO: check if system-wide versions are the newest ones -+ # if --upgrade is used? -+ if all(dep_records): -+ # if we have all _PROJECTS installed system-wide, we'll recreate -+ # wheels from them and install those -+ rewheel_dir = tempfile.TemporaryDirectory() -+ for dr in dep_records: -+ new_whl = rewheel.rewheel_from_record(dr, rewheel_dir.name) -+ whls.append(os.path.join(rewheel_dir.name, new_whl)) -+ else: -+ # if we don't have all the _PROJECTS installed system-wide, -+ # let's just fall back to bundled wheels -+ for project, version in _PROJECTS: -+ whl = os.path.join( -+ os.path.dirname(__file__), -+ "_bundled", -+ "{}-{}-py2.py3-none-any.whl".format(project, version) -+ ) -+ whls.append(whl) -+ - with tempfile.TemporaryDirectory() as tmpdir: - # Put our bundled wheels into a temporary directory and construct the - # additional paths that need added to sys.path - additional_paths = [] -- for project, version in _PROJECTS: -- wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) -- whl = pkgutil.get_data( -- "ensurepip", -- "_bundled/{}".format(wheel_name), -- ) -- with open(os.path.join(tmpdir, wheel_name), "wb") as fp: -- fp.write(whl) -- -- additional_paths.append(os.path.join(tmpdir, wheel_name)) -+ for whl in whls: -+ shutil.copy(whl, tmpdir) -+ additional_paths.append(os.path.join(tmpdir, os.path.basename(whl))) -+ if rewheel_dir: -+ rewheel_dir.cleanup() - - # Construct the arguments to be passed to the pip command - args = ["install", "--no-index", "--find-links", tmpdir] -diff --git a/Lib/ensurepip/rewheel/__init__.py b/Lib/ensurepip/rewheel/__init__.py -new file mode 100644 -index 0000000..753c764 ---- /dev/null -+++ b/Lib/ensurepip/rewheel/__init__.py -@@ -0,0 +1,143 @@ -+import argparse -+import codecs -+import csv -+import email.parser -+import os -+import io -+import re -+import site -+import subprocess -+import sys -+import zipfile -+ -+def run(): -+ parser = argparse.ArgumentParser(description='Recreate wheel of package with given RECORD.') -+ parser.add_argument('record_path', -+ help='Path to RECORD file') -+ parser.add_argument('-o', '--output-dir', -+ help='Dir where to place the wheel, defaults to current working dir.', -+ dest='outdir', -+ default=os.path.curdir) -+ -+ ns = parser.parse_args() -+ retcode = 0 -+ try: -+ print(rewheel_from_record(**vars(ns))) -+ except BaseException as e: -+ print('Failed: {}'.format(e)) -+ retcode = 1 -+ sys.exit(1) -+ -+def find_system_records(projects): -+ """Return list of paths to RECORD files for system-installed projects. -+ -+ If a project is not installed, the resulting list contains None instead -+ of a path to its RECORD -+ """ -+ records = [] -+ # get system site-packages dirs -+ sys_sitepack = site.getsitepackages([sys.base_prefix, sys.base_exec_prefix]) -+ sys_sitepack = [sp for sp in sys_sitepack if os.path.exists(sp)] -+ # try to find all projects in all system site-packages -+ for project in projects: -+ path = None -+ for sp in sys_sitepack: -+ dist_info_re = os.path.join(sp, project) + r'-[^\{0}]+\.dist-info'.format(os.sep) -+ candidates = [os.path.join(sp, p) for p in os.listdir(sp)] -+ # filter out candidate dirs based on the above regexp -+ filtered = [c for c in candidates if re.match(dist_info_re, c)] -+ # if we have 0 or 2 or more dirs, something is wrong... -+ if len(filtered) == 1: -+ path = filtered[0] -+ if path is not None: -+ records.append(os.path.join(path, 'RECORD')) -+ else: -+ records.append(None) -+ return records -+ -+def rewheel_from_record(record_path, outdir): -+ """Recreates a whee of package with given record_path and returns path -+ to the newly created wheel.""" -+ site_dir = os.path.dirname(os.path.dirname(record_path)) -+ record_relpath = record_path[len(site_dir):].strip(os.path.sep) -+ to_write, to_omit = get_records_to_pack(site_dir, record_relpath) -+ new_wheel_name = get_wheel_name(record_path) -+ new_wheel_path = os.path.join(outdir, new_wheel_name + '.whl') -+ -+ new_wheel = zipfile.ZipFile(new_wheel_path, mode='w', compression=zipfile.ZIP_DEFLATED) -+ # we need to write a new record with just the files that we will write, -+ # e.g. not binaries and *.pyc/*.pyo files -+ new_record = io.StringIO() -+ writer = csv.writer(new_record) -+ -+ # handle files that we can write straight away -+ for f, sha_hash, size in to_write: -+ new_wheel.write(os.path.join(site_dir, f), arcname=f) -+ writer.writerow([f, sha_hash,size]) -+ -+ # rewrite the old wheel file with a new computed one -+ writer.writerow([record_relpath, '', '']) -+ new_wheel.writestr(record_relpath, new_record.getvalue()) -+ -+ new_wheel.close() -+ -+ return new_wheel.filename -+ -+def get_wheel_name(record_path): -+ """Return proper name of the wheel, without .whl.""" -+ -+ wheel_info_path = os.path.join(os.path.dirname(record_path), 'WHEEL') -+ with codecs.open(wheel_info_path, encoding='utf-8') as wheel_info_file: -+ wheel_info = email.parser.Parser().parsestr(wheel_info_file.read()) -+ -+ metadata_path = os.path.join(os.path.dirname(record_path), 'METADATA') -+ with codecs.open(metadata_path, encoding='utf-8') as metadata_file: -+ metadata = email.parser.Parser().parsestr(metadata_file.read()) -+ -+ # construct name parts according to wheel spec -+ distribution = metadata.get('Name') -+ version = metadata.get('Version') -+ build_tag = '' # nothing for now -+ lang_tag = [] -+ for t in wheel_info.get_all('Tag'): -+ lang_tag.append(t.split('-')[0]) -+ lang_tag = '.'.join(lang_tag) -+ abi_tag, plat_tag = wheel_info.get('Tag').split('-')[1:3] -+ # leave out build tag, if it is empty -+ to_join = filter(None, [distribution, version, build_tag, lang_tag, abi_tag, plat_tag]) -+ return '-'.join(list(to_join)) -+ -+def get_records_to_pack(site_dir, record_relpath): -+ """Accepts path of sitedir and path of RECORD file relative to it. -+ Returns two lists: -+ - list of files that can be written to new RECORD straight away -+ - list of files that shouldn't be written or need some processing -+ (pyc and pyo files, scripts) -+ """ -+ record_file_path = os.path.join(site_dir, record_relpath) -+ with codecs.open(record_file_path, encoding='utf-8') as record_file: -+ record_contents = record_file.read() -+ # temporary fix for https://github.com/pypa/pip/issues/1376 -+ # we need to ignore files under ".data" directory -+ data_dir = os.path.dirname(record_relpath).strip(os.path.sep) -+ data_dir = data_dir[:-len('dist-info')] + 'data' -+ -+ to_write = [] -+ to_omit = [] -+ for l in record_contents.splitlines(): -+ spl = l.split(',') -+ if len(spl) == 3: -+ # new record will omit (or write differently): -+ # - abs paths, paths with ".." (entry points), -+ # - pyc+pyo files -+ # - the old RECORD file -+ # TODO: is there any better way to recognize an entry point? -+ if os.path.isabs(spl[0]) or spl[0].startswith('..') or \ -+ spl[0].endswith('.pyc') or spl[0].endswith('.pyo') or \ -+ spl[0] == record_relpath or spl[0].startswith(data_dir): -+ to_omit.append(spl) -+ else: -+ to_write.append(spl) -+ else: -+ pass # bad RECORD or empty line -+ return to_write, to_omit -diff --git a/Makefile.pre.in b/Makefile.pre.in -index 85e2ee3..4d34130 100644 ---- a/Makefile.pre.in -+++ b/Makefile.pre.in -@@ -1256,7 +1256,7 @@ LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \ - test/test_asyncio \ - collections concurrent concurrent/futures encodings \ - email email/mime test/test_email test/test_email/data \ -- ensurepip ensurepip/_bundled \ -+ ensurepip ensurepip/_bundled ensurepip/rewheel \ - html json test/test_json http dbm xmlrpc \ - sqlite3 sqlite3/test \ - logging csv wsgiref urllib \ diff --git a/SOURCES/00189-use-rpm-wheels.patch b/SOURCES/00189-use-rpm-wheels.patch new file mode 100644 index 0000000..6e12814 --- /dev/null +++ b/SOURCES/00189-use-rpm-wheels.patch @@ -0,0 +1,70 @@ +diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py +index 09c572d..167d27b 100644 +--- a/Lib/ensurepip/__init__.py ++++ b/Lib/ensurepip/__init__.py +@@ -1,16 +1,27 @@ ++import distutils.version ++import glob + import os + import os.path +-import pkgutil + import sys + import tempfile + + + __all__ = ["version", "bootstrap"] + ++_WHEEL_DIR = "/usr/share/python{}-wheels/".format(sys.version_info[0]) + +-_SETUPTOOLS_VERSION = "40.6.2" ++def _get_most_recent_wheel_version(pkg): ++ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg)) ++ suffix = "-py2.py3-none-any.whl" ++ pattern = "{}*{}".format(prefix, suffix) ++ versions = (p[len(prefix):-len(suffix)] for p in glob.glob(pattern)) ++ return str(max(versions, key=distutils.version.LooseVersion)) ++ ++ ++_SETUPTOOLS_VERSION = _get_most_recent_wheel_version("setuptools") ++ ++_PIP_VERSION = _get_most_recent_wheel_version("pip") + +-_PIP_VERSION = "18.1" + + _PROJECTS = [ + ("setuptools", _SETUPTOOLS_VERSION), +@@ -23,9 +34,15 @@ def _run_pip(args, additional_paths=None): + if additional_paths is not None: + sys.path = additional_paths + sys.path + +- # Install the bundled software +- import pip._internal +- return pip._internal.main(args) ++ try: ++ # pip 10 ++ from pip._internal import main ++ except ImportError: ++ # pip 9 ++ from pip import main ++ if args[0] in ["install", "list", "wheel"]: ++ args.append('--pre') ++ return main(args) + + + def version(): +@@ -94,12 +111,9 @@ def _bootstrap(*, root=None, upgrade=False, user=False, + additional_paths = [] + for project, version in _PROJECTS: + wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) +- whl = pkgutil.get_data( +- "ensurepip", +- "_bundled/{}".format(wheel_name), +- ) +- with open(os.path.join(tmpdir, wheel_name), "wb") as fp: +- fp.write(whl) ++ with open(os.path.join(_WHEEL_DIR, wheel_name), "rb") as sfp: ++ with open(os.path.join(tmpdir, wheel_name), "wb") as fp: ++ fp.write(sfp.read()) + + additional_paths.append(os.path.join(tmpdir, wheel_name)) + diff --git a/SOURCES/00316-mark-bdist_wininst-unsupported.patch b/SOURCES/00316-mark-bdist_wininst-unsupported.patch new file mode 100644 index 0000000..c091be0 --- /dev/null +++ b/SOURCES/00316-mark-bdist_wininst-unsupported.patch @@ -0,0 +1,14 @@ +diff --git a/Lib/distutils/command/bdist_wininst.py b/Lib/distutils/command/bdist_wininst.py +index fde5675..15434c3 100644 +--- a/Lib/distutils/command/bdist_wininst.py ++++ b/Lib/distutils/command/bdist_wininst.py +@@ -55,6 +55,9 @@ class bdist_wininst(Command): + boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', + 'skip-build'] + ++ # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows ++ _unsupported = (sys.platform != "win32") ++ + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None diff --git a/SOURCES/00318-fixes-for-tls-13.patch b/SOURCES/00318-fixes-for-tls-13.patch new file mode 100644 index 0000000..8f4a1b0 --- /dev/null +++ b/SOURCES/00318-fixes-for-tls-13.patch @@ -0,0 +1,949 @@ +From 412ccf4c6f8c417006c0a93392a8274a425074c0 Mon Sep 17 00:00:00 2001 +From: Victor Stinner +Date: Wed, 29 May 2019 04:04:54 +0200 +Subject: [PATCH 1/5] bpo-32947: test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1 + (GH-11612) + +Backport partially commit 529525fb5a8fd9b96ab4021311a598c77588b918: +complete the previous partial backport (commit +2a4ee8aa01d61b6a9c8e9c65c211e61bdb471826. + +Co-Authored-By: Christian Heimes +--- + Lib/test/test_ssl.py | 15 +++++++++++++++ + .../2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst | 1 + + 2 files changed, 16 insertions(+) + create mode 100644 Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst + +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index cb0acda..639109f 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -2043,6 +2043,16 @@ if _have_threads: + sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n" + % (msg, ctype, msg.lower(), ctype)) + self.write(msg.lower()) ++ except ConnectionResetError: ++ # XXX: OpenSSL 1.1.1 sometimes raises ConnectionResetError ++ # when connection is not shut down gracefully. ++ if self.server.chatty and support.verbose: ++ sys.stdout.write( ++ " Connection reset by peer: {}\n".format( ++ self.addr) ++ ) ++ self.close() ++ self.running = False + except OSError: + if self.server.chatty: + handle_error("Test server failure:\n") +@@ -2122,6 +2132,11 @@ if _have_threads: + pass + except KeyboardInterrupt: + self.stop() ++ except BaseException as e: ++ if support.verbose and self.chatty: ++ sys.stdout.write( ++ ' connection handling failed: ' + repr(e) + '\n') ++ + self.sock.close() + + def stop(self): +diff --git a/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst b/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst +new file mode 100644 +index 0000000..f508504 +--- /dev/null ++++ b/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst +@@ -0,0 +1 @@ ++test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1. +-- +2.21.0 + + +From 6b728ec778067849dd1f0d9b73cf1ac47dafa270 Mon Sep 17 00:00:00 2001 +From: "Miss Islington (bot)" + <31488909+miss-islington@users.noreply.github.com> +Date: Wed, 25 Sep 2019 09:12:59 -0700 +Subject: [PATCH 2/5] bpo-38271: encrypt private key test files with AES256 + (GH-16385) + +The private keys for test_ssl were encrypted with 3DES in traditional +PKCSGH-5 format. 3DES and the digest algorithm of PKCSGH-5 are blocked by +some strict crypto policies. Use PKCSGH-8 format with AES256 encryption +instead. + +Signed-off-by: Christian Heimes + +https://bugs.python.org/issue38271 + +Automerge-Triggered-By: @tiran +(cherry picked from commit bfd0c963d88f3df69489ee250655e2b8f3d235bd) + +Co-authored-by: Christian Heimes +--- + Lib/test/keycert.passwd.pem | 85 ++++++++++--------- + Lib/test/make_ssl_certs.py | 4 +- + Lib/test/ssl_key.passwd.pem | 84 +++++++++--------- + .../2019-09-25-13-11-29.bpo-38271.iHXNIg.rst | 4 + + 4 files changed, 91 insertions(+), 86 deletions(-) + create mode 100644 Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst + +diff --git a/Lib/test/keycert.passwd.pem b/Lib/test/keycert.passwd.pem +index cbb3c3b..c330c36 100644 +--- a/Lib/test/keycert.passwd.pem ++++ b/Lib/test/keycert.passwd.pem +@@ -1,45 +1,45 @@ +------BEGIN RSA PRIVATE KEY----- +-Proc-Type: 4,ENCRYPTED +-DEK-Info: DES-EDE3-CBC,D134E931C96D9DEC +- +-nuGFEej7vIjkYWSMz5OJeVTNntDRQi6ZM4DBm3g8T7i/0odr3WFqGMMKZcIhLYQf +-rgRq7RSKtrJ1y5taVucMV+EuCjyfzDo0TsYt+ZrXv/D08eZhjRmkhoHnGVF0TqQm +-nQEXM/ERT4J2RM78dnG+homMkI76qOqxgGbRqQqJo6AiVRcAZ45y8s96bru2TAB8 +-+pWjO/v0Je7AFVdwSU52N8OOY6uoSAygW+0UY1WVxbVGJF2XfRsNpPX+YQHYl6e+ +-3xM5XBVCgr6kmdAyub5qUJ38X3TpdVGoR0i+CVS9GTr2pSRib1zURAeeHnlqiUZM +-4m0Gn9s72nJevU1wxED8pwOhR8fnHEmMKGD2HPhKoOCbzDhwwBZO27TNa1uWeM3f +-M5oixKDi2PqMn3y2cDx1NjJtP661688EcJ5a2Ih9BgO9xpnhSyzBWEKcAn0tJB0H +-/56M0FW6cdOOIzMveGGL7sHW5E+iOdI1n5e7C6KJUzew78Y9qJnhS53EdI6qTz9R +-wsIsj1i070Fk6RbPo6zpLlF6w7Zj8GlZaZA7OZZv9wo5VEV/0ST8gmiiBOBc4C6Y +-u9hyLIIu4dFEBKyQHRvBnQSLNpKx6or1OGFDVBay2In9Yh2BHh1+vOj/OIz/wq48 +-EHOIV27fRJxLu4jeK5LIGDhuPnMJ8AJYQ0bQOUP6fd7p+TxWkAQZPB/Dx/cs3hxr +-nFEdzx+eO+IAsObx/b1EGZyEJyETBslu4GwYX7/KK3HsJhDJ1bdZ//28jOCaoir6 +-ZOMT72GRwmVoQTJ0XpccfjHfKJDRLT7C1xvzo4Eibth0hpTZkA75IUYUp6qK/PuJ +-kH/qdiC7QIkRKtsrawW4vEDna3YtxIYhQqz9+KwO6u/0gzooZtv1RU4U3ifMDB5u +-5P5GAzACRqlY8QYBkM869lvWqzQPHvybC4ak9Yx6/heMO9ddjdIW9BaK8BLxvN/6 +-UCD936Y4fWltt09jHZIoxWFykouBwmd7bXooNYXmDRNmjTdVhKJuOEOQw8hDzx7e +-pWFJ9Z/V4Qm1tvXbCD7QFqMCDoY3qFvVG8DBqXpmxe1yPfz21FWrT7IuqDXAD3ns +-vxfN/2a+Cy04U9FBNVCvWqWIs5AgNpdCMJC2FlXKTy+H3/7rIjNyFyvbX0vxIXtK +-liOVNXiyVM++KZXqktqMUDlsJENmIHV9B046luqbgW018fHkyEYlL3iRZGbYegwr +-XO9VVIKVPw1BEvJ8VNdGFGuZGepd8qX2ezfYADrNR+4t85HDm8inbjTobSjWuljs +-ftUNkOeCHqAvWCFQTLCfdykvV08EJfVY79y7yFPtfRV2gxYokXFifjo3su9sVQr1 +-UiIS5ZAsIC1hBXWeXoBN7QVTkFi7Yto6E1q2k10LiT3obpUUUQ/oclhrJOCJVjrS +-oRcj2QBy8OT4T9slJr5maTWdgd7Lt6+I6cGQXPaDvjGOJl0eBYM14vhx4rRQWytJ +-k07hhHFO4+9CGCuHS8AAy2gR6acYFWt2ZiiNZ0z/iPIHNK4YEyy9aLf6uZH/KQjE +-jmHToo7XD6QvCAEC5qTHby3o3LfHIhyZi/4L+AhS4FKUHF6M0peeyYt4z3HaK2d2 +-N6mHLPdjwNjra7GOmcns4gzcrdfoF+R293KpPal4PjknvR3dZL4kKP/ougTAM5zv +-qDIvRbkHzjP8ChTpoLcJsNVXykNcNkjcSi0GHtIpYjh6QX6P2uvR/S4+Bbb9p9rn +-hIy/ovu9tWN2hiPxGPe6torF6BulAxsTYlDercC204AyzsrdA0pr6HBgJH9C6ML1 +-TchwodbFJqn9rSv91i1liusAGoOvE81AGBdrXY7LxfSNhYY1IK6yR/POJPTd53sA +-uX2/j6Rtoksd/2BHPM6AUnI/2B9slhuzWX2aCtWLeuwvXDS6rYuTigaQmLkzTRfM +-dlMI3s9KLXxgi5YVumUZleJWXwBNP7KiKajd+VTSD+7WAhyhM5FIG5wVOaxmy4G2 +-TyqZ/Ax9d2VEjTQHWvQlLPQ4Mp0EIz0aEl94K/S8CK8bJRH6+PRkar+dJi1xqlL+ +-BYb42At9mEJ8odLlFikvNi1+t7jqXk5jRi5C0xFKx3nTtzoH2zNUeuA3R6vSocVK +-45jnze9IkKmxMlJ4loR5sgszdpDCD3kXqjtCcbMTmcrGyzJek3HSOTpiEORoTFOe +-Rhg6jH5lm+QcC263oipojS0qEQcnsWJP2CylNYMYHR9O/9NQxT3o2lsRHqZTMELV +-uQa/SFH+paQNbZOj8MRwPSqqiIxJFuLswKte1R+W7LKn1yBSM7Pp39lNbzGvJD2E +-YRfnCwFpJ54voVAuQ4jXJvigCW2qeCjXlxeD6K2j4eGJEEOmIjIW1wjubyBY6OI3 +------END RSA PRIVATE KEY----- ++-----BEGIN ENCRYPTED PRIVATE KEY----- ++MIIHbTBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQIhD+rJdxqb6ECAggA ++MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBDTdyjCP3riOSUfxix4aXEvBIIH ++ECGkbsFabrcFMZcplw5jHMaOlG7rYjUzwDJ80JM8uzbv2Jb8SvNlns2+xmnEvH/M ++mNvRmnXmplbVjH3XBMK8o2Psnr2V/a0j7/pgqpRxHykG+koOY4gzdt3MAg8JPbS2 ++hymSl+Y5EpciO3xLfz4aFL1ZNqspQbO/TD13Ij7DUIy7xIRBMp4taoZCrP0cEBAZ +++wgu9m23I4dh3E8RUBzWyFFNic2MVVHrui6JbHc4dIHfyKLtXJDhUcS0vIC9PvcV ++jhorh3UZC4lM+/jjXV5AhzQ0VrJ2tXAUX2dA144XHzkSH2QmwfnajPsci7BL2CGC ++rjyTy4NfB/lDwU+55dqJZQSKXMxAapJMrtgw7LD5CKQcN6zmfhXGssJ7HQUXKkaX ++I1YOFzuUD7oo56BVCnVswv0jX9RxrE5QYNreMlOP9cS+kIYH65N+PAhlURuQC14K ++PgDkHn5knSa2UQA5tc5f7zdHOZhGRUfcjLP+KAWA3nh+/2OKw/X3zuPx75YT/FKe ++tACPw5hjEpl62m9Xa0eWepZXwqkIOkzHMmCyNCsbC0mmRoEjmvfnslfsmnh4Dg/c ++4YsTYMOLLIeCa+WIc38aA5W2lNO9lW0LwLhX1rP+GRVPv+TVHXlfoyaI+jp0iXrJ ++t3xxT0gaiIR/VznyS7Py68QV/zB7VdqbsNzS7LdquHK1k8+7OYiWjY3gqyU40Iu2 ++d1eSnIoDvQJwyYp7XYXbOlXNLY+s1Qb7yxcW3vXm0Bg3gKT8r1XHWJ9rj+CxAn5r ++ysfkPs1JsesxzzQjwTiDNvHnBnZnwxuxfBr26ektEHmuAXSl8V6dzLN/aaPjpTj4 ++CkE7KyqX3U9bLkp+ztl4xWKEmW44nskzm0+iqrtrxMyTfvvID4QrABjZL4zmWIqc ++e3ZfA3AYk9VDIegk/YKGC5VZ8YS7ZXQ0ASK652XqJ7QlMKTxxV7zda6Fp4uW6/qN ++ezt5wgbGGhZQXj2wDQmWNQYyG/juIgYTpCUA54U5XBIjuR6pg+Ytm0UrvNjsUoAC ++wGelyqaLDq8U8jdIFYVTJy9aJjQOYXjsUJ0dZN2aGHSlju0ZGIZc49cTIVQ9BTC5 ++Yc0Vlwzpl+LuA25DzKZNSb/ci0lO/cQGJ2uXQQgaNgdsHlu8nukENGJhnIzx4fzK ++wEh3yHxhTRCzPPwDfXmx0IHXrPqJhSpAgaXBVIm8OjvmMxO+W75W4uLfNY/B7e2H ++3cjklGuvkofOf7sEOrGUYf4cb6Obg8FpvHgpKo5Twwmoh/qvEKckBFqNhZXDDl88 ++GbGlSEgyaAV1Ig8s1NJKBolWFa0juyPAwJ8vT1T4iwW7kQ7KXKt2UNn96K/HxkLu ++pikvukz8oRHMlfVHa0R48UB1fFHwZLzPmwkpu6ancIxk3uO3yfhf6iDk3bmnyMlz ++g3k/b6MrLYaOVByRxay85jH3Vvgqfgn6wa6BJ7xQ81eZ8B45gFuTH0J5JtLL7SH8 ++darRPLCYfA+Ums9/H6pU5EXfd3yfjMIbvhCXHkJrrljkZ+th3p8dyto6wmYqIY6I ++qR9sU+o6DhRaiP8tCICuhHxQpXylUM6WeJkJwduTJ8KWIvzsj4mReIKOl/oC2jSd ++gIdKhb9Q3zj9ce4N5m6v66tyvjxGZ+xf3BvUPDD+LwZeXgf7OBsNVbXzQbzto594 ++nbCzPocFi3gERE50ru4K70eQCy08TPG5NpOz+DDdO5vpAuMLYEuI7O3L+3GjW40Q ++G5bu7H5/i7o/RWR67qhG/7p9kPw3nkUtYgnvnWaPMIuTfb4c2d069kjlfgWjIbbI ++tpSKmm5DHlqTE4/ECAbIEDtSaw9dXHCdL3nh5+n428xDdGbjN4lT86tfu17EYKzl ++ydH1RJ1LX3o3TEj9UkmDPt7LnftvwybMFEcP7hM2xD4lC++wKQs7Alg6dTkBnJV4 ++5xU78WRntJkJTU7kFkpPKA0QfyCuSF1fAMoukDBkqUdOj6jE0BlJQlHk5iwgnJlt ++uEdkTjHZEjIUxWC6llPcAzaPNlmnD45AgfEW+Jn21IvutmJiQAz5lm9Z9PXaR0C8 ++hXB6owRY67C0YKQwXhoNf6xQun2xGBGYy5rPEEezX1S1tUH5GR/KW1Lh+FzFqHXI ++ZEb5avfDqHKehGAjPON+Br7akuQ125M9LLjKuSyPaQzeeCAy356Xd7XzVwbPddbm ++9S9WSPqzaPgh10chIHoNoC8HMd33dB5j9/Q6jrbU/oPlptu/GlorWblvJdcTuBGI ++IVn45RFnkG8hCz0GJSNzW7+70YdESQbfJW79vssWMaiSjFE0pMyFXrFR5lBywBTx ++PiGEUWtvrKG94X1TMlGUzDzDJOQNZ9dT94bonNe9pVmP5BP4/DzwwiWh6qrzWk6p ++j8OE4cfCSh2WvHnhJbH7/N0v+JKjtxeIeJ16jx/K2oK5 ++-----END ENCRYPTED PRIVATE KEY----- + -----BEGIN CERTIFICATE----- + MIIEWTCCAsGgAwIBAgIJAJinz4jHSjLtMA0GCSqGSIb3DQEBCwUAMF8xCzAJBgNV + BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +@@ -66,3 +66,4 @@ jMqTFlmO7kpf/jpCSmamp3/JSEE1BJKHwQ6Ql4nzRA2N1mnvWH7Zxcv043gkHeAu + 9Wc2uXpw9xF8itV4Uvcdr3dwqByvIqn7iI/gB+4l41e0u8OmH2MKOx4Nxlly5TNW + HcVKQHyOeyvnINuBAQ== + -----END CERTIFICATE----- ++ +diff --git a/Lib/test/make_ssl_certs.py b/Lib/test/make_ssl_certs.py +index 3622765..41b5f46 100644 +--- a/Lib/test/make_ssl_certs.py ++++ b/Lib/test/make_ssl_certs.py +@@ -206,8 +206,8 @@ if __name__ == '__main__': + with open('ssl_key.pem', 'w') as f: + f.write(key) + print("password protecting ssl_key.pem in ssl_key.passwd.pem") +- check_call(['openssl','rsa','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-des3','-passout','pass:somepass']) +- check_call(['openssl','rsa','-in','ssl_key.pem','-out','keycert.passwd.pem','-des3','-passout','pass:somepass']) ++ check_call(['openssl','pkey','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-aes256','-passout','pass:somepass']) ++ check_call(['openssl','pkey','-in','ssl_key.pem','-out','keycert.passwd.pem','-aes256','-passout','pass:somepass']) + + with open('keycert.pem', 'w') as f: + f.write(key) +diff --git a/Lib/test/ssl_key.passwd.pem b/Lib/test/ssl_key.passwd.pem +index e4f1370..46de61a 100644 +--- a/Lib/test/ssl_key.passwd.pem ++++ b/Lib/test/ssl_key.passwd.pem +@@ -1,42 +1,42 @@ +------BEGIN RSA PRIVATE KEY----- +-Proc-Type: 4,ENCRYPTED +-DEK-Info: DES-EDE3-CBC,8064BE1494B24B13 +- +-KJrffOMbo8M0I3PzcYxRZGMpKD1yB3Ii4+bT5XoanxjIJ+4fdx6LfZ0Rsx+riyzs +-tymsQu/iYY9j+4rCvN9+eetsL1X6iZpiimKsLexcid9M3fb0vxED5Sgw0dvunCUA +-xhqjLIKR92MKbODHf6KrDKCpsiPbjq4gZ7P+uCGXAMHL3MXIJSC0hW9rK7Ce6oyO +-CjpIcgB8x+GUWZZZhAFdlzIHMZrteNP2P5HK6QcaT71P034Dz1hhqoj4Q0t+Fta2 +-4tfsM/bnTR/l6hwlhPa1e3Uj322tDTDWBScgWANn5+sEWldLmozMaWhZsn22pfk2 +-KjRMGXG024JVheV882nbdOBvG7oq+lxkZ/ZP+vvqJqnvYtf7WtM8UivzYpe5Hz5b +-kVvWzPjBLUSZ9whM9rDLqSSqMPyPvDTuEmLkuq+xm7pYJmsLqIMP2klZLqRxLX6K +-uqwplb8UG440qauxgnQ905PId1l2fJEnRtV+7vXprA0L0QotgXLVHBhLmTFM+3PH +-9H3onf31dionUAPrn3nfVE36HhvVgRyvDBnBzJSIMighgq21Qx/d1dk0DRYi1hUI +-nCHl0YJPXheVcXR7JiSF2XQCAaFuS1Mr7NCXfWZOZQC/0dkvmHnl9DUAhuqq9BNZ +-1cKhZXcKHadg2/r0Zup/oDzmHPUEfTAXT0xbqoWlhkdwbF2veWQ96A/ncx3ISTb4 +-PkXBlX9rdia8nmtyQDQRn4NuvchbaGkj4WKFC8pF8Hn7naHqwjpHaDUimBc0CoQW +-edNJqruKWwtSVLuwKHCC2gZFX9AXSKJXJz/QRSUlhFGOhuF/J6yKaXj6n5lxWNiQ +-54J+OP/hz2aS95CD2+Zf1SKpxdWiLZSIQqESpmmUrXROixNJZ/Z7gI74Dd9dSJOH +-W+3AU03vrrFZVrJVZhjcINHoH1Skh6JKscH18L6x4U868nSr4SrRLX8BhHllOQyD +-bmU+PZAjF8ZBIaCtTGulDXD29F73MeAZeTSsgQjFu0iKLj1wPiphbx8i/SUtR4YP +-X6PVA04g66r1NBw+3RQASVorZ3g1MSFvITHXcbKkBDeJH2z1+c6t/VVyTONnQhM5 +-lLgRSk6HCbetvT9PKxWrWutA12pdBYEHdZhMHVf2+xclky7l09w8hg2/qqcdGRGe +-oAOZ72t0l5ObNyaruDKUS6f4AjOyWq/Xj5xuFtf1n3tQHyslSyCTPcAbQhDfTHUx +-vixb/V9qvYPt7OCn8py7v1M69NH42QVFAvwveDIFjZdqfIKBoJK2V4qPoevJI6uj +-Q5ByMt8OXOjSXNpHXpYQWUiWeCwOEBXJX8rzCHdMtg37jJ0zCmeErR1NTdg+EujM +-TWYgd06jlT67tURST0aB2kg4ijKgUJefD313LW1zC6gVsTbjSZxYyRbPfSP6flQB +-yCi1C19E2OsgleqbkBVC5GlYUzaJT7SGjCRmGx1eqtbrALu+LVH24Wceexlpjydl +-+s2nf/DZlKun/tlPh6YioifPCJjByZMQOCEfIox6BkemZETz8uYA4TTWimG13Z03 +-gyDGC2jdpEW414J2qcQDvrdUgJ+HlhrAAHaWpMQDbXYxBGoZ+3+ORvQV4kAsCwL8 +-k3EIrVpePdik+1xgOWsyLj6QxFXlTMvL6Wc5pnArFPORsgHEolJvxSPTf9aAHNPn +-V2WBvxiLBtYpGrujAUM40Syx/aN2RPtcXYPAusHUBw+S8/p+/8Kg8GZmnIXG3F89 +-45Eepl2quZYIrou7a1fwIpIIZ0hFiBQ1mlHVMFtxwVHS1bQb3SU2GeO+JcGjdVXc +-04qeGuQ5M164eQ5C0T7ZQ1ULiUlFWKD30m+cjqmZzt3d7Q0mKpMKuESIuZJo/wpD +-Nas432aLKUhcNx/pOYLkKJRpGZKOupQoD5iUj/j44o8JoFkDK33v2S57XB5QGz28 +-9Zuhx49b3W8mbM6EBanlQKLWJGCxXqc/jhYhFWn+b0MhidynFgA0oeWvf6ZDyt6H +-Yi5Etxsar09xp0Do3NxtQXLuSUu0ji2pQzSIKuoqQWKqldm6VrpwojiqJhy4WQBQ +-aVVyFeWBC7G3Zj76dO+yp2sfJ0itJUQ8AIB9Cg0f34rEZu+r9luPmqBoUeL95Tk7 +-YvCOU3Jl8Iqysv8aNpVXT8sa8rrSbruWCByEePZ37RIdHLMVBwVY0eVaFQjrjU7E +-mXmM9eaoYLfXOllsQ+M2+qPFUITr/GU3Qig13DhK/+yC1R6V2a0l0WRhMltIPYKW +-Ztvvr4hK5LcYCeS113BLiMbDIMMZZYGDZGMdC8DnnVbT2loF0Rfmp80Af31KmMQ4 +-6XvMatW9UDjBoY5a/YMpdm7SRwm+MgV2KNPpc2kST87/yi9oprGAb8qiarHiHTM0 +------END RSA PRIVATE KEY----- ++-----BEGIN ENCRYPTED PRIVATE KEY----- ++MIIHbTBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQI072N7W+PDDMCAggA ++MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBA/AuaRNi4vE4KGqI4In+70BIIH ++ENGS5Vex5NID873frmd1UZEHZ+O/Bd0wDb+NUpIqesHkRYf7kKi6Gnr+nKQ/oVVn ++Lm3JjE7c8ECP0OkOOXmiXuWL1SkzBBWqCI4stSGUPvBiHsGwNnvJAaGjUffgMlcC ++aJOA2+dnejLkzblq4CB2LQdm06N3Xoe9tyqtQaUHxfzJAf5Ydd8uj7vpKN2MMhY7 ++icIPJwSyh0N7S6XWVtHEokr9Kp4y2hS5a+BgCWV1/1z0aF7agnSVndmT1VR+nWmc ++lM14k+lethmHMB+fsNSjnqeJ7XOPlOTHqhiZ9bBSTgF/xr5Bck/NiKRzHjdovBox ++TKg+xchaBhpRh7wBPBIlNJeHmIjv+8obOKjKU98Ig/7R9+IryZaNcKAH0PuOT+Sw ++QHXiCGQbOiYHB9UyhDTWiB7YVjd8KHefOFxfHzOQb/iBhbv1x3bTl3DgepvRN6VO ++dIsPLoIZe42sdf9GeMsk8mGJyZUQ6AzsfhWk3grb/XscizPSvrNsJ2VL1R7YTyT3 ++3WA4ZXR1EqvXnWL7N/raemQjy62iOG6t7fcF5IdP9CMbWP+Plpsz4cQW7FtesCTq ++a5ZXraochQz361ODFNIeBEGU+0qqXUtZDlmos/EySkZykSeU/L0bImS62VGE3afo ++YXBmznTTT9kkFkqv7H0MerfJsrE/wF8puP3GM01DW2JRgXRpSWlvbPV/2LnMtRuD ++II7iH4rWDtTjCN6BWKAgDOnPkc9sZ4XulqT32lcUeV6LTdMBfq8kMEc8eDij1vUT ++maVCRpuwaq8EIT3lVgNLufHiG96ojlyYtj3orzw22IjkgC/9ee8UDik9CqbMVmFf ++fVHhsw8LNSg8Q4bmwm5Eg2w2it2gtI68+mwr75oCxuJ/8OMjW21Prj8XDh5reie2 ++c0lDKQOFZ9UnLU1bXR/6qUM+JFKR4DMq+fOCuoQSVoyVUEOsJpvBOYnYZN9cxsZm ++vh9dKafMEcKZ8flsbr+gOmOw7+Py2ifSlf25E/Frb1W4gtbTb0LQVHb6+drutrZj ++8HEu4CnHYFCD4ZnOJb26XlZCb8GFBddW86yJYyUqMMV6Q1aJfAOAglsTo1LjIMOZ ++byo0BTAmwUevU/iuOXQ4qRBXXcoidDcTCrxfUSPG9wdt9l+m5SdQpWqfQ+fx5O7m ++SLlrHyZCiPSFMtC9DxqjIklHjf5W3wslGLgaD30YXa4VDYkRihf3CNsxGQ+tVvef ++l0ZjoAitF7Gaua06IESmKnpHe23dkr1cjYq+u2IV+xGH8LeExdwsQ9kpuTeXPnQs ++JOA99SsFx1ct32RrwjxnDDsiNkaViTKo9GDkV3jQTfoFgAVqfSgg9wGXpqUqhNG7 ++TiSIHCowllLny2zn4XrXCy2niD3VDt0skb3l/PaegHE2z7S5YY85nQtYwpLiwB9M ++SQ08DYKxPBZYKtS2iZ/fsA1gjSRQDPg/SIxMhUC3M3qH8iWny1Lzl25F2Uq7VVEX ++LdTUtaby49jRTT3CQGr5n6z7bMbUegiY7h8WmOekuThGDH+4xZp6+rDP4GFk4FeK ++JcF70vMQYIjQZhadic6olv+9VtUP42ltGG/yP9a3eWRkzfAf2eCh6B1rYdgEWwE8 ++rlcZzwM+y6eUmeNF2FVWB8iWtTMQHy+dYNPM+Jtus1KQKxiiq/yCRs7nWvzWRFWA ++HRyqV0J6/lqgm4FvfktFt1T0W+mDoLJOR2/zIwMy2lgL5zeHuR3SaMJnCikJbqKS ++HB3UvrhAWUcZqdH29+FhVWeM7ybyF1Wccmf+IIC/ePLa6gjtqPV8lG/5kbpcpnB6 ++UQY8WWaKMxyr3jJ9bAX5QKshchp04cDecOLZrpFGNNQngR8RxSEkiIgAqNxWunIu ++KrdBDrupv/XAgEOclmgToY3iywLJSV5gHAyHWDUhRH4cFCLiGPl4XIcnXOuTze3H ++3j+EYSiS3v3DhHjp33YU2pXlJDjiYsKzAXejEh66++Y8qaQdCAad3ruWRCzW3kgk ++Md0A1VGzntTnQsewvExQEMZH2LtYIsPv3KCYGeSAuLabX4tbGk79PswjnjLLEOr0 ++Ghf6RF6qf5/iFyJoG4vrbKT8kx6ywh0InILCdjUunuDskIBxX6tEcr9XwajoIvb2 ++kcmGdjam5kKLS7QOWQTl8/r/cuFes0dj34cX5Qpq+Gd7tRq/D+b0207926Cxvftv ++qQ1cVn8HiLxKkZzd3tpf2xnoV1zkTL0oHrNg+qzxoxXUTUcwtIf1d/HRbYEAhi/d ++bBBoFeftEHWNq+sJgS9bH+XNzo/yK4u04B5miOq8v4CSkJdzu+ZdF22d4cjiGmtQ ++8BTmcn0Unzm+u5H0+QSZe54QBHJGNXXOIKMTkgnOdW27g4DbI1y7fCqJiSMbRW6L ++oHmMfbdB3GWqGbsUkhY8i6h9op0MU6WOX7ea2Rxyt4t6 ++-----END ENCRYPTED PRIVATE KEY----- +diff --git a/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst b/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst +new file mode 100644 +index 0000000..8f43d32 +--- /dev/null ++++ b/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst +@@ -0,0 +1,4 @@ ++The private keys for test_ssl were encrypted with 3DES in traditional ++PKCS#5 format. 3DES and the digest algorithm of PKCS#5 are blocked by ++some strict crypto policies. Use PKCS#8 format with AES256 encryption ++instead. +-- +2.21.0 + + +From d8584f9bb3fb841a1b21ed25abc2237ea8bbc206 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Tue, 26 Nov 2019 23:57:21 +0100 +Subject: [PATCH 3/5] Use PROTOCOL_TLS_CLIENT/SERVER + +Replaces PROTOCOL_TLSv* and PROTOCOL_SSLv23 with PROTOCOL_TLS_CLIENT and +PROTOCOL_TLS_SERVER. + +Partially backports a170fa162dc03f0a014373349e548954fff2e567 +--- + Lib/ssl.py | 7 +- + Lib/test/test_logging.py | 2 +- + Lib/test/test_ssl.py | 169 +++++++++++++++++++-------------------- + 3 files changed, 87 insertions(+), 91 deletions(-) + +diff --git a/Lib/ssl.py b/Lib/ssl.py +index 0114387..c5c5529 100644 +--- a/Lib/ssl.py ++++ b/Lib/ssl.py +@@ -473,7 +473,7 @@ def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None, + context.load_default_certs(purpose) + return context + +-def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=None, ++def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=CERT_NONE, + check_hostname=False, purpose=Purpose.SERVER_AUTH, + certfile=None, keyfile=None, + cafile=None, capath=None, cadata=None): +@@ -492,9 +492,12 @@ def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=None, + # by default. + context = SSLContext(protocol) + ++ if not check_hostname: ++ context.check_hostname = False + if cert_reqs is not None: + context.verify_mode = cert_reqs +- context.check_hostname = check_hostname ++ if check_hostname: ++ context.check_hostname = True + + if keyfile and not certfile: + raise ValueError("certfile must be specified") +diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py +index 763a5d1..d5c63b4 100644 +--- a/Lib/test/test_logging.py ++++ b/Lib/test/test_logging.py +@@ -1830,7 +1830,7 @@ class HTTPHandlerTest(BaseTest): + else: + here = os.path.dirname(__file__) + localhost_cert = os.path.join(here, "keycert.pem") +- sslctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + sslctx.load_cert_chain(localhost_cert) + + context = ssl.create_default_context(cafile=localhost_cert) +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index 639109f..a7bf2f7 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -155,6 +155,8 @@ def test_wrap_socket(sock, ssl_version=ssl.PROTOCOL_TLS, *, + **kwargs): + context = ssl.SSLContext(ssl_version) + if cert_reqs is not None: ++ if cert_reqs == ssl.CERT_NONE: ++ context.check_hostname = False + context.verify_mode = cert_reqs + if ca_certs is not None: + context.load_verify_locations(ca_certs) +@@ -1377,7 +1379,7 @@ class ContextTests(unittest.TestCase): + self._assert_context_options(ctx) + + def test_check_hostname(self): +- ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS) + self.assertFalse(ctx.check_hostname) + + # Requires CERT_REQUIRED or CERT_OPTIONAL +@@ -2386,17 +2388,13 @@ if _have_threads: + server_params_test(context, context, + chatty=True, connectionchatty=True) + +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) +- client_context.load_verify_locations(SIGNING_CA) +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) +- # server_context.load_verify_locations(SIGNING_CA) +- server_context.load_cert_chain(SIGNED_CERTFILE2) ++ client_context, server_context, hostname = testing_context() + + with self.subTest(client=ssl.PROTOCOL_TLS_CLIENT, server=ssl.PROTOCOL_TLS_SERVER): + server_params_test(client_context=client_context, + server_context=server_context, + chatty=True, connectionchatty=True, +- sni_name='fakehostname') ++ sni_name='localhost') + + client_context.check_hostname = False + with self.subTest(client=ssl.PROTOCOL_TLS_SERVER, server=ssl.PROTOCOL_TLS_CLIENT): +@@ -2404,7 +2402,7 @@ if _have_threads: + server_params_test(client_context=server_context, + server_context=client_context, + chatty=True, connectionchatty=True, +- sni_name='fakehostname') ++ sni_name='localhost') + self.assertIn('called a function you should not call', + str(e.exception)) + +@@ -2469,39 +2467,38 @@ if _have_threads: + if support.verbose: + sys.stdout.write("\n") + +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(SIGNED_CERTFILE) ++ client_context, server_context, hostname = testing_context() + +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.verify_mode = ssl.CERT_REQUIRED +- context.load_verify_locations(SIGNING_CA) + tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0) +- self.assertEqual(context.verify_flags, ssl.VERIFY_DEFAULT | tf) ++ self.assertEqual(client_context.verify_flags, ssl.VERIFY_DEFAULT | tf) + + # VERIFY_DEFAULT should pass + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket()) as s: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: + s.connect((HOST, server.port)) + cert = s.getpeercert() + self.assertTrue(cert, "Can't get peer certificate.") + + # VERIFY_CRL_CHECK_LEAF without a loaded CRL file fails +- context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF ++ client_context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF + + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket()) as s: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: + with self.assertRaisesRegex(ssl.SSLError, + "certificate verify failed"): + s.connect((HOST, server.port)) + + # now load a CRL file. The CRL file is signed by the CA. +- context.load_verify_locations(CRLFILE) ++ client_context.load_verify_locations(CRLFILE) + + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket()) as s: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: + s.connect((HOST, server.port)) + cert = s.getpeercert() + self.assertTrue(cert, "Can't get peer certificate.") +@@ -2510,19 +2507,13 @@ if _have_threads: + if support.verbose: + sys.stdout.write("\n") + +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(SIGNED_CERTFILE) +- +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.verify_mode = ssl.CERT_REQUIRED +- context.check_hostname = True +- context.load_verify_locations(SIGNING_CA) ++ client_context, server_context, hostname = testing_context() + + # correct hostname should verify + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket(), +- server_hostname="localhost") as s: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: + s.connect((HOST, server.port)) + cert = s.getpeercert() + self.assertTrue(cert, "Can't get peer certificate.") +@@ -2530,7 +2521,7 @@ if _have_threads: + # incorrect hostname should raise an exception + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket(), ++ with client_context.wrap_socket(socket.socket(), + server_hostname="invalid") as s: + with self.assertRaisesRegex(ssl.CertificateError, + "hostname 'invalid' doesn't match 'localhost'"): +@@ -2542,7 +2533,7 @@ if _have_threads: + with socket.socket() as s: + with self.assertRaisesRegex(ValueError, + "check_hostname requires server_hostname"): +- context.wrap_socket(s) ++ client_context.wrap_socket(s) + + def test_wrong_cert(self): + """Connecting when the server rejects the client's certificate +@@ -2767,7 +2758,6 @@ if _have_threads: + msgs = (b"msg 1", b"MSG 2", b"STARTTLS", b"MSG 3", b"msg 4", b"ENDTLS", b"msg 5", b"msg 6") + + server = ThreadedEchoServer(CERTFILE, +- ssl_version=ssl.PROTOCOL_TLSv1, + starttls_server=True, + chatty=True, + connectionchatty=True) +@@ -2795,7 +2785,7 @@ if _have_threads: + sys.stdout.write( + " client: read %r from server, starting TLS...\n" + % msg) +- conn = test_wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1) ++ conn = test_wrap_socket(s) + wrapped = True + elif indata == b"ENDTLS" and msg.startswith(b"ok"): + # ENDTLS ok, switch back to clear text +@@ -2882,7 +2872,7 @@ if _have_threads: + + server = ThreadedEchoServer(CERTFILE, + certreqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1, ++ ssl_version=ssl.PROTOCOL_TLS_SERVER, + cacerts=CERTFILE, + chatty=True, + connectionchatty=False) +@@ -2892,7 +2882,7 @@ if _have_threads: + certfile=CERTFILE, + ca_certs=CERTFILE, + cert_reqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1) ++ ssl_version=ssl.PROTOCOL_TLS_CLIENT) + s.connect((HOST, server.port)) + # helper methods for standardising recv* method signatures + def _recv_into(): +@@ -3034,7 +3024,7 @@ if _have_threads: + def test_nonblocking_send(self): + server = ThreadedEchoServer(CERTFILE, + certreqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1, ++ ssl_version=ssl.PROTOCOL_TLS_SERVER, + cacerts=CERTFILE, + chatty=True, + connectionchatty=False) +@@ -3044,7 +3034,7 @@ if _have_threads: + certfile=CERTFILE, + ca_certs=CERTFILE, + cert_reqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1) ++ ssl_version=ssl.PROTOCOL_TLS_CLIENT) + s.connect((HOST, server.port)) + s.setblocking(False) + +@@ -3190,9 +3180,11 @@ if _have_threads: + Basic tests for SSLSocket.version(). + More tests are done in the test_protocol_*() methods. + """ +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ++ context.check_hostname = False ++ context.verify_mode = ssl.CERT_NONE + with ThreadedEchoServer(CERTFILE, +- ssl_version=ssl.PROTOCOL_TLSv1, ++ ssl_version=ssl.PROTOCOL_TLS_SERVER, + chatty=False) as server: + with context.wrap_socket(socket.socket()) as s: + self.assertIs(s.version(), None) +@@ -3247,7 +3239,7 @@ if _have_threads: + + server = ThreadedEchoServer(CERTFILE, + certreqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1, ++ ssl_version=ssl.PROTOCOL_TLS_SERVER, + cacerts=CERTFILE, + chatty=True, + connectionchatty=False) +@@ -3257,7 +3249,7 @@ if _have_threads: + certfile=CERTFILE, + ca_certs=CERTFILE, + cert_reqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1) ++ ssl_version=ssl.PROTOCOL_TLS_CLIENT) + s.connect((HOST, server.port)) + # get the data + cb_data = s.get_channel_binding("tls-unique") +@@ -3282,7 +3274,7 @@ if _have_threads: + certfile=CERTFILE, + ca_certs=CERTFILE, + cert_reqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1) ++ ssl_version=ssl.PROTOCOL_TLS_CLIENT) + s.connect((HOST, server.port)) + new_cb_data = s.get_channel_binding("tls-unique") + if support.verbose: +@@ -3299,32 +3291,35 @@ if _have_threads: + s.close() + + def test_compression(self): +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + if support.verbose: + sys.stdout.write(" got compression: {!r}\n".format(stats['compression'])) + self.assertIn(stats['compression'], { None, 'ZLIB', 'RLE' }) + ++ + @unittest.skipUnless(hasattr(ssl, 'OP_NO_COMPRESSION'), + "ssl.OP_NO_COMPRESSION needed for this test") + def test_compression_disabled(self): +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- context.options |= ssl.OP_NO_COMPRESSION +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ client_context.options |= ssl.OP_NO_COMPRESSION ++ server_context.options |= ssl.OP_NO_COMPRESSION ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + self.assertIs(stats['compression'], None) + + def test_dh_params(self): + # Check we can get a connection with ephemeral Diffie-Hellman +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- context.load_dh_params(DHFILE) +- context.set_ciphers("kEDH") +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ server_context.load_dh_params(DHFILE) ++ server_context.set_ciphers("kEDH") ++ server_context.options |= ssl.OP_NO_TLSv1_3 ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + cipher = stats["cipher"][0] + parts = cipher.split("-") + if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts: +@@ -3332,22 +3327,20 @@ if _have_threads: + + def test_selected_alpn_protocol(self): + # selected_alpn_protocol() is None unless ALPN is used. +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + self.assertIs(stats['client_alpn_protocol'], None) + + @unittest.skipUnless(ssl.HAS_ALPN, "ALPN support required") + def test_selected_alpn_protocol_if_server_uses_alpn(self): + # selected_alpn_protocol() is None unless ALPN is used by the client. +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- client_context.load_verify_locations(CERTFILE) +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(CERTFILE) ++ client_context, server_context, hostname = testing_context() + server_context.set_alpn_protocols(['foo', 'bar']) + stats = server_params_test(client_context, server_context, +- chatty=True, connectionchatty=True) ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + self.assertIs(stats['client_alpn_protocol'], None) + + @unittest.skipUnless(ssl.HAS_ALPN, "ALPN support needed for this test") +@@ -3394,10 +3387,10 @@ if _have_threads: + + def test_selected_npn_protocol(self): + # selected_npn_protocol() is None unless NPN is used +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + self.assertIs(stats['client_npn_protocol'], None) + + @unittest.skipUnless(ssl.HAS_NPN, "NPN support needed for this test") +@@ -3430,12 +3423,11 @@ if _have_threads: + self.assertEqual(server_result, expected, msg % (server_result, "server")) + + def sni_contexts(self): +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + server_context.load_cert_chain(SIGNED_CERTFILE) +- other_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ other_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + other_context.load_cert_chain(SIGNED_CERTFILE2) +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- client_context.verify_mode = ssl.CERT_REQUIRED ++ client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + client_context.load_verify_locations(SIGNING_CA) + return server_context, other_context, client_context + +@@ -3448,6 +3440,8 @@ if _have_threads: + calls = [] + server_context, other_context, client_context = self.sni_contexts() + ++ client_context.check_hostname = False ++ + def servername_cb(ssl_sock, server_name, initial_context): + calls.append((server_name, initial_context)) + if server_name is not None: +@@ -3533,11 +3527,7 @@ if _have_threads: + self.assertIn("TypeError", stderr.getvalue()) + + def test_shared_ciphers(self): +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(SIGNED_CERTFILE) +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- client_context.verify_mode = ssl.CERT_REQUIRED +- client_context.load_verify_locations(SIGNING_CA) ++ client_context, server_context, hostname = testing_context() + if ssl.OPENSSL_VERSION_INFO >= (1, 0, 2): + client_context.set_ciphers("AES128:AES256") + server_context.set_ciphers("AES256") +@@ -3555,7 +3545,8 @@ if _have_threads: + # TLS 1.3 ciphers are always enabled + expected_algs.extend(["TLS_CHACHA20", "TLS_AES"]) + +- stats = server_params_test(client_context, server_context) ++ stats = server_params_test(client_context, server_context, ++ sni_name=hostname) + ciphers = stats['server_shared_ciphers'][0] + self.assertGreater(len(ciphers), 0) + for name, tls_version, bits in ciphers: +@@ -3595,14 +3586,13 @@ if _have_threads: + self.assertEqual(s.recv(1024), TEST_DATA) + + def test_session(self): +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(SIGNED_CERTFILE) +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- client_context.verify_mode = ssl.CERT_REQUIRED +- client_context.load_verify_locations(SIGNING_CA) ++ client_context, server_context, hostname = testing_context() ++ # TODO: sessions aren't compatible with TLSv1.3 yet ++ client_context.options |= ssl.OP_NO_TLSv1_3 + + # first connection without session +- stats = server_params_test(client_context, server_context) ++ stats = server_params_test(client_context, server_context, ++ sni_name=hostname) + session = stats['session'] + self.assertTrue(session.id) + self.assertGreater(session.time, 0) +@@ -3616,7 +3606,8 @@ if _have_threads: + self.assertEqual(sess_stat['hits'], 0) + + # reuse session +- stats = server_params_test(client_context, server_context, session=session) ++ stats = server_params_test(client_context, server_context, ++ session=session, sni_name=hostname) + sess_stat = server_context.session_stats() + self.assertEqual(sess_stat['accept'], 2) + self.assertEqual(sess_stat['hits'], 1) +@@ -3629,7 +3620,8 @@ if _have_threads: + self.assertGreaterEqual(session2.timeout, session.timeout) + + # another one without session +- stats = server_params_test(client_context, server_context) ++ stats = server_params_test(client_context, server_context, ++ sni_name=hostname) + self.assertFalse(stats['session_reused']) + session3 = stats['session'] + self.assertNotEqual(session3.id, session.id) +@@ -3639,7 +3631,8 @@ if _have_threads: + self.assertEqual(sess_stat['hits'], 1) + + # reuse session again +- stats = server_params_test(client_context, server_context, session=session) ++ stats = server_params_test(client_context, server_context, ++ session=session, sni_name=hostname) + self.assertTrue(stats['session_reused']) + session4 = stats['session'] + self.assertEqual(session4.id, session.id) +-- +2.21.0 + + +From 743c3e09b485092b51a982ab9859ffc79cbb7791 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Wed, 27 Nov 2019 00:01:17 +0100 +Subject: [PATCH 4/5] Adjust some tests for TLS 1.3 compatibility + +Partially backports some changes from 529525fb5a8fd9b96ab4021311a598c77588b918 +and 2614ed4c6e4b32eafb683f2378ed20e87d42976d +--- + Lib/test/test_ssl.py | 17 ++++++++++++++--- + 1 file changed, 14 insertions(+), 3 deletions(-) + +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index a7bf2f7..43c2dbc 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -3189,7 +3189,12 @@ if _have_threads: + with context.wrap_socket(socket.socket()) as s: + self.assertIs(s.version(), None) + s.connect((HOST, server.port)) +- self.assertEqual(s.version(), 'TLSv1') ++ if IS_OPENSSL_1_1: ++ self.assertEqual(s.version(), 'TLSv1.3') ++ elif ssl.OPENSSL_VERSION_INFO >= (1, 0, 2): ++ self.assertEqual(s.version(), 'TLSv1.2') ++ else: # 0.9.8 to 1.0.1 ++ self.assertIn(s.version(), ('TLSv1', 'TLSv1.2')) + self.assertIs(s.version(), None) + + @unittest.skipUnless(ssl.HAS_TLSv1_3, +@@ -3259,7 +3264,10 @@ if _have_threads: + + # check if it is sane + self.assertIsNotNone(cb_data) +- self.assertEqual(len(cb_data), 12) # True for TLSv1 ++ if s.version() == 'TLSv1.3': ++ self.assertEqual(len(cb_data), 48) ++ else: ++ self.assertEqual(len(cb_data), 12) # True for TLSv1 + + # and compare with the peers version + s.write(b"CB tls-unique\n") +@@ -3283,7 +3291,10 @@ if _have_threads: + # is it really unique + self.assertNotEqual(cb_data, new_cb_data) + self.assertIsNotNone(cb_data) +- self.assertEqual(len(cb_data), 12) # True for TLSv1 ++ if s.version() == 'TLSv1.3': ++ self.assertEqual(len(cb_data), 48) ++ else: ++ self.assertEqual(len(cb_data), 12) # True for TLSv1 + s.write(b"CB tls-unique\n") + peer_data_repr = s.read().strip() + self.assertEqual(peer_data_repr, +-- +2.21.0 + + +From cd250c8a782f36c7a6f5ffabc922cb75744fa9c0 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Tue, 26 Nov 2019 23:18:10 +0100 +Subject: [PATCH 5/5] Skip the ssl tests that rely on TLSv1 and TLSv1.1 + availability + +--- + Lib/test/test_ssl.py | 32 +++++++++++++++++++++++--------- + 1 file changed, 23 insertions(+), 9 deletions(-) + +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index 43c2dbc..b35db25 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -39,6 +39,13 @@ IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL') + IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0) + PY_SSL_DEFAULT_CIPHERS = sysconfig.get_config_var('PY_SSL_DEFAULT_CIPHERS') + ++# On RHEL8 openssl disables TLSv1 and TLSv1.1 on runtime. ++# Since we don't have a good way to detect runtime changes ++# on the allowed protocols, we hardcode the default config ++# with those flags. ++TLSv1_enabled = False ++TLSv1_1_enabled = False ++ + def data_file(*name): + return os.path.join(os.path.dirname(__file__), *name) + +@@ -2380,7 +2387,8 @@ if _have_threads: + if support.verbose: + sys.stdout.write("\n") + for protocol in PROTOCOLS: +- if protocol in {ssl.PROTOCOL_TLS_CLIENT, ssl.PROTOCOL_TLS_SERVER}: ++ if protocol in {ssl.PROTOCOL_TLS_CLIENT, ssl.PROTOCOL_TLS_SERVER, ++ ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1}: + continue + with self.subTest(protocol=ssl._PROTOCOL_NAMES[protocol]): + context = ssl.SSLContext(protocol) +@@ -2650,17 +2658,20 @@ if _have_threads: + if hasattr(ssl, 'PROTOCOL_SSLv3'): + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1') ++ if TLSv1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1') + + if hasattr(ssl, 'PROTOCOL_SSLv3'): + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_OPTIONAL) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_OPTIONAL) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) ++ if TLSv1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) + + if hasattr(ssl, 'PROTOCOL_SSLv3'): + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_REQUIRED) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) ++ if TLSv1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) + + # Server with specific SSL options + if hasattr(ssl, 'PROTOCOL_SSLv3'): +@@ -2698,9 +2709,10 @@ if _have_threads: + """Connecting to a TLSv1 server with various client options""" + if support.verbose: + sys.stdout.write("\n") +- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1') +- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) +- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) ++ if TLSv1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1') ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) + if hasattr(ssl, 'PROTOCOL_SSLv2'): + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False) + if hasattr(ssl, 'PROTOCOL_SSLv3'): +@@ -2716,7 +2728,8 @@ if _have_threads: + Testing against older TLS versions.""" + if support.verbose: + sys.stdout.write("\n") +- try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') ++ if TLSv1_1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') + if hasattr(ssl, 'PROTOCOL_SSLv2'): + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv2, False) + if hasattr(ssl, 'PROTOCOL_SSLv3'): +@@ -2724,7 +2737,8 @@ if _have_threads: + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv23, False, + client_options=ssl.OP_NO_TLSv1_1) + +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') ++ if TLSv1_1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1, False) + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1, False) + +-- +2.21.0 + diff --git a/SOURCES/00318-test-ssl-fix-for-tls-13.patch b/SOURCES/00318-test-ssl-fix-for-tls-13.patch deleted file mode 100644 index 7b00d75..0000000 --- a/SOURCES/00318-test-ssl-fix-for-tls-13.patch +++ /dev/null @@ -1,44 +0,0 @@ -bpo-32947: test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1 - -Backport partially commit 529525fb5a8fd9b96ab4021311a598c77588b918: -complete the previous partial backport (commit -2a4ee8aa01d61b6a9c8e9c65c211e61bdb471826. - -Reported upstream: - -* https://bugs.python.org/issue32947#msg333990 -* https://github.com/python/cpython/pull/11612 - -diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py -index 7f8f636..05c09a6 100644 ---- a/Lib/test/test_ssl.py -+++ b/Lib/test/test_ssl.py -@@ -2021,6 +2021,16 @@ if _have_threads: - sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n" - % (msg, ctype, msg.lower(), ctype)) - self.write(msg.lower()) -+ except ConnectionResetError: -+ # XXX: OpenSSL 1.1.1 sometimes raises ConnectionResetError -+ # when connection is not shut down gracefully. -+ if self.server.chatty and support.verbose: -+ sys.stdout.write( -+ " Connection reset by peer: {}\n".format( -+ self.addr) -+ ) -+ self.close() -+ self.running = False - except OSError: - if self.server.chatty: - handle_error("Test server failure:\n") -@@ -2100,6 +2110,11 @@ if _have_threads: - pass - except KeyboardInterrupt: - self.stop() -+ except BaseException as e: -+ if support.verbose and self.chatty: -+ sys.stdout.write( -+ ' connection handling failed: ' + repr(e) + '\n') -+ - self.sock.close() - - def stop(self): diff --git a/SOURCES/00320-CVE-2019-9636.patch b/SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch similarity index 100% rename from SOURCES/00320-CVE-2019-9636.patch rename to SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch diff --git a/SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch b/SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch new file mode 100644 index 0000000..4584ce1 --- /dev/null +++ b/SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch @@ -0,0 +1,117 @@ +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index 883201f..cf4d84d 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -3891,6 +3891,37 @@ class TestPostHandshakeAuth(unittest.TestCase): + s.write(b'PHA') + self.assertIn(b'WRONG_SSL_VERSION', s.recv(1024)) + ++ def test_bpo37428_pha_cert_none(self): ++ # verify that post_handshake_auth does not implicitly enable cert ++ # validation. ++ hostname = 'localhost' ++ client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ++ client_context.post_handshake_auth = True ++ client_context.load_cert_chain(SIGNED_CERTFILE) ++ # no cert validation and CA on client side ++ client_context.check_hostname = False ++ client_context.verify_mode = ssl.CERT_NONE ++ ++ server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) ++ server_context.load_cert_chain(SIGNED_CERTFILE) ++ server_context.load_verify_locations(SIGNING_CA) ++ server_context.post_handshake_auth = True ++ server_context.verify_mode = ssl.CERT_REQUIRED ++ ++ server = ThreadedEchoServer(context=server_context, chatty=False) ++ with server: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: ++ s.connect((HOST, server.port)) ++ s.write(b'HASCERT') ++ self.assertEqual(s.recv(1024), b'FALSE\n') ++ s.write(b'PHA') ++ self.assertEqual(s.recv(1024), b'OK\n') ++ s.write(b'HASCERT') ++ self.assertEqual(s.recv(1024), b'TRUE\n') ++ # server cert has not been validated ++ self.assertEqual(s.getpeercert(), {}) ++ + + def test_main(verbose=False): + if support.verbose: +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +index ec366f0..9bf1cde 100644 +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -732,6 +732,26 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, + #endif + SSL_set_mode(self->ssl, mode); + ++#ifdef TLS1_3_VERSION ++ if (sslctx->post_handshake_auth == 1) { ++ if (socket_type == PY_SSL_SERVER) { ++ /* bpo-37428: OpenSSL does not ignore SSL_VERIFY_POST_HANDSHAKE. ++ * Set SSL_VERIFY_POST_HANDSHAKE flag only for server sockets and ++ * only in combination with SSL_VERIFY_PEER flag. */ ++ int mode = SSL_get_verify_mode(self->ssl); ++ if (mode & SSL_VERIFY_PEER) { ++ int (*verify_cb)(int, X509_STORE_CTX *) = NULL; ++ verify_cb = SSL_get_verify_callback(self->ssl); ++ mode |= SSL_VERIFY_POST_HANDSHAKE; ++ SSL_set_verify(self->ssl, mode, verify_cb); ++ } ++ } else { ++ /* client socket */ ++ SSL_set_post_handshake_auth(self->ssl, 1); ++ } ++ } ++#endif ++ + #if HAVE_SNI + if (server_hostname != NULL) { + /* Don't send SNI for IP addresses. We cannot simply use inet_aton() and +@@ -2765,10 +2785,10 @@ _set_verify_mode(PySSLContext *self, enum py_ssl_cert_requirements n) + "invalid value for verify_mode"); + return -1; + } +-#ifdef TLS1_3_VERSION +- if (self->post_handshake_auth) +- mode |= SSL_VERIFY_POST_HANDSHAKE; +-#endif ++ ++ /* bpo-37428: newPySSLSocket() sets SSL_VERIFY_POST_HANDSHAKE flag for ++ * server sockets and SSL_set_post_handshake_auth() for client. */ ++ + /* keep current verify cb */ + verify_cb = SSL_CTX_get_verify_callback(self->ctx); + SSL_CTX_set_verify(self->ctx, mode, verify_cb); +@@ -3346,8 +3366,6 @@ get_post_handshake_auth(PySSLContext *self, void *c) { + #if TLS1_3_VERSION + static int + set_post_handshake_auth(PySSLContext *self, PyObject *arg, void *c) { +- int (*verify_cb)(int, X509_STORE_CTX *) = NULL; +- int mode = SSL_CTX_get_verify_mode(self->ctx); + int pha = PyObject_IsTrue(arg); + + if (pha == -1) { +@@ -3355,17 +3373,8 @@ set_post_handshake_auth(PySSLContext *self, PyObject *arg, void *c) { + } + self->post_handshake_auth = pha; + +- /* client-side socket setting, ignored by server-side */ +- SSL_CTX_set_post_handshake_auth(self->ctx, pha); +- +- /* server-side socket setting, ignored by client-side */ +- verify_cb = SSL_CTX_get_verify_callback(self->ctx); +- if (pha) { +- mode |= SSL_VERIFY_POST_HANDSHAKE; +- } else { +- mode ^= SSL_VERIFY_POST_HANDSHAKE; +- } +- SSL_CTX_set_verify(self->ctx, mode, verify_cb); ++ /* bpo-37428: newPySSLSocket() sets SSL_VERIFY_POST_HANDSHAKE flag for ++ * server sockets and SSL_set_post_handshake_auth() for client. */ + + return 0; + } diff --git a/SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch b/SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch new file mode 100644 index 0000000..bf92e84 --- /dev/null +++ b/SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch @@ -0,0 +1,70 @@ +diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst +index 2f59ece..d756916 100644 +--- a/Doc/library/http.client.rst ++++ b/Doc/library/http.client.rst +@@ -88,6 +88,11 @@ The module provides the following classes: + :func:`ssl._create_unverified_context` can be passed to the *context* + parameter. + ++ .. versionchanged:: 3.7.4 ++ This class now enables TLS 1.3 ++ :attr:`ssl.SSLContext.post_handshake_auth` for the default *context* or ++ when *cert_file* is passed with a custom *context*. ++ + .. deprecated:: 3.6 + + *key_file* and *cert_file* are deprecated in favor of *context*. +diff --git a/Lib/http/client.py b/Lib/http/client.py +index 1a6bd8a..f0d2642 100644 +--- a/Lib/http/client.py ++++ b/Lib/http/client.py +@@ -1390,6 +1390,9 @@ else: + self.cert_file = cert_file + if context is None: + context = ssl._create_default_https_context() ++ # enable PHA for TLS 1.3 connections if available ++ if context.post_handshake_auth is not None: ++ context.post_handshake_auth = True + will_verify = context.verify_mode != ssl.CERT_NONE + if check_hostname is None: + check_hostname = context.check_hostname +@@ -1398,6 +1401,10 @@ else: + "either CERT_OPTIONAL or CERT_REQUIRED") + if key_file or cert_file: + context.load_cert_chain(cert_file, key_file) ++ # cert and key file means the user wants to authenticate. ++ # enable TLS 1.3 PHA implicitly even for custom contexts. ++ if context.post_handshake_auth is not None: ++ context.post_handshake_auth = True + self._context = context + self._check_hostname = check_hostname + +diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py +index 714d521..5795b7a 100644 +--- a/Lib/test/test_httplib.py ++++ b/Lib/test/test_httplib.py +@@ -1709,6 +1709,24 @@ class HTTPSTest(TestCase): + self.assertEqual(h, c.host) + self.assertEqual(p, c.port) + ++ def test_tls13_pha(self): ++ import ssl ++ if not ssl.HAS_TLSv1_3: ++ self.skipTest('TLS 1.3 support required') ++ # just check status of PHA flag ++ h = client.HTTPSConnection('localhost', 443) ++ self.assertTrue(h._context.post_handshake_auth) ++ ++ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ++ self.assertFalse(context.post_handshake_auth) ++ h = client.HTTPSConnection('localhost', 443, context=context) ++ self.assertIs(h._context, context) ++ self.assertFalse(h._context.post_handshake_auth) ++ ++ h = client.HTTPSConnection('localhost', 443, context=context, ++ cert_file=CERT_localhost) ++ self.assertTrue(h._context.post_handshake_auth) ++ + + class RequestBodyTest(TestCase): + """Test cases where a request includes a message body.""" diff --git a/SOURCES/00329-fips.patch b/SOURCES/00329-fips.patch new file mode 100644 index 0000000..e250caa --- /dev/null +++ b/SOURCES/00329-fips.patch @@ -0,0 +1,5571 @@ +From 53d108c5103c99079f1f231b0e731f3f47a142fc Mon Sep 17 00:00:00 2001 +From: "Miss Islington (bot)" + <31488909+miss-islington@users.noreply.github.com> +Date: Wed, 25 Sep 2019 08:50:31 -0700 +Subject: [PATCH 01/36] [3.8] bpo-38270: Check for hash digest algorithms and + avoid MD5 (GH-16382) (GH-16393) + +Make it easier to run and test Python on systems with restrict crypto policies: + +* add requires_hashdigest to test.support to check if a hash digest algorithm is available and working +* avoid MD5 in test_hmac +* replace MD5 with SHA256 in test_tarfile +* mark network tests that require MD5 for MD5-based digest auth or CRAM-MD5 + +https://bugs.python.org/issue38270 +(cherry picked from commit c64a1a61e6fc542cada40eb069a239317e1af36e) + +Co-authored-by: Christian Heimes + +https://bugs.python.org/issue38270 + +Automerge-Triggered-By: @tiran +--- + Lib/test/support/__init__.py | 22 ++++++++++ + Lib/test/test_hmac.py | 67 +++++++++++++++++++------------ + Lib/test/test_imaplib.py | 6 ++- + Lib/test/test_poplib.py | 2 + + Lib/test/test_smtplib.py | 11 ++++- + Lib/test/test_tarfile.py | 56 ++++++++++++++------------ + Lib/test/test_urllib2_localnet.py | 1 + + 7 files changed, 112 insertions(+), 53 deletions(-) + +diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py +index 66c0fed8411..a43819904e0 100644 +--- a/Lib/test/support/__init__.py ++++ b/Lib/test/support/__init__.py +@@ -11,6 +11,7 @@ import faulthandler + import fnmatch + import functools + import gc ++import hashlib + import importlib + import importlib.util + import io +@@ -627,6 +628,27 @@ def requires_mac_ver(*min_version): + return wrapper + return decorator + ++def requires_hashdigest(digestname): ++ """Decorator raising SkipTest if a hashing algorithm is not available ++ ++ The hashing algorithm could be missing or blocked by a strict crypto ++ policy. ++ ++ ValueError: [digital envelope routines: EVP_DigestInit_ex] disabled for FIPS ++ ValueError: unsupported hash type md4 ++ """ ++ def decorator(func): ++ @functools.wraps(func) ++ def wrapper(*args, **kwargs): ++ try: ++ hashlib.new(digestname) ++ except ValueError: ++ raise unittest.SkipTest( ++ f"hash digest '{digestname}' is not available." ++ ) ++ return func(*args, **kwargs) ++ return wrapper ++ return decorator + + # Don't use "localhost", since resolving it uses the DNS under recent + # Windows versions (see issue #18792). +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 067e13f1079..81c3485f761 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -4,6 +4,8 @@ import hashlib + import unittest + import warnings + ++from test.support import requires_hashdigest ++ + + def ignore_warning(func): + @functools.wraps(func) +@@ -17,6 +19,7 @@ def ignore_warning(func): + + class TestVectorsTestCase(unittest.TestCase): + ++ @requires_hashdigest('md5') + def test_md5_vectors(self): + # Test the HMAC module against test vectors from the RFC. + +@@ -63,6 +66,7 @@ class TestVectorsTestCase(unittest.TestCase): + b"and Larger Than One Block-Size Data"), + "6f630fad67cda0ee1fb1f562db3aa53e") + ++ @requires_hashdigest('sha1') + def test_sha_vectors(self): + def shatest(key, data, digest): + h = hmac.HMAC(key, data, digestmod=hashlib.sha1) +@@ -230,23 +234,28 @@ class TestVectorsTestCase(unittest.TestCase): + '134676fb6de0446065c97440fa8c6a58', + }) + ++ @requires_hashdigest('sha224') + def test_sha224_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha224, 'sha224', 28, 64) + ++ @requires_hashdigest('sha256') + def test_sha256_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha256, 'sha256', 32, 64) + ++ @requires_hashdigest('sha384') + def test_sha384_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha384, 'sha384', 48, 128) + ++ @requires_hashdigest('sha512') + def test_sha512_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128) + ++ @requires_hashdigest('sha256') + def test_legacy_block_size_warnings(self): + class MockCrazyHash(object): + """Ain't no block_size attribute here.""" + def __init__(self, *args): +- self._x = hashlib.sha1(*args) ++ self._x = hashlib.sha256(*args) + self.digest_size = self._x.digest_size + def update(self, v): + self._x.update(v) +@@ -273,76 +282,80 @@ class TestVectorsTestCase(unittest.TestCase): + self.assertEqual(h.hexdigest().upper(), digest) + + ++ + class ConstructorTestCase(unittest.TestCase): + ++ expected = ( ++ "6c845b47f52b3b47f6590c502db7825aad757bf4fadc8fa972f7cd2e76a5bdeb" ++ ) + @ignore_warning ++ @requires_hashdigest('sha256') + def test_normal(self): + # Standard constructor call. +- failed = 0 + try: +- h = hmac.HMAC(b"key") ++ hmac.HMAC(b"key", digestmod='sha256') + except Exception: + self.fail("Standard constructor call raised exception.") + + @ignore_warning ++ @requires_hashdigest('sha256') + def test_with_str_key(self): + # Pass a key of type str, which is an error, because it expects a key + # of type bytes + with self.assertRaises(TypeError): +- h = hmac.HMAC("key") ++ h = hmac.HMAC("key", digestmod='sha256') + +- @ignore_warning ++ @requires_hashdigest('sha256') + def test_dot_new_with_str_key(self): + # Pass a key of type str, which is an error, because it expects a key + # of type bytes + with self.assertRaises(TypeError): +- h = hmac.new("key") ++ h = hmac.HMAC("key", digestmod='sha256') + + @ignore_warning ++ @requires_hashdigest('sha256') + def test_withtext(self): + # Constructor call with text. + try: +- h = hmac.HMAC(b"key", b"hash this!") ++ h = hmac.HMAC(b"key", b"hash this!", digestmod='sha256') + except Exception: + self.fail("Constructor call with text argument raised exception.") +- self.assertEqual(h.hexdigest(), '34325b639da4cfd95735b381e28cb864') ++ self.assertEqual(h.hexdigest(), self.expected) + ++ @requires_hashdigest('sha256') + def test_with_bytearray(self): + try: + h = hmac.HMAC(bytearray(b"key"), bytearray(b"hash this!"), +- digestmod="md5") ++ digestmod="sha256") + except Exception: + self.fail("Constructor call with bytearray arguments raised exception.") +- self.assertEqual(h.hexdigest(), '34325b639da4cfd95735b381e28cb864') ++ self.assertEqual(h.hexdigest(), self.expected) + ++ @requires_hashdigest('sha256') + def test_with_memoryview_msg(self): + try: +- h = hmac.HMAC(b"key", memoryview(b"hash this!"), digestmod="md5") ++ h = hmac.HMAC(b"key", memoryview(b"hash this!"), digestmod="sha256") + except Exception: + self.fail("Constructor call with memoryview msg raised exception.") +- self.assertEqual(h.hexdigest(), '34325b639da4cfd95735b381e28cb864') ++ self.assertEqual(h.hexdigest(), self.expected) + ++ @requires_hashdigest('sha256') + def test_withmodule(self): + # Constructor call with text and digest module. + try: +- h = hmac.HMAC(b"key", b"", hashlib.sha1) ++ h = hmac.HMAC(b"key", b"", hashlib.sha256) + except Exception: +- self.fail("Constructor call with hashlib.sha1 raised exception.") ++ self.fail("Constructor call with hashlib.sha256 raised exception.") + +-class SanityTestCase(unittest.TestCase): + +- @ignore_warning +- def test_default_is_md5(self): +- # Testing if HMAC defaults to MD5 algorithm. +- # NOTE: this whitebox test depends on the hmac class internals +- h = hmac.HMAC(b"key") +- self.assertEqual(h.digest_cons, hashlib.md5) ++class SanityTestCase(unittest.TestCase): + ++ @requires_hashdigest('sha256') + def test_exercise_all_methods(self): + # Exercising all methods once. + # This must not raise any exceptions + try: +- h = hmac.HMAC(b"my secret key", digestmod="md5") ++ h = hmac.HMAC(b"my secret key", digestmod="sha256") + h.update(b"compute the hash of this text!") + dig = h.digest() + dig = h.hexdigest() +@@ -350,11 +363,13 @@ class SanityTestCase(unittest.TestCase): + except Exception: + self.fail("Exception raised during normal usage of HMAC class.") + ++ + class CopyTestCase(unittest.TestCase): + ++ @requires_hashdigest('sha256') + def test_attributes(self): + # Testing if attributes are of same type. +- h1 = hmac.HMAC(b"key", digestmod="md5") ++ h1 = hmac.HMAC(b"key", digestmod="sha256") + h2 = h1.copy() + self.assertTrue(h1.digest_cons == h2.digest_cons, + "digest constructors don't match.") +@@ -363,9 +378,10 @@ class CopyTestCase(unittest.TestCase): + self.assertEqual(type(h1.outer), type(h2.outer), + "Types of outer don't match.") + ++ @requires_hashdigest('sha256') + def test_realcopy(self): + # Testing if the copy method created a real copy. +- h1 = hmac.HMAC(b"key", digestmod="md5") ++ h1 = hmac.HMAC(b"key", digestmod="sha256") + h2 = h1.copy() + # Using id() in case somebody has overridden __eq__/__ne__. + self.assertTrue(id(h1) != id(h2), "No real copy of the HMAC instance.") +@@ -374,9 +390,10 @@ class CopyTestCase(unittest.TestCase): + self.assertTrue(id(h1.outer) != id(h2.outer), + "No real copy of the attribute 'outer'.") + ++ @requires_hashdigest('sha256') + def test_equality(self): + # Testing if the copy has the same digests. +- h1 = hmac.HMAC(b"key", digestmod="md5") ++ h1 = hmac.HMAC(b"key", digestmod="sha256") + h1.update(b"some random text") + h2 = h1.copy() + self.assertEqual(h1.digest(), h2.digest(), +diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py +index 0593a3756b0..086cc0ac4e0 100644 +--- a/Lib/test/test_imaplib.py ++++ b/Lib/test/test_imaplib.py +@@ -14,7 +14,8 @@ import calendar + import inspect + + from test.support import (reap_threads, verbose, transient_internet, +- run_with_tz, run_with_locale, cpython_only) ++ run_with_tz, run_with_locale, cpython_only, ++ requires_hashdigest) + import unittest + from unittest import mock + from datetime import datetime, timezone, timedelta +@@ -369,6 +370,7 @@ class NewIMAPTestsMixin(): + self.assertEqual(code, 'OK') + self.assertEqual(server.response, b'ZmFrZQ==\r\n') # b64 encoded 'fake' + ++ @requires_hashdigest('md5') + def test_login_cram_md5_bytes(self): + class AuthHandler(SimpleIMAPHandler): + capabilities = 'LOGINDISABLED AUTH=CRAM-MD5' +@@ -386,6 +388,7 @@ class NewIMAPTestsMixin(): + ret, _ = client.login_cram_md5("tim", b"tanstaaftanstaaf") + self.assertEqual(ret, "OK") + ++ @requires_hashdigest('md5') + def test_login_cram_md5_plain_text(self): + class AuthHandler(SimpleIMAPHandler): + capabilities = 'LOGINDISABLED AUTH=CRAM-MD5' +@@ -797,6 +800,7 @@ class ThreadedNetworkedTests(unittest.TestCase): + b'ZmFrZQ==\r\n') # b64 encoded 'fake' + + @reap_threads ++ @requires_hashdigest('md5') + def test_login_cram_md5(self): + + class AuthHandler(SimpleIMAPHandler): +diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py +index 234c855545c..b8146be3a8c 100644 +--- a/Lib/test/test_poplib.py ++++ b/Lib/test/test_poplib.py +@@ -304,9 +304,11 @@ class TestPOP3Class(TestCase): + def test_rpop(self): + self.assertOK(self.client.rpop('foo')) + ++ @test_support.requires_hashdigest('md5') + def test_apop_normal(self): + self.assertOK(self.client.apop('foo', 'dummypassword')) + ++ @test_support.requires_hashdigest('md5') + def test_apop_REDOS(self): + # Replace welcome with very long evil welcome. + # NB The upper bound on welcome length is currently 2048. +diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py +index 87047514e7a..64b3201254a 100644 +--- a/Lib/test/test_smtplib.py ++++ b/Lib/test/test_smtplib.py +@@ -4,6 +4,7 @@ import email.mime.text + from email.message import EmailMessage + from email.base64mime import body_encode as encode_base64 + import email.utils ++import hashlib + import hmac + import socket + import smtpd +@@ -18,6 +19,7 @@ import textwrap + + import unittest + from test import support, mock_socket ++from test.support import requires_hashdigest + from unittest.mock import Mock + + HOST = "localhost" +@@ -968,6 +970,7 @@ class SMTPSimTests(unittest.TestCase): + self.assertEqual(resp, (235, b'Authentication Succeeded')) + smtp.close() + ++ @requires_hashdigest('md5') + def testAUTH_CRAM_MD5(self): + self.serv.add_feature("AUTH CRAM-MD5") + smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) +@@ -984,7 +987,13 @@ class SMTPSimTests(unittest.TestCase): + smtp.close() + + def test_auth_function(self): +- supported = {'CRAM-MD5', 'PLAIN', 'LOGIN'} ++ supported = {'PLAIN', 'LOGIN'} ++ try: ++ hashlib.md5() ++ except ValueError: ++ pass ++ else: ++ supported.add('CRAM-MD5') + for mechanism in supported: + self.serv.add_feature("AUTH {}".format(mechanism)) + for mechanism in supported: +diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py +index 4cd7d5370f5..b5e855e0d7e 100644 +--- a/Lib/test/test_tarfile.py ++++ b/Lib/test/test_tarfile.py +@@ -1,7 +1,7 @@ + import sys + import os + import io +-from hashlib import md5 ++from hashlib import sha256 + from contextlib import contextmanager + from random import Random + import pathlib +@@ -11,7 +11,7 @@ import unittest.mock + import tarfile + + from test import support +-from test.support import script_helper ++from test.support import script_helper, requires_hashdigest + + # Check for our compression modules. + try: +@@ -27,8 +27,8 @@ try: + except ImportError: + lzma = None + +-def md5sum(data): +- return md5(data).hexdigest() ++def sha256sum(data): ++ return sha256(data).hexdigest() + + TEMPDIR = os.path.abspath(support.TESTFN) + "-tardir" + tarextdir = TEMPDIR + '-extract-test' +@@ -39,8 +39,12 @@ xzname = os.path.join(TEMPDIR, "testtar.tar.xz") + tmpname = os.path.join(TEMPDIR, "tmp.tar") + dotlessname = os.path.join(TEMPDIR, "testtar") + +-md5_regtype = "65f477c818ad9e15f7feab0c6d37742f" +-md5_sparse = "a54fbc4ca4f4399a90e1b27164012fc6" ++sha256_regtype = ( ++ "e09e4bc8b3c9d9177e77256353b36c159f5f040531bbd4b024a8f9b9196c71ce" ++) ++sha256_sparse = ( ++ "4f05a776071146756345ceee937b33fc5644f5a96b9780d1c7d6a32cdf164d7b" ++) + + + class TarTest: +@@ -95,7 +99,7 @@ class UstarReadTest(ReadTest, unittest.TestCase): + data = fobj.read() + self.assertEqual(len(data), tarinfo.size, + "regular file extraction failed") +- self.assertEqual(md5sum(data), md5_regtype, ++ self.assertEqual(sha256sum(data), sha256_regtype, + "regular file extraction failed") + + def test_fileobj_readlines(self): +@@ -180,7 +184,7 @@ class UstarReadTest(ReadTest, unittest.TestCase): + with self.tar.extractfile("ustar/regtype") as fobj: + fobj = io.TextIOWrapper(fobj) + data = fobj.read().encode("iso8859-1") +- self.assertEqual(md5sum(data), md5_regtype) ++ self.assertEqual(sha256sum(data), sha256_regtype) + try: + fobj.seek(100) + except AttributeError: +@@ -546,13 +550,13 @@ class MiscReadTestBase(CommonReadTest): + self.addCleanup(support.unlink, os.path.join(TEMPDIR, "ustar/lnktype")) + with open(os.path.join(TEMPDIR, "ustar/lnktype"), "rb") as f: + data = f.read() +- self.assertEqual(md5sum(data), md5_regtype) ++ self.assertEqual(sha256sum(data), sha256_regtype) + + tar.extract("ustar/symtype", TEMPDIR) + self.addCleanup(support.unlink, os.path.join(TEMPDIR, "ustar/symtype")) + with open(os.path.join(TEMPDIR, "ustar/symtype"), "rb") as f: + data = f.read() +- self.assertEqual(md5sum(data), md5_regtype) ++ self.assertEqual(sha256sum(data), sha256_regtype) + + def test_extractall(self): + # Test if extractall() correctly restores directory permissions +@@ -687,7 +691,7 @@ class StreamReadTest(CommonReadTest, unittest.TestCase): + data = fobj.read() + self.assertEqual(len(data), tarinfo.size, + "regular file extraction failed") +- self.assertEqual(md5sum(data), md5_regtype, ++ self.assertEqual(sha256sum(data), sha256_regtype, + "regular file extraction failed") + + def test_provoke_stream_error(self): +@@ -799,8 +803,8 @@ class MemberReadTest(ReadTest, unittest.TestCase): + def _test_member(self, tarinfo, chksum=None, **kwargs): + if chksum is not None: + with self.tar.extractfile(tarinfo) as f: +- self.assertEqual(md5sum(f.read()), chksum, +- "wrong md5sum for %s" % tarinfo.name) ++ self.assertEqual(sha256sum(f.read()), chksum, ++ "wrong sha256sum for %s" % tarinfo.name) + + kwargs["mtime"] = 0o7606136617 + kwargs["uid"] = 1000 +@@ -815,11 +819,11 @@ class MemberReadTest(ReadTest, unittest.TestCase): + + def test_find_regtype(self): + tarinfo = self.tar.getmember("ustar/regtype") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + def test_find_conttype(self): + tarinfo = self.tar.getmember("ustar/conttype") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + def test_find_dirtype(self): + tarinfo = self.tar.getmember("ustar/dirtype") +@@ -851,28 +855,28 @@ class MemberReadTest(ReadTest, unittest.TestCase): + + def test_find_sparse(self): + tarinfo = self.tar.getmember("ustar/sparse") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_gnusparse(self): + tarinfo = self.tar.getmember("gnu/sparse") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_gnusparse_00(self): + tarinfo = self.tar.getmember("gnu/sparse-0.0") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_gnusparse_01(self): + tarinfo = self.tar.getmember("gnu/sparse-0.1") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_gnusparse_10(self): + tarinfo = self.tar.getmember("gnu/sparse-1.0") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_umlauts(self): + tarinfo = self.tar.getmember("ustar/umlauts-" + "\xc4\xd6\xdc\xe4\xf6\xfc\xdf") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + def test_find_ustar_longname(self): + name = "ustar/" + "12345/" * 39 + "1234567/longname" +@@ -880,7 +884,7 @@ class MemberReadTest(ReadTest, unittest.TestCase): + + def test_find_regtype_oldv7(self): + tarinfo = self.tar.getmember("misc/regtype-old-v7") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + def test_find_pax_umlauts(self): + self.tar.close() +@@ -888,7 +892,7 @@ class MemberReadTest(ReadTest, unittest.TestCase): + encoding="iso8859-1") + tarinfo = self.tar.getmember("pax/umlauts-" + "\xc4\xd6\xdc\xe4\xf6\xfc\xdf") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + + class LongnameTest: +@@ -950,8 +954,8 @@ class GNUReadTest(LongnameTest, ReadTest, unittest.TestCase): + filename = os.path.join(TEMPDIR, name) + with open(filename, "rb") as fobj: + data = fobj.read() +- self.assertEqual(md5sum(data), md5_sparse, +- "wrong md5sum for %s" % name) ++ self.assertEqual(sha256sum(data), sha256_sparse, ++ "wrong sha256sum for %s" % name) + + if self._fs_supports_holes(): + s = os.stat(filename) +@@ -2431,7 +2435,7 @@ class LinkEmulationTest(ReadTest, unittest.TestCase): + self.tar.extract(name, TEMPDIR) + with open(os.path.join(TEMPDIR, name), "rb") as f: + data = f.read() +- self.assertEqual(md5sum(data), md5_regtype) ++ self.assertEqual(sha256sum(data), sha256_regtype) + + # See issues #1578269, #8879, and #17689 for some history on these skips + @unittest.skipIf(hasattr(os.path, "islink"), +diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py +index ef0091c4930..895f97cc09a 100644 +--- a/Lib/test/test_urllib2_localnet.py ++++ b/Lib/test/test_urllib2_localnet.py +@@ -325,6 +325,7 @@ class ProxyAuthTests(unittest.TestCase): + PASSWD = "test123" + REALM = "TestRealm" + ++ @support.requires_hashdigest("md5") + def setUp(self): + super(ProxyAuthTests, self).setUp() + # Ignore proxy bypass settings in the environment. +-- +2.21.0 + + +From 75d9613294e1cbd5aab9363cf3c435871a25f065 Mon Sep 17 00:00:00 2001 +From: Christian Heimes +Date: Mon, 30 Sep 2019 09:10:38 +0200 +Subject: [PATCH 02/36] [3.8] bpo-38270: More fixes for strict crypto policy + (GH-16418) (#16437) + +test_hmac and test_hashlib test built-in hashing implementations and +OpenSSL-based hashing implementations. Add more checks to skip OpenSSL +implementations when a strict crypto policy is active. + +Use EVP_DigestInit_ex() instead of EVP_DigestInit() to initialize the +EVP context. The EVP_DigestInit() function clears alls flags and breaks +usedforsecurity flag again. + +Signed-off-by: Christian Heimes + +https://bugs.python.org/issue38270. +(cherry picked from commit 90558158093c0ad893102158fd3c2dd9f864e82e) + +Co-authored-by: Christian Heimes +--- + Lib/test/support/__init__.py | 19 ++++++++++++++++--- + Lib/test/test_hashlib.py | 3 +++ + Lib/test/test_hmac.py | 12 ++++++------ + 3 files changed, 25 insertions(+), 9 deletions(-) + +diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py +index a43819904e0..3342218cf0a 100644 +--- a/Lib/test/support/__init__.py ++++ b/Lib/test/support/__init__.py +@@ -71,6 +71,11 @@ try: + except ImportError: + resource = None + ++try: ++ import _hashlib ++except ImportError: ++ _hashlib = None ++ + __all__ = [ + # globals + "PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast", +@@ -88,7 +93,8 @@ __all__ = [ + "create_empty_file", "can_symlink", "fs_is_case_insensitive", + # unittest + "is_resource_enabled", "requires", "requires_freebsd_version", +- "requires_linux_version", "requires_mac_ver", "check_syntax_error", ++ "requires_linux_version", "requires_mac_ver", "requires_hashdigest", ++ "check_syntax_error", + "TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset", + "transient_internet", "BasicTestRunner", "run_unittest", "run_doctest", + "skip_unless_symlink", "requires_gzip", "requires_bz2", "requires_lzma", +@@ -628,12 +634,16 @@ def requires_mac_ver(*min_version): + return wrapper + return decorator + +-def requires_hashdigest(digestname): ++def requires_hashdigest(digestname, openssl=None): + """Decorator raising SkipTest if a hashing algorithm is not available + + The hashing algorithm could be missing or blocked by a strict crypto + policy. + ++ If 'openssl' is True, then the decorator checks that OpenSSL provides ++ the algorithm. Otherwise the check falls back to built-in ++ implementations. ++ + ValueError: [digital envelope routines: EVP_DigestInit_ex] disabled for FIPS + ValueError: unsupported hash type md4 + """ +@@ -641,7 +651,10 @@ def requires_hashdigest(digestname): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: +- hashlib.new(digestname) ++ if openssl and _hashlib is not None: ++ _hashlib.new(digestname) ++ else: ++ hashlib.new(digestname) + except ValueError: + raise unittest.SkipTest( + f"hash digest '{digestname}' is not available." +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 9711856853d..1b1b4d54112 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -8,6 +8,7 @@ + + import array + from binascii import unhexlify ++import functools + import hashlib + import importlib + import itertools +@@ -21,6 +22,7 @@ import unittest + import warnings + from test import support + from test.support import _4G, bigmemtest, import_fresh_module ++from test.support import requires_hashdigest + from http.client import HTTPException + + # Were we compiled --with-pydebug or with #define Py_DEBUG? +@@ -117,6 +119,7 @@ class HashLibTestCase(unittest.TestCase): + constructors.add(_test_algorithm_via_hashlib_new) + + _hashlib = self._conditional_import_module('_hashlib') ++ self._hashlib = _hashlib + if _hashlib: + # These two algorithms should always be present when this module + # is compiled. If not, something was compiled wrong. +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 81c3485f761..2a5a0d3d061 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -19,7 +19,7 @@ def ignore_warning(func): + + class TestVectorsTestCase(unittest.TestCase): + +- @requires_hashdigest('md5') ++ @requires_hashdigest('md5', openssl=True) + def test_md5_vectors(self): + # Test the HMAC module against test vectors from the RFC. + +@@ -66,7 +66,7 @@ class TestVectorsTestCase(unittest.TestCase): + b"and Larger Than One Block-Size Data"), + "6f630fad67cda0ee1fb1f562db3aa53e") + +- @requires_hashdigest('sha1') ++ @requires_hashdigest('sha1', openssl=True) + def test_sha_vectors(self): + def shatest(key, data, digest): + h = hmac.HMAC(key, data, digestmod=hashlib.sha1) +@@ -234,19 +234,19 @@ class TestVectorsTestCase(unittest.TestCase): + '134676fb6de0446065c97440fa8c6a58', + }) + +- @requires_hashdigest('sha224') ++ @requires_hashdigest('sha224', openssl=True) + def test_sha224_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha224, 'sha224', 28, 64) + +- @requires_hashdigest('sha256') ++ @requires_hashdigest('sha256', openssl=True) + def test_sha256_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha256, 'sha256', 32, 64) + +- @requires_hashdigest('sha384') ++ @requires_hashdigest('sha384', openssl=True) + def test_sha384_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha384, 'sha384', 48, 128) + +- @requires_hashdigest('sha512') ++ @requires_hashdigest('sha512', openssl=True) + def test_sha512_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128) + +-- +2.21.0 + + +From 19a40f7e820bb991c03e03c05dc832539b676983 Mon Sep 17 00:00:00 2001 +From: stratakis +Date: Tue, 3 Dec 2019 16:35:54 +0100 +Subject: [PATCH 03/36] bpo-38270: Fix indentation of test_hmac assertions + (GH-17446) + +Since https://github.com/python/cpython/commit/c64a1a61e6fc542cada40eb069a239317e1af36e two assertions were indented and thus ignored when running test_hmac. + +This PR fixes it. As the change is quite trivial I didn't add a NEWS entry. + + +https://bugs.python.org/issue38270 +--- + Lib/test/test_hmac.py | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 2a5a0d3d061..338e0215a41 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -329,7 +329,7 @@ class ConstructorTestCase(unittest.TestCase): + digestmod="sha256") + except Exception: + self.fail("Constructor call with bytearray arguments raised exception.") +- self.assertEqual(h.hexdigest(), self.expected) ++ self.assertEqual(h.hexdigest(), self.expected) + + @requires_hashdigest('sha256') + def test_with_memoryview_msg(self): +@@ -337,7 +337,7 @@ class ConstructorTestCase(unittest.TestCase): + h = hmac.HMAC(b"key", memoryview(b"hash this!"), digestmod="sha256") + except Exception: + self.fail("Constructor call with memoryview msg raised exception.") +- self.assertEqual(h.hexdigest(), self.expected) ++ self.assertEqual(h.hexdigest(), self.expected) + + @requires_hashdigest('sha256') + def test_withmodule(self): +-- +2.21.0 + + +From 5fa96205b380f2cb555ac346e6b7039746cb51ca Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 25 Jul 2019 16:19:52 +0200 +Subject: [PATCH 04/36] Expose OpenSSL FIPS_mode() as hashlib.get_fips_mode() + +--- + Lib/hashlib.py | 5 +++++ + Modules/_hashopenssl.c | 36 +++++++++++++++++++++++++++++++++ + Modules/clinic/_hashopenssl.c.h | 25 ++++++++++++++++++++++- + 3 files changed, 65 insertions(+), 1 deletion(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index 98d2d7981a3..ae17c585110 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -236,6 +236,11 @@ try: + except ImportError: + pass + ++try: ++ from _hashlib import get_fips_mode ++except ImportError: ++ pass ++ + + for __func_name in __always_supported: + # try them all, some may not work due to the OpenSSL +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 84edd72c85a..4876a7f7aa7 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -25,6 +25,9 @@ + #include + #include "openssl/err.h" + ++/* Expose FIPS_mode */ ++#include ++ + #include "clinic/_hashopenssl.c.h" + /*[clinic input] + module _hashlib +@@ -987,6 +990,38 @@ GEN_CONSTRUCTOR(sha256) + GEN_CONSTRUCTOR(sha384) + GEN_CONSTRUCTOR(sha512) + ++/*[clinic input] ++_hashlib.get_fips_mode ++ ++Determine the OpenSSL FIPS mode of operation. ++ ++Effectively any non-zero return value indicates FIPS mode; ++values other than 1 may have additional significance. ++ ++See OpenSSL documentation for the FIPS_mode() function for details. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hashlib_get_fips_mode_impl(PyObject *module) ++/*[clinic end generated code: output=ad8a7793310d3f98 input=f42a2135df2a5e11]*/ ++{ ++ int result = FIPS_mode(); ++ if (result == 0) { ++ // "If the library was built without support of the FIPS Object Module, ++ // then the function will return 0 with an error code of ++ // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." ++ // But 0 is also a valid result value. ++ ++ unsigned long errcode = ERR_peek_last_error(); ++ if (errcode) { ++ _setException(PyExc_ValueError); ++ return NULL; ++ } ++ } ++ return PyLong_FromLong(result); ++} ++ ++ + /* List of functions exported by this module */ + + static struct PyMethodDef EVP_functions[] = { +@@ -996,6 +1031,7 @@ static struct PyMethodDef EVP_functions[] = { + pbkdf2_hmac__doc__}, + #endif + _HASHLIB_SCRYPT_METHODDEF ++ _HASHLIB_GET_FIPS_MODE_METHODDEF + CONSTRUCTOR_METH_DEF(md5), + CONSTRUCTOR_METH_DEF(sha1), + CONSTRUCTOR_METH_DEF(sha224), +diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h +index 04453526040..8828e2776e9 100644 +--- a/Modules/clinic/_hashopenssl.c.h ++++ b/Modules/clinic/_hashopenssl.c.h +@@ -54,7 +54,30 @@ exit: + + #endif /* (OPENSSL_VERSION_NUMBER > 0x10100000L && !defined(OPENSSL_NO_SCRYPT) && !defined(LIBRESSL_VERSION_NUMBER)) */ + ++PyDoc_STRVAR(_hashlib_get_fips_mode__doc__, ++"get_fips_mode($module, /)\n" ++"--\n" ++"\n" ++"Determine the OpenSSL FIPS mode of operation.\n" ++"\n" ++"Effectively any non-zero return value indicates FIPS mode;\n" ++"values other than 1 may have additional significance.\n" ++"\n" ++"See OpenSSL documentation for the FIPS_mode() function for details."); ++ ++#define _HASHLIB_GET_FIPS_MODE_METHODDEF \ ++ {"get_fips_mode", (PyCFunction)_hashlib_get_fips_mode, METH_NOARGS, _hashlib_get_fips_mode__doc__}, ++ ++static PyObject * ++_hashlib_get_fips_mode_impl(PyObject *module); ++ ++static PyObject * ++_hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) ++{ ++ return _hashlib_get_fips_mode_impl(module); ++} ++ + #ifndef _HASHLIB_SCRYPT_METHODDEF + #define _HASHLIB_SCRYPT_METHODDEF + #endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */ +-/*[clinic end generated code: output=118cd7036fa0fb52 input=a9049054013a1b77]*/ ++/*[clinic end generated code: output=7d683c930bbb7c36 input=a9049054013a1b77]*/ +-- +2.21.0 + + +From e60cc93037b913ca2a410d73b52d8301ab0d76cd Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Thu, 25 Jul 2019 17:04:06 +0200 +Subject: [PATCH 05/36] Use python's fall backs for the crypto it implements + only if we are not in FIPS mode + +--- + Lib/hashlib.py | 209 +++++++++++++++------------------------ + Lib/test/test_hashlib.py | 1 + + 2 files changed, 81 insertions(+), 129 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index ae17c585110..7db1e02601f 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -67,56 +67,64 @@ algorithms_available = set(__always_supported) + __all__ = __always_supported + ('new', 'algorithms_guaranteed', + 'algorithms_available', 'pbkdf2_hmac') + +- +-__builtin_constructor_cache = {} +- +-def __get_builtin_constructor(name): +- cache = __builtin_constructor_cache +- constructor = cache.get(name) +- if constructor is not None: +- return constructor +- try: +- if name in ('SHA1', 'sha1'): +- import _sha1 +- cache['SHA1'] = cache['sha1'] = _sha1.sha1 +- elif name in ('MD5', 'md5'): +- import _md5 +- cache['MD5'] = cache['md5'] = _md5.md5 +- elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'): +- import _sha256 +- cache['SHA224'] = cache['sha224'] = _sha256.sha224 +- cache['SHA256'] = cache['sha256'] = _sha256.sha256 +- elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'): +- import _sha512 +- cache['SHA384'] = cache['sha384'] = _sha512.sha384 +- cache['SHA512'] = cache['sha512'] = _sha512.sha512 +- elif name in ('blake2b', 'blake2s'): +- import _blake2 +- cache['blake2b'] = _blake2.blake2b +- cache['blake2s'] = _blake2.blake2s +- elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', +- 'shake_128', 'shake_256'}: +- import _sha3 +- cache['sha3_224'] = _sha3.sha3_224 +- cache['sha3_256'] = _sha3.sha3_256 +- cache['sha3_384'] = _sha3.sha3_384 +- cache['sha3_512'] = _sha3.sha3_512 +- cache['shake_128'] = _sha3.shake_128 +- cache['shake_256'] = _sha3.shake_256 +- except ImportError: +- pass # no extension module, this hash is unsupported. +- +- constructor = cache.get(name) +- if constructor is not None: +- return constructor +- +- raise ValueError('unsupported hash type ' + name) ++try: ++ from _hashlib import get_fips_mode ++except ImportError: ++ def get_fips_mode(): ++ return 0 ++ ++ ++if not get_fips_mode(): ++ __builtin_constructor_cache = {} ++ ++ def __get_builtin_constructor(name): ++ cache = __builtin_constructor_cache ++ constructor = cache.get(name) ++ if constructor is not None: ++ return constructor ++ try: ++ if name in ('SHA1', 'sha1'): ++ import _sha1 ++ cache['SHA1'] = cache['sha1'] = _sha1.sha1 ++ elif name in ('MD5', 'md5'): ++ import _md5 ++ cache['MD5'] = cache['md5'] = _md5.md5 ++ elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'): ++ import _sha256 ++ cache['SHA224'] = cache['sha224'] = _sha256.sha224 ++ cache['SHA256'] = cache['sha256'] = _sha256.sha256 ++ elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'): ++ import _sha512 ++ cache['SHA384'] = cache['sha384'] = _sha512.sha384 ++ cache['SHA512'] = cache['sha512'] = _sha512.sha512 ++ elif name in ('blake2b', 'blake2s'): ++ import _blake2 ++ cache['blake2b'] = _blake2.blake2b ++ cache['blake2s'] = _blake2.blake2s ++ elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', ++ 'shake_128', 'shake_256'}: ++ import _sha3 ++ cache['sha3_224'] = _sha3.sha3_224 ++ cache['sha3_256'] = _sha3.sha3_256 ++ cache['sha3_384'] = _sha3.sha3_384 ++ cache['sha3_512'] = _sha3.sha3_512 ++ cache['shake_128'] = _sha3.shake_128 ++ cache['shake_256'] = _sha3.shake_256 ++ except ImportError: ++ pass # no extension module, this hash is unsupported. ++ ++ constructor = cache.get(name) ++ if constructor is not None: ++ return constructor ++ ++ raise ValueError('unsupported hash type ' + name) + + + def __get_openssl_constructor(name): +- if name in {'blake2b', 'blake2s'}: +- # Prefer our blake2 implementation. +- return __get_builtin_constructor(name) ++ if not get_fips_mode(): ++ if name in {'blake2b', 'blake2s'}: ++ # Prefer our blake2 implementation. ++ return __get_builtin_constructor(name) + try: + f = getattr(_hashlib, 'openssl_' + name) + # Allow the C module to raise ValueError. The function will be +@@ -125,27 +133,30 @@ def __get_openssl_constructor(name): + # Use the C function directly (very fast) + return f + except (AttributeError, ValueError): ++ if get_fips_mode(): ++ raise + return __get_builtin_constructor(name) + +- +-def __py_new(name, data=b'', **kwargs): +- """new(name, data=b'', **kwargs) - Return a new hashing object using the +- named algorithm; optionally initialized with data (which must be +- a bytes-like object). +- """ +- return __get_builtin_constructor(name)(data, **kwargs) ++if not get_fips_mode(): ++ def __py_new(name, data=b'', **kwargs): ++ """new(name, data=b'', **kwargs) - Return a new hashing object using the ++ named algorithm; optionally initialized with data (which must be ++ a bytes-like object). ++ """ ++ return __get_builtin_constructor(name)(data, **kwargs) + + + def __hash_new(name, data=b'', **kwargs): + """new(name, data=b'') - Return a new hashing object using the named algorithm; + optionally initialized with data (which must be a bytes-like object). + """ +- if name in {'blake2b', 'blake2s'}: +- # Prefer our blake2 implementation. +- # OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. +- # It does neither support keyed blake2 nor advanced features like +- # salt, personal, tree hashing or SSE. +- return __get_builtin_constructor(name)(data, **kwargs) ++ if not get_fips_mode(): ++ if name in {'blake2b', 'blake2s'}: ++ # Prefer our blake2 implementation. ++ # OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. ++ # It does neither support keyed blake2 nor advanced features like ++ # salt, personal, tree hashing or SSE. ++ return __get_builtin_constructor(name)(data, **kwargs) + try: + return _hashlib.new(name, data) + except ValueError: +@@ -153,6 +164,8 @@ def __hash_new(name, data=b'', **kwargs): + # hash, try using our builtin implementations. + # This allows for SHA224/256 and SHA384/512 support even though + # the OpenSSL library prior to 0.9.8 doesn't provide them. ++ if get_fips_mode(): ++ raise + return __get_builtin_constructor(name)(data) + + +@@ -163,72 +176,14 @@ try: + algorithms_available = algorithms_available.union( + _hashlib.openssl_md_meth_names) + except ImportError: ++ if get_fips_mode(): ++ raise + new = __py_new + __get_hash = __get_builtin_constructor + +-try: +- # OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA +- from _hashlib import pbkdf2_hmac +-except ImportError: +- _trans_5C = bytes((x ^ 0x5C) for x in range(256)) +- _trans_36 = bytes((x ^ 0x36) for x in range(256)) +- +- def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None): +- """Password based key derivation function 2 (PKCS #5 v2.0) + +- This Python implementations based on the hmac module about as fast +- as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster +- for long passwords. +- """ +- if not isinstance(hash_name, str): +- raise TypeError(hash_name) +- +- if not isinstance(password, (bytes, bytearray)): +- password = bytes(memoryview(password)) +- if not isinstance(salt, (bytes, bytearray)): +- salt = bytes(memoryview(salt)) +- +- # Fast inline HMAC implementation +- inner = new(hash_name) +- outer = new(hash_name) +- blocksize = getattr(inner, 'block_size', 64) +- if len(password) > blocksize: +- password = new(hash_name, password).digest() +- password = password + b'\x00' * (blocksize - len(password)) +- inner.update(password.translate(_trans_36)) +- outer.update(password.translate(_trans_5C)) +- +- def prf(msg, inner=inner, outer=outer): +- # PBKDF2_HMAC uses the password as key. We can re-use the same +- # digest objects and just update copies to skip initialization. +- icpy = inner.copy() +- ocpy = outer.copy() +- icpy.update(msg) +- ocpy.update(icpy.digest()) +- return ocpy.digest() +- +- if iterations < 1: +- raise ValueError(iterations) +- if dklen is None: +- dklen = outer.digest_size +- if dklen < 1: +- raise ValueError(dklen) +- +- dkey = b'' +- loop = 1 +- from_bytes = int.from_bytes +- while len(dkey) < dklen: +- prev = prf(salt + loop.to_bytes(4, 'big')) +- # endianess doesn't matter here as long to / from use the same +- rkey = int.from_bytes(prev, 'big') +- for i in range(iterations - 1): +- prev = prf(prev) +- # rkey = rkey ^ prev +- rkey ^= from_bytes(prev, 'big') +- loop += 1 +- dkey += rkey.to_bytes(inner.digest_size, 'big') +- +- return dkey[:dklen] ++# OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA ++from _hashlib import pbkdf2_hmac + + try: + # OpenSSL's scrypt requires OpenSSL 1.1+ +@@ -236,12 +191,6 @@ try: + except ImportError: + pass + +-try: +- from _hashlib import get_fips_mode +-except ImportError: +- pass +- +- + for __func_name in __always_supported: + # try them all, some may not work due to the OpenSSL + # version not supporting that algorithm. +@@ -254,4 +203,6 @@ for __func_name in __always_supported: + + # Cleanup locals() + del __always_supported, __func_name, __get_hash +-del __py_new, __hash_new, __get_openssl_constructor ++del __hash_new, __get_openssl_constructor ++if not get_fips_mode(): ++ del __py_new +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 1b1b4d54112..4e6b5b607a9 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -933,6 +933,7 @@ class KDFTests(unittest.TestCase): + iterations=1, dklen=None) + self.assertEqual(out, self.pbkdf2_results['sha1'][0][0]) + ++ @unittest.skip("The python implementation of pbkdf2_hmac has been removed") + def test_pbkdf2_hmac_py(self): + self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac) + +-- +2.21.0 + + +From 4803386d980b9ad1a9bdde829e8642676acd8bf4 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 25 Jul 2019 17:19:06 +0200 +Subject: [PATCH 06/36] Disable Python's hash implementations in FIPS mode, + forcing OpenSSL + +--- + Include/_hashopenssl.h | 66 ++++++++++++++++++++++++++++++++++ + Modules/_blake2/blake2b_impl.c | 5 +++ + Modules/_blake2/blake2module.c | 3 ++ + Modules/_blake2/blake2s_impl.c | 5 +++ + Modules/_hashopenssl.c | 36 +------------------ + Modules/_sha3/sha3module.c | 5 +++ + setup.py | 31 ++++++++-------- + 7 files changed, 101 insertions(+), 50 deletions(-) + create mode 100644 Include/_hashopenssl.h + +diff --git a/Include/_hashopenssl.h b/Include/_hashopenssl.h +new file mode 100644 +index 00000000000..a726c0d3fbf +--- /dev/null ++++ b/Include/_hashopenssl.h +@@ -0,0 +1,66 @@ ++#ifndef Py_HASHOPENSSL_H ++#define Py_HASHOPENSSL_H ++ ++#include "Python.h" ++#include ++#include ++ ++/* LCOV_EXCL_START */ ++static PyObject * ++_setException(PyObject *exc) ++{ ++ unsigned long errcode; ++ const char *lib, *func, *reason; ++ ++ errcode = ERR_peek_last_error(); ++ if (!errcode) { ++ PyErr_SetString(exc, "unknown reasons"); ++ return NULL; ++ } ++ ERR_clear_error(); ++ ++ lib = ERR_lib_error_string(errcode); ++ func = ERR_func_error_string(errcode); ++ reason = ERR_reason_error_string(errcode); ++ ++ if (lib && func) { ++ PyErr_Format(exc, "[%s: %s] %s", lib, func, reason); ++ } ++ else if (lib) { ++ PyErr_Format(exc, "[%s] %s", lib, reason); ++ } ++ else { ++ PyErr_SetString(exc, reason); ++ } ++ return NULL; ++} ++/* LCOV_EXCL_STOP */ ++ ++ ++__attribute__((__unused__)) ++static int ++_Py_hashlib_fips_error(char *name) { ++ int result = FIPS_mode(); ++ if (result == 0) { ++ // "If the library was built without support of the FIPS Object Module, ++ // then the function will return 0 with an error code of ++ // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." ++ // But 0 is also a valid result value. ++ ++ unsigned long errcode = ERR_peek_last_error(); ++ if (errcode) { ++ _setException(PyExc_ValueError); ++ return 1; ++ } ++ return 0; ++ } ++ PyErr_Format(PyExc_ValueError, "%s is not available in FIPS mode", ++ name); ++ return 1; ++} ++ ++#define FAIL_RETURN_IN_FIPS_MODE(name) do { \ ++ if (_Py_hashlib_fips_error(name)) return NULL; \ ++} while (0) ++ ++#endif // !Py_HASHOPENSSL_H +diff --git a/Modules/_blake2/blake2b_impl.c b/Modules/_blake2/blake2b_impl.c +index 418c0184006..341e67a8fcc 100644 +--- a/Modules/_blake2/blake2b_impl.c ++++ b/Modules/_blake2/blake2b_impl.c +@@ -14,6 +14,7 @@ + */ + + #include "Python.h" ++#include "_hashopenssl.h" + #include "pystrhex.h" + #ifdef WITH_THREAD + #include "pythread.h" +@@ -103,6 +104,8 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + unsigned long leaf_size = 0; + unsigned long long node_offset = 0; + ++ FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ + self = new_BLAKE2bObject(type); + if (self == NULL) { + goto error; +@@ -293,6 +296,8 @@ _blake2_blake2b_update(BLAKE2bObject *self, PyObject *data) + { + Py_buffer buf; + ++ FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + + #ifdef WITH_THREAD +diff --git a/Modules/_blake2/blake2module.c b/Modules/_blake2/blake2module.c +index e2a3d420d4e..817b7165684 100644 +--- a/Modules/_blake2/blake2module.c ++++ b/Modules/_blake2/blake2module.c +@@ -9,6 +9,7 @@ + */ + + #include "Python.h" ++#include "_hashopenssl.h" + + #include "impl/blake2.h" + +@@ -57,6 +58,8 @@ PyInit__blake2(void) + PyObject *m; + PyObject *d; + ++ FAIL_RETURN_IN_FIPS_MODE("blake2"); ++ + m = PyModule_Create(&blake2_module); + if (m == NULL) + return NULL; +diff --git a/Modules/_blake2/blake2s_impl.c b/Modules/_blake2/blake2s_impl.c +index 24e529b6596..0dfe0586b57 100644 +--- a/Modules/_blake2/blake2s_impl.c ++++ b/Modules/_blake2/blake2s_impl.c +@@ -14,6 +14,7 @@ + */ + + #include "Python.h" ++#include "_hashopenssl.h" + #include "pystrhex.h" + #ifdef WITH_THREAD + #include "pythread.h" +@@ -103,6 +104,8 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + unsigned long leaf_size = 0; + unsigned long long node_offset = 0; + ++ FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ + self = new_BLAKE2sObject(type); + if (self == NULL) { + goto error; +@@ -293,6 +296,8 @@ _blake2_blake2s_update(BLAKE2sObject *self, PyObject *data) + { + Py_buffer buf; + ++ FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + + #ifdef WITH_THREAD +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 4876a7f7aa7..02cf6087a2e 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -17,16 +17,13 @@ + #include "structmember.h" + #include "hashlib.h" + #include "pystrhex.h" ++#include "_hashopenssl.h" + + + /* EVP is the preferred interface to hashing in OpenSSL */ + #include + /* We use the object interface to discover what hashes OpenSSL supports. */ + #include +-#include "openssl/err.h" +- +-/* Expose FIPS_mode */ +-#include + + #include "clinic/_hashopenssl.c.h" + /*[clinic input] +@@ -77,37 +74,6 @@ DEFINE_CONSTS_FOR_NEW(sha384) + DEFINE_CONSTS_FOR_NEW(sha512) + + +-/* LCOV_EXCL_START */ +-static PyObject * +-_setException(PyObject *exc) +-{ +- unsigned long errcode; +- const char *lib, *func, *reason; +- +- errcode = ERR_peek_last_error(); +- if (!errcode) { +- PyErr_SetString(exc, "unknown reasons"); +- return NULL; +- } +- ERR_clear_error(); +- +- lib = ERR_lib_error_string(errcode); +- func = ERR_func_error_string(errcode); +- reason = ERR_reason_error_string(errcode); +- +- if (lib && func) { +- PyErr_Format(exc, "[%s: %s] %s", lib, func, reason); +- } +- else if (lib) { +- PyErr_Format(exc, "[%s] %s", lib, reason); +- } +- else { +- PyErr_SetString(exc, reason); +- } +- return NULL; +-} +-/* LCOV_EXCL_STOP */ +- + static EVPobject * + newEVPobject(PyObject *name) + { +diff --git a/Modules/_sha3/sha3module.c b/Modules/_sha3/sha3module.c +index 2c2b2dbc5c7..624f7f247d4 100644 +--- a/Modules/_sha3/sha3module.c ++++ b/Modules/_sha3/sha3module.c +@@ -18,6 +18,7 @@ + #include "Python.h" + #include "pystrhex.h" + #include "../hashlib.h" ++#include "_hashopenssl.h" + + /* ************************************************************************** + * SHA-3 (Keccak) and SHAKE +@@ -162,6 +163,7 @@ static PyTypeObject SHAKE256type; + static SHA3object * + newSHA3object(PyTypeObject *type) + { ++ FAIL_RETURN_IN_FIPS_MODE("_sha3"); + SHA3object *newobj; + newobj = (SHA3object *)PyObject_New(SHA3object, type); + if (newobj == NULL) { +@@ -177,6 +179,7 @@ newSHA3object(PyTypeObject *type) + static PyObject * + py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + { ++ FAIL_RETURN_IN_FIPS_MODE("_sha3"); + SHA3object *self = NULL; + Py_buffer buf = {NULL, NULL}; + HashReturn res; +@@ -724,6 +727,8 @@ PyInit__sha3(void) + { + PyObject *m = NULL; + ++ FAIL_RETURN_IN_FIPS_MODE("_sha3"); ++ + if ((m = PyModule_Create(&_SHA3module)) == NULL) { + return NULL; + } +diff --git a/setup.py b/setup.py +index e2c18982532..e2e659ef795 100644 +--- a/setup.py ++++ b/setup.py +@@ -901,31 +901,30 @@ class PyBuildExt(build_ext): + have_usable_openssl = (have_any_openssl and + openssl_ver >= min_openssl_ver) + ++ if not have_usable_openssl: ++ raise ValueError('Cannot build for RHEL without OpenSSL') ++ ++ ssl_args = { ++ 'include_dirs': ssl_incs, ++ 'library_dirs': ssl_libs, ++ 'libraries': ['ssl', 'crypto'], ++ } ++ + if have_any_openssl: + if have_usable_openssl: + # The _hashlib module wraps optimized implementations + # of hash functions from the OpenSSL library. + exts.append( Extension('_hashlib', ['_hashopenssl.c'], + depends = ['hashlib.h'], +- include_dirs = ssl_incs, +- library_dirs = ssl_libs, +- libraries = ['ssl', 'crypto']) ) ++ **ssl_args)) + else: + print("warning: openssl 0x%08x is too old for _hashlib" % + openssl_ver) + missing.append('_hashlib') + +- # We always compile these even when OpenSSL is available (issue #14693). +- # It's harmless and the object code is tiny (40-50 KB per module, +- # only loaded when actually used). +- exts.append( Extension('_sha256', ['sha256module.c'], +- depends=['hashlib.h']) ) +- exts.append( Extension('_sha512', ['sha512module.c'], +- depends=['hashlib.h']) ) +- exts.append( Extension('_md5', ['md5module.c'], +- depends=['hashlib.h']) ) +- exts.append( Extension('_sha1', ['sha1module.c'], +- depends=['hashlib.h']) ) ++ # RHEL: Always force OpenSSL for md5, sha1, sha256, sha512; ++ # don't build Python's implementations. ++ # sha3 and blake2 have extra functionality, so do build those: + + blake2_deps = glob(os.path.join(os.getcwd(), srcdir, + 'Modules/_blake2/impl/*')) +@@ -944,6 +943,7 @@ class PyBuildExt(build_ext): + '_blake2/blake2b_impl.c', + '_blake2/blake2s_impl.c'], + define_macros=blake2_macros, ++ **ssl_args, + depends=blake2_deps) ) + + sha3_deps = glob(os.path.join(os.getcwd(), srcdir, +@@ -951,7 +951,8 @@ class PyBuildExt(build_ext): + sha3_deps.append('hashlib.h') + exts.append( Extension('_sha3', + ['_sha3/sha3module.c'], +- depends=sha3_deps)) ++ **ssl_args, ++ depends=sha3_deps + ['hashlib.h'])) + + # Modules that provide persistent dictionary-like semantics. You will + # probably want to arrange for at least one of them to be available on +-- +2.21.0 + + +From 0f35f480e2134c7d5c4ea3d68d8c0af14f56afa3 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 25 Jul 2019 17:35:27 +0200 +Subject: [PATCH 07/36] Expose all hashes available to OpenSSL, using a list + +--- + Modules/_hashopenssl.c | 44 ++++++++++++++----------------------- + Modules/_hashopenssl_list.h | 21 ++++++++++++++++++ + 2 files changed, 37 insertions(+), 28 deletions(-) + create mode 100644 Modules/_hashopenssl_list.h + +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 02cf6087a2e..7dfd70822b9 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -66,12 +66,9 @@ static PyTypeObject EVPtype; + static PyObject *CONST_ ## Name ## _name_obj = NULL; \ + static EVP_MD_CTX *CONST_new_ ## Name ## _ctx_p = NULL; + +-DEFINE_CONSTS_FOR_NEW(md5) +-DEFINE_CONSTS_FOR_NEW(sha1) +-DEFINE_CONSTS_FOR_NEW(sha224) +-DEFINE_CONSTS_FOR_NEW(sha256) +-DEFINE_CONSTS_FOR_NEW(sha384) +-DEFINE_CONSTS_FOR_NEW(sha512) ++#define _HASH(py_name, openssl_name) DEFINE_CONSTS_FOR_NEW(py_name) ++#include "_hashopenssl_list.h" ++#undef _HASH + + + static EVPobject * +@@ -896,7 +893,7 @@ generate_hash_name_list(void) + * The first call will lazy-initialize, which reports an exception + * if initialization fails. + */ +-#define GEN_CONSTRUCTOR(NAME) \ ++#define GEN_CONSTRUCTOR(NAME, SSL_NAME) \ + static PyObject * \ + EVP_new_ ## NAME (PyObject *self, PyObject *args) \ + { \ +@@ -910,8 +907,8 @@ generate_hash_name_list(void) + \ + if (CONST_new_ ## NAME ## _ctx_p == NULL) { \ + EVP_MD_CTX *ctx_p = EVP_MD_CTX_new(); \ +- if (!EVP_get_digestbyname(#NAME) || \ +- !EVP_DigestInit(ctx_p, EVP_get_digestbyname(#NAME))) { \ ++ if (!EVP_get_digestbyname(SSL_NAME) || \ ++ !EVP_DigestInit(ctx_p, EVP_get_digestbyname(SSL_NAME))) { \ + _setException(PyExc_ValueError); \ + EVP_MD_CTX_free(ctx_p); \ + return NULL; \ +@@ -939,7 +936,7 @@ generate_hash_name_list(void) + {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \ + PyDoc_STR("Returns a " #NAME \ + " hash object; optionally initialized with a string") \ +- } ++ }, + + /* used in the init function to setup a constructor: initialize OpenSSL + constructor constants if they haven't been initialized already. */ +@@ -949,12 +946,9 @@ generate_hash_name_list(void) + } \ + } while (0); + +-GEN_CONSTRUCTOR(md5) +-GEN_CONSTRUCTOR(sha1) +-GEN_CONSTRUCTOR(sha224) +-GEN_CONSTRUCTOR(sha256) +-GEN_CONSTRUCTOR(sha384) +-GEN_CONSTRUCTOR(sha512) ++#define _HASH(py_name, openssl_name) GEN_CONSTRUCTOR(py_name, openssl_name) ++#include "_hashopenssl_list.h" ++#undef _HASH + + /*[clinic input] + _hashlib.get_fips_mode +@@ -998,12 +992,9 @@ static struct PyMethodDef EVP_functions[] = { + #endif + _HASHLIB_SCRYPT_METHODDEF + _HASHLIB_GET_FIPS_MODE_METHODDEF +- CONSTRUCTOR_METH_DEF(md5), +- CONSTRUCTOR_METH_DEF(sha1), +- CONSTRUCTOR_METH_DEF(sha224), +- CONSTRUCTOR_METH_DEF(sha256), +- CONSTRUCTOR_METH_DEF(sha384), +- CONSTRUCTOR_METH_DEF(sha512), ++#define _HASH(py_name, openssl_name) CONSTRUCTOR_METH_DEF(py_name) ++#include "_hashopenssl_list.h" ++#undef _HASH + {NULL, NULL} /* Sentinel */ + }; + +@@ -1061,11 +1052,8 @@ PyInit__hashlib(void) + PyModule_AddObject(m, "HASH", (PyObject *)&EVPtype); + + /* these constants are used by the convenience constructors */ +- INIT_CONSTRUCTOR_CONSTANTS(md5); +- INIT_CONSTRUCTOR_CONSTANTS(sha1); +- INIT_CONSTRUCTOR_CONSTANTS(sha224); +- INIT_CONSTRUCTOR_CONSTANTS(sha256); +- INIT_CONSTRUCTOR_CONSTANTS(sha384); +- INIT_CONSTRUCTOR_CONSTANTS(sha512); ++#define _HASH(py_name, openssl_name) INIT_CONSTRUCTOR_CONSTANTS(py_name) ++#include "_hashopenssl_list.h" ++#undef _HASH + return m; + } +diff --git a/Modules/_hashopenssl_list.h b/Modules/_hashopenssl_list.h +new file mode 100644 +index 00000000000..3c11b2eb6c6 +--- /dev/null ++++ b/Modules/_hashopenssl_list.h +@@ -0,0 +1,21 @@ ++/* Call the _HASH macro with all the hashes exported by OpenSSL, ++ * at compile time. ++ * ++ * This file is meant to be included multiple times, with different values of ++ * _HASH. ++ */ ++ ++_HASH(md5, "md5") ++_HASH(sha1, "sha1") ++_HASH(sha224, "sha224") ++_HASH(sha256, "sha256") ++_HASH(sha384, "sha384") ++_HASH(sha512, "sha512") ++_HASH(blake2b, "blake2b512") ++_HASH(blake2s, "blake2s256") ++_HASH(sha3_224, "sha3-224") ++_HASH(sha3_256, "sha3-256") ++_HASH(sha3_384, "sha3-384") ++_HASH(sha3_512, "sha3-512") ++_HASH(shake_128, "shake128") ++_HASH(shake_256, "shake256") +-- +2.21.0 + + +From 7e5b74b7268e4fb7d473d24cd023493e4e87eb76 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 25 Jul 2019 18:13:45 +0200 +Subject: [PATCH 08/36] Fix tests + +--- + Lib/hashlib.py | 5 +++- + Lib/test/test_hashlib.py | 58 +++++++++++++++++++++++++++++++--------- + 2 files changed, 49 insertions(+), 14 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index 7db1e02601f..2def0a310c6 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -122,7 +122,10 @@ if not get_fips_mode(): + + def __get_openssl_constructor(name): + if not get_fips_mode(): +- if name in {'blake2b', 'blake2s'}: ++ if name in { ++ 'blake2b', 'blake2s', 'shake_256', 'shake_128', ++ #'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', ++ }: + # Prefer our blake2 implementation. + return __get_builtin_constructor(name) + try: +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 4e6b5b607a9..e57c93b42f5 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -182,7 +182,9 @@ class HashLibTestCase(unittest.TestCase): + a = array.array("b", range(10)) + for cons in self.hash_constructors: + c = cons(a) +- if c.name in self.shakes: ++ if (c.name in self.shakes ++ and not cons.__name__.startswith('openssl_') ++ ): + c.hexdigest(16) + else: + c.hexdigest() +@@ -229,7 +231,9 @@ class HashLibTestCase(unittest.TestCase): + def test_hexdigest(self): + for cons in self.hash_constructors: + h = cons() +- if h.name in self.shakes: ++ if (h.name in self.shakes ++ and not cons.__name__.startswith('openssl_') ++ ): + self.assertIsInstance(h.digest(16), bytes) + self.assertEqual(hexstr(h.digest(16)), h.hexdigest(16)) + else: +@@ -243,6 +247,8 @@ class HashLibTestCase(unittest.TestCase): + h = cons() + if h.name not in self.shakes: + continue ++ if cons.__name__.startswith('openssl_'): ++ continue + for digest in h.digest, h.hexdigest: + with self.assertRaises((ValueError, OverflowError)): + digest(-10) +@@ -272,7 +278,9 @@ class HashLibTestCase(unittest.TestCase): + m1.update(bees) + m1.update(cees) + m1.update(dees) +- if m1.name in self.shakes: ++ if (m1.name in self.shakes ++ and not cons.__name__.startswith('openssl_') ++ ): + args = (16,) + else: + args = () +@@ -299,15 +307,36 @@ class HashLibTestCase(unittest.TestCase): + # 2 is for hashlib.name(...) and hashlib.new(name, ...) + self.assertGreaterEqual(len(constructors), 2) + for hash_object_constructor in constructors: ++ if ( ++ kwargs ++ and hash_object_constructor.__name__.startswith('openssl_') ++ ): ++ return + m = hash_object_constructor(data, **kwargs) +- computed = m.hexdigest() if not shake else m.hexdigest(length) ++ if shake: ++ if hash_object_constructor.__name__.startswith('openssl_'): ++ if length > m.digest_size: ++ # OpenSSL doesn't give long digests ++ return ++ computed = m.hexdigest()[:length*2] ++ hexdigest = hexdigest[:length*2] ++ else: ++ computed = m.hexdigest(length) ++ else: ++ computed = m.hexdigest() + self.assertEqual( + computed, hexdigest, + "Hash algorithm %s constructed using %s returned hexdigest" + " %r for %d byte input data that should have hashed to %r." + % (name, hash_object_constructor, + computed, len(data), hexdigest)) +- computed = m.digest() if not shake else m.digest(length) ++ if shake: ++ if hash_object_constructor.__name__.startswith('openssl_'): ++ computed = m.digest()[:length] ++ else: ++ computed = m.digest(length) ++ else: ++ computed = m.digest() + digest = bytes.fromhex(hexdigest) + self.assertEqual(computed, digest) + if not shake: +@@ -347,12 +376,14 @@ class HashLibTestCase(unittest.TestCase): + for hash_object_constructor in constructors: + m = hash_object_constructor() + self.assertEqual(m.block_size, block_size) +- self.assertEqual(m.digest_size, digest_size) ++ if not hash_object_constructor.__name__.startswith('openssl_'): ++ self.assertEqual(m.digest_size, digest_size) + if digest_length: +- self.assertEqual(len(m.digest(digest_length)), +- digest_length) +- self.assertEqual(len(m.hexdigest(digest_length)), +- 2*digest_length) ++ if not hash_object_constructor.__name__.startswith('openssl_'): ++ self.assertEqual(len(m.digest(digest_length)), ++ digest_length) ++ self.assertEqual(len(m.hexdigest(digest_length)), ++ 2*digest_length) + else: + self.assertEqual(len(m.digest()), digest_size) + self.assertEqual(len(m.hexdigest()), 2*digest_size) +@@ -382,9 +413,10 @@ class HashLibTestCase(unittest.TestCase): + for hash_object_constructor in constructors: + m = hash_object_constructor() + self.assertEqual(capacity + rate, 1600) +- self.assertEqual(m._capacity_bits, capacity) +- self.assertEqual(m._rate_bits, rate) +- self.assertEqual(m._suffix, suffix) ++ if not hash_object_constructor.__name__.startswith('openssl_'): ++ self.assertEqual(m._capacity_bits, capacity) ++ self.assertEqual(m._rate_bits, rate) ++ self.assertEqual(m._suffix, suffix) + + @requires_sha3 + def test_extra_sha3(self): +-- +2.21.0 + + +From d6618795dd079acd8585f830b129d7f2fc9a6c52 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Fri, 26 Jul 2019 11:27:57 +0200 +Subject: [PATCH 09/36] Change FIPS exceptions from _blake2, _sha3 module init + to ImportError + +--- + Include/_hashopenssl.h | 11 +++++------ + Modules/_blake2/blake2b_impl.c | 4 ++-- + Modules/_blake2/blake2module.c | 2 +- + Modules/_blake2/blake2s_impl.c | 4 ++-- + Modules/_sha3/sha3module.c | 6 +++--- + 5 files changed, 13 insertions(+), 14 deletions(-) + +diff --git a/Include/_hashopenssl.h b/Include/_hashopenssl.h +index a726c0d3fbf..47ed0030422 100644 +--- a/Include/_hashopenssl.h ++++ b/Include/_hashopenssl.h +@@ -39,7 +39,7 @@ _setException(PyObject *exc) + + __attribute__((__unused__)) + static int +-_Py_hashlib_fips_error(char *name) { ++_Py_hashlib_fips_error(PyObject *exc, char *name) { + int result = FIPS_mode(); + if (result == 0) { + // "If the library was built without support of the FIPS Object Module, +@@ -49,18 +49,17 @@ _Py_hashlib_fips_error(char *name) { + + unsigned long errcode = ERR_peek_last_error(); + if (errcode) { +- _setException(PyExc_ValueError); ++ _setException(exc); + return 1; + } + return 0; + } +- PyErr_Format(PyExc_ValueError, "%s is not available in FIPS mode", +- name); ++ PyErr_Format(exc, "%s is not available in FIPS mode", name); + return 1; + } + +-#define FAIL_RETURN_IN_FIPS_MODE(name) do { \ +- if (_Py_hashlib_fips_error(name)) return NULL; \ ++#define FAIL_RETURN_IN_FIPS_MODE(exc, name) do { \ ++ if (_Py_hashlib_fips_error(exc, name)) return NULL; \ + } while (0) + + #endif // !Py_HASHOPENSSL_H +diff --git a/Modules/_blake2/blake2b_impl.c b/Modules/_blake2/blake2b_impl.c +index 341e67a8fcc..f6bfce823b8 100644 +--- a/Modules/_blake2/blake2b_impl.c ++++ b/Modules/_blake2/blake2b_impl.c +@@ -104,7 +104,7 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + unsigned long leaf_size = 0; + unsigned long long node_offset = 0; + +- FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); + + self = new_BLAKE2bObject(type); + if (self == NULL) { +@@ -296,7 +296,7 @@ _blake2_blake2b_update(BLAKE2bObject *self, PyObject *data) + { + Py_buffer buf; + +- FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); + + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + +diff --git a/Modules/_blake2/blake2module.c b/Modules/_blake2/blake2module.c +index 817b7165684..a9c7cbc7ebe 100644 +--- a/Modules/_blake2/blake2module.c ++++ b/Modules/_blake2/blake2module.c +@@ -58,7 +58,7 @@ PyInit__blake2(void) + PyObject *m; + PyObject *d; + +- FAIL_RETURN_IN_FIPS_MODE("blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ImportError, "blake2"); + + m = PyModule_Create(&blake2_module); + if (m == NULL) +diff --git a/Modules/_blake2/blake2s_impl.c b/Modules/_blake2/blake2s_impl.c +index 0dfe0586b57..28ae5b65101 100644 +--- a/Modules/_blake2/blake2s_impl.c ++++ b/Modules/_blake2/blake2s_impl.c +@@ -104,7 +104,7 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + unsigned long leaf_size = 0; + unsigned long long node_offset = 0; + +- FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); + + self = new_BLAKE2sObject(type); + if (self == NULL) { +@@ -296,7 +296,7 @@ _blake2_blake2s_update(BLAKE2sObject *self, PyObject *data) + { + Py_buffer buf; + +- FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); + + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + +diff --git a/Modules/_sha3/sha3module.c b/Modules/_sha3/sha3module.c +index 624f7f247d4..2783a75644f 100644 +--- a/Modules/_sha3/sha3module.c ++++ b/Modules/_sha3/sha3module.c +@@ -163,7 +163,7 @@ static PyTypeObject SHAKE256type; + static SHA3object * + newSHA3object(PyTypeObject *type) + { +- FAIL_RETURN_IN_FIPS_MODE("_sha3"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_sha3"); + SHA3object *newobj; + newobj = (SHA3object *)PyObject_New(SHA3object, type); + if (newobj == NULL) { +@@ -179,7 +179,7 @@ newSHA3object(PyTypeObject *type) + static PyObject * + py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + { +- FAIL_RETURN_IN_FIPS_MODE("_sha3"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_sha3"); + SHA3object *self = NULL; + Py_buffer buf = {NULL, NULL}; + HashReturn res; +@@ -727,7 +727,7 @@ PyInit__sha3(void) + { + PyObject *m = NULL; + +- FAIL_RETURN_IN_FIPS_MODE("_sha3"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ImportError, "_sha3"); + + if ((m = PyModule_Create(&_SHA3module)) == NULL) { + return NULL; +-- +2.21.0 + + +From eb78e4b4179842185f53edba2073ab7644db7ad3 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Fri, 26 Jul 2019 11:24:09 +0200 +Subject: [PATCH 10/36] Make hashlib importable under FIPS mode + +--- + Lib/hashlib.py | 8 +++++--- + 1 file changed, 5 insertions(+), 3 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index 2def0a310c6..ca1dd202251 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -132,12 +132,14 @@ def __get_openssl_constructor(name): + f = getattr(_hashlib, 'openssl_' + name) + # Allow the C module to raise ValueError. The function will be + # defined but the hash not actually available thanks to OpenSSL. +- f() ++ if not get_fips_mode(): ++ # N.B. In "FIPS mode", there is no fallback. ++ # If this test fails, we want to export the broken hash ++ # constructor anyway. ++ f() + # Use the C function directly (very fast) + return f + except (AttributeError, ValueError): +- if get_fips_mode(): +- raise + return __get_builtin_constructor(name) + + if not get_fips_mode(): +-- +2.21.0 + + +From 0f313f65457f1e7d0e7238a71935f5d4e0d197ee Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Fri, 26 Jul 2019 15:41:10 +0200 +Subject: [PATCH 11/36] Implement hmac.new using new built-in module, + _hmacopenssl + +--- + Lib/hmac.py | 33 ++- + Modules/_hmacopenssl.c | 396 ++++++++++++++++++++++++++++++++ + Modules/clinic/_hmacopenssl.c.h | 133 +++++++++++ + setup.py | 4 + + 4 files changed, 565 insertions(+), 1 deletion(-) + create mode 100644 Modules/_hmacopenssl.c + create mode 100644 Modules/clinic/_hmacopenssl.c.h + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index 121029aa670..ed98406bd2e 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -6,6 +6,8 @@ Implements the HMAC algorithm as described by RFC 2104. + import warnings as _warnings + from _operator import _compare_digest as compare_digest + import hashlib as _hashlib ++import _hashlib as _hashlibopenssl ++import _hmacopenssl + + trans_5C = bytes((x ^ 0x5C) for x in range(256)) + trans_36 = bytes((x ^ 0x36) for x in range(256)) +@@ -37,6 +39,11 @@ class HMAC: + + Note: key and msg must be a bytes or bytearray objects. + """ ++ if _hashlib.get_fips_mode(): ++ raise ValueError( ++ 'hmac.HMAC is not available in FIPS mode. ' ++ + 'Use hmac.new().' ++ ) + + if not isinstance(key, (bytes, bytearray)): + raise TypeError("key: expected bytes or bytearray, but got %r" % type(key).__name__) +@@ -90,6 +97,8 @@ class HMAC: + def update(self, msg): + """Update this hashing object with the string msg. + """ ++ if _hashlib.get_fips_mode(): ++ raise ValueError('hmac.HMAC is not available in FIPS mode') + self.inner.update(msg) + + def copy(self): +@@ -130,6 +139,19 @@ class HMAC: + h = self._current() + return h.hexdigest() + ++ ++def _get_openssl_name(digestmod): ++ if isinstance(digestmod, str): ++ return digestmod.lower() ++ elif callable(digestmod): ++ digestmod = digestmod(b'') ++ ++ if not isinstance(digestmod, _hashlibopenssl.HASH): ++ raise TypeError( ++ 'Only OpenSSL hashlib hashes are accepted in FIPS mode.') ++ ++ return digestmod.name.lower().replace('_', '-') ++ + def new(key, msg = None, digestmod = None): + """Create a new hashing object and return it. + +@@ -141,4 +163,13 @@ def new(key, msg = None, digestmod = None): + method, and can ask for the hash value at any time by calling its digest() + method. + """ +- return HMAC(key, msg, digestmod) ++ if _hashlib.get_fips_mode(): ++ if digestmod is None: ++ digestmod = 'md5' ++ name = _get_openssl_name(digestmod) ++ result = _hmacopenssl.new(key, digestmod=name) ++ if msg: ++ result.update(msg) ++ return result ++ else: ++ return HMAC(key, msg, digestmod) +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +new file mode 100644 +index 00000000000..ca95d725f01 +--- /dev/null ++++ b/Modules/_hmacopenssl.c +@@ -0,0 +1,396 @@ ++/* Module that wraps all OpenSSL MHAC algorithm */ ++ ++/* Copyright (C) 2019 Red Hat, Inc. Red Hat, Inc. and/or its affiliates ++ * ++ * Based on _hashopenssl.c, which is: ++ * Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org) ++ * Licensed to PSF under a Contributor Agreement. ++ * ++ * Derived from a skeleton of shamodule.c containing work performed by: ++ * ++ * Andrew Kuchling (amk@amk.ca) ++ * Greg Stein (gstein@lyra.org) ++ * ++ */ ++ ++#define PY_SSIZE_T_CLEAN ++ ++#include "Python.h" ++#include "structmember.h" ++#include "hashlib.h" ++#include "pystrhex.h" ++#include "_hashopenssl.h" ++ ++ ++#include ++ ++static PyTypeObject HmacType; ++ ++typedef struct { ++ PyObject_HEAD ++ PyObject *name; /* name of the hash algorithm */ ++ HMAC_CTX *ctx; /* OpenSSL hmac context */ ++ PyThread_type_lock lock; /* HMAC context lock */ ++} HmacObject; ++ ++#include "clinic/_hmacopenssl.c.h" ++/*[clinic input] ++module _hmacopenssl ++class _hmacopenssl.HMAC "HmacObject *" "&HmacType" ++[clinic start generated code]*/ ++/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c98d3f2af591c085]*/ ++ ++ ++/*[clinic input] ++_hmacopenssl.new ++ ++ key: Py_buffer ++ * ++ digestmod: str ++ ++Return a new hmac object. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_new_impl(PyObject *module, Py_buffer *key, ++ const char *digestmod) ++/*[clinic end generated code: output=46f1cb4e02921922 input=be8c0c2e4fad508c]*/ ++{ ++ if (digestmod == NULL) { ++ PyErr_SetString(PyExc_ValueError, "digestmod must be specified"); ++ return NULL; ++ } ++ ++ /* name mut be lowercase */ ++ for (int i=0; digestmod[i]; i++) { ++ if ( ++ ((digestmod[i] < 'a') || (digestmod[i] > 'z')) ++ && ((digestmod[i] < '0') || (digestmod[i] > '9')) ++ && digestmod[i] != '-' ++ ) { ++ PyErr_SetString(PyExc_ValueError, "digestmod must be lowercase"); ++ return NULL; ++ } ++ } ++ ++ const EVP_MD *digest = EVP_get_digestbyname(digestmod); ++ if (!digest) { ++ PyErr_SetString(PyExc_ValueError, "unknown hash function"); ++ return NULL; ++ } ++ ++ PyObject *name = NULL; ++ HMAC_CTX *ctx = NULL; ++ HmacObject *retval = NULL; ++ ++ name = PyUnicode_FromFormat("hmac-%s", digestmod); ++ if (name == NULL) { ++ goto error; ++ } ++ ++ ctx = HMAC_CTX_new(); ++ if (ctx == NULL) { ++ _setException(PyExc_ValueError); ++ goto error; ++ } ++ ++ int r = HMAC_Init_ex( ++ ctx, ++ (const char*)key->buf, ++ key->len, ++ digest, ++ NULL /*impl*/); ++ if (r == 0) { ++ _setException(PyExc_ValueError); ++ goto error; ++ } ++ ++ retval = (HmacObject *)PyObject_New(HmacObject, &HmacType); ++ if (retval == NULL) { ++ goto error; ++ } ++ ++ retval->name = name; ++ retval->ctx = ctx; ++ retval->lock = NULL; ++ ++ return (PyObject*)retval; ++ ++error: ++ if (ctx) HMAC_CTX_free(ctx); ++ if (name) Py_DECREF(name); ++ if (retval) PyObject_Del(name); ++ return NULL; ++} ++ ++/*[clinic input] ++_hmacopenssl.HMAC.copy ++ ++Return a copy (“clone”) of the HMAC object. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_HMAC_copy_impl(HmacObject *self) ++/*[clinic end generated code: output=fe5ee41faf30dcf0 input=f5ed20feec42d8d0]*/ ++{ ++ HmacObject *retval = (HmacObject *)PyObject_New(HmacObject, &HmacType); ++ if (retval == NULL) { ++ return NULL; ++ } ++ ++ Py_INCREF(self->name); ++ retval->name = self->name; ++ ++ int r = HMAC_CTX_copy(retval->ctx, self->ctx); ++ if (r == 0) { ++ PyObject_Del(retval); ++ return _setException(PyExc_ValueError); ++ } ++ ++ return (PyObject*)retval; ++} ++ ++static void ++_hmac_dealloc(HmacObject *self) ++{ ++ if (self->lock != NULL) { ++ PyThread_free_lock(self->lock); ++ } ++ HMAC_CTX_free(self->ctx); ++ Py_XDECREF(self->name); ++ PyObject_Del(self); ++} ++ ++static PyObject * ++_hmac_repr(HmacObject *self) ++{ ++ return PyUnicode_FromFormat("<%U HMAC object @ %p>", self->name, self); ++} ++ ++/*[clinic input] ++_hmacopenssl.HMAC.update ++ ++ msg: Py_buffer ++ ++Update the HMAC object with msg. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_HMAC_update_impl(HmacObject *self, Py_buffer *msg) ++/*[clinic end generated code: output=0efeee663a98cee5 input=0683d64f35808cb9]*/ ++{ ++ if (self->lock == NULL && msg->len >= HASHLIB_GIL_MINSIZE) { ++ self->lock = PyThread_allocate_lock(); ++ /* fail? lock = NULL and we fail over to non-threaded code. */ ++ } ++ ++ int r; ++ ++ if (self->lock != NULL) { ++ Py_BEGIN_ALLOW_THREADS ++ PyThread_acquire_lock(self->lock, 1); ++ r = HMAC_Update(self->ctx, (const unsigned char*)msg->buf, msg->len); ++ PyThread_release_lock(self->lock); ++ Py_END_ALLOW_THREADS ++ } else { ++ r = HMAC_Update(self->ctx, (const unsigned char*)msg->buf, msg->len); ++ } ++ ++ if (r == 0) { ++ _setException(PyExc_ValueError); ++ return NULL; ++ } ++ Py_RETURN_NONE; ++} ++ ++static unsigned int ++_digest_size(HmacObject *self) ++{ ++ const EVP_MD *md = HMAC_CTX_get_md(self->ctx); ++ if (md == NULL) { ++ _setException(PyExc_ValueError); ++ return 0; ++ } ++ return EVP_MD_size(md); ++} ++ ++static int ++_digest(HmacObject *self, unsigned char *buf, unsigned int len) ++{ ++ HMAC_CTX *temp_ctx = HMAC_CTX_new(); ++ if (temp_ctx == NULL) { ++ PyErr_NoMemory(); ++ return 0; ++ } ++ int r = HMAC_CTX_copy(temp_ctx, self->ctx); ++ if (r == 0) { ++ _setException(PyExc_ValueError); ++ return 0; ++ } ++ r = HMAC_Final(temp_ctx, buf, &len); ++ HMAC_CTX_free(temp_ctx); ++ if (r == 0) { ++ _setException(PyExc_ValueError); ++ return 0; ++ } ++ return 1; ++} ++ ++/*[clinic input] ++_hmacopenssl.HMAC.digest ++ ++Return the digest of the bytes passed to the update() method so far. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_HMAC_digest_impl(HmacObject *self) ++/*[clinic end generated code: output=3aa6dbfc46ec4957 input=bf769a10b1d9edd9]*/ ++{ ++ unsigned int digest_size = _digest_size(self); ++ if (digest_size == 0) { ++ return _setException(PyExc_ValueError); ++ } ++ unsigned char buf[digest_size]; /* FIXME: C99 feature */ ++ int r = _digest(self, buf, digest_size); ++ if (r == 0) { ++ return NULL; ++ } ++ return PyBytes_FromStringAndSize((const char *)buf, digest_size); ++} ++ ++/*[clinic input] ++_hmacopenssl.HMAC.hexdigest ++ ++Return hexadecimal digest of the bytes passed to the update() method so far. ++ ++This may be used to exchange the value safely in email or other non-binary ++environments. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_HMAC_hexdigest_impl(HmacObject *self) ++/*[clinic end generated code: output=630f6fa89f9f1e48 input=b8e60ec8b811c4cd]*/ ++{ ++ unsigned int digest_size = _digest_size(self); ++ if (digest_size == 0) { ++ return _setException(PyExc_ValueError); ++ } ++ unsigned char buf[digest_size]; /* FIXME: C99 feature */ ++ int r = _digest(self, buf, digest_size); ++ if (r == 0) { ++ return NULL; ++ } ++ return _Py_strhex((const char *)buf, digest_size); ++} ++ ++ ++ ++static PyObject * ++_hmacopenssl_get_digest_size(HmacObject *self, void *closure) ++{ ++ unsigned int digest_size = _digest_size(self); ++ if (digest_size == 0) { ++ return _setException(PyExc_ValueError); ++ } ++ return PyLong_FromLong(digest_size); ++} ++ ++static PyObject * ++_hmacopenssl_get_block_size(HmacObject *self, void *closure) ++{ ++ const EVP_MD *md = HMAC_CTX_get_md(self->ctx); ++ if (md == NULL) { ++ return _setException(PyExc_ValueError); ++ } ++ return PyLong_FromLong(EVP_MD_size(md)); ++} ++ ++static PyMethodDef Hmac_methods[] = { ++ _HMACOPENSSL_HMAC_UPDATE_METHODDEF ++ _HMACOPENSSL_HMAC_DIGEST_METHODDEF ++ _HMACOPENSSL_HMAC_HEXDIGEST_METHODDEF ++ _HMACOPENSSL_HMAC_COPY_METHODDEF ++ {NULL, NULL} /* sentinel */ ++}; ++ ++static PyGetSetDef Hmac_getset[] = { ++ {"digest_size", (getter)_hmacopenssl_get_digest_size, NULL, NULL, NULL}, ++ {"block_size", (getter)_hmacopenssl_get_block_size, NULL, NULL, NULL}, ++ {NULL} /* Sentinel */ ++}; ++ ++static PyMemberDef Hmac_members[] = { ++ {"name", T_OBJECT, offsetof(HmacObject, name), READONLY, PyDoc_STR("HMAC name")}, ++}; ++ ++PyDoc_STRVAR(hmactype_doc, ++"The object used to calculate HMAC of a message.\n\ ++\n\ ++Methods:\n\ ++\n\ ++update() -- updates the current digest with an additional string\n\ ++digest() -- return the current digest value\n\ ++hexdigest() -- return the current digest as a string of hexadecimal digits\n\ ++copy() -- return a copy of the current hash object\n\ ++\n\ ++Attributes:\n\ ++\n\ ++name -- the name, including the hash algorithm used by this object\n\ ++digest_size -- number of bytes in digest() output\n"); ++ ++static PyTypeObject HmacType = { ++ PyVarObject_HEAD_INIT(NULL, 0) ++ "_hmacopenssl.HMAC", /*tp_name*/ ++ sizeof(HmacObject), /*tp_basicsize*/ ++ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, ++ .tp_doc = hmactype_doc, ++ .tp_repr = (reprfunc)_hmac_repr, ++ .tp_dealloc = (destructor)_hmac_dealloc, ++ .tp_methods = Hmac_methods, ++ .tp_getset = Hmac_getset, ++ .tp_members = Hmac_members, ++}; ++ ++static struct PyMethodDef hmacopenssl_functions[] = { ++ _HMACOPENSSL_NEW_METHODDEF ++ {NULL, NULL} /* Sentinel */ ++}; ++ ++ ++ ++/* Initialize this module. */ ++ ++ ++static struct PyModuleDef _hmacopenssl_module = { ++ PyModuleDef_HEAD_INIT, ++ "_hmacopenssl", ++ NULL, ++ -1, ++ hmacopenssl_functions, ++ NULL, ++ NULL, ++ NULL, ++ NULL ++}; ++ ++PyMODINIT_FUNC ++PyInit__hmacopenssl(void) ++{ ++ /* TODO build EVP_functions openssl_* entries dynamically based ++ * on what hashes are supported rather than listing many ++ * but having some be unsupported. Only init appropriate ++ * constants. */ ++ ++ Py_TYPE(&HmacType) = &PyType_Type; ++ if (PyType_Ready(&HmacType) < 0) ++ return NULL; ++ ++ PyObject *m = PyModule_Create(&_hmacopenssl_module); ++ if (m == NULL) ++ return NULL; ++ ++ Py_INCREF((PyObject *)&HmacType); ++ PyModule_AddObject(m, "HMAC", (PyObject *)&HmacType); ++ ++ return m; ++} +diff --git a/Modules/clinic/_hmacopenssl.c.h b/Modules/clinic/_hmacopenssl.c.h +new file mode 100644 +index 00000000000..b472a6eddd3 +--- /dev/null ++++ b/Modules/clinic/_hmacopenssl.c.h +@@ -0,0 +1,133 @@ ++/*[clinic input] ++preserve ++[clinic start generated code]*/ ++ ++PyDoc_STRVAR(_hmacopenssl_new__doc__, ++"new($module, /, key, *, digestmod)\n" ++"--\n" ++"\n" ++"Return a new hmac object."); ++ ++#define _HMACOPENSSL_NEW_METHODDEF \ ++ {"new", (PyCFunction)_hmacopenssl_new, METH_FASTCALL, _hmacopenssl_new__doc__}, ++ ++static PyObject * ++_hmacopenssl_new_impl(PyObject *module, Py_buffer *key, ++ const char *digestmod); ++ ++static PyObject * ++_hmacopenssl_new(PyObject *module, PyObject **args, Py_ssize_t nargs, PyObject *kwnames) ++{ ++ PyObject *return_value = NULL; ++ static const char * const _keywords[] = {"key", "digestmod", NULL}; ++ static _PyArg_Parser _parser = {"y*$s:new", _keywords, 0}; ++ Py_buffer key = {NULL, NULL}; ++ const char *digestmod; ++ ++ if (!_PyArg_ParseStack(args, nargs, kwnames, &_parser, ++ &key, &digestmod)) { ++ goto exit; ++ } ++ return_value = _hmacopenssl_new_impl(module, &key, digestmod); ++ ++exit: ++ /* Cleanup for key */ ++ if (key.obj) { ++ PyBuffer_Release(&key); ++ } ++ ++ return return_value; ++} ++ ++PyDoc_STRVAR(_hmacopenssl_HMAC_copy__doc__, ++"copy($self, /)\n" ++"--\n" ++"\n" ++"Return a copy (“clone”) of the HMAC object."); ++ ++#define _HMACOPENSSL_HMAC_COPY_METHODDEF \ ++ {"copy", (PyCFunction)_hmacopenssl_HMAC_copy, METH_NOARGS, _hmacopenssl_HMAC_copy__doc__}, ++ ++static PyObject * ++_hmacopenssl_HMAC_copy_impl(HmacObject *self); ++ ++static PyObject * ++_hmacopenssl_HMAC_copy(HmacObject *self, PyObject *Py_UNUSED(ignored)) ++{ ++ return _hmacopenssl_HMAC_copy_impl(self); ++} ++ ++PyDoc_STRVAR(_hmacopenssl_HMAC_update__doc__, ++"update($self, /, msg)\n" ++"--\n" ++"\n" ++"Update the HMAC object with msg."); ++ ++#define _HMACOPENSSL_HMAC_UPDATE_METHODDEF \ ++ {"update", (PyCFunction)_hmacopenssl_HMAC_update, METH_FASTCALL, _hmacopenssl_HMAC_update__doc__}, ++ ++static PyObject * ++_hmacopenssl_HMAC_update_impl(HmacObject *self, Py_buffer *msg); ++ ++static PyObject * ++_hmacopenssl_HMAC_update(HmacObject *self, PyObject **args, Py_ssize_t nargs, PyObject *kwnames) ++{ ++ PyObject *return_value = NULL; ++ static const char * const _keywords[] = {"msg", NULL}; ++ static _PyArg_Parser _parser = {"y*:update", _keywords, 0}; ++ Py_buffer msg = {NULL, NULL}; ++ ++ if (!_PyArg_ParseStack(args, nargs, kwnames, &_parser, ++ &msg)) { ++ goto exit; ++ } ++ return_value = _hmacopenssl_HMAC_update_impl(self, &msg); ++ ++exit: ++ /* Cleanup for msg */ ++ if (msg.obj) { ++ PyBuffer_Release(&msg); ++ } ++ ++ return return_value; ++} ++ ++PyDoc_STRVAR(_hmacopenssl_HMAC_digest__doc__, ++"digest($self, /)\n" ++"--\n" ++"\n" ++"Return the digest of the bytes passed to the update() method so far."); ++ ++#define _HMACOPENSSL_HMAC_DIGEST_METHODDEF \ ++ {"digest", (PyCFunction)_hmacopenssl_HMAC_digest, METH_NOARGS, _hmacopenssl_HMAC_digest__doc__}, ++ ++static PyObject * ++_hmacopenssl_HMAC_digest_impl(HmacObject *self); ++ ++static PyObject * ++_hmacopenssl_HMAC_digest(HmacObject *self, PyObject *Py_UNUSED(ignored)) ++{ ++ return _hmacopenssl_HMAC_digest_impl(self); ++} ++ ++PyDoc_STRVAR(_hmacopenssl_HMAC_hexdigest__doc__, ++"hexdigest($self, /)\n" ++"--\n" ++"\n" ++"Return hexadecimal digest of the bytes passed to the update() method so far.\n" ++"\n" ++"This may be used to exchange the value safely in email or other non-binary\n" ++"environments."); ++ ++#define _HMACOPENSSL_HMAC_HEXDIGEST_METHODDEF \ ++ {"hexdigest", (PyCFunction)_hmacopenssl_HMAC_hexdigest, METH_NOARGS, _hmacopenssl_HMAC_hexdigest__doc__}, ++ ++static PyObject * ++_hmacopenssl_HMAC_hexdigest_impl(HmacObject *self); ++ ++static PyObject * ++_hmacopenssl_HMAC_hexdigest(HmacObject *self, PyObject *Py_UNUSED(ignored)) ++{ ++ return _hmacopenssl_HMAC_hexdigest_impl(self); ++} ++/*[clinic end generated code: output=10b6e8cac6d7a2c9 input=a9049054013a1b77]*/ +diff --git a/setup.py b/setup.py +index e2e659ef795..282aa4178ed 100644 +--- a/setup.py ++++ b/setup.py +@@ -922,6 +922,10 @@ class PyBuildExt(build_ext): + openssl_ver) + missing.append('_hashlib') + ++ exts.append( Extension('_hmacopenssl', ['_hmacopenssl.c'], ++ depends = ['hashlib.h'], ++ **ssl_args)) ++ + # RHEL: Always force OpenSSL for md5, sha1, sha256, sha512; + # don't build Python's implementations. + # sha3 and blake2 have extra functionality, so do build those: +-- +2.21.0 + + +From 37797b4d89e7b4859dc4728da91cbebf5fdb2a52 Mon Sep 17 00:00:00 2001 +From: Marcel Plch +Date: Mon, 29 Jul 2019 12:45:11 +0200 +Subject: [PATCH 12/36] FIPS review + +* Port _hmacopenssl to multiphase init. +* Make _hmacopenssl.HMAC.copy create same type as self. +* hmac.py cosmetic nitpick +--- + Lib/hmac.py | 2 +- + Modules/_hmacopenssl.c | 112 +++++++++++++++++++++++++---------------- + 2 files changed, 70 insertions(+), 44 deletions(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index ed98406bd2e..b9bf16b84ca 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -42,7 +42,7 @@ class HMAC: + if _hashlib.get_fips_mode(): + raise ValueError( + 'hmac.HMAC is not available in FIPS mode. ' +- + 'Use hmac.new().' ++ 'Use hmac.new().' + ) + + if not isinstance(key, (bytes, bytearray)): +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index ca95d725f01..216ed04f236 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -24,7 +24,10 @@ + + #include + +-static PyTypeObject HmacType; ++typedef struct hmacopenssl_state { ++ PyTypeObject *HmacType; ++} hmacopenssl_state; ++ + + typedef struct { + PyObject_HEAD +@@ -36,9 +39,9 @@ typedef struct { + #include "clinic/_hmacopenssl.c.h" + /*[clinic input] + module _hmacopenssl +-class _hmacopenssl.HMAC "HmacObject *" "&HmacType" ++class _hmacopenssl.HMAC "HmacObject *" "PyModule_GetState(module)->HmacType" + [clinic start generated code]*/ +-/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c98d3f2af591c085]*/ ++/*[clinic end generated code: output=da39a3ee5e6b4b0d input=204b7f45847f57b4]*/ + + + /*[clinic input] +@@ -56,11 +59,18 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + const char *digestmod) + /*[clinic end generated code: output=46f1cb4e02921922 input=be8c0c2e4fad508c]*/ + { ++ hmacopenssl_state *state; ++ + if (digestmod == NULL) { + PyErr_SetString(PyExc_ValueError, "digestmod must be specified"); + return NULL; + } + ++ state = PyModule_GetState(module); ++ if (state == NULL) { ++ return NULL; ++ } ++ + /* name mut be lowercase */ + for (int i=0; digestmod[i]; i++) { + if ( +@@ -105,7 +115,7 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + goto error; + } + +- retval = (HmacObject *)PyObject_New(HmacObject, &HmacType); ++ retval = (HmacObject *)PyObject_New(HmacObject, state->HmacType); + if (retval == NULL) { + goto error; + } +@@ -133,7 +143,9 @@ static PyObject * + _hmacopenssl_HMAC_copy_impl(HmacObject *self) + /*[clinic end generated code: output=fe5ee41faf30dcf0 input=f5ed20feec42d8d0]*/ + { +- HmacObject *retval = (HmacObject *)PyObject_New(HmacObject, &HmacType); ++ HmacObject *retval; ++ ++ retval = (HmacObject *)PyObject_New(HmacObject, (PyTypeObject *)PyObject_Type((PyObject *)self)); + if (retval == NULL) { + return NULL; + } +@@ -147,7 +159,7 @@ _hmacopenssl_HMAC_copy_impl(HmacObject *self) + return _setException(PyExc_ValueError); + } + +- return (PyObject*)retval; ++ return (PyObject *)retval; + } + + static void +@@ -338,19 +350,24 @@ Attributes:\n\ + name -- the name, including the hash algorithm used by this object\n\ + digest_size -- number of bytes in digest() output\n"); + +-static PyTypeObject HmacType = { +- PyVarObject_HEAD_INIT(NULL, 0) +- "_hmacopenssl.HMAC", /*tp_name*/ +- sizeof(HmacObject), /*tp_basicsize*/ +- .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, +- .tp_doc = hmactype_doc, +- .tp_repr = (reprfunc)_hmac_repr, +- .tp_dealloc = (destructor)_hmac_dealloc, +- .tp_methods = Hmac_methods, +- .tp_getset = Hmac_getset, +- .tp_members = Hmac_members, ++static PyType_Slot HmacType_slots[] = { ++ {Py_tp_doc, hmactype_doc}, ++ {Py_tp_repr, (reprfunc)_hmac_repr}, ++ {Py_tp_dealloc,(destructor)_hmac_dealloc}, ++ {Py_tp_methods, Hmac_methods}, ++ {Py_tp_getset, Hmac_getset}, ++ {Py_tp_members, Hmac_members}, ++ {0, NULL} ++}; ++ ++PyType_Spec HmacType_spec = { ++ "_hmacopenssl.HMAC", /* name */ ++ sizeof(HmacObject), /* basicsize */ ++ .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, ++ .slots = HmacType_slots, + }; + ++ + static struct PyMethodDef hmacopenssl_functions[] = { + _HMACOPENSSL_NEW_METHODDEF + {NULL, NULL} /* Sentinel */ +@@ -360,37 +377,46 @@ static struct PyMethodDef hmacopenssl_functions[] = { + + /* Initialize this module. */ + +- +-static struct PyModuleDef _hmacopenssl_module = { +- PyModuleDef_HEAD_INIT, +- "_hmacopenssl", +- NULL, +- -1, +- hmacopenssl_functions, +- NULL, +- NULL, +- NULL, +- NULL +-}; +- +-PyMODINIT_FUNC +-PyInit__hmacopenssl(void) +-{ ++static int ++hmacopenssl_exec(PyObject *m) { + /* TODO build EVP_functions openssl_* entries dynamically based + * on what hashes are supported rather than listing many +- * but having some be unsupported. Only init appropriate ++ * and having some unsupported. Only init appropriate + * constants. */ ++ PyObject *temp; + +- Py_TYPE(&HmacType) = &PyType_Type; +- if (PyType_Ready(&HmacType) < 0) +- return NULL; ++ temp = PyType_FromSpec(&HmacType_spec); ++ if (temp == NULL) { ++ goto fail; ++ } + +- PyObject *m = PyModule_Create(&_hmacopenssl_module); +- if (m == NULL) +- return NULL; ++ if (PyModule_AddObject(m, "HMAC", temp) == -1) { ++ goto fail; ++ } ++ ++ return 0; + +- Py_INCREF((PyObject *)&HmacType); +- PyModule_AddObject(m, "HMAC", (PyObject *)&HmacType); ++fail: ++ Py_XDECREF(temp); ++ return -1; ++} + +- return m; ++static PyModuleDef_Slot hmacopenssl_slots[] = { ++ {Py_mod_exec, hmacopenssl_exec}, ++ {0, NULL}, ++}; ++ ++static struct PyModuleDef _hmacopenssl_def = { ++ PyModuleDef_HEAD_INIT, /* m_base */ ++ .m_name = "_hmacopenssl", ++ .m_methods = hmacopenssl_functions, ++ .m_slots = hmacopenssl_slots, ++ .m_size = sizeof(hmacopenssl_state) ++}; ++ ++ ++PyMODINIT_FUNC ++PyInit__hmacopenssl(void) ++{ ++ return PyModuleDef_Init(&_hmacopenssl_def); + } +-- +2.21.0 + + +From 9a3bf73d382cda99a665cab8438f3cd0388256b0 Mon Sep 17 00:00:00 2001 +From: Marcel Plch +Date: Mon, 29 Jul 2019 13:05:04 +0200 +Subject: [PATCH 13/36] revert cosmetic nitpick and remove trailing whitespace + +--- + Lib/hmac.py | 2 +- + Modules/_hmacopenssl.c | 4 ++-- + 2 files changed, 3 insertions(+), 3 deletions(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index b9bf16b84ca..ed98406bd2e 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -42,7 +42,7 @@ class HMAC: + if _hashlib.get_fips_mode(): + raise ValueError( + 'hmac.HMAC is not available in FIPS mode. ' +- 'Use hmac.new().' ++ + 'Use hmac.new().' + ) + + if not isinstance(key, (bytes, bytearray)): +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 216ed04f236..221714ca434 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -363,7 +363,7 @@ static PyType_Slot HmacType_slots[] = { + PyType_Spec HmacType_spec = { + "_hmacopenssl.HMAC", /* name */ + sizeof(HmacObject), /* basicsize */ +- .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, ++ .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + .slots = HmacType_slots, + }; + +@@ -407,7 +407,7 @@ static PyModuleDef_Slot hmacopenssl_slots[] = { + }; + + static struct PyModuleDef _hmacopenssl_def = { +- PyModuleDef_HEAD_INIT, /* m_base */ ++ PyModuleDef_HEAD_INIT, /* m_base */ + .m_name = "_hmacopenssl", + .m_methods = hmacopenssl_functions, + .m_slots = hmacopenssl_slots, +-- +2.21.0 + + +From b2853963d35c60595cec7e5cd40f31b1c9c59426 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Wed, 31 Jul 2019 15:43:43 +0200 +Subject: [PATCH 14/36] Add initial tests for various hashes under FIPS mode + +--- + Lib/test/test_fips.py | 64 +++++++++++++++++++++++++++++++++++++++++++ + 1 file changed, 64 insertions(+) + create mode 100644 Lib/test/test_fips.py + +diff --git a/Lib/test/test_fips.py b/Lib/test/test_fips.py +new file mode 100644 +index 00000000000..bee911ef405 +--- /dev/null ++++ b/Lib/test/test_fips.py +@@ -0,0 +1,64 @@ ++import unittest ++import hmac, _hmacopenssl ++import hashlib, _hashlib ++ ++ ++ ++class HashlibFipsTests(unittest.TestCase): ++ ++ @unittest.skipUnless(hashlib.get_fips_mode(), "Test only when FIPS is enabled") ++ def test_fips_imports(self): ++ """blake2s and blake2b should fail to import in FIPS mode ++ """ ++ with self.assertRaises(ValueError, msg='blake2s not available in FIPS'): ++ m = hashlib.blake2s() ++ with self.assertRaises(ValueError, msg='blake2b not available in FIPS'): ++ m = hashlib.blake2b() ++ ++ def compare_hashes(self, python_hash, openssl_hash): ++ """ ++ Compare between the python implementation and the openssl one that the digests ++ are the same ++ """ ++ if python_hash.name.startswith('shake_128'): ++ m = python_hash.hexdigest(16) ++ elif python_hash.name.startswith('shake_256'): ++ m = python_hash.hexdigest(32) ++ else: ++ m = python_hash.hexdigest() ++ h = openssl_hash.hexdigest() ++ ++ self.assertEqual(m, h) ++ ++ @unittest.skipIf(hashlib.get_fips_mode(), "blake2 hashes are not available under FIPS") ++ def test_blake2_hashes(self): ++ self.compare_hashes(hashlib.blake2b(b'abc'), _hashlib.openssl_blake2b(b'abc')) ++ self.compare_hashes(hashlib.blake2s(b'abc'), _hashlib.openssl_blake2s(b'abc')) ++ ++ def test_sha3_hashes(self): ++ self.compare_hashes(hashlib.sha3_224(b'abc'), _hashlib.openssl_sha3_224(b'abc')) ++ self.compare_hashes(hashlib.sha3_256(b'abc'), _hashlib.openssl_sha3_256(b'abc')) ++ self.compare_hashes(hashlib.sha3_384(b'abc'), _hashlib.openssl_sha3_384(b'abc')) ++ self.compare_hashes(hashlib.sha3_512(b'abc'), _hashlib.openssl_sha3_512(b'abc')) ++ ++ @unittest.skipIf(hashlib.get_fips_mode(), "shake hashes are not available under FIPS") ++ def test_shake_hashes(self): ++ self.compare_hashes(hashlib.shake_128(b'abc'), _hashlib.openssl_shake_128(b'abc')) ++ self.compare_hashes(hashlib.shake_256(b'abc'), _hashlib.openssl_shake_256(b'abc')) ++ ++ def test_sha(self): ++ self.compare_hashes(hashlib.sha1(b'abc'), _hashlib.openssl_sha1(b'abc')) ++ self.compare_hashes(hashlib.sha224(b'abc'), _hashlib.openssl_sha224(b'abc')) ++ self.compare_hashes(hashlib.sha256(b'abc'), _hashlib.openssl_sha256(b'abc')) ++ self.compare_hashes(hashlib.sha384(b'abc'), _hashlib.openssl_sha384(b'abc')) ++ self.compare_hashes(hashlib.sha512(b'abc'), _hashlib.openssl_sha512(b'abc')) ++ ++ def test_hmac_digests(self): ++ self.compare_hashes(_hmacopenssl.new(b'My hovercraft is full of eels', digestmod='sha384'), ++ hmac.new(b'My hovercraft is full of eels', digestmod='sha384')) ++ ++ ++ ++ ++if __name__ == "__main__": ++ unittest.main() +-- +2.21.0 + + +From b3a8214b5afe4809983e6a5e5d339527e8238318 Mon Sep 17 00:00:00 2001 +From: Marcel Plch +Date: Thu, 1 Aug 2019 16:39:37 +0200 +Subject: [PATCH 15/36] Initialize HMAC type. + +--- + Modules/_hmacopenssl.c | 12 ++++++++---- + 1 file changed, 8 insertions(+), 4 deletions(-) + +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 221714ca434..239445a0831 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -22,12 +22,12 @@ + #include "_hashopenssl.h" + + +-#include + + typedef struct hmacopenssl_state { + PyTypeObject *HmacType; + } hmacopenssl_state; + ++#include + + typedef struct { + PyObject_HEAD +@@ -39,7 +39,7 @@ typedef struct { + #include "clinic/_hmacopenssl.c.h" + /*[clinic input] + module _hmacopenssl +-class _hmacopenssl.HMAC "HmacObject *" "PyModule_GetState(module)->HmacType" ++class _hmacopenssl.HMAC "HmacObject *" "((hmacopenssl_state *)PyModule_GetState(module))->HmacType" + [clinic start generated code]*/ + /*[clinic end generated code: output=da39a3ee5e6b4b0d input=204b7f45847f57b4]*/ + +@@ -71,7 +71,7 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + return NULL; + } + +- /* name mut be lowercase */ ++ /* name must be lowercase */ + for (int i=0; digestmod[i]; i++) { + if ( + ((digestmod[i] < 'a') || (digestmod[i] > 'z')) +@@ -383,7 +383,8 @@ hmacopenssl_exec(PyObject *m) { + * on what hashes are supported rather than listing many + * and having some unsupported. Only init appropriate + * constants. */ +- PyObject *temp; ++ PyObject *temp = NULL; ++ hmacopenssl_state *state; + + temp = PyType_FromSpec(&HmacType_spec); + if (temp == NULL) { +@@ -394,6 +395,9 @@ hmacopenssl_exec(PyObject *m) { + goto fail; + } + ++ state = PyModule_GetState(m); ++ state->HmacType = (PyTypeObject *)temp; ++ + return 0; + + fail: +-- +2.21.0 + + +From 5bd6eb14097fe4ff2bb639e5c50260b1379d0a69 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 1 Aug 2019 17:57:05 +0200 +Subject: [PATCH 16/36] Use a stronger hash in multiprocessing handshake + +Adapted from patch by David Malcolm, +https://bugs.python.org/issue17258 +--- + Lib/multiprocessing/connection.py | 8 ++++++-- + 1 file changed, 6 insertions(+), 2 deletions(-) + +diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py +index d3797503a75..a0b1538f88b 100644 +--- a/Lib/multiprocessing/connection.py ++++ b/Lib/multiprocessing/connection.py +@@ -42,6 +42,10 @@ BUFSIZE = 8192 + # A very generous timeout when it comes to local connections... + CONNECTION_TIMEOUT = 20. + ++# The hmac module implicitly defaults to using MD5. ++# Support using a stronger algorithm for the challenge/response code: ++HMAC_DIGEST_NAME='sha256' ++ + _mmap_counter = itertools.count() + + default_family = 'AF_INET' +@@ -718,7 +722,7 @@ def deliver_challenge(connection, authkey): + assert isinstance(authkey, bytes) + message = os.urandom(MESSAGE_LENGTH) + connection.send_bytes(CHALLENGE + message) +- digest = hmac.new(authkey, message, 'md5').digest() ++ digest = hmac.new(authkey, message, HMAC_DIGEST_NAME).digest() + response = connection.recv_bytes(256) # reject large message + if response == digest: + connection.send_bytes(WELCOME) +@@ -732,7 +736,7 @@ def answer_challenge(connection, authkey): + message = connection.recv_bytes(256) # reject large message + assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message + message = message[len(CHALLENGE):] +- digest = hmac.new(authkey, message, 'md5').digest() ++ digest = hmac.new(authkey, message, HMAC_DIGEST_NAME).digest() + connection.send_bytes(digest) + response = connection.recv_bytes(256) # reject large message + if response != WELCOME: +-- +2.21.0 + + +From 1dd4b2d6c4797b828ac38d8c62f1e69fce49f1e9 Mon Sep 17 00:00:00 2001 +From: Marcel Plch +Date: Fri, 2 Aug 2019 17:36:01 +0200 +Subject: [PATCH 17/36] Fix refcounting + +--- + Modules/_hmacopenssl.c | 35 ++++++++++++++++++++++++++++++++++- + 1 file changed, 34 insertions(+), 1 deletion(-) + +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 239445a0831..9c2882833d1 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -373,6 +373,34 @@ static struct PyMethodDef hmacopenssl_functions[] = { + {NULL, NULL} /* Sentinel */ + }; + ++static int ++hmacopenssl_traverse(PyObject *self, visitproc visit, void *arg) ++{ ++ hmacopenssl_state *state; ++ ++ state = PyModule_GetState(self); ++ ++ if (state) { ++ Py_VISIT(state->HmacType); ++ } ++ ++ return 0; ++} ++ ++static int ++hmacopenssl_clear(PyObject *self) ++{ ++ hmacopenssl_state *state; ++ ++ state = PyModule_GetState(self); ++ ++ if (state) { ++ Py_CLEAR(state->HmacType); ++ } ++ ++ return 0; ++} ++ + + + /* Initialize this module. */ +@@ -396,7 +424,10 @@ hmacopenssl_exec(PyObject *m) { + } + + state = PyModule_GetState(m); ++ + state->HmacType = (PyTypeObject *)temp; ++ Py_INCREF(temp); ++ + + return 0; + +@@ -415,7 +446,9 @@ static struct PyModuleDef _hmacopenssl_def = { + .m_name = "_hmacopenssl", + .m_methods = hmacopenssl_functions, + .m_slots = hmacopenssl_slots, +- .m_size = sizeof(hmacopenssl_state) ++ .m_size = sizeof(hmacopenssl_state), ++ .m_traverse = hmacopenssl_traverse, ++ .m_clear = hmacopenssl_clear + }; + + +-- +2.21.0 + + +From 4e1351cc2d704e8da7bb8125ad0ab194e8a02ec4 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 13:37:05 +0200 +Subject: [PATCH 18/36] hmac: Don't default to md5 in FIPS mode + +--- + Lib/hmac.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index ed98406bd2e..7b8821edd58 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -165,7 +165,7 @@ def new(key, msg = None, digestmod = None): + """ + if _hashlib.get_fips_mode(): + if digestmod is None: +- digestmod = 'md5' ++ raise ValueError("'digestmod' argument is mandatory in FIPS mode") + name = _get_openssl_name(digestmod) + result = _hmacopenssl.new(key, digestmod=name) + if msg: +-- +2.21.0 + + +From ea44160c242101f14d22242a0722e730ef304b8b Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 14:20:58 +0200 +Subject: [PATCH 19/36] Make _hmacopenssl.HMAC subclassable; subclass it as + hmac.HMAC under FIPS + +This removes the _hmacopenssl.new function. +--- + Lib/hmac.py | 27 +++++++----- + Lib/test/test_fips.py | 2 +- + Modules/_hmacopenssl.c | 75 ++++++++++++++++----------------- + Modules/clinic/_hmacopenssl.c.h | 39 +---------------- + 4 files changed, 56 insertions(+), 87 deletions(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index 7b8821edd58..d479c5a4492 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -141,6 +141,8 @@ class HMAC: + + + def _get_openssl_name(digestmod): ++ if digestmod is None: ++ raise ValueError("'digestmod' argument is mandatory in FIPS mode") + if isinstance(digestmod, str): + return digestmod.lower() + elif callable(digestmod): +@@ -152,6 +154,20 @@ def _get_openssl_name(digestmod): + + return digestmod.name.lower().replace('_', '-') + ++ ++class HMAC_openssl(_hmacopenssl.HMAC): ++ def __new__(cls, key, msg = None, digestmod = None): ++ name = _get_openssl_name(digestmod) ++ result = _hmacopenssl.HMAC.__new__(cls, key, digestmod=name) ++ if msg: ++ result.update(msg) ++ return result ++ ++ ++if _hashlib.get_fips_mode(): ++ HMAC = HMAC_openssl ++ ++ + def new(key, msg = None, digestmod = None): + """Create a new hashing object and return it. + +@@ -163,13 +179,4 @@ def new(key, msg = None, digestmod = None): + method, and can ask for the hash value at any time by calling its digest() + method. + """ +- if _hashlib.get_fips_mode(): +- if digestmod is None: +- raise ValueError("'digestmod' argument is mandatory in FIPS mode") +- name = _get_openssl_name(digestmod) +- result = _hmacopenssl.new(key, digestmod=name) +- if msg: +- result.update(msg) +- return result +- else: +- return HMAC(key, msg, digestmod) ++ return HMAC(key, msg, digestmod) +diff --git a/Lib/test/test_fips.py b/Lib/test/test_fips.py +index bee911ef405..34812e6098a 100644 +--- a/Lib/test/test_fips.py ++++ b/Lib/test/test_fips.py +@@ -54,7 +54,7 @@ class HashlibFipsTests(unittest.TestCase): + self.compare_hashes(hashlib.sha512(b'abc'), _hashlib.openssl_sha512(b'abc')) + + def test_hmac_digests(self): +- self.compare_hashes(_hmacopenssl.new(b'My hovercraft is full of eels', digestmod='sha384'), ++ self.compare_hashes(_hmacopenssl.HMAC(b'My hovercraft is full of eels', digestmod='sha384'), + hmac.new(b'My hovercraft is full of eels', digestmod='sha384')) + + +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 9c2882833d1..7d3d9739f3a 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -41,33 +41,25 @@ typedef struct { + module _hmacopenssl + class _hmacopenssl.HMAC "HmacObject *" "((hmacopenssl_state *)PyModule_GetState(module))->HmacType" + [clinic start generated code]*/ +-/*[clinic end generated code: output=da39a3ee5e6b4b0d input=204b7f45847f57b4]*/ ++/*[clinic end generated code: output=da39a3ee5e6b4b0d input=9fe07a087adc2cf9]*/ + + +-/*[clinic input] +-_hmacopenssl.new +- +- key: Py_buffer +- * +- digestmod: str +- +-Return a new hmac object. +-[clinic start generated code]*/ +- + static PyObject * +-_hmacopenssl_new_impl(PyObject *module, Py_buffer *key, +- const char *digestmod) +-/*[clinic end generated code: output=46f1cb4e02921922 input=be8c0c2e4fad508c]*/ ++Hmac_new(PyTypeObject *subtype, PyObject *args, PyObject *kwds) + { +- hmacopenssl_state *state; +- +- if (digestmod == NULL) { +- PyErr_SetString(PyExc_ValueError, "digestmod must be specified"); ++ static char *kwarg_names[] = {"key", "digestmod", NULL}; ++ Py_buffer key = {NULL, NULL}; ++ char *digestmod = NULL; ++ ++ int ret = PyArg_ParseTupleAndKeywords( ++ args, kwds, "y*|$s:_hmacopenssl.HMAC", kwarg_names, ++ &key, &digestmod); ++ if (ret == 0) { + return NULL; + } + +- state = PyModule_GetState(module); +- if (state == NULL) { ++ if (digestmod == NULL) { ++ PyErr_SetString(PyExc_ValueError, "digestmod must be specified"); + return NULL; + } + +@@ -106,8 +98,8 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + + int r = HMAC_Init_ex( + ctx, +- (const char*)key->buf, +- key->len, ++ (const char*)key.buf, ++ key.len, + digest, + NULL /*impl*/); + if (r == 0) { +@@ -115,7 +107,10 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + goto error; + } + +- retval = (HmacObject *)PyObject_New(HmacObject, state->HmacType); ++ PyBuffer_Release(&key); ++ key.buf = NULL; ++ ++ retval = (HmacObject *)subtype->tp_alloc(subtype, 0); + if (retval == NULL) { + goto error; + } +@@ -130,6 +125,7 @@ error: + if (ctx) HMAC_CTX_free(ctx); + if (name) Py_DECREF(name); + if (retval) PyObject_Del(name); ++ if (key.buf) PyBuffer_Release(&key); + return NULL; + } + +@@ -145,19 +141,27 @@ _hmacopenssl_HMAC_copy_impl(HmacObject *self) + { + HmacObject *retval; + +- retval = (HmacObject *)PyObject_New(HmacObject, (PyTypeObject *)PyObject_Type((PyObject *)self)); ++ HMAC_CTX *ctx = HMAC_CTX_new(); ++ if (ctx == NULL) { ++ return _setException(PyExc_ValueError); ++ } ++ ++ int r = HMAC_CTX_copy(ctx, self->ctx); ++ if (r == 0) { ++ HMAC_CTX_free(ctx); ++ return _setException(PyExc_ValueError); ++ } ++ ++ retval = (HmacObject *)Py_TYPE(self)->tp_alloc(Py_TYPE(self), 0); + if (retval == NULL) { ++ HMAC_CTX_free(ctx); + return NULL; + } +- ++ retval->ctx = ctx; + Py_INCREF(self->name); + retval->name = self->name; + +- int r = HMAC_CTX_copy(retval->ctx, self->ctx); +- if (r == 0) { +- PyObject_Del(retval); +- return _setException(PyExc_ValueError); +- } ++ retval->lock = NULL; + + return (PyObject *)retval; + } +@@ -169,8 +173,8 @@ _hmac_dealloc(HmacObject *self) + PyThread_free_lock(self->lock); + } + HMAC_CTX_free(self->ctx); +- Py_XDECREF(self->name); +- PyObject_Del(self); ++ Py_CLEAR(self->name); ++ Py_TYPE(self)->tp_free(self); + } + + static PyObject * +@@ -357,6 +361,7 @@ static PyType_Slot HmacType_slots[] = { + {Py_tp_methods, Hmac_methods}, + {Py_tp_getset, Hmac_getset}, + {Py_tp_members, Hmac_members}, ++ {Py_tp_new, Hmac_new}, + {0, NULL} + }; + +@@ -368,11 +373,6 @@ PyType_Spec HmacType_spec = { + }; + + +-static struct PyMethodDef hmacopenssl_functions[] = { +- _HMACOPENSSL_NEW_METHODDEF +- {NULL, NULL} /* Sentinel */ +-}; +- + static int + hmacopenssl_traverse(PyObject *self, visitproc visit, void *arg) + { +@@ -444,7 +444,6 @@ static PyModuleDef_Slot hmacopenssl_slots[] = { + static struct PyModuleDef _hmacopenssl_def = { + PyModuleDef_HEAD_INIT, /* m_base */ + .m_name = "_hmacopenssl", +- .m_methods = hmacopenssl_functions, + .m_slots = hmacopenssl_slots, + .m_size = sizeof(hmacopenssl_state), + .m_traverse = hmacopenssl_traverse, +diff --git a/Modules/clinic/_hmacopenssl.c.h b/Modules/clinic/_hmacopenssl.c.h +index b472a6eddd3..861acc11bfd 100644 +--- a/Modules/clinic/_hmacopenssl.c.h ++++ b/Modules/clinic/_hmacopenssl.c.h +@@ -2,43 +2,6 @@ + preserve + [clinic start generated code]*/ + +-PyDoc_STRVAR(_hmacopenssl_new__doc__, +-"new($module, /, key, *, digestmod)\n" +-"--\n" +-"\n" +-"Return a new hmac object."); +- +-#define _HMACOPENSSL_NEW_METHODDEF \ +- {"new", (PyCFunction)_hmacopenssl_new, METH_FASTCALL, _hmacopenssl_new__doc__}, +- +-static PyObject * +-_hmacopenssl_new_impl(PyObject *module, Py_buffer *key, +- const char *digestmod); +- +-static PyObject * +-_hmacopenssl_new(PyObject *module, PyObject **args, Py_ssize_t nargs, PyObject *kwnames) +-{ +- PyObject *return_value = NULL; +- static const char * const _keywords[] = {"key", "digestmod", NULL}; +- static _PyArg_Parser _parser = {"y*$s:new", _keywords, 0}; +- Py_buffer key = {NULL, NULL}; +- const char *digestmod; +- +- if (!_PyArg_ParseStack(args, nargs, kwnames, &_parser, +- &key, &digestmod)) { +- goto exit; +- } +- return_value = _hmacopenssl_new_impl(module, &key, digestmod); +- +-exit: +- /* Cleanup for key */ +- if (key.obj) { +- PyBuffer_Release(&key); +- } +- +- return return_value; +-} +- + PyDoc_STRVAR(_hmacopenssl_HMAC_copy__doc__, + "copy($self, /)\n" + "--\n" +@@ -130,4 +93,4 @@ _hmacopenssl_HMAC_hexdigest(HmacObject *self, PyObject *Py_UNUSED(ignored)) + { + return _hmacopenssl_HMAC_hexdigest_impl(self); + } +-/*[clinic end generated code: output=10b6e8cac6d7a2c9 input=a9049054013a1b77]*/ ++/*[clinic end generated code: output=d93ad460795d49b5 input=a9049054013a1b77]*/ +-- +2.21.0 + + +From e3d6a5adbe0032726dff0b1f6473ce3ff63fad51 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 16:10:36 +0200 +Subject: [PATCH 20/36] Fix _hmacopenssl.HMAC.block_size + +--- + Modules/_hmacopenssl.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 7d3d9739f3a..a24c8ba0229 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -318,7 +318,7 @@ _hmacopenssl_get_block_size(HmacObject *self, void *closure) + if (md == NULL) { + return _setException(PyExc_ValueError); + } +- return PyLong_FromLong(EVP_MD_size(md)); ++ return PyLong_FromLong(EVP_MD_block_size(md)); + } + + static PyMethodDef Hmac_methods[] = { +-- +2.21.0 + + +From f925a1165d7b07fb4d0bba216f7dbc387b94e94d Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 15:02:08 +0200 +Subject: [PATCH 21/36] distutils upload: Skip md5 checksum in FIPS mode + +--- + Lib/distutils/command/upload.py | 11 ++++++++++- + Lib/distutils/tests/test_upload.py | 13 +++++++++++-- + 2 files changed, 21 insertions(+), 3 deletions(-) + +diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py +index 32dda359bad..0edb39efd4c 100644 +--- a/Lib/distutils/command/upload.py ++++ b/Lib/distutils/command/upload.py +@@ -102,7 +102,6 @@ class upload(PyPIRCCommand): + 'content': (os.path.basename(filename),content), + 'filetype': command, + 'pyversion': pyversion, +- 'md5_digest': hashlib.md5(content).hexdigest(), + + # additional meta-data + 'metadata_version': '1.0', +@@ -121,6 +120,16 @@ class upload(PyPIRCCommand): + 'requires': meta.get_requires(), + 'obsoletes': meta.get_obsoletes(), + } ++ try: ++ digest = hashlib.md5(content).hexdigest() ++ except ValueError as e: ++ msg = 'calculating md5 checksum failed: %s' % e ++ self.announce(msg, log.ERROR) ++ if not hashlib.get_fips_mode(): ++ # this really shouldn't fail ++ raise ++ else: ++ data['md5_digest'] = digest + comment = '' + if command == 'bdist_rpm': + dist, version, id = platform.dist() +diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py +index c17d8e7d54e..b4b64e97737 100644 +--- a/Lib/distutils/tests/test_upload.py ++++ b/Lib/distutils/tests/test_upload.py +@@ -3,6 +3,7 @@ import os + import unittest + import unittest.mock as mock + from urllib.request import HTTPError ++import hashlib + + from test.support import run_unittest + +@@ -130,7 +131,11 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + + # what did we send ? + headers = dict(self.last_open.req.headers) +- self.assertEqual(headers['Content-length'], '2162') ++ if hashlib.get_fips_mode(): ++ # md5 hash is omitted ++ self.assertEqual(headers['Content-length'], '2020') ++ else: ++ self.assertEqual(headers['Content-length'], '2162') + content_type = headers['Content-type'] + self.assertTrue(content_type.startswith('multipart/form-data')) + self.assertEqual(self.last_open.req.get_method(), 'POST') +@@ -166,7 +171,11 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + cmd.run() + + headers = dict(self.last_open.req.headers) +- self.assertEqual(headers['Content-length'], '2172') ++ if hashlib.get_fips_mode(): ++ # md5 hash is omitted ++ self.assertEqual(headers['Content-length'], '2030') ++ else: ++ self.assertEqual(headers['Content-length'], '2172') + self.assertIn(b'long description\r', self.last_open.req.data) + + def test_upload_fails(self): +-- +2.21.0 + + +From 936a7835a1873952eab8f7ec3b9b67f63786c001 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 15:32:25 +0200 +Subject: [PATCH 22/36] Fix HMAC tests on FIPS mode + +--- + Lib/hmac.py | 3 +++ + Lib/test/test_hmac.py | 26 ++++++++++++++++++++++++++ + 2 files changed, 29 insertions(+) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index d479c5a4492..7af94c39ed6 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -157,6 +157,9 @@ def _get_openssl_name(digestmod): + + class HMAC_openssl(_hmacopenssl.HMAC): + def __new__(cls, key, msg = None, digestmod = None): ++ if not isinstance(key, (bytes, bytearray)): ++ raise TypeError("key: expected bytes or bytearray, but got %r" % type(key).__name__) ++ + name = _get_openssl_name(digestmod) + result = _hmacopenssl.HMAC.__new__(cls, key, digestmod=name) + if msg: +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 338e0215a41..5b090727ef6 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -250,6 +250,7 @@ class TestVectorsTestCase(unittest.TestCase): + def test_sha512_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128) + ++ @unittest.skipIf(hashlib.get_fips_mode(), 'MockCrazyHash unacceptable in FIPS mode.') + @requires_hashdigest('sha256') + def test_legacy_block_size_warnings(self): + class MockCrazyHash(object): +@@ -298,6 +299,14 @@ class ConstructorTestCase(unittest.TestCase): + self.fail("Standard constructor call raised exception.") + + @ignore_warning ++ def test_normal_digestmod(self): ++ # Standard constructor call. ++ failed = 0 ++ try: ++ h = hmac.HMAC(b"key", digestmod='sha1') ++ except Exception: ++ self.fail("Standard constructor call raised exception.") ++ + @requires_hashdigest('sha256') + def test_with_str_key(self): + # Pass a key of type str, which is an error, because it expects a key +@@ -366,6 +375,7 @@ class SanityTestCase(unittest.TestCase): + + class CopyTestCase(unittest.TestCase): + ++ @unittest.skipIf(hashlib.get_fips_mode(), "Internal attributes unavailable in FIPS mode") + @requires_hashdigest('sha256') + def test_attributes(self): + # Testing if attributes are of same type. +@@ -378,6 +388,7 @@ class CopyTestCase(unittest.TestCase): + self.assertEqual(type(h1.outer), type(h2.outer), + "Types of outer don't match.") + ++ @unittest.skipIf(hashlib.get_fips_mode(), "Internal attributes unavailable in FIPS mode") + @requires_hashdigest('sha256') + def test_realcopy(self): + # Testing if the copy method created a real copy. +@@ -390,6 +401,21 @@ class CopyTestCase(unittest.TestCase): + self.assertTrue(id(h1.outer) != id(h2.outer), + "No real copy of the attribute 'outer'.") + ++ def test_realcopy(self): ++ # Testing if the copy method created a real copy. ++ h1 = hmac.HMAC(b"key", digestmod="sha1") ++ h2 = h1.copy() ++ # Using id() in case somebody has overridden __eq__/__ne__. ++ self.assertTrue(id(h1) != id(h2), "No real copy of the HMAC instance.") ++ old_digest = h1.digest() ++ assert h1.digest() == h2.digest() ++ h1.update(b'hi') ++ assert h1.digest() != h2.digest() ++ assert h2.digest() == old_digest ++ new_digest = h1.digest() ++ h2.update(b'hi') ++ assert h1.digest() == h2.digest() == new_digest ++ + @requires_hashdigest('sha256') + def test_equality(self): + # Testing if the copy has the same digests. +-- +2.21.0 + + +From 264c6eed5a5d060fe7bad7e4410d920cecbb083c Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 16:37:12 +0200 +Subject: [PATCH 23/36] test_tools: Skip md5sum tests in FIPS mode + +--- + Lib/test/test_tools/test_md5sum.py | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/Lib/test/test_tools/test_md5sum.py b/Lib/test/test_tools/test_md5sum.py +index fb565b73778..7028a4dc214 100644 +--- a/Lib/test/test_tools/test_md5sum.py ++++ b/Lib/test/test_tools/test_md5sum.py +@@ -4,11 +4,15 @@ import os + import unittest + from test import support + from test.support.script_helper import assert_python_ok, assert_python_failure ++import hashlib + + from test.test_tools import scriptsdir, skip_if_missing + + skip_if_missing() + ++if hashlib.get_fips_mode(): ++ raise unittest.SkipTest("md5sum won't work at all in FIPS mode") ++ + class MD5SumTests(unittest.TestCase): + @classmethod + def setUpClass(cls): +-- +2.21.0 + + +From c5373f5e7ae4ecfe8337ee3c73b594b5f0ebb411 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 17:21:16 +0200 +Subject: [PATCH 24/36] _hashopenssl: Include hash name in the error message + +--- + Modules/_hashopenssl.c | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 7dfd70822b9..0563473c627 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -431,7 +431,7 @@ EVPnew(PyObject *name_obj, + EVPobject *self; + + if (!digest && !initial_ctx) { +- PyErr_SetString(PyExc_ValueError, "unsupported hash type"); ++ PyErr_Format(PyExc_ValueError, "unsupported hash type %U", name_obj); + return NULL; + } + +@@ -622,7 +622,7 @@ pbkdf2_hmac(PyObject *self, PyObject *args, PyObject *kwdict) + + digest = EVP_get_digestbyname(name); + if (digest == NULL) { +- PyErr_SetString(PyExc_ValueError, "unsupported hash type"); ++ PyErr_Format(PyExc_ValueError, "unsupported hash type %s", name); + goto end; + } + +-- +2.21.0 + + +From c3f9e194eb5f86ebec4db2820b8968d6896abc8b Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 18:23:57 +0200 +Subject: [PATCH 25/36] Make hashlib tests pass in FIPS mode (with some hashlib + changes) + +--- + Lib/hashlib.py | 18 +++++++++-- + Lib/test/test_hashlib.py | 67 ++++++++++++++++++++++++++++------------ + Modules/_hashopenssl.c | 14 +++++++++ + 3 files changed, 78 insertions(+), 21 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index ca1dd202251..d3344f60b2e 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -155,7 +155,18 @@ def __hash_new(name, data=b'', **kwargs): + """new(name, data=b'') - Return a new hashing object using the named algorithm; + optionally initialized with data (which must be a bytes-like object). + """ +- if not get_fips_mode(): ++ if get_fips_mode(): ++ # Use OpenSSL names for Python built-in hashes ++ orig_name = name ++ name = { ++ 'sha3_224': "sha3-224", ++ 'sha3_256': "sha3-256", ++ 'sha3_384': "sha3-384", ++ 'sha3_512': "sha3-512", ++ 'shake_128': "shake128", ++ 'shake_256': "shake256", ++ }.get(name, name) ++ else: + if name in {'blake2b', 'blake2s'}: + # Prefer our blake2 implementation. + # OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. +@@ -163,7 +174,10 @@ def __hash_new(name, data=b'', **kwargs): + # salt, personal, tree hashing or SSE. + return __get_builtin_constructor(name)(data, **kwargs) + try: +- return _hashlib.new(name, data) ++ retval = _hashlib.new(name, data) ++ if get_fips_mode() and name != orig_name: ++ retval._set_name(orig_name) ++ return retval + except ValueError: + # If the _hashlib module (OpenSSL) doesn't support the named + # hash, try using our builtin implementations. +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index e57c93b42f5..9745bfd6fa2 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -31,6 +31,11 @@ COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') + c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) + py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) + ++if hashlib.get_fips_mode(): ++ FIPS_DISABLED = {'md5', 'MD5', 'blake2b', 'blake2s'} ++else: ++ FIPS_DISABLED = set() ++ + try: + import _blake2 + except ImportError: +@@ -86,6 +91,11 @@ class HashLibTestCase(unittest.TestCase): + # Issue #14693: fallback modules are always compiled under POSIX + _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG + ++ if hashlib.get_fips_mode(): ++ shakes = set() ++ supported_hash_names = tuple( ++ n for n in supported_hash_names if n not in FIPS_DISABLED) ++ + def _conditional_import_module(self, module_name): + """Import a module and return a reference to it or None on failure.""" + try: +@@ -93,8 +103,20 @@ class HashLibTestCase(unittest.TestCase): + except ModuleNotFoundError as error: + if self._warn_on_extension_import: + warnings.warn('Did a C extension fail to compile? %s' % error) ++ except ImportError as error: ++ if not hashlib.get_fips_mode(): ++ raise + return None + ++ def _has_shake_extras(self, hasher): ++ """Return true if the hasher should have "shake" API (digest length)""" ++ if hasher.name not in self.shakes: ++ return False ++ _hashlib = self._conditional_import_module('_hashlib') ++ if _hashlib and isinstance(hasher, _hashlib.HASH): ++ return False ++ return True ++ + def __init__(self, *args, **kwargs): + algorithms = set() + for algorithm in self.supported_hash_names: +@@ -182,15 +204,13 @@ class HashLibTestCase(unittest.TestCase): + a = array.array("b", range(10)) + for cons in self.hash_constructors: + c = cons(a) +- if (c.name in self.shakes +- and not cons.__name__.startswith('openssl_') +- ): ++ if self._has_shake_extras(c): + c.hexdigest(16) + else: + c.hexdigest() + + def test_algorithms_guaranteed(self): +- self.assertEqual(hashlib.algorithms_guaranteed, ++ self.assertEqual(hashlib.algorithms_guaranteed - FIPS_DISABLED, + set(_algo for _algo in self.supported_hash_names + if _algo.islower())) + +@@ -202,6 +222,12 @@ class HashLibTestCase(unittest.TestCase): + self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam') + self.assertRaises(TypeError, hashlib.new, 1) + ++ @unittest.skipUnless(hashlib.get_fips_mode(), "Builtin constructor only unavailable in FIPS mode") ++ def test_get_builtin_constructor_fips(self): ++ with self.assertRaises(AttributeError): ++ hashlib.__get_builtin_constructor ++ ++ @unittest.skipIf(hashlib.get_fips_mode(), "No builtin constructors in FIPS mode") + def test_get_builtin_constructor(self): + get_builtin_constructor = getattr(hashlib, + '__get_builtin_constructor') +@@ -231,9 +257,7 @@ class HashLibTestCase(unittest.TestCase): + def test_hexdigest(self): + for cons in self.hash_constructors: + h = cons() +- if (h.name in self.shakes +- and not cons.__name__.startswith('openssl_') +- ): ++ if self._has_shake_extras(h): + self.assertIsInstance(h.digest(16), bytes) + self.assertEqual(hexstr(h.digest(16)), h.hexdigest(16)) + else: +@@ -245,9 +269,7 @@ class HashLibTestCase(unittest.TestCase): + large_sizes = (2**29, 2**32-10, 2**32+10, 2**61, 2**64-10, 2**64+10) + for cons in self.hash_constructors: + h = cons() +- if h.name not in self.shakes: +- continue +- if cons.__name__.startswith('openssl_'): ++ if not self._has_shake_extras(h): + continue + for digest in h.digest, h.hexdigest: + with self.assertRaises((ValueError, OverflowError)): +@@ -278,9 +300,7 @@ class HashLibTestCase(unittest.TestCase): + m1.update(bees) + m1.update(cees) + m1.update(dees) +- if (m1.name in self.shakes +- and not cons.__name__.startswith('openssl_') +- ): ++ if self._has_shake_extras(m1): + args = (16,) + else: + args = () +@@ -349,7 +369,8 @@ class HashLibTestCase(unittest.TestCase): + self.assertRaises(TypeError, hash_object_constructor, 'spam') + + def test_no_unicode(self): +- self.check_no_unicode('md5') ++ if not hashlib.get_fips_mode(): ++ self.check_no_unicode('md5') + self.check_no_unicode('sha1') + self.check_no_unicode('sha224') + self.check_no_unicode('sha256') +@@ -392,7 +413,8 @@ class HashLibTestCase(unittest.TestCase): + self.assertIn(name.split("_")[0], repr(m)) + + def test_blocksize_name(self): +- self.check_blocksize_name('md5', 64, 16) ++ if not hashlib.get_fips_mode(): ++ self.check_blocksize_name('md5', 64, 16) + self.check_blocksize_name('sha1', 64, 20) + self.check_blocksize_name('sha224', 64, 28) + self.check_blocksize_name('sha256', 64, 32) +@@ -432,22 +454,27 @@ class HashLibTestCase(unittest.TestCase): + self.check_blocksize_name('blake2b', 128, 64) + self.check_blocksize_name('blake2s', 64, 32) + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_0(self): + self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e') + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_1(self): + self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72') + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_2(self): + self.check('md5', + b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'd174ab98d277d9f5a5611c2c9f419d9f') + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + @unittest.skipIf(sys.maxsize < _4G + 5, 'test cannot run on 32-bit systems') + @bigmemtest(size=_4G + 5, memuse=1, dry_run=False) + def test_case_md5_huge(self, size): + self.check('md5', b'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d') + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + @unittest.skipIf(sys.maxsize < _4G - 1, 'test cannot run on 32-bit systems') + @bigmemtest(size=_4G - 1, memuse=1, dry_run=False) + def test_case_md5_uintmax(self, size): +@@ -829,14 +856,16 @@ class HashLibTestCase(unittest.TestCase): + m = cons(b'x' * gil_minsize) + m.update(b'1') + +- m = hashlib.md5() ++ m = hashlib.sha1() + m.update(b'1') + m.update(b'#' * gil_minsize) + m.update(b'1') +- self.assertEqual(m.hexdigest(), 'cb1e1a2cbc80be75e19935d621fb9b21') ++ self.assertEqual(m.hexdigest(), ++ 'c45f7445ca0ea087d7a1758fbea07935f267c46a') + +- m = hashlib.md5(b'x' * gil_minsize) +- self.assertEqual(m.hexdigest(), 'cfb767f225d58469c5de3632a8803958') ++ m = hashlib.sha1(b'x' * gil_minsize) ++ self.assertEqual(m.hexdigest(), ++ '63fda1efde982ba1ffe9d53035bff5c9ce4758fb') + + @unittest.skipUnless(threading, 'Threading required for this test.') + @support.reap_threads +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 0563473c627..e330423e266 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -256,11 +256,25 @@ EVP_update(EVPobject *self, PyObject *args) + Py_RETURN_NONE; + } + ++static PyObject * ++EVP_set_name(EVPobject *self, PyObject *new_name) ++{ ++ if (!PyUnicode_CheckExact(new_name)) { ++ PyErr_SetString(PyExc_TypeError, "expected string"); ++ return NULL; ++ } ++ Py_DECREF(self->name); ++ Py_INCREF(new_name); ++ self->name = new_name; ++ Py_RETURN_NONE; ++} ++ + static PyMethodDef EVP_methods[] = { + {"update", (PyCFunction)EVP_update, METH_VARARGS, EVP_update__doc__}, + {"digest", (PyCFunction)EVP_digest, METH_NOARGS, EVP_digest__doc__}, + {"hexdigest", (PyCFunction)EVP_hexdigest, METH_NOARGS, EVP_hexdigest__doc__}, + {"copy", (PyCFunction)EVP_copy, METH_NOARGS, EVP_copy__doc__}, ++ {"_set_name", (PyCFunction)EVP_set_name, METH_O, EVP_copy__doc__}, + {NULL, NULL} /* sentinel */ + }; + +-- +2.21.0 + + +From a40d1cb8672bc2c5403b6e9ff6eaa3324282de09 Mon Sep 17 00:00:00 2001 +From: Lumir Balhar +Date: Wed, 14 Aug 2019 14:43:07 +0200 +Subject: [PATCH 26/36] distutils upload: only add md5 if available, but + *always* use sha256 + +--- + Lib/distutils/command/upload.py | 3 ++- + Lib/distutils/tests/test_upload.py | 14 ++++++++------ + 2 files changed, 10 insertions(+), 7 deletions(-) + +diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py +index 0edb39efd4c..170acb3d6b6 100644 +--- a/Lib/distutils/command/upload.py ++++ b/Lib/distutils/command/upload.py +@@ -102,6 +102,7 @@ class upload(PyPIRCCommand): + 'content': (os.path.basename(filename),content), + 'filetype': command, + 'pyversion': pyversion, ++ 'sha256_digest': hashlib.sha256(content).hexdigest(), + + # additional meta-data + 'metadata_version': '1.0', +@@ -124,7 +125,7 @@ class upload(PyPIRCCommand): + digest = hashlib.md5(content).hexdigest() + except ValueError as e: + msg = 'calculating md5 checksum failed: %s' % e +- self.announce(msg, log.ERROR) ++ self.announce(msg, log.INFO) + if not hashlib.get_fips_mode(): + # this really shouldn't fail + raise +diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py +index b4b64e97737..f720a7905dd 100644 +--- a/Lib/distutils/tests/test_upload.py ++++ b/Lib/distutils/tests/test_upload.py +@@ -132,10 +132,11 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + # what did we send ? + headers = dict(self.last_open.req.headers) + if hashlib.get_fips_mode(): +- # md5 hash is omitted +- self.assertEqual(headers['Content-length'], '2020') ++ # only sha256 hash is used ++ self.assertEqual(headers['Content-length'], '2197') + else: +- self.assertEqual(headers['Content-length'], '2162') ++ # both sha256 and md5 hashes are used ++ self.assertEqual(headers['Content-length'], '2339') + content_type = headers['Content-type'] + self.assertTrue(content_type.startswith('multipart/form-data')) + self.assertEqual(self.last_open.req.get_method(), 'POST') +@@ -172,10 +173,11 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + + headers = dict(self.last_open.req.headers) + if hashlib.get_fips_mode(): +- # md5 hash is omitted +- self.assertEqual(headers['Content-length'], '2030') ++ # only sha256 hash is used ++ self.assertEqual(headers['Content-length'], '2207') + else: +- self.assertEqual(headers['Content-length'], '2172') ++ # both sha256 and md5 hashes are used ++ self.assertEqual(headers['Content-length'], '2349') + self.assertIn(b'long description\r', self.last_open.req.data) + + def test_upload_fails(self): +-- +2.21.0 + + +From e0d2cca338186bbc5cb9c67bb4402dec38fed5a7 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 26 Aug 2019 15:55:48 +0200 +Subject: [PATCH 27/36] Add the usedforsecurity argument back + +--- + Lib/hashlib.py | 4 ++- + Modules/_hashopenssl.c | 82 +++++++++++++++++++++++++++++++----------- + 2 files changed, 65 insertions(+), 21 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index d3344f60b2e..cd3b035b1d7 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -174,7 +174,9 @@ def __hash_new(name, data=b'', **kwargs): + # salt, personal, tree hashing or SSE. + return __get_builtin_constructor(name)(data, **kwargs) + try: +- retval = _hashlib.new(name, data) ++ usedforsecurity = kwargs.pop('usedforsecurity', True) ++ retval = _hashlib.new( ++ name, data, usedforsecurity=usedforsecurity) + if get_fips_mode() and name != orig_name: + retval._set_name(orig_name) + return retval +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index e330423e266..b621c330c32 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -318,6 +318,25 @@ EVP_repr(EVPobject *self) + return PyUnicode_FromFormat("<%U HASH object @ %p>", self->name, self); + } + ++ ++static void ++mc_ctx_init(EVP_MD_CTX *ctx, int usedforsecurity) ++{ ++ EVP_MD_CTX_init(ctx); ++ /* ++ If the user has declared that this digest is being used in a ++ non-security role (e.g. indexing into a data structure), set ++ the exception flag for openssl to allow it ++ */ ++ if (!usedforsecurity) { ++#ifdef EVP_MD_CTX_FLAG_NON_FIPS_ALLOW ++ EVP_MD_CTX_set_flags(ctx, ++ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW); ++#endif ++ } ++} ++ ++ + #if HASH_OBJ_CONSTRUCTOR + static int + EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds) +@@ -328,9 +347,10 @@ EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds) + Py_buffer view; + char *nameStr; + const EVP_MD *digest; ++ int usedforsecurity=1; + +- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:HASH", kwlist, +- &name_obj, &data_obj)) { ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O$d:HASH", kwlist, ++ &name_obj, &data_obj, &usedforsecurity)) { + return -1; + } + +@@ -351,7 +371,8 @@ EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds) + PyBuffer_Release(&view); + return -1; + } +- if (!EVP_DigestInit(self->ctx, digest)) { ++ mc_ctx_init(&self->ctx, usedforsecurity); ++ if (!EVP_DigestInit_ex(self->ctx, digest, NULL)) { + _setException(PyExc_ValueError); + if (data_obj) + PyBuffer_Release(&view); +@@ -440,7 +461,7 @@ static PyTypeObject EVPtype = { + static PyObject * + EVPnew(PyObject *name_obj, + const EVP_MD *digest, const EVP_MD_CTX *initial_ctx, +- const unsigned char *cp, Py_ssize_t len) ++ const unsigned char *cp, Py_ssize_t len, int usedforsecurity) + { + EVPobject *self; + +@@ -455,7 +476,8 @@ EVPnew(PyObject *name_obj, + if (initial_ctx) { + EVP_MD_CTX_copy(self->ctx, initial_ctx); + } else { +- if (!EVP_DigestInit(self->ctx, digest)) { ++ mc_ctx_init(self->ctx, usedforsecurity); ++ if (!EVP_DigestInit_ex(self->ctx, digest, NULL)) { + _setException(PyExc_ValueError); + Py_DECREF(self); + return NULL; +@@ -484,26 +506,35 @@ An optional string argument may be provided and will be\n\ + automatically hashed.\n\ + \n\ + The MD5 and SHA1 algorithms are always supported.\n"); ++static PyObject *_EVP_new_impl(PyObject *name_obj, char *name, PyObject *data_obj, int usedforsecurity); + + static PyObject * + EVP_new(PyObject *self, PyObject *args, PyObject *kwdict) + { +- static char *kwlist[] = {"name", "string", NULL}; ++ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL}; + PyObject *name_obj = NULL; + PyObject *data_obj = NULL; +- Py_buffer view = { 0 }; +- PyObject *ret_obj; +- char *name; +- const EVP_MD *digest; ++ int usedforsecurity = 1; + +- if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|O:new", kwlist, +- &name_obj, &data_obj)) { ++ if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|O$p:new", kwlist, ++ &name_obj, &data_obj, &usedforsecurity)) { + return NULL; + } ++ return _EVP_new_impl(name_obj, NULL, data_obj, usedforsecurity); ++} + +- if (!PyArg_Parse(name_obj, "s", &name)) { +- PyErr_SetString(PyExc_TypeError, "name must be a string"); +- return NULL; ++static PyObject * ++_EVP_new_impl(PyObject *name_obj, char *name, PyObject *data_obj, int usedforsecurity) ++{ ++ Py_buffer view = { 0 }; ++ PyObject *ret_obj; ++ const EVP_MD *digest; ++ ++ if (!name) { ++ if (!PyArg_Parse(name_obj, "s", &name)) { ++ PyErr_SetString(PyExc_TypeError, "name must be a string"); ++ return NULL; ++ } + } + + if (data_obj) +@@ -511,7 +542,7 @@ EVP_new(PyObject *self, PyObject *args, PyObject *kwdict) + + digest = EVP_get_digestbyname(name); + +- ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len); ++ ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len, usedforsecurity); + + if (data_obj) + PyBuffer_Release(&view); +@@ -906,18 +937,27 @@ generate_hash_name_list(void) + * code that wants to make hashes of a bunch of small strings. + * The first call will lazy-initialize, which reports an exception + * if initialization fails. ++ * ++ * Note that for usedforsecurity=0, we fall back to new(). + */ + #define GEN_CONSTRUCTOR(NAME, SSL_NAME) \ + static PyObject * \ +- EVP_new_ ## NAME (PyObject *self, PyObject *args) \ ++ EVP_new_ ## NAME (PyObject *self, PyObject *args, PyObject *kw) \ + { \ + PyObject *data_obj = NULL; \ + Py_buffer view = { 0 }; \ + PyObject *ret_obj; \ ++ int usedforsecurity = 1; \ ++ char *kwnames[] = {"", "usedforsecurity", NULL}; \ + \ +- if (!PyArg_ParseTuple(args, "|O:" #NAME , &data_obj)) { \ ++ if (!PyArg_ParseTupleAndKeywords(args, kw, "|O$p:" #NAME, kwnames, &data_obj, &usedforsecurity)) { \ + return NULL; \ + } \ ++ if (!usedforsecurity) { \ ++ return _EVP_new_impl( \ ++ CONST_ ## NAME ## _name_obj, SSL_NAME, \ ++ data_obj, usedforsecurity); \ ++ } \ + \ + if (CONST_new_ ## NAME ## _ctx_p == NULL) { \ + EVP_MD_CTX *ctx_p = EVP_MD_CTX_new(); \ +@@ -938,7 +978,8 @@ generate_hash_name_list(void) + NULL, \ + CONST_new_ ## NAME ## _ctx_p, \ + (unsigned char*)view.buf, \ +- view.len); \ ++ view.len, \ ++ usedforsecurity); \ + \ + if (data_obj) \ + PyBuffer_Release(&view); \ +@@ -947,7 +988,8 @@ generate_hash_name_list(void) + + /* a PyMethodDef structure for the constructor */ + #define CONSTRUCTOR_METH_DEF(NAME) \ +- {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \ ++ {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, \ ++ METH_VARARGS|METH_KEYWORDS, \ + PyDoc_STR("Returns a " #NAME \ + " hash object; optionally initialized with a string") \ + }, +-- +2.21.0 + + +From 0d169ad7d9aa48bdb3313b6d72682d097dc9dea5 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 26 Aug 2019 18:59:15 +0200 +Subject: [PATCH 28/36] Add no-op usedforsecurity argument to internal hash + implementations + +--- + Modules/_blake2/blake2b_impl.c | 6 ++++-- + Modules/_blake2/blake2s_impl.c | 6 ++++-- + Modules/_blake2/clinic/blake2b_impl.c.h | 16 +++++++++------- + Modules/_blake2/clinic/blake2s_impl.c.h | 16 +++++++++------- + Modules/_sha3/sha3module.c | 5 +++++ + 5 files changed, 31 insertions(+), 18 deletions(-) + +diff --git a/Modules/_blake2/blake2b_impl.c b/Modules/_blake2/blake2b_impl.c +index f6bfce823b8..ae9d244200a 100644 +--- a/Modules/_blake2/blake2b_impl.c ++++ b/Modules/_blake2/blake2b_impl.c +@@ -87,6 +87,8 @@ _blake2.blake2b.__new__ as py_blake2b_new + inner_size: int = 0 + last_node: bool = False + ++ usedforsecurity: bool = True ++ + Return a new BLAKE2b hash object. + [clinic start generated code]*/ + +@@ -95,8 +97,8 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + Py_buffer *key, Py_buffer *salt, Py_buffer *person, + int fanout, int depth, PyObject *leaf_size_obj, + PyObject *node_offset_obj, int node_depth, +- int inner_size, int last_node) +-/*[clinic end generated code: output=7506d8d890e5f13b input=aca35b33c5612b4b]*/ ++ int inner_size, int last_node, int usedforsecurity) ++/*[clinic end generated code: output=02dcc52ee784622b input=c5dfcb847f9065ac]*/ + { + BLAKE2bObject *self = NULL; + Py_buffer buf; +diff --git a/Modules/_blake2/blake2s_impl.c b/Modules/_blake2/blake2s_impl.c +index 28ae5b65101..bf80d6b7428 100644 +--- a/Modules/_blake2/blake2s_impl.c ++++ b/Modules/_blake2/blake2s_impl.c +@@ -87,6 +87,8 @@ _blake2.blake2s.__new__ as py_blake2s_new + inner_size: int = 0 + last_node: bool = False + ++ usedforsecurity: bool = True ++ + Return a new BLAKE2s hash object. + [clinic start generated code]*/ + +@@ -95,8 +97,8 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + Py_buffer *key, Py_buffer *salt, Py_buffer *person, + int fanout, int depth, PyObject *leaf_size_obj, + PyObject *node_offset_obj, int node_depth, +- int inner_size, int last_node) +-/*[clinic end generated code: output=fe060b258a8cbfc6 input=3abfaabe7f5f62cc]*/ ++ int inner_size, int last_node, int usedforsecurity) ++/*[clinic end generated code: output=e32ea5e22d54db91 input=af5344c57efd870d]*/ + { + BLAKE2sObject *self = NULL; + Py_buffer buf; +diff --git a/Modules/_blake2/clinic/blake2b_impl.c.h b/Modules/_blake2/clinic/blake2b_impl.c.h +index 9b2965eb6b3..9688c04dda8 100644 +--- a/Modules/_blake2/clinic/blake2b_impl.c.h ++++ b/Modules/_blake2/clinic/blake2b_impl.c.h +@@ -5,7 +5,8 @@ preserve + PyDoc_STRVAR(py_blake2b_new__doc__, + "blake2b(data=b\'\', /, *, digest_size=_blake2.blake2b.MAX_DIGEST_SIZE,\n" + " key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n" +-" node_offset=0, node_depth=0, inner_size=0, last_node=False)\n" ++" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n" ++" usedforsecurity=True)\n" + "--\n" + "\n" + "Return a new BLAKE2b hash object."); +@@ -15,14 +16,14 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + Py_buffer *key, Py_buffer *salt, Py_buffer *person, + int fanout, int depth, PyObject *leaf_size_obj, + PyObject *node_offset_obj, int node_depth, +- int inner_size, int last_node); ++ int inner_size, int last_node, int usedforsecurity); + + static PyObject * + py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + { + PyObject *return_value = NULL; +- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", NULL}; +- static _PyArg_Parser _parser = {"|O$iy*y*y*iiOOiip:blake2b", _keywords, 0}; ++ static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL}; ++ static _PyArg_Parser _parser = {"|O$iy*y*y*iiOOiipp:blake2b", _keywords, 0}; + PyObject *data = NULL; + int digest_size = BLAKE2B_OUTBYTES; + Py_buffer key = {NULL, NULL}; +@@ -35,12 +36,13 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + int node_depth = 0; + int inner_size = 0; + int last_node = 0; ++ int usedforsecurity = 1; + + if (!_PyArg_ParseTupleAndKeywordsFast(args, kwargs, &_parser, +- &data, &digest_size, &key, &salt, &person, &fanout, &depth, &leaf_size_obj, &node_offset_obj, &node_depth, &inner_size, &last_node)) { ++ &data, &digest_size, &key, &salt, &person, &fanout, &depth, &leaf_size_obj, &node_offset_obj, &node_depth, &inner_size, &last_node, &usedforsecurity)) { + goto exit; + } +- return_value = py_blake2b_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size_obj, node_offset_obj, node_depth, inner_size, last_node); ++ return_value = py_blake2b_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size_obj, node_offset_obj, node_depth, inner_size, last_node, usedforsecurity); + + exit: + /* Cleanup for key */ +@@ -121,4 +123,4 @@ _blake2_blake2b_hexdigest(BLAKE2bObject *self, PyObject *Py_UNUSED(ignored)) + { + return _blake2_blake2b_hexdigest_impl(self); + } +-/*[clinic end generated code: output=0eb559f418fc0a21 input=a9049054013a1b77]*/ ++/*[clinic end generated code: output=d5f214b052c75135 input=a9049054013a1b77]*/ +diff --git a/Modules/_blake2/clinic/blake2s_impl.c.h b/Modules/_blake2/clinic/blake2s_impl.c.h +index 42b87b7099d..5653e93044d 100644 +--- a/Modules/_blake2/clinic/blake2s_impl.c.h ++++ b/Modules/_blake2/clinic/blake2s_impl.c.h +@@ -5,7 +5,8 @@ preserve + PyDoc_STRVAR(py_blake2s_new__doc__, + "blake2s(data=b\'\', /, *, digest_size=_blake2.blake2s.MAX_DIGEST_SIZE,\n" + " key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n" +-" node_offset=0, node_depth=0, inner_size=0, last_node=False)\n" ++" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n" ++" usedforsecurity=True)\n" + "--\n" + "\n" + "Return a new BLAKE2s hash object."); +@@ -15,14 +16,14 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + Py_buffer *key, Py_buffer *salt, Py_buffer *person, + int fanout, int depth, PyObject *leaf_size_obj, + PyObject *node_offset_obj, int node_depth, +- int inner_size, int last_node); ++ int inner_size, int last_node, int usedforsecurity); + + static PyObject * + py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + { + PyObject *return_value = NULL; +- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", NULL}; +- static _PyArg_Parser _parser = {"|O$iy*y*y*iiOOiip:blake2s", _keywords, 0}; ++ static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL}; ++ static _PyArg_Parser _parser = {"|O$iy*y*y*iiOOiipp:blake2s", _keywords, 0}; + PyObject *data = NULL; + int digest_size = BLAKE2S_OUTBYTES; + Py_buffer key = {NULL, NULL}; +@@ -35,12 +36,13 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + int node_depth = 0; + int inner_size = 0; + int last_node = 0; ++ int usedforsecurity = 1; + + if (!_PyArg_ParseTupleAndKeywordsFast(args, kwargs, &_parser, +- &data, &digest_size, &key, &salt, &person, &fanout, &depth, &leaf_size_obj, &node_offset_obj, &node_depth, &inner_size, &last_node)) { ++ &data, &digest_size, &key, &salt, &person, &fanout, &depth, &leaf_size_obj, &node_offset_obj, &node_depth, &inner_size, &last_node, &usedforsecurity)) { + goto exit; + } +- return_value = py_blake2s_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size_obj, node_offset_obj, node_depth, inner_size, last_node); ++ return_value = py_blake2s_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size_obj, node_offset_obj, node_depth, inner_size, last_node, usedforsecurity); + + exit: + /* Cleanup for key */ +@@ -121,4 +123,4 @@ _blake2_blake2s_hexdigest(BLAKE2sObject *self, PyObject *Py_UNUSED(ignored)) + { + return _blake2_blake2s_hexdigest_impl(self); + } +-/*[clinic end generated code: output=13d4b08ea9ee2d62 input=a9049054013a1b77]*/ ++/*[clinic end generated code: output=2373a3b3fa542e89 input=a9049054013a1b77]*/ +diff --git a/Modules/_sha3/sha3module.c b/Modules/_sha3/sha3module.c +index 2783a75644f..62db9cb5616 100644 +--- a/Modules/_sha3/sha3module.c ++++ b/Modules/_sha3/sha3module.c +@@ -185,6 +185,11 @@ py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + HashReturn res; + PyObject *data = NULL; + ++ // Ignore "usedforsecurity" ++ if (PyMapping_DelItemString(kwargs, "usedforsecurity")) { ++ PyErr_Clear(); ++ } ++ + if (!_PyArg_NoKeywords(type->tp_name, kwargs)) { + return NULL; + } +-- +2.21.0 + + +From c451744d82f78f42bfa947b782a24ff7c8c6ad9d Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 26 Aug 2019 19:09:39 +0200 +Subject: [PATCH 29/36] Test the usedforsecurity flag + +--- + Lib/test/test_hashlib.py | 82 ++++++++++++++++++++++++++-------------- + 1 file changed, 54 insertions(+), 28 deletions(-) + +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 9745bfd6fa2..19a2fbdf294 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -24,6 +24,7 @@ from test import support + from test.support import _4G, bigmemtest, import_fresh_module + from test.support import requires_hashdigest + from http.client import HTTPException ++from functools import partial + + # Were we compiled --with-pydebug or with #define Py_DEBUG? + COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') +@@ -32,8 +33,10 @@ c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) + py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) + + if hashlib.get_fips_mode(): +- FIPS_DISABLED = {'md5', 'MD5', 'blake2b', 'blake2s'} ++ FIPS_UNAVAILABLE = {'blake2b', 'blake2s'} ++ FIPS_DISABLED = {'md5', 'MD5', *FIPS_UNAVAILABLE} + else: ++ FIPS_UNAVAILABLE = set() + FIPS_DISABLED = set() + + try: +@@ -78,6 +81,15 @@ def read_vectors(hash_name): + yield parts + + ++def _is_openssl_constructor(constructor): ++ if getattr(constructor, '__name__', '').startswith('openssl_'): ++ return True ++ if isinstance(constructor, partial): ++ if constructor.func.__name__.startswith('openssl_'): ++ return True ++ return False ++ ++ + class HashLibTestCase(unittest.TestCase): + supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', + 'sha224', 'SHA224', 'sha256', 'SHA256', +@@ -93,8 +105,6 @@ class HashLibTestCase(unittest.TestCase): + + if hashlib.get_fips_mode(): + shakes = set() +- supported_hash_names = tuple( +- n for n in supported_hash_names if n not in FIPS_DISABLED) + + def _conditional_import_module(self, module_name): + """Import a module and return a reference to it or None on failure.""" +@@ -120,7 +130,8 @@ class HashLibTestCase(unittest.TestCase): + def __init__(self, *args, **kwargs): + algorithms = set() + for algorithm in self.supported_hash_names: +- algorithms.add(algorithm.lower()) ++ if algorithm not in FIPS_UNAVAILABLE: ++ algorithms.add(algorithm.lower()) + + _blake2 = self._conditional_import_module('_blake2') + if _blake2: +@@ -130,15 +141,21 @@ class HashLibTestCase(unittest.TestCase): + for algorithm in algorithms: + self.constructors_to_test[algorithm] = set() + ++ def _add_constructor(algorithm, constructor): ++ constructors.add(partial(constructor, usedforsecurity=False)) ++ if algorithm not in FIPS_DISABLED: ++ constructors.add(constructor) ++ constructors.add(partial(constructor, usedforsecurity=True)) ++ + # For each algorithm, test the direct constructor and the use + # of hashlib.new given the algorithm name. + for algorithm, constructors in self.constructors_to_test.items(): +- constructors.add(getattr(hashlib, algorithm)) ++ _add_constructor(algorithm, getattr(hashlib, algorithm)) + def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, **kwargs): + if data is None: + return hashlib.new(_alg, **kwargs) + return hashlib.new(_alg, data, **kwargs) +- constructors.add(_test_algorithm_via_hashlib_new) ++ _add_constructor(algorithm, _test_algorithm_via_hashlib_new) + + _hashlib = self._conditional_import_module('_hashlib') + self._hashlib = _hashlib +@@ -150,7 +167,7 @@ class HashLibTestCase(unittest.TestCase): + for algorithm, constructors in self.constructors_to_test.items(): + constructor = getattr(_hashlib, 'openssl_'+algorithm, None) + if constructor: +- constructors.add(constructor) ++ _add_constructor(algorithm, constructor) + + def add_builtin_constructor(name): + constructor = getattr(hashlib, "__get_builtin_constructor")(name) +@@ -210,7 +227,7 @@ class HashLibTestCase(unittest.TestCase): + c.hexdigest() + + def test_algorithms_guaranteed(self): +- self.assertEqual(hashlib.algorithms_guaranteed - FIPS_DISABLED, ++ self.assertEqual(hashlib.algorithms_guaranteed, + set(_algo for _algo in self.supported_hash_names + if _algo.islower())) + +@@ -286,7 +303,9 @@ class HashLibTestCase(unittest.TestCase): + self.assertIn(h.name, self.supported_hash_names) + else: + self.assertNotIn(h.name, self.supported_hash_names) +- self.assertEqual(h.name, hashlib.new(h.name).name) ++ if h.name not in FIPS_DISABLED: ++ self.assertEqual(h.name, hashlib.new(h.name).name) ++ self.assertEqual(h.name, hashlib.new(h.name, usedforsecurity=False).name) + + def test_large_update(self): + aas = b'a' * 128 +@@ -328,13 +347,15 @@ class HashLibTestCase(unittest.TestCase): + self.assertGreaterEqual(len(constructors), 2) + for hash_object_constructor in constructors: + if ( +- kwargs +- and hash_object_constructor.__name__.startswith('openssl_') ++ (kwargs.keys() - {'usedforsecurity'}) ++ and _is_openssl_constructor(hash_object_constructor) + ): ++ # Don't check openssl constructors with ++ # any extra keys (except usedforsecurity) + return + m = hash_object_constructor(data, **kwargs) + if shake: +- if hash_object_constructor.__name__.startswith('openssl_'): ++ if _is_openssl_constructor(hash_object_constructor): + if length > m.digest_size: + # OpenSSL doesn't give long digests + return +@@ -351,7 +372,7 @@ class HashLibTestCase(unittest.TestCase): + % (name, hash_object_constructor, + computed, len(data), hexdigest)) + if shake: +- if hash_object_constructor.__name__.startswith('openssl_'): ++ if _is_openssl_constructor(hash_object_constructor): + computed = m.digest()[:length] + else: + computed = m.digest(length) +@@ -369,8 +390,7 @@ class HashLibTestCase(unittest.TestCase): + self.assertRaises(TypeError, hash_object_constructor, 'spam') + + def test_no_unicode(self): +- if not hashlib.get_fips_mode(): +- self.check_no_unicode('md5') ++ self.check_no_unicode('md5') + self.check_no_unicode('sha1') + self.check_no_unicode('sha224') + self.check_no_unicode('sha256') +@@ -397,10 +417,10 @@ class HashLibTestCase(unittest.TestCase): + for hash_object_constructor in constructors: + m = hash_object_constructor() + self.assertEqual(m.block_size, block_size) +- if not hash_object_constructor.__name__.startswith('openssl_'): ++ if not _is_openssl_constructor(hash_object_constructor): + self.assertEqual(m.digest_size, digest_size) + if digest_length: +- if not hash_object_constructor.__name__.startswith('openssl_'): ++ if not _is_openssl_constructor(hash_object_constructor): + self.assertEqual(len(m.digest(digest_length)), + digest_length) + self.assertEqual(len(m.hexdigest(digest_length)), +@@ -435,7 +455,7 @@ class HashLibTestCase(unittest.TestCase): + for hash_object_constructor in constructors: + m = hash_object_constructor() + self.assertEqual(capacity + rate, 1600) +- if not hash_object_constructor.__name__.startswith('openssl_'): ++ if not _is_openssl_constructor(hash_object_constructor): + self.assertEqual(m._capacity_bits, capacity) + self.assertEqual(m._rate_bits, rate) + self.assertEqual(m._suffix, suffix) +@@ -454,31 +474,27 @@ class HashLibTestCase(unittest.TestCase): + self.check_blocksize_name('blake2b', 128, 64) + self.check_blocksize_name('blake2s', 64, 32) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_0(self): +- self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e') ++ self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e', usedforsecurity=False) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_1(self): +- self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72') ++ self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72', usedforsecurity=False) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_2(self): + self.check('md5', + b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', +- 'd174ab98d277d9f5a5611c2c9f419d9f') ++ 'd174ab98d277d9f5a5611c2c9f419d9f', ++ usedforsecurity=False) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + @unittest.skipIf(sys.maxsize < _4G + 5, 'test cannot run on 32-bit systems') + @bigmemtest(size=_4G + 5, memuse=1, dry_run=False) + def test_case_md5_huge(self, size): +- self.check('md5', b'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d') ++ self.check('md5', b'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d', usedforsecurity=False) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + @unittest.skipIf(sys.maxsize < _4G - 1, 'test cannot run on 32-bit systems') + @bigmemtest(size=_4G - 1, memuse=1, dry_run=False) + def test_case_md5_uintmax(self, size): +- self.check('md5', b'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3') ++ self.check('md5', b'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3', usedforsecurity=False) + + # use the three examples from Federal Information Processing Standards + # Publication 180-1, Secure Hash Standard, 1995 April 17 +@@ -904,6 +920,16 @@ class HashLibTestCase(unittest.TestCase): + + self.assertEqual(expected_hash, hasher.hexdigest()) + ++ @unittest.skipUnless(hashlib.get_fips_mode(), 'Needs FIPS mode.') ++ def test_usedforsecurity_repeat(self): ++ """Make sure usedforsecurity flag isn't copied to other contexts""" ++ for i in range(3): ++ for cons in hashlib.md5, partial(hashlib.new, 'md5'): ++ self.assertRaises(ValueError, cons) ++ self.assertRaises(ValueError, partial(cons, usedforsecurity=True)) ++ self.assertEqual(cons(usedforsecurity=False).hexdigest(), ++ 'd41d8cd98f00b204e9800998ecf8427e') ++ + + class KDFTests(unittest.TestCase): + +-- +2.21.0 + + +From 327707f185572cf13da66851e0af824a81d65aa9 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 29 Aug 2019 10:25:28 +0200 +Subject: [PATCH 30/36] Skip error checking in _hashlib.get_fips_mode + +Fixes: https://bugzilla.redhat.com/show_bug.cgi?id=1745499 +--- + Modules/_hashopenssl.c | 30 ++++++++++++++++-------------- + 1 file changed, 16 insertions(+), 14 deletions(-) + +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index b621c330c32..6bfb12c5618 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -1021,20 +1021,22 @@ static PyObject * + _hashlib_get_fips_mode_impl(PyObject *module) + /*[clinic end generated code: output=ad8a7793310d3f98 input=f42a2135df2a5e11]*/ + { +- int result = FIPS_mode(); +- if (result == 0) { +- // "If the library was built without support of the FIPS Object Module, +- // then the function will return 0 with an error code of +- // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." +- // But 0 is also a valid result value. +- +- unsigned long errcode = ERR_peek_last_error(); +- if (errcode) { +- _setException(PyExc_ValueError); +- return NULL; +- } +- } +- return PyLong_FromLong(result); ++ // XXX: This function skips error checking. ++ // This is only appropriate for RHEL. ++ ++ // From the OpenSSL docs: ++ // "If the library was built without support of the FIPS Object Module, ++ // then the function will return 0 with an error code of ++ // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." ++ // In RHEL: ++ // * we do build with FIPS, so the function always succeeds ++ // * even if it didn't, people seem used to errors being left on the ++ // OpenSSL error stack. ++ ++ // For more info, see: ++ // https://bugzilla.redhat.com/show_bug.cgi?id=1745499 ++ ++ return PyLong_FromLong(FIPS_mode()); + } + + +-- +2.21.0 + + +From 695039674b78b19d9ff26d637c11acea99c44807 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 10 Oct 2019 13:04:50 +0200 +Subject: [PATCH 31/36] Skip error checking in _Py_hashlib_fips_error + +https://bugzilla.redhat.com/show_bug.cgi?id=1760106 +--- + Include/_hashopenssl.h | 12 +++--------- + 1 file changed, 3 insertions(+), 9 deletions(-) + +diff --git a/Include/_hashopenssl.h b/Include/_hashopenssl.h +index 47ed0030422..d4cbdef984d 100644 +--- a/Include/_hashopenssl.h ++++ b/Include/_hashopenssl.h +@@ -42,16 +42,10 @@ static int + _Py_hashlib_fips_error(PyObject *exc, char *name) { + int result = FIPS_mode(); + if (result == 0) { +- // "If the library was built without support of the FIPS Object Module, +- // then the function will return 0 with an error code of +- // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." +- // But 0 is also a valid result value. ++ // XXX: This function skips error checking. ++ // This is only appropriate for RHEL. ++ // See _hashlib.get_fips_mode for details. + +- unsigned long errcode = ERR_peek_last_error(); +- if (errcode) { +- _setException(exc); +- return 1; +- } + return 0; + } + PyErr_Format(exc, "%s is not available in FIPS mode", name); +-- +2.21.0 + + +From 0f9008e92b62f5f22e2bfd0a73d5325d16eedf6f Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Thu, 21 Nov 2019 00:20:09 +0100 +Subject: [PATCH 32/36] Use usedforsecurity=False in uuid + +Partially backport commit 7cad53e6b084435a220e6604010f1fa5778bd0b1 +only for uuid +--- + Lib/uuid.py | 7 +++++-- + 1 file changed, 5 insertions(+), 2 deletions(-) + +diff --git a/Lib/uuid.py b/Lib/uuid.py +index db8b2ef94ed..7e680a24d06 100644 +--- a/Lib/uuid.py ++++ b/Lib/uuid.py +@@ -615,8 +615,11 @@ def uuid1(node=None, clock_seq=None): + def uuid3(namespace, name): + """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" + from hashlib import md5 +- hash = md5(namespace.bytes + bytes(name, "utf-8")).digest() +- return UUID(bytes=hash[:16], version=3) ++ digest = md5( ++ namespace.bytes + bytes(name, "utf-8"), ++ usedforsecurity=False ++ ).digest() ++ return UUID(bytes=digest[:16], version=3) + + def uuid4(): + """Generate a random UUID.""" +-- +2.21.0 + + +From 75829df72b65fb573f3b310033978e493ff700b4 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 19:12:38 +0200 +Subject: [PATCH 33/36] Fixups + +- Adjust error message of the original hmac.HMAC class +- Don't duplicate a test name +--- + Lib/hmac.py | 2 +- + Lib/test/test_hmac.py | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index 7af94c39ed6..33b5be613d9 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -41,7 +41,7 @@ class HMAC: + """ + if _hashlib.get_fips_mode(): + raise ValueError( +- 'hmac.HMAC is not available in FIPS mode. ' ++ 'This class is not available in FIPS mode. ' + + 'Use hmac.new().' + ) + +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 5b090727ef6..449b1366314 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -401,7 +401,7 @@ class CopyTestCase(unittest.TestCase): + self.assertTrue(id(h1.outer) != id(h2.outer), + "No real copy of the attribute 'outer'.") + +- def test_realcopy(self): ++ def test_realcopy_by_digest(self): + # Testing if the copy method created a real copy. + h1 = hmac.HMAC(b"key", digestmod="sha1") + h2 = h1.copy() +-- +2.21.0 + + +From 86f1f161b5aab08975379dbae6ebac08a697e3d7 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 26 Aug 2019 19:39:48 +0200 +Subject: [PATCH 34/36] Don't re-export get_fips_mode from hashlib + +Fixes: https://bugzilla.redhat.com/show_bug.cgi?id=1745685 +--- + Lib/distutils/command/upload.py | 3 ++- + Lib/distutils/tests/test_upload.py | 5 +++-- + Lib/hashlib.py | 23 ++++++++++++----------- + Lib/hmac.py | 6 +++--- + Lib/test/test_fips.py | 6 +++--- + Lib/test/test_hashlib.py | 16 +++++++++------- + Lib/test/test_hmac.py | 8 +++++--- + Lib/test/test_logging.py | 1 + + Lib/test/test_smtplib.py | 4 +++- + Lib/test/test_tools/test_md5sum.py | 4 ++-- + Lib/test/test_urllib2_localnet.py | 1 + + 11 files changed, 44 insertions(+), 33 deletions(-) + +diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py +index 170acb3d6b6..d0a4aee001e 100644 +--- a/Lib/distutils/command/upload.py ++++ b/Lib/distutils/command/upload.py +@@ -126,7 +126,8 @@ class upload(PyPIRCCommand): + except ValueError as e: + msg = 'calculating md5 checksum failed: %s' % e + self.announce(msg, log.INFO) +- if not hashlib.get_fips_mode(): ++ from _hashlib import get_fips_mode ++ if not get_fips_mode(): + # this really shouldn't fail + raise + else: +diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py +index f720a7905dd..a198b213577 100644 +--- a/Lib/distutils/tests/test_upload.py ++++ b/Lib/distutils/tests/test_upload.py +@@ -4,6 +4,7 @@ import unittest + import unittest.mock as mock + from urllib.request import HTTPError + import hashlib ++from _hashlib import get_fips_mode + + from test.support import run_unittest + +@@ -131,7 +132,7 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + + # what did we send ? + headers = dict(self.last_open.req.headers) +- if hashlib.get_fips_mode(): ++ if get_fips_mode(): + # only sha256 hash is used + self.assertEqual(headers['Content-length'], '2197') + else: +@@ -172,7 +173,7 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + cmd.run() + + headers = dict(self.last_open.req.headers) +- if hashlib.get_fips_mode(): ++ if get_fips_mode(): + # only sha256 hash is used + self.assertEqual(headers['Content-length'], '2207') + else: +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index cd3b035b1d7..3e9a4aa27a7 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -68,13 +68,13 @@ __all__ = __always_supported + ('new', 'algorithms_guaranteed', + 'algorithms_available', 'pbkdf2_hmac') + + try: +- from _hashlib import get_fips_mode ++ from _hashlib import get_fips_mode as _hashlib_get_fips_mode + except ImportError: +- def get_fips_mode(): ++ def _hashlib_get_fips_mode(): + return 0 + + +-if not get_fips_mode(): ++if not _hashlib_get_fips_mode(): + __builtin_constructor_cache = {} + + def __get_builtin_constructor(name): +@@ -121,7 +121,7 @@ if not get_fips_mode(): + + + def __get_openssl_constructor(name): +- if not get_fips_mode(): ++ if not _hashlib.get_fips_mode(): + if name in { + 'blake2b', 'blake2s', 'shake_256', 'shake_128', + #'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', +@@ -132,7 +132,7 @@ def __get_openssl_constructor(name): + f = getattr(_hashlib, 'openssl_' + name) + # Allow the C module to raise ValueError. The function will be + # defined but the hash not actually available thanks to OpenSSL. +- if not get_fips_mode(): ++ if not _hashlib.get_fips_mode(): + # N.B. In "FIPS mode", there is no fallback. + # If this test fails, we want to export the broken hash + # constructor anyway. +@@ -142,7 +142,7 @@ def __get_openssl_constructor(name): + except (AttributeError, ValueError): + return __get_builtin_constructor(name) + +-if not get_fips_mode(): ++if not _hashlib_get_fips_mode(): + def __py_new(name, data=b'', **kwargs): + """new(name, data=b'', **kwargs) - Return a new hashing object using the + named algorithm; optionally initialized with data (which must be +@@ -155,7 +155,7 @@ def __hash_new(name, data=b'', **kwargs): + """new(name, data=b'') - Return a new hashing object using the named algorithm; + optionally initialized with data (which must be a bytes-like object). + """ +- if get_fips_mode(): ++ if _hashlib.get_fips_mode(): + # Use OpenSSL names for Python built-in hashes + orig_name = name + name = { +@@ -177,7 +177,7 @@ def __hash_new(name, data=b'', **kwargs): + usedforsecurity = kwargs.pop('usedforsecurity', True) + retval = _hashlib.new( + name, data, usedforsecurity=usedforsecurity) +- if get_fips_mode() and name != orig_name: ++ if _hashlib.get_fips_mode() and name != orig_name: + retval._set_name(orig_name) + return retval + except ValueError: +@@ -185,7 +185,7 @@ def __hash_new(name, data=b'', **kwargs): + # hash, try using our builtin implementations. + # This allows for SHA224/256 and SHA384/512 support even though + # the OpenSSL library prior to 0.9.8 doesn't provide them. +- if get_fips_mode(): ++ if _hashlib.get_fips_mode(): + raise + return __get_builtin_constructor(name)(data) + +@@ -197,7 +197,7 @@ try: + algorithms_available = algorithms_available.union( + _hashlib.openssl_md_meth_names) + except ImportError: +- if get_fips_mode(): ++ if _hashlib_get_fips_mode(): + raise + new = __py_new + __get_hash = __get_builtin_constructor +@@ -225,5 +225,6 @@ for __func_name in __always_supported: + # Cleanup locals() + del __always_supported, __func_name, __get_hash + del __hash_new, __get_openssl_constructor +-if not get_fips_mode(): ++if not _hashlib.get_fips_mode(): + del __py_new ++del _hashlib_get_fips_mode +diff --git a/Lib/hmac.py b/Lib/hmac.py +index 33b5be613d9..ca83d9dc0d3 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -39,7 +39,7 @@ class HMAC: + + Note: key and msg must be a bytes or bytearray objects. + """ +- if _hashlib.get_fips_mode(): ++ if _hashlibopenssl.get_fips_mode(): + raise ValueError( + 'This class is not available in FIPS mode. ' + + 'Use hmac.new().' +@@ -97,7 +97,7 @@ class HMAC: + def update(self, msg): + """Update this hashing object with the string msg. + """ +- if _hashlib.get_fips_mode(): ++ if _hashlibopenssl.get_fips_mode(): + raise ValueError('hmac.HMAC is not available in FIPS mode') + self.inner.update(msg) + +@@ -167,7 +167,7 @@ class HMAC_openssl(_hmacopenssl.HMAC): + return result + + +-if _hashlib.get_fips_mode(): ++if _hashlibopenssl.get_fips_mode(): + HMAC = HMAC_openssl + + +diff --git a/Lib/test/test_fips.py b/Lib/test/test_fips.py +index 34812e6098a..86e61e29c0b 100644 +--- a/Lib/test/test_fips.py ++++ b/Lib/test/test_fips.py +@@ -6,7 +6,7 @@ import hashlib, _hashlib + + class HashlibFipsTests(unittest.TestCase): + +- @unittest.skipUnless(hashlib.get_fips_mode(), "Test only when FIPS is enabled") ++ @unittest.skipUnless(_hashlib.get_fips_mode(), "Test only when FIPS is enabled") + def test_fips_imports(self): + """blake2s and blake2b should fail to import in FIPS mode + """ +@@ -30,7 +30,7 @@ class HashlibFipsTests(unittest.TestCase): + + self.assertEqual(m, h) + +- @unittest.skipIf(hashlib.get_fips_mode(), "blake2 hashes are not available under FIPS") ++ @unittest.skipIf(_hashlib.get_fips_mode(), "blake2 hashes are not available under FIPS") + def test_blake2_hashes(self): + self.compare_hashes(hashlib.blake2b(b'abc'), _hashlib.openssl_blake2b(b'abc')) + self.compare_hashes(hashlib.blake2s(b'abc'), _hashlib.openssl_blake2s(b'abc')) +@@ -41,7 +41,7 @@ class HashlibFipsTests(unittest.TestCase): + self.compare_hashes(hashlib.sha3_384(b'abc'), _hashlib.openssl_sha3_384(b'abc')) + self.compare_hashes(hashlib.sha3_512(b'abc'), _hashlib.openssl_sha3_512(b'abc')) + +- @unittest.skipIf(hashlib.get_fips_mode(), "shake hashes are not available under FIPS") ++ @unittest.skipIf(_hashlib.get_fips_mode(), "shake hashes are not available under FIPS") + def test_shake_hashes(self): + self.compare_hashes(hashlib.shake_128(b'abc'), _hashlib.openssl_shake_128(b'abc')) + self.compare_hashes(hashlib.shake_256(b'abc'), _hashlib.openssl_shake_256(b'abc')) +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 19a2fbdf294..a6f9a353fd1 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -32,7 +32,9 @@ COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') + c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) + py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) + +-if hashlib.get_fips_mode(): ++from _hashlib import get_fips_mode as _get_fips_mode ++ ++if _get_fips_mode(): + FIPS_UNAVAILABLE = {'blake2b', 'blake2s'} + FIPS_DISABLED = {'md5', 'MD5', *FIPS_UNAVAILABLE} + else: +@@ -103,7 +105,7 @@ class HashLibTestCase(unittest.TestCase): + # Issue #14693: fallback modules are always compiled under POSIX + _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG + +- if hashlib.get_fips_mode(): ++ if _get_fips_mode(): + shakes = set() + + def _conditional_import_module(self, module_name): +@@ -114,7 +116,7 @@ class HashLibTestCase(unittest.TestCase): + if self._warn_on_extension_import: + warnings.warn('Did a C extension fail to compile? %s' % error) + except ImportError as error: +- if not hashlib.get_fips_mode(): ++ if not _get_fips_mode(): + raise + return None + +@@ -239,12 +241,12 @@ class HashLibTestCase(unittest.TestCase): + self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam') + self.assertRaises(TypeError, hashlib.new, 1) + +- @unittest.skipUnless(hashlib.get_fips_mode(), "Builtin constructor only unavailable in FIPS mode") ++ @unittest.skipUnless(_get_fips_mode(), "Builtin constructor only unavailable in FIPS mode") + def test_get_builtin_constructor_fips(self): + with self.assertRaises(AttributeError): + hashlib.__get_builtin_constructor + +- @unittest.skipIf(hashlib.get_fips_mode(), "No builtin constructors in FIPS mode") ++ @unittest.skipIf(_get_fips_mode(), "No builtin constructors in FIPS mode") + def test_get_builtin_constructor(self): + get_builtin_constructor = getattr(hashlib, + '__get_builtin_constructor') +@@ -433,7 +435,7 @@ class HashLibTestCase(unittest.TestCase): + self.assertIn(name.split("_")[0], repr(m)) + + def test_blocksize_name(self): +- if not hashlib.get_fips_mode(): ++ if not _get_fips_mode(): + self.check_blocksize_name('md5', 64, 16) + self.check_blocksize_name('sha1', 64, 20) + self.check_blocksize_name('sha224', 64, 28) +@@ -920,7 +922,7 @@ class HashLibTestCase(unittest.TestCase): + + self.assertEqual(expected_hash, hasher.hexdigest()) + +- @unittest.skipUnless(hashlib.get_fips_mode(), 'Needs FIPS mode.') ++ @unittest.skipUnless(_get_fips_mode(), 'Needs FIPS mode.') + def test_usedforsecurity_repeat(self): + """Make sure usedforsecurity flag isn't copied to other contexts""" + for i in range(3): +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 449b1366314..35b893509cb 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -3,6 +3,7 @@ import hmac + import hashlib + import unittest + import warnings ++from _hashlib import get_fips_mode + + from test.support import requires_hashdigest + +@@ -250,7 +251,7 @@ class TestVectorsTestCase(unittest.TestCase): + def test_sha512_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128) + +- @unittest.skipIf(hashlib.get_fips_mode(), 'MockCrazyHash unacceptable in FIPS mode.') ++ @unittest.skipIf(get_fips_mode(), 'MockCrazyHash unacceptable in FIPS mode.') + @requires_hashdigest('sha256') + def test_legacy_block_size_warnings(self): + class MockCrazyHash(object): +@@ -274,6 +275,7 @@ class TestVectorsTestCase(unittest.TestCase): + hmac.HMAC(b'a', b'b', digestmod=MockCrazyHash) + self.fail('Expected warning about small block_size') + ++ @unittest.skipIf(get_fips_mode(), 'md5 is not default in FIPS mode.') + def test_with_digestmod_warning(self): + with self.assertWarns(PendingDeprecationWarning): + key = b"\x0b" * 16 +@@ -375,7 +377,7 @@ class SanityTestCase(unittest.TestCase): + + class CopyTestCase(unittest.TestCase): + +- @unittest.skipIf(hashlib.get_fips_mode(), "Internal attributes unavailable in FIPS mode") ++ @unittest.skipIf(get_fips_mode(), "Internal attributes unavailable in FIPS mode") + @requires_hashdigest('sha256') + def test_attributes(self): + # Testing if attributes are of same type. +@@ -388,7 +390,7 @@ class CopyTestCase(unittest.TestCase): + self.assertEqual(type(h1.outer), type(h2.outer), + "Types of outer don't match.") + +- @unittest.skipIf(hashlib.get_fips_mode(), "Internal attributes unavailable in FIPS mode") ++ @unittest.skipIf(get_fips_mode(), "Internal attributes unavailable in FIPS mode") + @requires_hashdigest('sha256') + def test_realcopy(self): + # Testing if the copy method created a real copy. +diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py +index d5c63b483ee..45b72e3bc7e 100644 +--- a/Lib/test/test_logging.py ++++ b/Lib/test/test_logging.py +@@ -46,6 +46,7 @@ import time + import unittest + import warnings + import weakref ++ + try: + import threading + # The following imports are needed only for tests which +diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py +index 64b3201254a..704b2bc0d35 100644 +--- a/Lib/test/test_smtplib.py ++++ b/Lib/test/test_smtplib.py +@@ -16,6 +16,8 @@ import time + import select + import errno + import textwrap ++import hashlib ++from _hashlib import get_fips_mode + + import unittest + from test import support, mock_socket +@@ -980,7 +982,7 @@ class SMTPSimTests(unittest.TestCase): + + def testAUTH_multiple(self): + # Test that multiple authentication methods are tried. +- self.serv.add_feature("AUTH BOGUS PLAIN LOGIN CRAM-MD5") ++ self.serv.add_feature("AUTH BOGUS PLAIN LOGIN") + smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + resp = smtp.login(sim_auth[0], sim_auth[1]) + self.assertEqual(resp, (235, b'Authentication Succeeded')) +diff --git a/Lib/test/test_tools/test_md5sum.py b/Lib/test/test_tools/test_md5sum.py +index 7028a4dc214..3ba1ca0f146 100644 +--- a/Lib/test/test_tools/test_md5sum.py ++++ b/Lib/test/test_tools/test_md5sum.py +@@ -4,13 +4,13 @@ import os + import unittest + from test import support + from test.support.script_helper import assert_python_ok, assert_python_failure +-import hashlib ++import _hashlib + + from test.test_tools import scriptsdir, skip_if_missing + + skip_if_missing() + +-if hashlib.get_fips_mode(): ++if _hashlib.get_fips_mode(): + raise unittest.SkipTest("md5sum won't work at all in FIPS mode") + + class MD5SumTests(unittest.TestCase): +diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py +index 895f97cc09a..b4fb13ad092 100644 +--- a/Lib/test/test_urllib2_localnet.py ++++ b/Lib/test/test_urllib2_localnet.py +@@ -6,6 +6,7 @@ import urllib.request + import http.server + import unittest + import hashlib ++from _hashlib import get_fips_mode + + from test import support + +-- +2.21.0 + + +From 3538f31cc15c633f1df2d6ed18471c1f771c4a52 Mon Sep 17 00:00:00 2001 +From: Christian Heimes +Date: Wed, 20 Nov 2019 10:59:25 +0100 +Subject: [PATCH 35/36] Use FIPS compliant CSPRNG + +Kernel's getrandom() source is not yet FIPS compliant. Use OpenSSL's +DRBG in FIPS mode and disable os.getrandom() function. + +Signed-off-by: Christian Heimes +--- + Lib/test/test_os.py | 5 +++ + Makefile.pre.in | 2 +- + Modules/posixmodule.c | 8 +++++ + Python/random.c | 75 +++++++++++++++++++++++++++++++++++++++++++ + 4 files changed, 89 insertions(+), 1 deletion(-) + +diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py +index 7a839c83fe4..91816afa47d 100644 +--- a/Lib/test/test_os.py ++++ b/Lib/test/test_os.py +@@ -1362,6 +1362,11 @@ class GetRandomTests(unittest.TestCase): + raise unittest.SkipTest("getrandom() syscall fails with ENOSYS") + else: + raise ++ except ValueError as exc: ++ if exc.args[0] == "getrandom is not FIPS compliant": ++ raise unittest.SkipTest("Skip in FIPS mode") ++ else: ++ raise + + def test_getrandom_type(self): + data = os.getrandom(16) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index d15d93509d1..40502fc36d6 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -112,7 +112,7 @@ CFLAGSFORSHARED=@CFLAGSFORSHARED@ + # C flags used for building the interpreter object files + PY_CORE_CFLAGS= $(PY_CFLAGS) $(PY_CFLAGS_NODIST) $(PY_CPPFLAGS) $(CFLAGSFORSHARED) -DPy_BUILD_CORE + # Linker flags used for building the interpreter object files +-PY_CORE_LDFLAGS=$(PY_LDFLAGS) $(PY_LDFLAGS_NODIST) ++PY_CORE_LDFLAGS=$(PY_LDFLAGS) $(PY_LDFLAGS_NODIST) -lcrypto + # Strict or non-strict aliasing flags used to compile dtoa.c, see above + CFLAGS_ALIASING=@CFLAGS_ALIASING@ + +diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c +index 776a3d249a8..647dbd3b4d6 100644 +--- a/Modules/posixmodule.c ++++ b/Modules/posixmodule.c +@@ -397,6 +397,9 @@ static int win32_can_symlink = 0; + #define MODNAME "posix" + #endif + ++/* for FIPS check in os.getrandom() */ ++#include ++ + #ifdef MS_WINDOWS + /* defined in fileutils.c */ + PyAPI_FUNC(void) _Py_time_t_to_FILE_TIME(time_t, int, FILETIME *); +@@ -12196,6 +12199,11 @@ os_getrandom_impl(PyObject *module, Py_ssize_t size, int flags) + return posix_error(); + } + ++ if (FIPS_mode()) { ++ PyErr_SetString(PyExc_ValueError, "getrandom is not FIPS compliant"); ++ return NULL; ++ } ++ + bytes = PyBytes_FromStringAndSize(NULL, size); + if (bytes == NULL) { + PyErr_NoMemory(); +diff --git a/Python/random.c b/Python/random.c +index d4747d61eb8..ab3cee29a39 100644 +--- a/Python/random.c ++++ b/Python/random.c +@@ -410,6 +410,77 @@ dev_urandom_close(void) + } + #endif /* !MS_WINDOWS */ + ++#include ++#include ++#include ++ ++#if (OPENSSL_VERSION_NUMBER < 0x10101000L) || defined(LIBRESSL_VERSION_NUMBER) ++# error "py_openssl_drbg_urandom requires OpenSSL 1.1.1 for fork safety" ++#endif ++ ++static void ++py_openssl_set_exception(PyObject* exc) { ++ unsigned long errcode; ++ const char *lib, *func, *reason; ++ ++ errcode = ERR_peek_last_error(); ++ if (!errcode) { ++ PyErr_SetString(exc, "unknown reasons"); ++ } ++ ERR_clear_error(); ++ ++ lib = ERR_lib_error_string(errcode); ++ func = ERR_func_error_string(errcode); ++ reason = ERR_reason_error_string(errcode); ++ ++ if (lib && func) { ++ PyErr_Format(exc, "[%s: %s] %s", lib, func, reason); ++ } ++ else if (lib) { ++ PyErr_Format(exc, "[%s] %s", lib, reason); ++ } ++ else { ++ PyErr_SetString(exc, reason); ++ } ++} ++ ++static int ++py_openssl_drbg_urandom(char *buffer, Py_ssize_t size, int raise) ++{ ++ int res; ++ static int init = 0; ++ ++ if (!init) { ++ init = 1; ++ res = OPENSSL_init_crypto(OPENSSL_INIT_ATFORK, NULL); ++ if (res == 0) { ++ if (raise) { ++ py_openssl_set_exception(PyExc_RuntimeError); ++ } ++ return 0; ++ } ++ } ++ ++ if (size > INT_MAX) { ++ if (raise) { ++ PyErr_Format(PyExc_OverflowError, ++ "RAND_bytes() size is limited to 2GB."); ++ } ++ return -1; ++ } ++ ++ res = RAND_bytes((unsigned char*)buffer, (int)size); ++ ++ if (res == 1) { ++ return 1; ++ } else { ++ if (raise) { ++ py_openssl_set_exception(PyExc_RuntimeError); ++ } ++ return 0; ++ } ++} ++ + + /* Fill buffer with pseudo-random bytes generated by a linear congruent + generator (LCG): +@@ -494,6 +565,10 @@ pyurandom(void *buffer, Py_ssize_t size, int blocking, int raise) + return 0; + } + ++ if (FIPS_mode()) { ++ return py_openssl_drbg_urandom(buffer, size, raise); ++ } ++ + #ifdef MS_WINDOWS + return win32_urandom((unsigned char *)buffer, size, raise); + #else +-- +2.21.0 + + +From ed4103bbd15a09bff4fb94a38d1b3f02df5e7c96 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Thu, 28 Nov 2019 17:26:02 +0100 +Subject: [PATCH 36/36] Fixups for FIPS compliant CSPRNG + +--- + Lib/test/test_os.py | 3 ++- + Python/random.c | 33 +++------------------------------ + 2 files changed, 5 insertions(+), 31 deletions(-) + +diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py +index 91816afa47d..f2572bb6c44 100644 +--- a/Lib/test/test_os.py ++++ b/Lib/test/test_os.py +@@ -27,6 +27,7 @@ import time + import unittest + import uuid + import warnings ++from _hashlib import get_fips_mode + from test import support + try: + import threading +@@ -1363,7 +1364,7 @@ class GetRandomTests(unittest.TestCase): + else: + raise + except ValueError as exc: +- if exc.args[0] == "getrandom is not FIPS compliant": ++ if get_fips_mode() and exc.args[0] == "getrandom is not FIPS compliant": + raise unittest.SkipTest("Skip in FIPS mode") + else: + raise +diff --git a/Python/random.c b/Python/random.c +index ab3cee29a39..222d651407c 100644 +--- a/Python/random.c ++++ b/Python/random.c +@@ -410,40 +410,13 @@ dev_urandom_close(void) + } + #endif /* !MS_WINDOWS */ + +-#include +-#include + #include ++#include <_hashopenssl.h> + + #if (OPENSSL_VERSION_NUMBER < 0x10101000L) || defined(LIBRESSL_VERSION_NUMBER) + # error "py_openssl_drbg_urandom requires OpenSSL 1.1.1 for fork safety" + #endif + +-static void +-py_openssl_set_exception(PyObject* exc) { +- unsigned long errcode; +- const char *lib, *func, *reason; +- +- errcode = ERR_peek_last_error(); +- if (!errcode) { +- PyErr_SetString(exc, "unknown reasons"); +- } +- ERR_clear_error(); +- +- lib = ERR_lib_error_string(errcode); +- func = ERR_func_error_string(errcode); +- reason = ERR_reason_error_string(errcode); +- +- if (lib && func) { +- PyErr_Format(exc, "[%s: %s] %s", lib, func, reason); +- } +- else if (lib) { +- PyErr_Format(exc, "[%s] %s", lib, reason); +- } +- else { +- PyErr_SetString(exc, reason); +- } +-} +- + static int + py_openssl_drbg_urandom(char *buffer, Py_ssize_t size, int raise) + { +@@ -455,7 +428,7 @@ py_openssl_drbg_urandom(char *buffer, Py_ssize_t size, int raise) + res = OPENSSL_init_crypto(OPENSSL_INIT_ATFORK, NULL); + if (res == 0) { + if (raise) { +- py_openssl_set_exception(PyExc_RuntimeError); ++ _setException(PyExc_RuntimeError); + } + return 0; + } +@@ -475,7 +448,7 @@ py_openssl_drbg_urandom(char *buffer, Py_ssize_t size, int raise) + return 1; + } else { + if (raise) { +- py_openssl_set_exception(PyExc_RuntimeError); ++ _setException(PyExc_RuntimeError); + } + return 0; + } +-- +2.21.0 + diff --git a/SOURCES/00330-CVE-2018-20852.patch b/SOURCES/00330-CVE-2018-20852.patch new file mode 100644 index 0000000..380fc33 --- /dev/null +++ b/SOURCES/00330-CVE-2018-20852.patch @@ -0,0 +1,93 @@ +diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py +index adf956d..97599d4 100644 +--- a/Lib/http/cookiejar.py ++++ b/Lib/http/cookiejar.py +@@ -1148,6 +1148,11 @@ class DefaultCookiePolicy(CookiePolicy): + req_host, erhn = eff_request_host(request) + domain = cookie.domain + ++ if domain and not domain.startswith("."): ++ dotdomain = "." + domain ++ else: ++ dotdomain = domain ++ + # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't + if (cookie.version == 0 and + (self.strict_ns_domain & self.DomainStrictNonDomain) and +@@ -1160,7 +1165,7 @@ class DefaultCookiePolicy(CookiePolicy): + _debug(" effective request-host name %s does not domain-match " + "RFC 2965 cookie domain %s", erhn, domain) + return False +- if cookie.version == 0 and not ("."+erhn).endswith(domain): ++ if cookie.version == 0 and not ("."+erhn).endswith(dotdomain): + _debug(" request-host %s does not match Netscape cookie domain " + "%s", req_host, domain) + return False +@@ -1174,7 +1179,11 @@ class DefaultCookiePolicy(CookiePolicy): + req_host = "."+req_host + if not erhn.startswith("."): + erhn = "."+erhn +- if not (req_host.endswith(domain) or erhn.endswith(domain)): ++ if domain and not domain.startswith("."): ++ dotdomain = "." + domain ++ else: ++ dotdomain = domain ++ if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)): + #_debug(" request domain %s does not match cookie domain %s", + # req_host, domain) + return False +diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py +index abc625d..6e1b308 100644 +--- a/Lib/test/test_http_cookiejar.py ++++ b/Lib/test/test_http_cookiejar.py +@@ -415,6 +415,7 @@ class CookieTests(unittest.TestCase): + ("http://foo.bar.com/", ".foo.bar.com", True), + ("http://foo.bar.com/", "foo.bar.com", True), + ("http://foo.bar.com/", ".bar.com", True), ++ ("http://foo.bar.com/", "bar.com", True), + ("http://foo.bar.com/", "com", True), + ("http://foo.com/", "rhubarb.foo.com", False), + ("http://foo.com/", ".foo.com", True), +@@ -425,6 +426,8 @@ class CookieTests(unittest.TestCase): + ("http://foo/", "foo", True), + ("http://foo/", "foo.local", True), + ("http://foo/", ".local", True), ++ ("http://barfoo.com", ".foo.com", False), ++ ("http://barfoo.com", "foo.com", False), + ]: + request = urllib.request.Request(url) + r = pol.domain_return_ok(domain, request) +@@ -959,6 +962,33 @@ class CookieTests(unittest.TestCase): + c.add_cookie_header(req) + self.assertFalse(req.has_header("Cookie")) + ++ c.clear() ++ ++ pol.set_blocked_domains([]) ++ req = urllib.request.Request("http://acme.com/") ++ res = FakeResponse(headers, "http://acme.com/") ++ cookies = c.make_cookies(res, req) ++ c.extract_cookies(res, req) ++ self.assertEqual(len(c), 1) ++ ++ req = urllib.request.Request("http://acme.com/") ++ c.add_cookie_header(req) ++ self.assertTrue(req.has_header("Cookie")) ++ ++ req = urllib.request.Request("http://badacme.com/") ++ c.add_cookie_header(req) ++ self.assertFalse(pol.return_ok(cookies[0], req)) ++ self.assertFalse(req.has_header("Cookie")) ++ ++ p = pol.set_blocked_domains(["acme.com"]) ++ req = urllib.request.Request("http://acme.com/") ++ c.add_cookie_header(req) ++ self.assertFalse(req.has_header("Cookie")) ++ ++ req = urllib.request.Request("http://badacme.com/") ++ c.add_cookie_header(req) ++ self.assertFalse(req.has_header("Cookie")) ++ + def test_secure(self): + for ns in True, False: + for whitespace in " ", "": diff --git a/SOURCES/00332-CVE-2019-16056.patch b/SOURCES/00332-CVE-2019-16056.patch new file mode 100644 index 0000000..ca2e3d4 --- /dev/null +++ b/SOURCES/00332-CVE-2019-16056.patch @@ -0,0 +1,95 @@ +diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py +index 737951e4b1b1..bc9c9b6241d4 100644 +--- a/Lib/email/_header_value_parser.py ++++ b/Lib/email/_header_value_parser.py +@@ -1561,6 +1561,8 @@ def get_domain(value): + token, value = get_dot_atom(value) + except errors.HeaderParseError: + token, value = get_atom(value) ++ if value and value[0] == '@': ++ raise errors.HeaderParseError('Invalid Domain') + if leader is not None: + token[:0] = [leader] + domain.append(token) +diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py +index cdfa3729adc7..41ff6f8c000d 100644 +--- a/Lib/email/_parseaddr.py ++++ b/Lib/email/_parseaddr.py +@@ -379,7 +379,12 @@ def getaddrspec(self): + aslist.append('@') + self.pos += 1 + self.gotonext() +- return EMPTYSTRING.join(aslist) + self.getdomain() ++ domain = self.getdomain() ++ if not domain: ++ # Invalid domain, return an empty address instead of returning a ++ # local part to denote failed parsing. ++ return EMPTYSTRING ++ return EMPTYSTRING.join(aslist) + domain + + def getdomain(self): + """Get the complete domain name from an address.""" +@@ -394,6 +399,10 @@ def getdomain(self): + elif self.field[self.pos] == '.': + self.pos += 1 + sdlist.append('.') ++ elif self.field[self.pos] == '@': ++ # bpo-34155: Don't parse domains with two `@` like ++ # `a@malicious.org@important.com`. ++ return EMPTYSTRING + elif self.field[self.pos] in self.atomends: + break + else: +diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py +index a2c900fa7fd2..02ef3e1006c6 100644 +--- a/Lib/test/test_email/test__header_value_parser.py ++++ b/Lib/test/test_email/test__header_value_parser.py +@@ -1418,6 +1418,16 @@ def test_get_addr_spec_dot_atom(self): + self.assertEqual(addr_spec.domain, 'example.com') + self.assertEqual(addr_spec.addr_spec, 'star.a.star@example.com') + ++ def test_get_addr_spec_multiple_domains(self): ++ with self.assertRaises(errors.HeaderParseError): ++ parser.get_addr_spec('star@a.star@example.com') ++ ++ with self.assertRaises(errors.HeaderParseError): ++ parser.get_addr_spec('star@a@example.com') ++ ++ with self.assertRaises(errors.HeaderParseError): ++ parser.get_addr_spec('star@172.17.0.1@example.com') ++ + # get_obs_route + + def test_get_obs_route_simple(self): +diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py +index f97ccc6711cc..68d052279987 100644 +--- a/Lib/test/test_email/test_email.py ++++ b/Lib/test/test_email/test_email.py +@@ -3035,6 +3035,20 @@ def test_parseaddr_empty(self): + self.assertEqual(utils.parseaddr('<>'), ('', '')) + self.assertEqual(utils.formataddr(utils.parseaddr('<>')), '') + ++ def test_parseaddr_multiple_domains(self): ++ self.assertEqual( ++ utils.parseaddr('a@b@c'), ++ ('', '') ++ ) ++ self.assertEqual( ++ utils.parseaddr('a@b.c@c'), ++ ('', '') ++ ) ++ self.assertEqual( ++ utils.parseaddr('a@172.17.0.1@c'), ++ ('', '') ++ ) ++ + def test_noquote_dump(self): + self.assertEqual( + utils.formataddr(('A Silly Person', 'person@dom.ain')), +diff --git a/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst +new file mode 100644 +index 000000000000..50292e29ed1d +--- /dev/null ++++ b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst +@@ -0,0 +1 @@ ++Fix parsing of invalid email addresses with more than one ``@`` (e.g. a@b@c.com.) to not return the part before 2nd ``@`` as valid email address. Patch by maxking & jpic. diff --git a/SOURCES/00333-reduce-pgo-tests.patch b/SOURCES/00333-reduce-pgo-tests.patch new file mode 100644 index 0000000..7dcd3b9 --- /dev/null +++ b/SOURCES/00333-reduce-pgo-tests.patch @@ -0,0 +1,296 @@ +diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py +index 538ff05..e7f2013 100644 +--- a/Lib/test/libregrtest/cmdline.py ++++ b/Lib/test/libregrtest/cmdline.py +@@ -263,7 +263,9 @@ def _create_parser(): + help='only write the name of test cases that will be run' + ' , don\'t execute them') + group.add_argument('-P', '--pgo', dest='pgo', action='store_true', +- help='enable Profile Guided Optimization training') ++ help='enable Profile Guided Optimization (PGO) training') ++ group.add_argument('--pgo-extended', action='store_true', ++ help='enable extended PGO training (slower training)') + group.add_argument('--fail-env-changed', action='store_true', + help='if a test file alters the environment, mark ' + 'the test as failed') +@@ -339,6 +341,8 @@ def _parse_args(args, **kwargs): + parser.error("-G/--failfast needs either -v or -W") + if ns.pgo and (ns.verbose or ns.verbose2 or ns.verbose3): + parser.error("--pgo/-v don't go together!") ++ if ns.pgo_extended: ++ ns.pgo = True # pgo_extended implies pgo + + if ns.nowindows: + print("Warning: the --nowindows (-n) option is deprecated. " +diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py +index b6d05f6..524dbfa 100644 +--- a/Lib/test/libregrtest/main.py ++++ b/Lib/test/libregrtest/main.py +@@ -17,6 +17,7 @@ from test.libregrtest.runtest import ( + INTERRUPTED, CHILD_ERROR, TEST_DID_NOT_RUN, + PROGRESS_MIN_TIME, format_test_result) + from test.libregrtest.setup import setup_tests ++from test.libregrtest.pgo import setup_pgo_tests + from test.libregrtest.utils import removepy, count, format_duration, printlist + from test import support + try: +@@ -214,6 +215,10 @@ class Regrtest: + + removepy(self.tests) + ++ if self.ns.pgo: ++ # add default PGO tests if no tests are specified ++ setup_pgo_tests(self.ns) ++ + stdtests = STDTESTS[:] + nottests = NOTTESTS.copy() + if self.ns.exclude: +@@ -601,6 +606,7 @@ class Regrtest: + input("Press any key to continue...") + + support.PGO = self.ns.pgo ++ support.PGO_EXTENDED = self.ns.pgo_extended + + setup_tests(self.ns) + +diff --git a/Lib/test/libregrtest/pgo.py b/Lib/test/libregrtest/pgo.py +new file mode 100644 +index 0000000..379ff05 +--- /dev/null ++++ b/Lib/test/libregrtest/pgo.py +@@ -0,0 +1,55 @@ ++# Set of tests run by default if --pgo is specified. The tests below were ++# chosen based on the following criteria: either they exercise a commonly used ++# C extension module or type, or they run some relatively typical Python code. ++# Long running tests should be avoided because the PGO instrumented executable ++# runs slowly. ++PGO_TESTS = [ ++ 'test_array', ++ 'test_base64', ++ 'test_binascii', ++ 'test_binop', ++ 'test_bisect', ++ 'test_bytes', ++ 'test_bz2', ++ 'test_cmath', ++ 'test_codecs', ++ 'test_collections', ++ 'test_complex', ++ 'test_dataclasses', ++ 'test_datetime', ++ 'test_decimal', ++ 'test_difflib', ++ 'test_embed', ++ 'test_float', ++ 'test_fstring', ++ 'test_functools', ++ 'test_generators', ++ 'test_hashlib', ++ 'test_heapq', ++ 'test_int', ++ 'test_itertools', ++ 'test_json', ++ 'test_long', ++ 'test_lzma', ++ 'test_math', ++ 'test_memoryview', ++ 'test_operator', ++ 'test_ordered_dict', ++ 'test_pickle', ++ 'test_pprint', ++ 'test_re', ++ 'test_set', ++ 'test_sqlite', ++ 'test_statistics', ++ 'test_struct', ++ 'test_tabnanny', ++ 'test_time', ++ 'test_unicode', ++ 'test_xml_etree', ++ 'test_xml_etree_c', ++] ++ ++def setup_pgo_tests(ns): ++ if not ns.args and not ns.pgo_extended: ++ # run default set of tests for PGO training ++ ns.args = PGO_TESTS[:] +diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py +index 764057a..468ee46 100644 +--- a/Lib/test/pickletester.py ++++ b/Lib/test/pickletester.py +@@ -2039,6 +2039,7 @@ class AbstractPickleTests(unittest.TestCase): + + FRAME_SIZE_TARGET = 64 * 1024 + ++ @support.skip_if_pgo_task + def check_frame_opcodes(self, pickled): + """ + Check the arguments of FRAME opcodes in a protocol 4+ pickle. +@@ -2059,6 +2060,7 @@ class AbstractPickleTests(unittest.TestCase): + frame_size = len(pickled) - last_pos - frame_opcode_size + self.assertEqual(frame_size, last_arg) + ++ @support.skip_if_pgo_task + def test_framing_many_objects(self): + obj = list(range(10**5)) + for proto in range(4, pickle.HIGHEST_PROTOCOL + 1): +diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py +index 66c0fed..e80a819 100644 +--- a/Lib/test/support/__init__.py ++++ b/Lib/test/support/__init__.py +@@ -953,6 +953,10 @@ SAVEDCWD = os.getcwd() + # useful for PGO + PGO = False + ++# Set by libregrtest/main.py if we are running the extended (time consuming) ++# PGO task. If this is True, PGO is also True. ++PGO_EXTENDED = False ++ + @contextlib.contextmanager + def temp_dir(path=None, quiet=False): + """Return a context manager that creates a temporary directory. +@@ -2442,6 +2446,11 @@ def skip_unless_xattr(test): + msg = "no non-broken extended attribute support" + return test if ok else unittest.skip(msg)(test) + ++def skip_if_pgo_task(test): ++ """Skip decorator for tests not run in (non-extended) PGO task""" ++ ok = not PGO or PGO_EXTENDED ++ msg = "Not run for (non-extended) PGO task" ++ return test if ok else unittest.skip(msg)(test) + + def fs_is_case_insensitive(directory): + """Detects if the file system for the specified directory is case-insensitive.""" +diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py +index f340f23..ebb151c 100644 +--- a/Lib/test/test_bz2.py ++++ b/Lib/test/test_bz2.py +@@ -654,6 +654,7 @@ class BZ2CompressorTest(BaseTest): + data += bz2c.flush() + self.assertEqual(ext_decompress(data), self.TEXT) + ++ @support.skip_if_pgo_task + @bigmemtest(size=_4G + 100, memuse=2) + def testCompress4G(self, size): + # "Test BZ2Compressor.compress()/flush() with >4GiB input" +@@ -712,6 +713,7 @@ class BZ2DecompressorTest(BaseTest): + self.assertRaises(EOFError, bz2d.decompress, b"anything") + self.assertRaises(EOFError, bz2d.decompress, b"") + ++ @support.skip_if_pgo_task + @bigmemtest(size=_4G + 100, memuse=3.3) + def testDecompress4G(self, size): + # "Test BZ2Decompressor.decompress() with >4GiB input" +diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py +index 9317951..8c1d016 100644 +--- a/Lib/test/test_itertools.py ++++ b/Lib/test/test_itertools.py +@@ -2023,6 +2023,7 @@ class RegressionTests(unittest.TestCase): + self.assertRaises(AssertionError, list, cycle(gen1())) + self.assertEqual(hist, [0,1]) + ++ @support.skip_if_pgo_task + def test_long_chain_of_empty_iterables(self): + # Make sure itertools.chain doesn't run into recursion limits when + # dealing with long chains of empty iterables. Even with a high +diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py +index 3dc2c1e..117de0a 100644 +--- a/Lib/test/test_lzma.py ++++ b/Lib/test/test_lzma.py +@@ -333,6 +333,7 @@ class CompressorDecompressorTestCase(unittest.TestCase): + + # Test with inputs larger than 4GiB. + ++ @support.skip_if_pgo_task + @bigmemtest(size=_4G + 100, memuse=2) + def test_compressor_bigmem(self, size): + lzc = LZMACompressor() +@@ -344,6 +345,7 @@ class CompressorDecompressorTestCase(unittest.TestCase): + finally: + ddata = None + ++ @support.skip_if_pgo_task + @bigmemtest(size=_4G + 100, memuse=3) + def test_decompressor_bigmem(self, size): + lzd = LZMADecompressor() +diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py +index 5347bb1..9d83217 100644 +--- a/Lib/test/test_regrtest.py ++++ b/Lib/test/test_regrtest.py +@@ -6,6 +6,7 @@ Note: test_regrtest cannot be run twice in parallel. + + import contextlib + import faulthandler ++import glob + import io + import os.path + import platform +@@ -532,6 +533,31 @@ class BaseTestCase(unittest.TestCase): + return proc.stdout + + ++class CheckActualTests(BaseTestCase): ++ """ ++ Check that regrtest appears to find the expected set of tests. ++ """ ++ ++ def test_finds_expected_number_of_tests(self): ++ args = ['-Wd', '-E', '-bb', '-m', 'test.regrtest', '--list-tests'] ++ output = self.run_python(args) ++ rough_number_of_tests_found = len(output.splitlines()) ++ actual_testsuite_glob = os.path.join(os.path.dirname(__file__), ++ 'test*.py') ++ rough_counted_test_py_files = len(glob.glob(actual_testsuite_glob)) ++ # We're not trying to duplicate test finding logic in here, ++ # just give a rough estimate of how many there should be and ++ # be near that. This is a regression test to prevent mishaps ++ # such as https://bugs.python.org/issue37667 in the future. ++ # If you need to change the values in here during some ++ # mythical future test suite reorganization, don't go ++ # overboard with logic and keep that goal in mind. ++ self.assertGreater(rough_number_of_tests_found, ++ rough_counted_test_py_files*9//10, ++ msg='Unexpectedly low number of tests found in:\n' ++ f'{", ".join(output.splitlines())}') ++ ++ + class ProgramsTestCase(BaseTestCase): + """ + Test various ways to run the Python test suite. Use options close +diff --git a/Makefile.pre.in b/Makefile.pre.in +index b452289..cc428ac 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -247,9 +247,10 @@ TCLTK_INCLUDES= @TCLTK_INCLUDES@ + TCLTK_LIBS= @TCLTK_LIBS@ + + # The task to run while instrumented when building the profile-opt target. +-# We exclude unittests with -x that take a rediculious amount of time to +-# run in the instrumented training build or do not provide much value. +-PROFILE_TASK=-m test.regrtest --pgo ++# To speed up profile generation, we don't run the full unit test suite ++# by default. The default is "-m test --pgo". To run more tests, use ++# PROFILE_TASK="-m test --pgo-extended" ++PROFILE_TASK= @PROFILE_TASK@ + + # report files for gcov / lcov coverage report + COVERAGE_INFO= $(abs_builddir)/coverage.info +diff --git a/configure.ac b/configure.ac +index c071ec3..816fc5a 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -1308,6 +1308,14 @@ else + DEF_MAKE_RULE="all" + fi + ++AC_ARG_VAR(PROFILE_TASK, Python args for PGO generation task) ++AC_MSG_CHECKING(PROFILE_TASK) ++if test -z "$PROFILE_TASK" ++then ++ PROFILE_TASK='-m test --pgo' ++fi ++AC_MSG_RESULT($PROFILE_TASK) ++ + # Make llvm-relatec checks work on systems where llvm tools are not installed with their + # normal names in the default $PATH (ie: Ubuntu). They exist under the + # non-suffixed name in their versioned llvm directory. diff --git a/SPECS/python3.spec b/SPECS/python3.spec index 70e1d7e..d786e5c 100644 --- a/SPECS/python3.spec +++ b/SPECS/python3.spec @@ -14,7 +14,7 @@ URL: https://www.python.org/ # WARNING When rebasing to a new Python version, # remember to update the python3-docs package as well Version: %{pybasever}.8 -Release: 11%{?dist} +Release: 23%{?dist} License: Python @@ -25,21 +25,16 @@ License: Python # Note that the bcond macros are named for the CLI option they create. # "%%bcond_without" means "ENABLE by default and create a --without option" +# Whether to use RPM build wheels from the python-{pip,setuptools}-wheel package +# Uses upstream bundled prebuilt wheels otherwise +%bcond_without rpmwheels + # Expensive optimizations (mainly, profile-guided optimizations) -%ifarch %{ix86} x86_64 %bcond_without optimizations -%else -# On some architectures, the optimized build takes tens of hours, possibly -# longer than Koji's 24-hour timeout. Disable optimizations here. -%bcond_with optimizations -%endif # Run the test suite in %%check %bcond_without tests -# Ability to reuse RPM-installed pip using rewheel -%bcond_without rewheel - # Extra build for debugging the interpreter or C-API extensions # (the -debug subpackages) %bcond_without debug_build @@ -209,9 +204,9 @@ BuildRequires: /usr/bin/dtrace # workaround http://bugs.python.org/issue19804 (test_uuid requires ifconfig) BuildRequires: /usr/sbin/ifconfig -%if %{with rewheel} -BuildRequires: python3-setuptools -BuildRequires: python3-pip +%if %{with rpmwheels} +BuildRequires: python3-setuptools-wheel +BuildRequires: python3-pip-wheel # Verify that the BuildRoot includes python36. # Not actually needed for build. @@ -321,10 +316,9 @@ Patch170: 00170-gc-assertions.patch Patch178: 00178-dont-duplicate-flags-in-sysconfig.patch # 00189 # -# Add the rewheel module, allowing to recreate wheels from already installed -# ones -# https://github.com/bkabrda/rewheel -Patch189: 00189-add-rewheel-module.patch +# Instead of bundled wheels, use our RPM packaged wheels from +# /usr/share/python3-wheels +Patch189: 00189-use-rpm-wheels.patch # 00205 # # LIBPL variable in makefile takes LIBPL from configure.ac @@ -357,6 +351,13 @@ Patch274: 00274-fix-arch-names.patch # See also: https://bugzilla.redhat.com/show_bug.cgi?id=1489816 Patch294: 00294-define-TLS-cipher-suite-on-build-time.patch +# 00316 # +# We remove the exe files from distutil's bdist_wininst +# So we mark the command as unsupported - and the tests are skipped +# Fixed upstream and backported from the 3.7 branch: https://bugs.python.org/issue10945 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1754040 +Patch316: 00316-mark-bdist_wininst-unsupported.patch + # 00317 # # Security fix for CVE-2019-5010: Fix segfault in ssl's cert parser # https://bugzilla.redhat.com/show_bug.cgi?id=1666789 @@ -364,11 +365,29 @@ Patch294: 00294-define-TLS-cipher-suite-on-build-time.patch Patch317: 00317-CVE-2019-5010.patch # 00318 # -# test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1 +# Various fixes for TLS 1.3 and OpenSSL 1.1.1 # https://bugzilla.redhat.com/show_bug.cgi?id=1639531 + +# test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1 # https://bugs.python.org/issue32947#msg333990 # https://github.com/python/cpython/pull/11612 -Patch318: 00318-test-ssl-fix-for-tls-13.patch + +# Encrypt private key test files with AES256 +# https://bugs.python.org/issue38271 +# https://github.com/python/cpython/pull/16396 + +# Prefer PROTOCOL_TLS_CLIENT/SERVER (partial backport) +# https://bugs.python.org/issue31346 +# https://github.com/python/cpython/pull/3058 + +# Enable TLS 1.3 in tests (partial backport) +# https://bugs.python.org/issue33618 +# https://github.com/python/cpython/pull/7082 + +# OpenSSL 1.1.1-pre1 / TLS 1.3 fixes (partial backport) +# https://bugs.python.org/issue32947 +# https://github.com/python/cpython/pull/5923 +Patch318: 00318-fixes-for-tls-13.patch # 00319 # # Fix test_tarfile on ppc64 @@ -377,10 +396,10 @@ Patch318: 00318-test-ssl-fix-for-tls-13.patch Patch319: 00319-test_tarfile_ppc64.patch # 00320 # -# Security fix for CVE-2019-9636: Information Disclosure due to urlsplit improper NFKC normalization +# Security fix for CVE-2019-9636 and CVE-2019-10160: Information Disclosure due to urlsplit improper NFKC normalization # Fixed upstream: https://bugs.python.org/issue36216 and https://bugs.python.org/issue36742 # Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1689318 -Patch320: 00320-CVE-2019-9636.patch +Patch320: 00320-CVE-2019-9636-and-CVE-2019-10160.patch # 00324 # # Disallow control chars in http URLs @@ -397,6 +416,78 @@ Patch324: 00324-disallow-control-chars-in-http-urls.patch # Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1714643 Patch325: 00325-CVE-2019-9948.patch +# 00326 # +# Don't set the post-handshake authentication verify flag on client side +# on TLS 1.3, as it also implicitly enables cert chain validation and an +# SSL/TLS connection will fail when verify mode is set to CERT_NONE. +# Fixed upstream: https://bugs.python.org/issue37428 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1725721 +Patch326: 00326-do-not-set-PHA-verify-flag-on-client-side.patch + +# 00327 # +# Enable TLS 1.3 post-handshake authentication in http.client for default +# context or if a cert_file is passed to HTTPSConnection +# Fixed upstream: https://bugs.python.org/issue37440 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1671353 +Patch327: 00327-enable-tls-1.3-PHA-in-http.client.patch + +# 00329 # +# Support OpenSSL FIPS mode +# - Fallback implementations md5, sha1, sha256, sha512 are removed in favor of OpenSSL wrappers +# - In FIPS mode, OpenSSL wrappers are always used in hashlib +# - add a new "usedforsecurity" keyword argument to the various digest +# algorithms in hashlib so that you can whitelist a callsite with +# "usedforsecurity=False" +# The change has been implemented upstream since Python 3.9: +# https://bugs.python.org/issue9216 +# - OpenSSL wrappers for the hashes blake2{b512,s256}, +# sha3_{224,256,384,512}, shake_{128,256} are now exported from _hashlib +# - In FIPS mode, the blake2, sha3 and shake hashes use OpenSSL wrappers +# and do not offer extended functionality (keys, tree hashing, custom digest size) +# - In FIPS mode, hmac.HMAC can only be instantiated with an OpenSSL wrapper +# or an string with OpenSSL hash name as the "digestmod" argument. +# The argument must be specified (instead of defaulting to ‘md5’). +# +# - Also while in FIPS mode, we utilize OpenSSL's DRBG and disable the +# os.getrandom() function. +# +# Upstream changes that have also been backported with this patch +# to allow tests to pass on stricter environments: +# +# Avoid MD5 or check for MD5 availablity +# https://bugs.python.org/issue38270 +# https://github.com/python/cpython/pull/16393 +# https://github.com/python/cpython/pull/16437 +# https://github.com/python/cpython/pull/17446 +# +# add usedforsecurity to hashlib constructors (partial backport for fixing a uuid test) +# https://github.com/python/cpython/pull/16044 +# Resolves: rhbz#1731424 +Patch329: 00329-fips.patch + +# 00330 # +# Fix CVE-2018-20852: cookie domain check returning incorrect results +# Fixed upstream: https://bugs.python.org/issue35121 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1741553 +Patch330: 00330-CVE-2018-20852.patch + +# 00332 # +# Fix CVE-2019-16056: Don't parse email addresses containing +# multiple '@' characters. +# Fixed upstream: https://bugs.python.org/issue34155 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1750776 +Patch332: 00332-CVE-2019-16056.patch + +# 00333 # +# Reduce the number of tests run during the profile guided optimizations build, +# as running the whole test suite during profiling increases the build time +# substantially, with negligible performance gain. +# Fixed upstream and backported from the 3.8 branch: +# https://bugs.python.org/issue36044 +# https://bugs.python.org/issue37667 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1749576 +Patch333: 00333-reduce-pgo-tests.patch + # (New patches go here ^^^) # # When adding new patches to "python" and "python3" in Fedora, EL, etc., @@ -422,9 +513,25 @@ Provides: python(abi) = %{pybasever} Requires: %{name}-libs%{?_isa} = %{version}-%{release} -%if %{with rewheel} +%if %{with rpmwheels} + +# RHEL8 was forked from F28 and thus required python3-setuptools here +# for the rewheel module to work. We've since backported the use of RPM +# prepared wheels from F29+ into RHEL8, and thus this dependency isn't +# strictly needed. +# However, it is possible, that some packages in BaseOS actually depend on +# setuptools without declaring the dependency in their spec file. Thus +# we're keeping this dependency here to avoid the possibility of breaking +# them. Requires: platform-python-setuptools -Requires: platform-python-pip +# For python3-pip the Requires has been reduced to Recommends, as there are +# generally less packages that depend on pip than packages that depend on +# setuptools at runtime, and thus there's less chance of breakage. +# (rhbz#1756217). +Recommends: platform-python-pip + +Requires: python3-setuptools-wheel +Requires: python3-pip-wheel %endif # Runtime require alternatives @@ -473,6 +580,14 @@ Requires: chkconfig Requires: gdbm%{?_isa} >= 1:1.13 %endif +%if %{with rpmwheels} +Requires: python3-setuptools-wheel +Requires: python3-pip-wheel +%else +Provides: bundled(python3-pip) = 18.1 +Provides: bundled(python3-setuptools) = 40.6.2 +%endif + # There are files in the standard library that have python shebang. # We've filtered the automatic requirement out so libs are installable without # the main package. This however makes it pulled in by default. @@ -528,6 +643,15 @@ Requires: python-rpm-macros Requires: python3-rpm-macros Requires: python3-rpm-generators +# This is not "API" (packages that need setuptools should still BuildRequire it) +# However some packages apparently can build both with and without setuptools +# producing egg-info as file or directory (depending on setuptools presence). +# Directory-to-file updates are problematic in RPM, so we ensure setuptools is +# installed when -devel is required. +# See https://bugzilla.redhat.com/show_bug.cgi?id=1623914 +# See https://fedoraproject.org/wiki/Packaging:Directory_Replacement +Requires: platform-python-setuptools + Provides: %{name}-2to3 = %{version}-%{release} Provides: 2to3 = %{version}-%{release} @@ -640,11 +764,6 @@ so extensions for both versions can co-exist in the same directory. rm -r Modules/expat rm -r Modules/zlib -%if %{with rewheel} -%global pip_version %(pip%{pybasever} --version | cut -d' ' -f2) -sed -r -i s/'_PIP_VERSION = "[0-9.]+"'/'_PIP_VERSION = "%{pip_version}"'/ Lib/ensurepip/__init__.py -%endif - # # Apply patches: # @@ -661,8 +780,9 @@ sed -r -i s/'_PIP_VERSION = "[0-9.]+"'/'_PIP_VERSION = "%{pip_version}"'/ Lib/en %patch170 -p1 %patch178 -p1 -%if %{with rewheel} +%if %{with rpmwheels} %patch189 -p1 +rm Lib/ensurepip/_bundled/*.whl %endif %patch205 -p1 @@ -670,12 +790,19 @@ sed -r -i s/'_PIP_VERSION = "[0-9.]+"'/'_PIP_VERSION = "%{pip_version}"'/ Lib/en %patch262 -p1 %patch274 -p1 %patch294 -p1 +%patch316 -p1 %patch317 -p1 %patch318 -p1 %patch319 -p1 %patch320 -p1 %patch324 -p1 %patch325 -p1 +%patch326 -p1 +%patch327 -p1 +%patch329 -p1 +%patch330 -p1 +%patch332 -p1 +%patch333 -p1 # Remove files that should be generated by the build @@ -763,7 +890,7 @@ BuildPython() { %{nil} # Invoke the build - make EXTRA_CFLAGS="$CFLAGS $MoreCFlags" %{?_smp_mflags} + %make_build CFLAGS_NODIST="$CFLAGS_NODIST $MoreCFlags" popd echo FINISHED: BUILD OF PYTHON FOR CONFIGURATION: $ConfName @@ -1102,17 +1229,10 @@ CheckPython() { WITHIN_PYTHON_RPM_BUILD= \ LD_LIBRARY_PATH=$ConfDir $ConfDir/python -m test.regrtest \ -wW --slowest --findleaks \ - -x test_distutils \ -x test_bdist_rpm \ %ifarch %{mips64} -x test_ctypes \ %endif - %ifarch s390x - -x test_gdb \ - %endif - %ifarch ppc64le - -x test_gdb \ - %endif echo FINISHED: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName @@ -1179,7 +1299,9 @@ fi %exclude %{_bindir}/pyvenv %{_bindir}/pyvenv-%{pybasever} -%{_mandir}/*/* + +%{_mandir}/man1/python3.6.1* +%{_mandir}/man1/unversioned-python.1* %files libs %license LICENSE @@ -1214,13 +1336,11 @@ fi %dir %{pylibdir}/ensurepip/__pycache__/ %{pylibdir}/ensurepip/*.py %{pylibdir}/ensurepip/__pycache__/*%{bytecode_suffixes} +%if %{with rpmwheels} %exclude %{pylibdir}/ensurepip/_bundled - -%if %{with rewheel} -%dir %{pylibdir}/ensurepip/rewheel/ -%dir %{pylibdir}/ensurepip/rewheel/__pycache__/ -%{pylibdir}/ensurepip/rewheel/*.py -%{pylibdir}/ensurepip/rewheel/__pycache__/*%{bytecode_suffixes} +%else +%dir %{pylibdir}/ensurepip/_bundled +%{pylibdir}/ensurepip/_bundled/*.whl %endif # The majority of the test module lives in the test subpackage @@ -1248,11 +1368,8 @@ fi %{pylibdir}/pydoc_data %{dynload_dir}/_blake2.%{SOABI_optimized}.so -%{dynload_dir}/_md5.%{SOABI_optimized}.so -%{dynload_dir}/_sha1.%{SOABI_optimized}.so -%{dynload_dir}/_sha256.%{SOABI_optimized}.so %{dynload_dir}/_sha3.%{SOABI_optimized}.so -%{dynload_dir}/_sha512.%{SOABI_optimized}.so +%{dynload_dir}/_hmacopenssl.%{SOABI_optimized}.so %{dynload_dir}/_asyncio.%{SOABI_optimized}.so %{dynload_dir}/_bisect.%{SOABI_optimized}.so @@ -1487,11 +1604,8 @@ fi # ...with debug builds of the built-in "extension" modules: %{dynload_dir}/_blake2.%{SOABI_debug}.so -%{dynload_dir}/_md5.%{SOABI_debug}.so -%{dynload_dir}/_sha1.%{SOABI_debug}.so -%{dynload_dir}/_sha256.%{SOABI_debug}.so %{dynload_dir}/_sha3.%{SOABI_debug}.so -%{dynload_dir}/_sha512.%{SOABI_debug}.so +%{dynload_dir}/_hmacopenssl.%{SOABI_debug}.so %{dynload_dir}/_asyncio.%{SOABI_debug}.so %{dynload_dir}/_bisect.%{SOABI_debug}.so @@ -1606,8 +1720,63 @@ fi # ====================================================== %changelog +* Wed Nov 27 2019 Charalampos Stratakis - 3.6.8-23 +- Modify the test suite to better handle disabled SSL/TLS versions and FIPS mode +- Use OpenSSL's DRBG and disable os.getrandom() function in FIPS mode +Resolves: rhbz#1754028, rhbz#1754027, rhbz#1754026, rhbz#1774471 + +* Thu Oct 24 2019 Tomas Orsava - 3.6.8-22 +- Changed Requires into Recommends for python3-pip to allow a lower RHEL8 + footprint for containers and other minimal environments +Resolves: rhbz#1756217 + +* Wed Oct 16 2019 Tomas Orsava - 3.6.8-21 +- Patch 329 (FIPS) modified: Added workaround for mod_ssl: + Skip error checking in _Py_hashlib_fips_error +Resolves: rhbz#1760106 + +* Mon Oct 14 2019 Charalampos Stratakis - 3.6.8-20 +- Security fix for CVE-2019-16056 +Resolves: rhbz#1750776 + +* Wed Oct 09 2019 Charalampos Stratakis - 3.6.8-19 +- Skip windows specific test_get_exe_bytes test case and enable test_distutils +Resolves: rhbz#1754040 + +* Mon Oct 07 2019 Charalampos Stratakis - 3.6.8-18 +- Reduce the number of tests running during the profile guided optimizations build +- Enable profile guided optimizations for all the supported architectures +Resolves: rhbz#1749576 + +* Mon Oct 07 2019 Charalampos Stratakis - 3.6.8-17 +- Security fix for CVE-2018-20852 +Resolves: rhbz#1741553 + +* Fri Oct 04 2019 Charalampos Stratakis - 3.6.8-16 +- Properly pass the -Og optimization flag to the debug build +Resolves: rhbz#1712977 and rhbz#1714733 + +* Thu Aug 29 2019 Tomas Orsava - 3.6.8-15 +- Patch 329 that adds support for OpenSSL FIPS mode has been improved and + bugfixed +Resolves: rhbz#1744670 rhbz#1745499 rhbz#1745685 + +* Tue Aug 06 2019 Tomas Orsava - 3.6.8-14 +- Adding a new patch 329 that adds support for OpenSSL FIPS mode +- Explicitly listing man pages in files section to fix an RPM warning +Resolves: rhbz#1731424 + +* Tue Jul 02 2019 Charalampos Stratakis - 3.6.8-13 +- Do not set PHA verify flag on client side (rhbz#1725721) +- Enable TLS 1.3 post-handshake authentication in http.client (rhbz#1671353) + +* Fri Jun 21 2019 Miro Hrončok - 3.6.8-12 +- Use RPM built wheels of pip and setuptools in ensurepip instead of our rewheel patch +- Require platform-python-setuptools from platform-python-devel to prevent packaging errors +Resolves: rhbz#1701286 + * Fri Jun 07 2019 Charalampos Stratakis - 3.6.8-11 -- Yet another fix for CVE-2019-9636 +- Fix for CVE-2019-10160 Resolves: rhbz#1689318 * Wed May 29 2019 Charalampos Stratakis - 3.6.8-10