import UBI python3.9-3.9.18-1.el9_3

This commit is contained in:
eabdullin 2023-11-07 11:38:27 +00:00
parent c25f457a64
commit de2527d3b2
9 changed files with 295 additions and 888 deletions

2
.gitignore vendored
View File

@ -1 +1 @@
SOURCES/Python-3.9.16.tar.xz
SOURCES/Python-3.9.18.tar.xz

View File

@ -1 +1 @@
19acd6a341e4f2d7ff97c10c2eada258e9898624 SOURCES/Python-3.9.16.tar.xz
abe4a20dcc11798495b17611ef9f8f33d6975722 SOURCES/Python-3.9.18.tar.xz

View File

@ -31,7 +31,7 @@ index e510cc7..5bd16a6 100644
__all__ = ["version", "bootstrap"]
-_SETUPTOOLS_VERSION = "58.1.0"
-_PIP_VERSION = "22.0.4"
-_PIP_VERSION = "23.0.1"
+
+_WHEEL_DIR = "/usr/share/python-wheels/"
+

View File

@ -0,0 +1,251 @@
From 8b70605b594b3831331a9340ba764ff751871612 Mon Sep 17 00:00:00 2001
From: Petr Viktorin <encukou@gmail.com>
Date: Mon, 6 Mar 2023 17:24:24 +0100
Subject: [PATCH 2/2] CVE-2007-4559, PEP-706: Add filters for tarfile
extraction (downstream)
Add and test RHEL-specific ways of configuring the default behavior: environment
variable and config file.
---
Lib/tarfile.py | 42 +++++++++++++
Lib/test/test_shutil.py | 3 +-
Lib/test/test_tarfile.py | 128 ++++++++++++++++++++++++++++++++++++++-
3 files changed, 169 insertions(+), 4 deletions(-)
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index b6ad7dbe2a4..dc7050b2c63 100755
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -72,6 +72,13 @@ __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
"ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
"DEFAULT_FORMAT", "open"]
+# If true, use the safer (but backwards-incompatible) 'tar' extraction filter,
+# rather than 'fully_trusted', by default.
+# The emitted warning is changed to match.
+_RH_SAFER_DEFAULT = True
+
+# System-wide configuration file
+_CONFIG_FILENAME = '/etc/python/tarfile.cfg'
#---------------------------------------------------------
# tar constants
@@ -2197,6 +2204,41 @@ class TarFile(object):
if filter is None:
filter = self.extraction_filter
if filter is None:
+ name = os.environ.get('PYTHON_TARFILE_EXTRACTION_FILTER')
+ if name is None:
+ try:
+ file = bltn_open(_CONFIG_FILENAME)
+ except FileNotFoundError:
+ pass
+ else:
+ import configparser
+ conf = configparser.ConfigParser(
+ interpolation=None,
+ comment_prefixes=('#', ),
+ )
+ with file:
+ conf.read_file(file)
+ name = conf.get('tarfile',
+ 'PYTHON_TARFILE_EXTRACTION_FILTER',
+ fallback='')
+ if name:
+ try:
+ filter = _NAMED_FILTERS[name]
+ except KeyError:
+ raise ValueError(f"filter {filter!r} not found") from None
+ self.extraction_filter = filter
+ return filter
+ if _RH_SAFER_DEFAULT:
+ warnings.warn(
+ 'The default behavior of tarfile extraction has been '
+ + 'changed to disallow common exploits '
+ + '(including CVE-2007-4559). '
+ + 'By default, absolute/parent paths are disallowed '
+ + 'and some mode bits are cleared. '
+ + 'See https://access.redhat.com/articles/7004769 '
+ + 'for more details.',
+ RuntimeWarning)
+ return tar_filter
return fully_trusted_filter
if isinstance(filter, str):
raise TypeError(
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
index 9041e7aa368..1eb1116cc10 100644
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -1613,7 +1613,8 @@ class TestArchives(BaseTest, unittest.TestCase):
def check_unpack_tarball(self, format):
self.check_unpack_archive(format, filter='fully_trusted')
self.check_unpack_archive(format, filter='data')
- with warnings_helper.check_no_warnings(self):
+ with warnings_helper.check_warnings(
+ ('.*CVE-2007-4559', RuntimeWarning)):
self.check_unpack_archive(format)
def test_unpack_archive_tar(self):
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index a66f7efd2d6..6fd3c384b5c 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -2,7 +2,7 @@ import sys
import os
import io
from hashlib import sha256
-from contextlib import contextmanager
+from contextlib import contextmanager, ExitStack
from random import Random
import pathlib
import shutil
@@ -2929,7 +2929,11 @@ class NoneInfoExtractTests(ReadTest):
tar = tarfile.open(tarname, mode='r', encoding="iso8859-1")
cls.control_dir = pathlib.Path(TEMPDIR) / "extractall_ctrl"
tar.errorlevel = 0
- tar.extractall(cls.control_dir, filter=cls.extraction_filter)
+ with ExitStack() as cm:
+ if cls.extraction_filter is None:
+ cm.enter_context(warnings.catch_warnings())
+ warnings.simplefilter(action="ignore", category=RuntimeWarning)
+ tar.extractall(cls.control_dir, filter=cls.extraction_filter)
tar.close()
cls.control_paths = set(
p.relative_to(cls.control_dir)
@@ -3592,7 +3596,8 @@ class TestExtractionFilters(unittest.TestCase):
"""Ensure the default filter does not warn (like in 3.12)"""
with ArchiveMaker() as arc:
arc.add('foo')
- with warnings_helper.check_no_warnings(self):
+ with warnings_helper.check_warnings(
+ ('.*CVE-2007-4559', RuntimeWarning)):
with self.check_context(arc.open(), None):
self.expect_file('foo')
@@ -3762,6 +3767,123 @@ class TestExtractionFilters(unittest.TestCase):
self.expect_exception(TypeError) # errorlevel is not int
+ @contextmanager
+ def rh_config_context(self, config_lines=None):
+ """Set up for testing various ways of overriding the default filter
+
+ return a triple with:
+ - temporary directory
+ - EnvironmentVarGuard()
+ - a test archive for use with check_* methods below
+
+ If config_lines is given, write them to the config file. Otherwise
+ the config file is missing.
+ """
+ tempdir = pathlib.Path(TEMPDIR) / 'tmp'
+ configfile = tempdir / 'tarfile.cfg'
+ with ArchiveMaker() as arc:
+ arc.add('good')
+ arc.add('ugly', symlink_to='/etc/passwd')
+ arc.add('../bad')
+ with (
+ support.temp_dir(tempdir),
+ support.swap_attr(tarfile, '_CONFIG_FILENAME', str(configfile)),
+ support.EnvironmentVarGuard() as env,
+ arc.open() as tar,
+ ):
+ if config_lines is not None:
+ with configfile.open('w') as f:
+ for line in config_lines:
+ print(line, file=f)
+ yield tempdir, env, tar
+
+ def check_rh_default_behavior(self, tar, tempdir):
+ """Check RH default: warn and refuse to extract dangerous files."""
+ with (
+ warnings_helper.check_warnings(
+ ('.*CVE-2007-4559', RuntimeWarning)),
+ self.assertRaises(tarfile.OutsideDestinationError),
+ ):
+ tar.extractall(tempdir / 'outdir')
+
+ def check_trusted_default(self, tar, tempdir):
+ """Check 'fully_trusted' is configured as the default filter."""
+ with (
+ warnings_helper.check_no_warnings(self),
+ ):
+ tar.extractall(tempdir / 'outdir')
+ self.assertTrue((tempdir / 'outdir/good').exists())
+ self.assertEqual((tempdir / 'outdir/ugly').readlink(),
+ pathlib.Path('/etc/passwd'))
+ self.assertTrue((tempdir / 'bad').exists())
+
+ def test_rh_default_no_conf(self):
+ with self.rh_config_context() as (tempdir, env, tar):
+ self.check_rh_default_behavior(tar, tempdir)
+
+ def test_rh_default_from_file(self):
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=fully_trusted']
+ with self.rh_config_context(lines) as (tempdir, env, tar):
+ self.check_trusted_default(tar, tempdir)
+
+ def test_rh_empty_config_file(self):
+ """Empty config file -> default behavior"""
+ lines = []
+ with self.rh_config_context(lines) as (tempdir, env, tar):
+ self.check_rh_default_behavior(tar, tempdir)
+
+ def test_empty_config_section(self):
+ """Empty section in config file -> default behavior"""
+ lines = ['[tarfile]']
+ with self.rh_config_context(lines) as (tempdir, env, tar):
+ self.check_rh_default_behavior(tar, tempdir)
+
+ def test_rh_default_empty_config_option(self):
+ """Empty option value in config file -> default behavior"""
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=']
+ with self.rh_config_context(lines) as (tempdir, env, tar):
+ self.check_rh_default_behavior(tar, tempdir)
+
+ def test_bad_config_option(self):
+ """Bad option value in config file -> ValueError"""
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=unknown!']
+ with self.rh_config_context(lines) as (tempdir, env, tar):
+ with self.assertRaises(ValueError):
+ tar.extractall(tempdir / 'outdir')
+
+ def test_default_from_envvar(self):
+ with self.rh_config_context() as (tempdir, env, tar):
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'fully_trusted'
+ self.check_trusted_default(tar, tempdir)
+
+ def test_empty_envvar(self):
+ """Empty env variable -> default behavior"""
+ with self.rh_config_context() as (tempdir, env, tar):
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = ''
+ self.check_rh_default_behavior(tar, tempdir)
+
+ def test_bad_envvar(self):
+ with self.rh_config_context() as (tempdir, env, tar):
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'unknown!'
+ with self.assertRaises(ValueError):
+ tar.extractall(tempdir / 'outdir')
+
+ def test_envvar_overrides_file(self):
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=data']
+ with self.rh_config_context(lines) as (tempdir, env, tar):
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'fully_trusted'
+ self.check_trusted_default(tar, tempdir)
+
+ def test_monkeypatch_overrides_envvar(self):
+ with self.rh_config_context(None) as (tempdir, env, tar):
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'data'
+ with support.swap_attr(
+ tarfile.TarFile, 'extraction_filter',
+ staticmethod(tarfile.fully_trusted_filter)
+ ):
+ self.check_trusted_default(tar, tempdir)
+
+
def setUpModule():
support.unlink(TEMPDIR)
os.makedirs(TEMPDIR)
--
2.40.1

View File

@ -1,229 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: "Miss Islington (bot)"
<31488909+miss-islington@users.noreply.github.com>
Date: Wed, 17 May 2023 14:41:25 -0700
Subject: [PATCH] 00399: CVE-2023-24329
* gh-102153: Start stripping C0 control and space chars in `urlsplit` (GH-102508)
`urllib.parse.urlsplit` has already been respecting the WHATWG spec a bit GH-25595.
This adds more sanitizing to respect the "Remove any leading C0 control or space from input" [rule](https://url.spec.whatwg.org/GH-url-parsing:~:text=Remove%20any%20leading%20and%20trailing%20C0%20control%20or%20space%20from%20input.) in response to [CVE-2023-24329](https://nvd.nist.gov/vuln/detail/CVE-2023-24329).
---------
(cherry picked from commit 2f630e1ce18ad2e07428296532a68b11dc66ad10)
Co-authored-by: Illia Volochii <illia.volochii@gmail.com>
Co-authored-by: Gregory P. Smith [Google] <greg@krypto.org>
---
Doc/library/urllib.parse.rst | 46 +++++++++++++-
Lib/test/test_urlparse.py | 61 ++++++++++++++++++-
Lib/urllib/parse.py | 12 ++++
...-03-07-20-59-17.gh-issue-102153.14CLSZ.rst | 3 +
4 files changed, 119 insertions(+), 3 deletions(-)
create mode 100644 Misc/NEWS.d/next/Security/2023-03-07-20-59-17.gh-issue-102153.14CLSZ.rst
diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst
index f0f8605128..c76b5879ea 100644
--- a/Doc/library/urllib.parse.rst
+++ b/Doc/library/urllib.parse.rst
@@ -159,6 +159,10 @@ or on combining URL components into a URL string.
ParseResult(scheme='http', netloc='www.cwi.nl:80', path='/%7Eguido/Python.html',
params='', query='', fragment='')
+ .. warning::
+
+ :func:`urlparse` does not perform validation. See :ref:`URL parsing
+ security <url-parsing-security>` for details.
.. versionchanged:: 3.2
Added IPv6 URL parsing capabilities.
@@ -323,8 +327,14 @@ or on combining URL components into a URL string.
``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is
decomposed before parsing, no error will be raised.
- Following the `WHATWG spec`_ that updates RFC 3986, ASCII newline
- ``\n``, ``\r`` and tab ``\t`` characters are stripped from the URL.
+ Following some of the `WHATWG spec`_ that updates RFC 3986, leading C0
+ control and space characters are stripped from the URL. ``\n``,
+ ``\r`` and tab ``\t`` characters are removed from the URL at any position.
+
+ .. warning::
+
+ :func:`urlsplit` does not perform validation. See :ref:`URL parsing
+ security <url-parsing-security>` for details.
.. versionchanged:: 3.6
Out-of-range port numbers now raise :exc:`ValueError`, instead of
@@ -337,6 +347,9 @@ or on combining URL components into a URL string.
.. versionchanged:: 3.9.5
ASCII newline and tab characters are stripped from the URL.
+ .. versionchanged:: 3.9.17
+ Leading WHATWG C0 control and space characters are stripped from the URL.
+
.. _WHATWG spec: https://url.spec.whatwg.org/#concept-basic-url-parser
.. function:: urlunsplit(parts)
@@ -413,6 +426,35 @@ or on combining URL components into a URL string.
or ``scheme://host/path``). If *url* is not a wrapped URL, it is returned
without changes.
+.. _url-parsing-security:
+
+URL parsing security
+--------------------
+
+The :func:`urlsplit` and :func:`urlparse` APIs do not perform **validation** of
+inputs. They may not raise errors on inputs that other applications consider
+invalid. They may also succeed on some inputs that might not be considered
+URLs elsewhere. Their purpose is for practical functionality rather than
+purity.
+
+Instead of raising an exception on unusual input, they may instead return some
+component parts as empty strings. Or components may contain more than perhaps
+they should.
+
+We recommend that users of these APIs where the values may be used anywhere
+with security implications code defensively. Do some verification within your
+code before trusting a returned component part. Does that ``scheme`` make
+sense? Is that a sensible ``path``? Is there anything strange about that
+``hostname``? etc.
+
+What constitutes a URL is not universally well defined. Different applications
+have different needs and desired constraints. For instance the living `WHATWG
+spec`_ describes what user facing web clients such as a web browser require.
+While :rfc:`3986` is more general. These functions incorporate some aspects of
+both, but cannot be claimed compliant with either. The APIs and existing user
+code with expectations on specific behaviors predate both standards leading us
+to be very cautious about making API behavior changes.
+
.. _parsing-ascii-encoded-bytes:
Parsing ASCII Encoded Bytes
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index 31943f357f..574da5bd69 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -649,6 +649,65 @@ class UrlParseTestCase(unittest.TestCase):
self.assertEqual(p.scheme, "http")
self.assertEqual(p.geturl(), "http://www.python.org/javascript:alert('msg')/?query=something#fragment")
+ def test_urlsplit_strip_url(self):
+ noise = bytes(range(0, 0x20 + 1))
+ base_url = "http://User:Pass@www.python.org:080/doc/?query=yes#frag"
+
+ url = noise.decode("utf-8") + base_url
+ p = urllib.parse.urlsplit(url)
+ self.assertEqual(p.scheme, "http")
+ self.assertEqual(p.netloc, "User:Pass@www.python.org:080")
+ self.assertEqual(p.path, "/doc/")
+ self.assertEqual(p.query, "query=yes")
+ self.assertEqual(p.fragment, "frag")
+ self.assertEqual(p.username, "User")
+ self.assertEqual(p.password, "Pass")
+ self.assertEqual(p.hostname, "www.python.org")
+ self.assertEqual(p.port, 80)
+ self.assertEqual(p.geturl(), base_url)
+
+ url = noise + base_url.encode("utf-8")
+ p = urllib.parse.urlsplit(url)
+ self.assertEqual(p.scheme, b"http")
+ self.assertEqual(p.netloc, b"User:Pass@www.python.org:080")
+ self.assertEqual(p.path, b"/doc/")
+ self.assertEqual(p.query, b"query=yes")
+ self.assertEqual(p.fragment, b"frag")
+ self.assertEqual(p.username, b"User")
+ self.assertEqual(p.password, b"Pass")
+ self.assertEqual(p.hostname, b"www.python.org")
+ self.assertEqual(p.port, 80)
+ self.assertEqual(p.geturl(), base_url.encode("utf-8"))
+
+ # Test that trailing space is preserved as some applications rely on
+ # this within query strings.
+ query_spaces_url = "https://www.python.org:88/doc/?query= "
+ p = urllib.parse.urlsplit(noise.decode("utf-8") + query_spaces_url)
+ self.assertEqual(p.scheme, "https")
+ self.assertEqual(p.netloc, "www.python.org:88")
+ self.assertEqual(p.path, "/doc/")
+ self.assertEqual(p.query, "query= ")
+ self.assertEqual(p.port, 88)
+ self.assertEqual(p.geturl(), query_spaces_url)
+
+ p = urllib.parse.urlsplit("www.pypi.org ")
+ # That "hostname" gets considered a "path" due to the
+ # trailing space and our existing logic... YUCK...
+ # and re-assembles via geturl aka unurlsplit into the original.
+ # django.core.validators.URLValidator (at least through v3.2) relies on
+ # this, for better or worse, to catch it in a ValidationError via its
+ # regular expressions.
+ # Here we test the basic round trip concept of such a trailing space.
+ self.assertEqual(urllib.parse.urlunsplit(p), "www.pypi.org ")
+
+ # with scheme as cache-key
+ url = "//www.python.org/"
+ scheme = noise.decode("utf-8") + "https" + noise.decode("utf-8")
+ for _ in range(2):
+ p = urllib.parse.urlsplit(url, scheme=scheme)
+ self.assertEqual(p.scheme, "https")
+ self.assertEqual(p.geturl(), "https://www.python.org/")
+
def test_attributes_bad_port(self):
"""Check handling of invalid ports."""
for bytes in (False, True):
@@ -656,7 +715,7 @@ class UrlParseTestCase(unittest.TestCase):
for port in ("foo", "1.5", "-1", "0x10"):
with self.subTest(bytes=bytes, parse=parse, port=port):
netloc = "www.example.net:" + port
- url = "http://" + netloc
+ url = "http://" + netloc + "/"
if bytes:
netloc = netloc.encode("ascii")
url = url.encode("ascii")
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
index b7965fe3d2..5b7193f67c 100644
--- a/Lib/urllib/parse.py
+++ b/Lib/urllib/parse.py
@@ -25,6 +25,10 @@ currently not entirely compliant with this RFC due to defacto
scenarios for parsing, and for backward compatibility purposes, some
parsing quirks from older RFCs are retained. The testcases in
test_urlparse.py provides a good indicator of parsing behavior.
+
+The WHATWG URL Parser spec should also be considered. We are not compliant with
+it either due to existing user code API behavior expectations (Hyrum's Law).
+It serves as a useful guide when making changes.
"""
import re
@@ -78,6 +82,10 @@ scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
'0123456789'
'+-.')
+# Leading and trailing C0 control and space to be stripped per WHATWG spec.
+# == "".join([chr(i) for i in range(0, 0x20 + 1)])
+_WHATWG_C0_CONTROL_OR_SPACE = '\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f '
+
# Unsafe bytes to be removed per WHATWG spec
_UNSAFE_URL_BYTES_TO_REMOVE = ['\t', '\r', '\n']
@@ -456,6 +464,10 @@ def urlsplit(url, scheme='', allow_fragments=True):
"""
url, scheme, _coerce_result = _coerce_args(url, scheme)
+ # Only lstrip url as some applications rely on preserving trailing space.
+ # (https://url.spec.whatwg.org/#concept-basic-url-parser would strip both)
+ url = url.lstrip(_WHATWG_C0_CONTROL_OR_SPACE)
+ scheme = scheme.strip(_WHATWG_C0_CONTROL_OR_SPACE)
for b in _UNSAFE_URL_BYTES_TO_REMOVE:
url = url.replace(b, "")
diff --git a/Misc/NEWS.d/next/Security/2023-03-07-20-59-17.gh-issue-102153.14CLSZ.rst b/Misc/NEWS.d/next/Security/2023-03-07-20-59-17.gh-issue-102153.14CLSZ.rst
new file mode 100644
index 0000000000..e57ac4ed3a
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2023-03-07-20-59-17.gh-issue-102153.14CLSZ.rst
@@ -0,0 +1,3 @@
+:func:`urllib.parse.urlsplit` now strips leading C0 control and space
+characters following the specification for URLs defined by WHATWG in
+response to CVE-2023-24329. Patch by Illia Volochii.

View File

@ -1,613 +0,0 @@
From 8173574066653b07e966f1a88b52248c85a50755 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C5=81ukasz=20Langa?= <lukasz@langa.pl>
Date: Tue, 22 Aug 2023 19:57:10 +0200
Subject: [PATCH 1/3] gh-108310: Fix CVE-2023-40217: Check for & avoid the ssl
pre-close flaw (#108320)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
gh-108310: Fix CVE-2023-40217: Check for & avoid the ssl pre-close flaw
Instances of `ssl.SSLSocket` were vulnerable to a bypass of the TLS handshake
and included protections (like certificate verification) and treating sent
unencrypted data as if it were post-handshake TLS encrypted data.
The vulnerability is caused when a socket is connected, data is sent by the
malicious peer and stored in a buffer, and then the malicious peer closes the
socket within a small timing window before the other peers TLS handshake can
begin. After this sequence of events the closed socket will not immediately
attempt a TLS handshake due to not being connected but will also allow the
buffered data to be read as if a successful TLS handshake had occurred.
Co-authored-by: Gregory P. Smith [Google LLC] <greg@krypto.org>
---
Lib/ssl.py | 31 ++-
Lib/test/test_ssl.py | 215 ++++++++++++++++++
...-08-22-17-39-12.gh-issue-108310.fVM3sg.rst | 7 +
3 files changed, 252 insertions(+), 1 deletion(-)
create mode 100644 Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst
diff --git a/Lib/ssl.py b/Lib/ssl.py
index 0e3606b..ef92e76 100644
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -1003,7 +1003,7 @@ class SSLSocket(socket):
)
self = cls.__new__(cls, **kwargs)
super(SSLSocket, self).__init__(**kwargs)
- self.settimeout(sock.gettimeout())
+ sock_timeout = sock.gettimeout()
sock.detach()
self._context = context
@@ -1022,9 +1022,38 @@ class SSLSocket(socket):
if e.errno != errno.ENOTCONN:
raise
connected = False
+ blocking = self.getblocking()
+ self.setblocking(False)
+ try:
+ # We are not connected so this is not supposed to block, but
+ # testing revealed otherwise on macOS and Windows so we do
+ # the non-blocking dance regardless. Our raise when any data
+ # is found means consuming the data is harmless.
+ notconn_pre_handshake_data = self.recv(1)
+ except OSError as e:
+ # EINVAL occurs for recv(1) on non-connected on unix sockets.
+ if e.errno not in (errno.ENOTCONN, errno.EINVAL):
+ raise
+ notconn_pre_handshake_data = b''
+ self.setblocking(blocking)
+ if notconn_pre_handshake_data:
+ # This prevents pending data sent to the socket before it was
+ # closed from escaping to the caller who could otherwise
+ # presume it came through a successful TLS connection.
+ reason = "Closed before TLS handshake with data in recv buffer."
+ notconn_pre_handshake_data_error = SSLError(e.errno, reason)
+ # Add the SSLError attributes that _ssl.c always adds.
+ notconn_pre_handshake_data_error.reason = reason
+ notconn_pre_handshake_data_error.library = None
+ try:
+ self.close()
+ except OSError:
+ pass
+ raise notconn_pre_handshake_data_error
else:
connected = True
+ self.settimeout(sock_timeout) # Must come after setblocking() calls.
self._connected = connected
if connected:
# create the SSL object
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index e270f44..aeca481 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -5,11 +5,14 @@ import unittest
import unittest.mock
from test import support
from test.support import socket_helper, warnings_helper
+import re
import socket
import select
+import struct
import time
import datetime
import gc
+import http.client
import os
import errno
import pprint
@@ -4842,6 +4845,218 @@ class TestSSLDebug(unittest.TestCase):
s.connect((HOST, server.port))
+def set_socket_so_linger_on_with_zero_timeout(sock):
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', 1, 0))
+
+
+class TestPreHandshakeClose(unittest.TestCase):
+ """Verify behavior of close sockets with received data before to the handshake.
+ """
+
+ class SingleConnectionTestServerThread(threading.Thread):
+
+ def __init__(self, *, name, call_after_accept):
+ self.call_after_accept = call_after_accept
+ self.received_data = b'' # set by .run()
+ self.wrap_error = None # set by .run()
+ self.listener = None # set by .start()
+ self.port = None # set by .start()
+ super().__init__(name=name)
+
+ def __enter__(self):
+ self.start()
+ return self
+
+ def __exit__(self, *args):
+ try:
+ if self.listener:
+ self.listener.close()
+ except OSError:
+ pass
+ self.join()
+ self.wrap_error = None # avoid dangling references
+
+ def start(self):
+ self.ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
+ self.ssl_ctx.verify_mode = ssl.CERT_REQUIRED
+ self.ssl_ctx.load_verify_locations(cafile=ONLYCERT)
+ self.ssl_ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY)
+ self.listener = socket.socket()
+ self.port = socket_helper.bind_port(self.listener)
+ self.listener.settimeout(2.0)
+ self.listener.listen(1)
+ super().start()
+
+ def run(self):
+ conn, address = self.listener.accept()
+ self.listener.close()
+ with conn:
+ if self.call_after_accept(conn):
+ return
+ try:
+ tls_socket = self.ssl_ctx.wrap_socket(conn, server_side=True)
+ except OSError as err: # ssl.SSLError inherits from OSError
+ self.wrap_error = err
+ else:
+ try:
+ self.received_data = tls_socket.recv(400)
+ except OSError:
+ pass # closed, protocol error, etc.
+
+ def non_linux_skip_if_other_okay_error(self, err):
+ if sys.platform == "linux":
+ return # Expect the full test setup to always work on Linux.
+ if (isinstance(err, ConnectionResetError) or
+ (isinstance(err, OSError) and err.errno == errno.EINVAL) or
+ re.search('wrong.version.number', getattr(err, "reason", ""), re.I)):
+ # On Windows the TCP RST leads to a ConnectionResetError
+ # (ECONNRESET) which Linux doesn't appear to surface to userspace.
+ # If wrap_socket() winds up on the "if connected:" path and doing
+ # the actual wrapping... we get an SSLError from OpenSSL. Typically
+ # WRONG_VERSION_NUMBER. While appropriate, neither is the scenario
+ # we're specifically trying to test. The way this test is written
+ # is known to work on Linux. We'll skip it anywhere else that it
+ # does not present as doing so.
+ self.skipTest(f"Could not recreate conditions on {sys.platform}:"
+ f" {err=}")
+ # If maintaining this conditional winds up being a problem.
+ # just turn this into an unconditional skip anything but Linux.
+ # The important thing is that our CI has the logic covered.
+
+ def test_preauth_data_to_tls_server(self):
+ server_accept_called = threading.Event()
+ ready_for_server_wrap_socket = threading.Event()
+
+ def call_after_accept(unused):
+ server_accept_called.set()
+ if not ready_for_server_wrap_socket.wait(2.0):
+ raise RuntimeError("wrap_socket event never set, test may fail.")
+ return False # Tell the server thread to continue.
+
+ server = self.SingleConnectionTestServerThread(
+ call_after_accept=call_after_accept,
+ name="preauth_data_to_tls_server")
+ server.__enter__() # starts it
+ self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
+
+ with socket.socket() as client:
+ client.connect(server.listener.getsockname())
+ # This forces an immediate connection close via RST on .close().
+ set_socket_so_linger_on_with_zero_timeout(client)
+ client.setblocking(False)
+
+ server_accept_called.wait()
+ client.send(b"DELETE /data HTTP/1.0\r\n\r\n")
+ client.close() # RST
+
+ ready_for_server_wrap_socket.set()
+ server.join()
+ wrap_error = server.wrap_error
+ self.assertEqual(b"", server.received_data)
+ self.assertIsInstance(wrap_error, OSError) # All platforms.
+ self.non_linux_skip_if_other_okay_error(wrap_error)
+ self.assertIsInstance(wrap_error, ssl.SSLError)
+ self.assertIn("before TLS handshake with data", wrap_error.args[1])
+ self.assertIn("before TLS handshake with data", wrap_error.reason)
+ self.assertNotEqual(0, wrap_error.args[0])
+ self.assertIsNone(wrap_error.library, msg="attr must exist")
+
+ def test_preauth_data_to_tls_client(self):
+ client_can_continue_with_wrap_socket = threading.Event()
+
+ def call_after_accept(conn_to_client):
+ # This forces an immediate connection close via RST on .close().
+ set_socket_so_linger_on_with_zero_timeout(conn_to_client)
+ conn_to_client.send(
+ b"HTTP/1.0 307 Temporary Redirect\r\n"
+ b"Location: https://example.com/someone-elses-server\r\n"
+ b"\r\n")
+ conn_to_client.close() # RST
+ client_can_continue_with_wrap_socket.set()
+ return True # Tell the server to stop.
+
+ server = self.SingleConnectionTestServerThread(
+ call_after_accept=call_after_accept,
+ name="preauth_data_to_tls_client")
+ server.__enter__() # starts it
+ self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
+
+ # Redundant; call_after_accept sets SO_LINGER on the accepted conn.
+ set_socket_so_linger_on_with_zero_timeout(server.listener)
+
+ with socket.socket() as client:
+ client.connect(server.listener.getsockname())
+ if not client_can_continue_with_wrap_socket.wait(2.0):
+ self.fail("test server took too long.")
+ ssl_ctx = ssl.create_default_context()
+ try:
+ tls_client = ssl_ctx.wrap_socket(
+ client, server_hostname="localhost")
+ except OSError as err: # SSLError inherits from OSError
+ wrap_error = err
+ received_data = b""
+ else:
+ wrap_error = None
+ received_data = tls_client.recv(400)
+ tls_client.close()
+
+ server.join()
+ self.assertEqual(b"", received_data)
+ self.assertIsInstance(wrap_error, OSError) # All platforms.
+ self.non_linux_skip_if_other_okay_error(wrap_error)
+ self.assertIsInstance(wrap_error, ssl.SSLError)
+ self.assertIn("before TLS handshake with data", wrap_error.args[1])
+ self.assertIn("before TLS handshake with data", wrap_error.reason)
+ self.assertNotEqual(0, wrap_error.args[0])
+ self.assertIsNone(wrap_error.library, msg="attr must exist")
+
+ def test_https_client_non_tls_response_ignored(self):
+
+ server_responding = threading.Event()
+
+ class SynchronizedHTTPSConnection(http.client.HTTPSConnection):
+ def connect(self):
+ http.client.HTTPConnection.connect(self)
+ # Wait for our fault injection server to have done its thing.
+ if not server_responding.wait(1.0) and support.verbose:
+ sys.stdout.write("server_responding event never set.")
+ self.sock = self._context.wrap_socket(
+ self.sock, server_hostname=self.host)
+
+ def call_after_accept(conn_to_client):
+ # This forces an immediate connection close via RST on .close().
+ set_socket_so_linger_on_with_zero_timeout(conn_to_client)
+ conn_to_client.send(
+ b"HTTP/1.0 402 Payment Required\r\n"
+ b"\r\n")
+ conn_to_client.close() # RST
+ server_responding.set()
+ return True # Tell the server to stop.
+
+ server = self.SingleConnectionTestServerThread(
+ call_after_accept=call_after_accept,
+ name="non_tls_http_RST_responder")
+ server.__enter__() # starts it
+ self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
+ # Redundant; call_after_accept sets SO_LINGER on the accepted conn.
+ set_socket_so_linger_on_with_zero_timeout(server.listener)
+
+ connection = SynchronizedHTTPSConnection(
+ f"localhost",
+ port=server.port,
+ context=ssl.create_default_context(),
+ timeout=2.0,
+ )
+ # There are lots of reasons this raises as desired, long before this
+ # test was added. Sending the request requires a successful TLS wrapped
+ # socket; that fails if the connection is broken. It may seem pointless
+ # to test this. It serves as an illustration of something that we never
+ # want to happen... properly not happening.
+ with self.assertRaises(OSError) as err_ctx:
+ connection.request("HEAD", "/test", headers={"Host": "localhost"})
+ response = connection.getresponse()
+
+
def setUpModule():
if support.verbose:
plats = {
diff --git a/Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst b/Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst
new file mode 100644
index 0000000..403c77a
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2023-08-22-17-39-12.gh-issue-108310.fVM3sg.rst
@@ -0,0 +1,7 @@
+Fixed an issue where instances of :class:`ssl.SSLSocket` were vulnerable to
+a bypass of the TLS handshake and included protections (like certificate
+verification) and treating sent unencrypted data as if it were
+post-handshake TLS encrypted data. Security issue reported as
+`CVE-2023-40217
+<https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-40217>`_ by
+Aapo Oksman. Patch by Gregory P. Smith.
--
2.41.0
From c84614f7f74be2fe6b4316abe9c9a3064109083b Mon Sep 17 00:00:00 2001
From: "Miss Islington (bot)"
<31488909+miss-islington@users.noreply.github.com>
Date: Wed, 23 Aug 2023 03:10:49 -0700
Subject: [PATCH 2/3] gh-108342: Break ref cycle in SSLSocket._create() exc
(GH-108344) (#108351)
Explicitly break a reference cycle when SSLSocket._create() raises an
exception. Clear the variable storing the exception, since the
exception traceback contains the variables and so creates a reference
cycle.
This test leak was introduced by the test added for the fix of GH-108310.
(cherry picked from commit 64f99350351bc46e016b2286f36ba7cd669b79e3)
Co-authored-by: Victor Stinner <vstinner@python.org>
---
Lib/ssl.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/Lib/ssl.py b/Lib/ssl.py
index ef92e76..cb5ec51 100644
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -1049,7 +1049,11 @@ class SSLSocket(socket):
self.close()
except OSError:
pass
- raise notconn_pre_handshake_data_error
+ try:
+ raise notconn_pre_handshake_data_error
+ finally:
+ # Explicitly break the reference cycle.
+ notconn_pre_handshake_data_error = None
else:
connected = True
--
2.41.0
From ddcf99dca28f9475007362c9945007a3e28f3e2a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C5=81ukasz=20Langa?= <lukasz@langa.pl>
Date: Thu, 24 Aug 2023 12:09:11 +0200
Subject: [PATCH 3/3] gh-108342: Make ssl TestPreHandshakeClose more reliable
(GH-108370) (#108407)
* In preauth tests of test_ssl, explicitly break reference cycles
invoving SingleConnectionTestServerThread to make sure that the
thread is deleted. Otherwise, the test marks the environment as
altered because the threading module sees a "dangling thread"
(SingleConnectionTestServerThread). This test leak was introduced
by the test added for the fix of issue gh-108310.
* Use support.SHORT_TIMEOUT instead of hardcoded 1.0 or 2.0 seconds
timeout.
* SingleConnectionTestServerThread.run() catchs TimeoutError
* Fix a race condition (missing synchronization) in
test_preauth_data_to_tls_client(): the server now waits until the
client connect() completed in call_after_accept().
* test_https_client_non_tls_response_ignored() calls server.join()
explicitly.
* Replace "localhost" with server.listener.getsockname()[0].
(cherry picked from commit 592bacb6fc0833336c0453e818e9b95016e9fd47)
Co-authored-by: Victor Stinner <vstinner@python.org>
---
Lib/test/test_ssl.py | 102 ++++++++++++++++++++++++++++++-------------
1 file changed, 71 insertions(+), 31 deletions(-)
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index aeca481..b9163ae 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -4855,12 +4855,16 @@ class TestPreHandshakeClose(unittest.TestCase):
class SingleConnectionTestServerThread(threading.Thread):
- def __init__(self, *, name, call_after_accept):
+ def __init__(self, *, name, call_after_accept, timeout=None):
self.call_after_accept = call_after_accept
self.received_data = b'' # set by .run()
self.wrap_error = None # set by .run()
self.listener = None # set by .start()
self.port = None # set by .start()
+ if timeout is None:
+ self.timeout = support.SHORT_TIMEOUT
+ else:
+ self.timeout = timeout
super().__init__(name=name)
def __enter__(self):
@@ -4883,13 +4887,19 @@ class TestPreHandshakeClose(unittest.TestCase):
self.ssl_ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY)
self.listener = socket.socket()
self.port = socket_helper.bind_port(self.listener)
- self.listener.settimeout(2.0)
+ self.listener.settimeout(self.timeout)
self.listener.listen(1)
super().start()
def run(self):
- conn, address = self.listener.accept()
- self.listener.close()
+ try:
+ conn, address = self.listener.accept()
+ except TimeoutError:
+ # on timeout, just close the listener
+ return
+ finally:
+ self.listener.close()
+
with conn:
if self.call_after_accept(conn):
return
@@ -4917,8 +4927,13 @@ class TestPreHandshakeClose(unittest.TestCase):
# we're specifically trying to test. The way this test is written
# is known to work on Linux. We'll skip it anywhere else that it
# does not present as doing so.
- self.skipTest(f"Could not recreate conditions on {sys.platform}:"
- f" {err=}")
+ try:
+ self.skipTest(f"Could not recreate conditions on {sys.platform}:"
+ f" {err=}")
+ finally:
+ # gh-108342: Explicitly break the reference cycle
+ err = None
+
# If maintaining this conditional winds up being a problem.
# just turn this into an unconditional skip anything but Linux.
# The important thing is that our CI has the logic covered.
@@ -4929,7 +4944,7 @@ class TestPreHandshakeClose(unittest.TestCase):
def call_after_accept(unused):
server_accept_called.set()
- if not ready_for_server_wrap_socket.wait(2.0):
+ if not ready_for_server_wrap_socket.wait(support.SHORT_TIMEOUT):
raise RuntimeError("wrap_socket event never set, test may fail.")
return False # Tell the server thread to continue.
@@ -4951,20 +4966,31 @@ class TestPreHandshakeClose(unittest.TestCase):
ready_for_server_wrap_socket.set()
server.join()
+
wrap_error = server.wrap_error
- self.assertEqual(b"", server.received_data)
- self.assertIsInstance(wrap_error, OSError) # All platforms.
- self.non_linux_skip_if_other_okay_error(wrap_error)
- self.assertIsInstance(wrap_error, ssl.SSLError)
- self.assertIn("before TLS handshake with data", wrap_error.args[1])
- self.assertIn("before TLS handshake with data", wrap_error.reason)
- self.assertNotEqual(0, wrap_error.args[0])
- self.assertIsNone(wrap_error.library, msg="attr must exist")
+ server.wrap_error = None
+ try:
+ self.assertEqual(b"", server.received_data)
+ self.assertIsInstance(wrap_error, OSError) # All platforms.
+ self.non_linux_skip_if_other_okay_error(wrap_error)
+ self.assertIsInstance(wrap_error, ssl.SSLError)
+ self.assertIn("before TLS handshake with data", wrap_error.args[1])
+ self.assertIn("before TLS handshake with data", wrap_error.reason)
+ self.assertNotEqual(0, wrap_error.args[0])
+ self.assertIsNone(wrap_error.library, msg="attr must exist")
+ finally:
+ # gh-108342: Explicitly break the reference cycle
+ wrap_error = None
+ server = None
def test_preauth_data_to_tls_client(self):
+ server_can_continue_with_wrap_socket = threading.Event()
client_can_continue_with_wrap_socket = threading.Event()
def call_after_accept(conn_to_client):
+ if not server_can_continue_with_wrap_socket.wait(support.SHORT_TIMEOUT):
+ print("ERROR: test client took too long")
+
# This forces an immediate connection close via RST on .close().
set_socket_so_linger_on_with_zero_timeout(conn_to_client)
conn_to_client.send(
@@ -4986,8 +5012,10 @@ class TestPreHandshakeClose(unittest.TestCase):
with socket.socket() as client:
client.connect(server.listener.getsockname())
- if not client_can_continue_with_wrap_socket.wait(2.0):
- self.fail("test server took too long.")
+ server_can_continue_with_wrap_socket.set()
+
+ if not client_can_continue_with_wrap_socket.wait(support.SHORT_TIMEOUT):
+ self.fail("test server took too long")
ssl_ctx = ssl.create_default_context()
try:
tls_client = ssl_ctx.wrap_socket(
@@ -5001,24 +5029,31 @@ class TestPreHandshakeClose(unittest.TestCase):
tls_client.close()
server.join()
- self.assertEqual(b"", received_data)
- self.assertIsInstance(wrap_error, OSError) # All platforms.
- self.non_linux_skip_if_other_okay_error(wrap_error)
- self.assertIsInstance(wrap_error, ssl.SSLError)
- self.assertIn("before TLS handshake with data", wrap_error.args[1])
- self.assertIn("before TLS handshake with data", wrap_error.reason)
- self.assertNotEqual(0, wrap_error.args[0])
- self.assertIsNone(wrap_error.library, msg="attr must exist")
+ try:
+ self.assertEqual(b"", received_data)
+ self.assertIsInstance(wrap_error, OSError) # All platforms.
+ self.non_linux_skip_if_other_okay_error(wrap_error)
+ self.assertIsInstance(wrap_error, ssl.SSLError)
+ self.assertIn("before TLS handshake with data", wrap_error.args[1])
+ self.assertIn("before TLS handshake with data", wrap_error.reason)
+ self.assertNotEqual(0, wrap_error.args[0])
+ self.assertIsNone(wrap_error.library, msg="attr must exist")
+ finally:
+ # gh-108342: Explicitly break the reference cycle
+ wrap_error = None
+ server = None
def test_https_client_non_tls_response_ignored(self):
-
server_responding = threading.Event()
class SynchronizedHTTPSConnection(http.client.HTTPSConnection):
def connect(self):
+ # Call clear text HTTP connect(), not the encrypted HTTPS (TLS)
+ # connect(): wrap_socket() is called manually below.
http.client.HTTPConnection.connect(self)
+
# Wait for our fault injection server to have done its thing.
- if not server_responding.wait(1.0) and support.verbose:
+ if not server_responding.wait(support.SHORT_TIMEOUT) and support.verbose:
sys.stdout.write("server_responding event never set.")
self.sock = self._context.wrap_socket(
self.sock, server_hostname=self.host)
@@ -5033,29 +5068,34 @@ class TestPreHandshakeClose(unittest.TestCase):
server_responding.set()
return True # Tell the server to stop.
+ timeout = 2.0
server = self.SingleConnectionTestServerThread(
call_after_accept=call_after_accept,
- name="non_tls_http_RST_responder")
+ name="non_tls_http_RST_responder",
+ timeout=timeout)
server.__enter__() # starts it
self.addCleanup(server.__exit__) # ... & unittest.TestCase stops it.
# Redundant; call_after_accept sets SO_LINGER on the accepted conn.
set_socket_so_linger_on_with_zero_timeout(server.listener)
connection = SynchronizedHTTPSConnection(
- f"localhost",
+ server.listener.getsockname()[0],
port=server.port,
context=ssl.create_default_context(),
- timeout=2.0,
+ timeout=timeout,
)
+
# There are lots of reasons this raises as desired, long before this
# test was added. Sending the request requires a successful TLS wrapped
# socket; that fails if the connection is broken. It may seem pointless
# to test this. It serves as an illustration of something that we never
# want to happen... properly not happening.
- with self.assertRaises(OSError) as err_ctx:
+ with self.assertRaises(OSError):
connection.request("HEAD", "/test", headers={"Host": "localhost"})
response = connection.getresponse()
+ server.join()
+
def setUpModule():
if support.verbose:
--
2.41.0

View File

@ -1,16 +0,0 @@
-----BEGIN PGP SIGNATURE-----
iQIzBAABCgAdFiEE4/8oOcBIslwITevpsmmV4xAlBWgFAmOPjAQACgkQsmmV4xAl
BWjzjQ//TQ9AtAs3RwRfGfJigHl3TG5lfYYdzAZIwEtt6NUw8tVKriCBMSvsJDjD
rlFX64SPWaDlTggnatU88sj1y4AtGpf517GbYKwJ1oLQjcCSIs6WSxD7CZAfb4CL
257KMANkT/n46luovTraqhAyLXp8fVWIEoSt3+6RgNYshjv00V6+L0HoE6jkzBRV
si6KHDUCyIydOJEtAt79w5Ze/pFxJjIlGZ6WxyRVEy77cyQKh0g4dSdQ15HZAsfr
fvv8rOmd8VXwIMi4xaUaHMddQxNrydDldDpKR4L1Lay/nY3OvSLI1AMw0D7n/FVO
HxgYvxwkRqHPgbDIBLoHe7nsou0621ELS+j6M7cRoqAjsSfEOwpHOBw7k4+zOoa3
4FHvru6TmT1p2iT6GSRllp/XspAzSelJeaFWA0Rs57MQ14gtXrw5hQHyZ1NgMzZi
TMpnj0tGHufQYn2ZQqGUIySvtH3S5eIZgZGdPETJ5k09mcRVEKcdujTbkrIcOYtC
GoPCw+3Qe7feVZLzElnsela9bDZi3uWfZh2kVyhZPAvxXJ0VNVCLvPlCKpr0R7t5
JJ7jMpblsA05FT6ZanbqWNFZtCHMjlkK1259oST3BMbBSHTFgY/KGJEHQTkYU3M2
U5OSn4za47qFBTVIXQsqkLGEBU/wrxtNmerJel8YW3ZIrkoTv2E=
=dXB5
-----END PGP SIGNATURE-----

View File

@ -0,0 +1,16 @@
-----BEGIN PGP SIGNATURE-----
iQIzBAABCgAdFiEE4/8oOcBIslwITevpsmmV4xAlBWgFAmTnntEACgkQsmmV4xAl
BWgmQw/9EFWMXtSfWBV93AQF37r0nbUnOBvrOcubkO7ygt+GfHKzN8EPuNeO2It7
yNZDuCmwepnNGaIkO7UkgbwYyNw3YaoHQqxG8izAfJAVqK6BSk8UAET/YKWFXbLv
cZBfgxSa0tTEkwq3BAY4vDewRXnLkUq7k6JRRCKFGLNSi/ygC56SijxyAV2g4Vio
Qcwr9VhsTvz6ujoWuPrfVpUY4I81LBJxKK7n9zBreYzh5uUXRu5k4lN2W8HrE4q0
7tTdsccB9j1CJAiUacYLxTFsvwd/hBs9+g9Eu5kqGeChqEU56Gd8wR96TEu8cVIZ
Bv5UEo9MgT1KsJwk0FMfV8qVScqZrGG3QaoMtNAeAm/tUrhhZO9ANYsC9dey03ut
tU6s5GAeh6i17bqW5WfvzCdhY9ayCInndzkq7SPi9F7fYx79PgdsofqPdyCSBXUo
Ozfn1VQkYQJTmYtrwqLfdAivubaEPIf1+fLqMOXbrI85Ujuy5xzlgVrrqO2K9rbE
DYyPgGZjPtss/yZGRCUdJX6rbW8Tq0HKt/8HpbW5fCt9o0wCSawR71GhzPA1fpNs
0mkAGvvoNGdiSizTLLPvNCaecw4kSzeBNViyP6oRCv69ifNqHPErItsMZ0YIMU14
w4/d9yI9kUa2bvE3cmx6G+9OS8PYip9MsJbQgP7kJsZ8wgt9rQU=
=aw+P
-----END PGP SIGNATURE-----

View File

@ -13,11 +13,11 @@ URL: https://www.python.org/
# WARNING When rebasing to a new Python version,
# remember to update the python3-docs package as well
%global general_version %{pybasever}.16
%global general_version %{pybasever}.18
#global prerel ...
%global upstream_version %{general_version}%{?prerel}
Version: %{general_version}%{?prerel:~%{prerel}}
Release: 1%{?dist}.2
Release: 1%{?dist}
License: Python
@ -325,7 +325,7 @@ Patch189: 00189-use-rpm-wheels.patch
# The versions are written in Lib/ensurepip/__init__.py, this patch removes them.
# When the bundled setuptools/pip wheel is updated, the patch no longer applies cleanly.
# In such cases, the patch needs to be amended and the versions updated here:
%global pip_version 22.0.4
%global pip_version 23.0.1
%global setuptools_version 58.1.0
# 00251 # 1b1047c14ff98eae6d355b4aac4df3e388813f62
@ -399,27 +399,15 @@ Patch329: 00329-fips.patch
# a nightmare because it's basically a binary file.
Patch353: 00353-architecture-names-upstream-downstream.patch
# 00399 # c32eff86eb80f6a6bdcbf4b1b6535fbc627b51a2
# CVE-2023-24329
#
# * gh-102153: Start stripping C0 control and space chars in `urlsplit` (GH-102508)
#
# `urllib.parse.urlsplit` has already been respecting the WHATWG spec a bit GH-25595.
#
# This adds more sanitizing to respect the "Remove any leading C0 control or space from input" [rule](https://url.spec.whatwg.org/GH-url-parsing:~:text=Remove%%20any%%20leading%%20and%%20trailing%%20C0%%20control%%20or%%20space%%20from%%20input.) in response to [CVE-2023-24329](https://nvd.nist.gov/vuln/detail/CVE-2023-24329).
#
# ---------
Patch399: 00399-cve-2023-24329.patch
# 00404 #
# CVE-2023-40217
#
# Security fix for CVE-2023-40217: Bypass TLS handshake on closed sockets
# Resolved upstream: https://github.com/python/cpython/issues/108310
# Fixups added on top from:
# https://github.com/python/cpython/issues/108342
#
Patch404: 00404-cve-2023-40217.patch
# 00397 #
# Add filters for tarfile extraction (CVE-2007-4559, PEP-706)
# First patch fixes determination of symlink targets, which were treated
# as relative to the root of the archive,
# rather than the directory containing the symlink.
# Not yet upstream as of this writing.
# The second patch is Red Hat configuration, see KB for documentation:
# - https://access.redhat.com/articles/7004769
Patch397: 00397-tarfile-filter.patch
# (New patches go here ^^^)
#
@ -1822,14 +1810,24 @@ CheckPython optimized
# ======================================================
%changelog
* Tue Sep 12 2023 Charalampos Stratakis <cstratak@redhat.com> - 3.9.16-1.2
* Thu Sep 07 2023 Charalampos Stratakis <cstratak@redhat.com> - 3.9.18-1
- Update to 3.9.18
- Security fix for CVE-2023-40217
Resolves: RHEL-2934
Resolves: RHEL-3043
* Mon May 29 2023 Charalampos Stratakis <cstratak@redhat.com> - 3.9.16-1.1
* Wed Aug 09 2023 Petr Viktorin <pviktori@redhat.com> - 3.9.17-2
- Fix symlink handling in the fix for CVE-2023-24329
Resolves: rhbz#263261
* Mon Jun 26 2023 Charalampos Stratakis <cstratak@redhat.com> - 3.9.17-1
- Update to 3.9.17
- Security fix for CVE-2023-24329
Resolves: rhbz#2173917
* Tue Mar 07 2023 Petr Viktorin <pviktori@redhat.com> - 3.9.16-2
- Add filters for tarfile extraction (CVE-2007-4559, PEP-706)
Resolves: rhbz#263261
* Thu Dec 08 2022 Charalampos Stratakis <cstratak@redhat.com> - 3.9.16-1
- Update to 3.9.16
- Security fixes for CVE-2022-42919 and CVE-2022-45061