import UBI python3.11-urllib3-1.26.12-6.el8_10

This commit is contained in:
eabdullin 2026-01-26 18:19:15 +00:00
parent d9d58cdc12
commit 4699ecd6d5
4 changed files with 1657 additions and 1 deletions

View File

@ -0,0 +1,75 @@
From 800f4b38cf841c2f2370ce9ab4d8de67f738091a Mon Sep 17 00:00:00 2001
From: Illia Volochii <illia.volochii@gmail.com>
Date: Fri, 5 Dec 2025 16:41:33 +0200
Subject: [PATCH] Security fix for CVE-2025-66418
* Add a hard-coded limit for the decompression chain
* Reuse new list
(cherry picked from commit 24d7b67eac89f94e11003424bcf0d8f7b72222a8)
---
changelog/GHSA-gm62-xv2j-4w53.security.rst | 4 ++++
src/urllib3/response.py | 12 +++++++++++-
test/test_response.py | 10 ++++++++++
3 files changed, 25 insertions(+), 1 deletion(-)
create mode 100644 changelog/GHSA-gm62-xv2j-4w53.security.rst
diff --git a/changelog/GHSA-gm62-xv2j-4w53.security.rst b/changelog/GHSA-gm62-xv2j-4w53.security.rst
new file mode 100644
index 00000000..6646eaa3
--- /dev/null
+++ b/changelog/GHSA-gm62-xv2j-4w53.security.rst
@@ -0,0 +1,4 @@
+Fixed a security issue where an attacker could compose an HTTP response with
+virtually unlimited links in the ``Content-Encoding`` header, potentially
+leading to a denial of service (DoS) attack by exhausting system resources
+during decoding. The number of allowed chained encodings is now limited to 5.
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
index 5be9d5d5..ab721d2b 100644
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -223,8 +223,18 @@ class MultiDecoder(object):
they were applied.
"""
+ # Maximum allowed number of chained HTTP encodings in the
+ # Content-Encoding header.
+ max_decode_links = 5
+
def __init__(self, modes):
- self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
+ encodings = [m.strip() for m in modes.split(",")]
+ if len(encodings) > self.max_decode_links:
+ raise DecodeError(
+ "Too many content encodings in the chain: "
+ f"{len(encodings)} > {self.max_decode_links}"
+ )
+ self._decoders = [_get_decoder(e) for e in encodings]
def flush(self):
return self._decoders[0].flush()
diff --git a/test/test_response.py b/test/test_response.py
index 0d4b8ea8..47f853a2 100644
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -534,6 +534,16 @@ class TestResponse(object):
assert r.read(9 * 37) == b"foobarbaz" * 37
assert r.read() == b""
+ def test_read_multi_decoding_too_many_links(self):
+ fp = BytesIO(b"foo")
+ with pytest.raises(
+ DecodeError, match="Too many content encodings in the chain: 6 > 5"
+ ):
+ HTTPResponse(
+ fp,
+ headers={"content-encoding": "gzip, deflate, br, zstd, gzip, deflate"},
+ )
+
def test_body_blob(self):
resp = HTTPResponse(b"foo")
assert resp.data == b"foo"
--
2.52.0

1352
SOURCES/CVE-2025-66471.patch Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,219 @@
From 38a132699e14b715a2dafb2257c1030e17f2fbf7 Mon Sep 17 00:00:00 2001
From: Ousret <ahmed.tahri@cloudnursery.dev>
Date: Thu, 17 Nov 2022 01:40:19 +0100
Subject: [PATCH 1/2] Prevent issue in HTTPResponse().read() when
decoded_content is True and then False Provided it has initialized eligible
decoder(decompressor) and did decode once
(cherry picked from commit cefd1dbba6a20ea4f017e6e472f9ada3a8a743e0)
---
changelog/2800.bugfix.rst | 1 +
src/urllib3/response.py | 12 ++++++++++++
test/test_response.py | 35 +++++++++++++++++++++++++++++++++++
3 files changed, 48 insertions(+)
create mode 100644 changelog/2800.bugfix.rst
diff --git a/changelog/2800.bugfix.rst b/changelog/2800.bugfix.rst
new file mode 100644
index 00000000..9dcf1eec
--- /dev/null
+++ b/changelog/2800.bugfix.rst
@@ -0,0 +1 @@
+Prevented issue in HTTPResponse().read() when decoded_content is True and then False.
\ No newline at end of file
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
index ab721d2b..4770c3db 100644
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -411,6 +411,7 @@ class HTTPResponse(io.IOBase):
self.reason = reason
self.strict = strict
self.decode_content = decode_content
+ self._has_decoded_content = False
self.retries = retries
self.enforce_content_length = enforce_content_length
self.auto_close = auto_close
@@ -586,6 +587,11 @@ class HTTPResponse(io.IOBase):
Decode the data passed in and potentially flush the decoder.
"""
if not decode_content:
+ if self._has_decoded_content:
+ raise RuntimeError(
+ "Calling read(decode_content=False) is not supported after "
+ "read(decode_content=True) was called."
+ )
return data
if max_length is None or flush_decoder:
@@ -594,6 +600,7 @@ class HTTPResponse(io.IOBase):
try:
if self._decoder:
data = self._decoder.decompress(data, max_length=max_length)
+ self._has_decoded_content = True
except self.DECODER_ERROR_CLASSES as e:
content_encoding = self.headers.get("content-encoding", "").lower()
raise DecodeError(
@@ -821,6 +828,11 @@ class HTTPResponse(io.IOBase):
else:
# do not waste memory on buffer when not decoding
if not decode_content:
+ if self._has_decoded_content:
+ raise RuntimeError(
+ "Calling read(decode_content=False) is not supported after "
+ "read(decode_content=True) was called."
+ )
return data
decoded_data = self._decode(
diff --git a/test/test_response.py b/test/test_response.py
index 47f853a2..77510205 100644
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -701,6 +701,41 @@ class TestResponse(object):
next(reader)
assert re.match("I/O operation on closed file.?", str(ctx.value))
+ def test_read_with_illegal_mix_decode_toggle(self):
+ compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
+ data = compress.compress(b"foo")
+ data += compress.flush()
+
+ fp = BytesIO(data)
+
+ resp = HTTPResponse(
+ fp, headers={"content-encoding": "deflate"}, preload_content=False
+ )
+
+ assert resp.read(1) == b"f"
+
+ with pytest.raises(
+ RuntimeError,
+ match=(
+ r"Calling read\(decode_content=False\) is not supported after "
+ r"read\(decode_content=True\) was called"
+ ),
+ ):
+ resp.read(1, decode_content=False)
+
+ def test_read_with_mix_decode_toggle(self):
+ compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
+ data = compress.compress(b"foo")
+ data += compress.flush()
+
+ fp = BytesIO(data)
+
+ resp = HTTPResponse(
+ fp, headers={"content-encoding": "deflate"}, preload_content=False
+ )
+ resp.read(1, decode_content=False)
+ assert resp.read(1, decode_content=True) == b"o"
+
def test_streaming(self):
fp = BytesIO(b"foo")
resp = HTTPResponse(fp, preload_content=False)
--
2.52.0
From cbab907beeaeb15ed0e0fd19e08a5f8c4a4c3e23 Mon Sep 17 00:00:00 2001
From: Illia Volochii <illia.volochii@gmail.com>
Date: Wed, 7 Jan 2026 18:07:30 +0200
Subject: [PATCH 2/2] Security fix for CVE-2026-21441
* Stop decoding response content during redirects needlessly
* Rename the new query parameter
* Add a changelog entry
(cherry picked from commit 8864ac407bba8607950025e0979c4c69bc7abc7b)
---
CHANGES.rst | 3 +++
dummyserver/handlers.py | 8 +++++++-
src/urllib3/response.py | 6 +++++-
test/with_dummyserver/test_connectionpool.py | 19 +++++++++++++++++++
4 files changed, 34 insertions(+), 2 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index fd6d06d8..9a4cae31 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -8,6 +8,9 @@ Backports
compressed HTTP content ("decompression bombs") leading to excessive resource
consumption even when a small amount of data was requested. Reading small
chunks of compressed data is safer and much more efficient now.
+- Fixed a high-severity security issue where decompression-bomb safeguards of
+ the streaming API were bypassed when HTTP redirects were followed.
+ (`GHSA-38jv-5279-wg99 <https://github.com/urllib3/urllib3/security/advisories/GHSA-38jv-5279-wg99>`__)
1.26.12 (2022-08-22)
--------------------
diff --git a/dummyserver/handlers.py b/dummyserver/handlers.py
index c90c2fce..d4fdaa81 100644
--- a/dummyserver/handlers.py
+++ b/dummyserver/handlers.py
@@ -186,9 +186,15 @@ class TestingApp(RequestHandler):
status = request.params.get("status", "303 See Other")
if len(status) == 3:
status = "%s Redirect" % status.decode("latin-1")
+ compressed = request.params.get("compressed") == b"true"
headers = [("Location", target)]
- return Response(status=status, headers=headers)
+ if compressed:
+ headers.append(("Content-Encoding", "gzip"))
+ data = gzip.compress(b"foo")
+ else:
+ data = b""
+ return Response(data, status=status, headers=headers)
def not_found(self, request):
return Response("Not found", status="404 Not Found")
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
index 4770c3db..5b149b18 100644
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -479,7 +479,11 @@ class HTTPResponse(io.IOBase):
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
"""
try:
- self.read()
+ self.read(
+ # Do not spend resources decoding the content unless
+ # decoding has already been initiated.
+ decode_content=self._has_decoded_content,
+ )
except (HTTPError, SocketError, BaseSSLError, HTTPException):
pass
diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
index a4efff54..c6432082 100644
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -411,6 +411,25 @@ class TestConnectionPool(HTTPDummyServerTestCase):
assert r.status == 200
assert r.data == b"Dummy server!"
+ @mock.patch("urllib3.response.GzipDecoder.decompress")
+ def test_no_decoding_with_redirect_when_preload_disabled(
+ self, gzip_decompress
+ ):
+ """
+ Test that urllib3 does not attempt to decode a gzipped redirect
+ response when `preload_content` is set to `False`.
+ """
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ # Three requests are expected: two redirects and one final / 200 OK.
+ response = pool.request(
+ "GET",
+ "/redirect",
+ fields={"target": "/redirect?compressed=true", "compressed": "true"},
+ preload_content=False,
+ )
+ assert response.status == 200
+ gzip_decompress.assert_not_called()
+
def test_bad_connect(self):
with HTTPConnectionPool("badhost.invalid", self.port) as pool:
with pytest.raises(MaxRetryError) as e:
--
2.52.0

View File

@ -8,7 +8,7 @@
Name: python%{python3_pkgversion}-%{srcname}
Version: 1.26.12
Release: 5%{?dist}
Release: 6%{?dist}
Summary: Python HTTP library with thread-safe connection pooling and file post
License: MIT
@ -44,6 +44,10 @@ Patch3: fix_test_ssltransport_py311.patch
# Upstream fix: https://github.com/urllib3/urllib3/commit/4e98d57809dacab1cbe625fddeec1a290c478ea9
Patch4: CVE-2023-45803.patch
Patch5: CVE-2025-66471.patch
Patch6: CVE-2025-66418.patch
Patch7: CVE-2026-21441.patch
BuildRequires: python%{python3_pkgversion}-devel
BuildRequires: python%{python3_pkgversion}-rpm-macros
BuildRequires: python%{python3_pkgversion}-setuptools
@ -138,6 +142,12 @@ ln -s %{python3_sitelib}/__pycache__/six.cpython-%{python3_version_nodots}.pyc \
%changelog
* Tue Dec 16 2025 Miro Hrončok <mhroncok@redhat.com> - 1.26.12-6
- Security fix for CVE-2025-66471
- Security fix for CVE-2025-66418
- Security fix for CVE-2026-21441
Resolves: RHEL-140555, RHEL-139408
* Tue Nov 19 2024 Charalampos Stratakis <cstratak@redhat.com> - 1.26.12-5
- Security fix for CVE-2023-45803
Resolves: RHEL-66562