import UBI python3.12-urllib3-1.26.19-2.el8_10

This commit is contained in:
eabdullin 2026-01-26 18:19:25 +00:00
parent 69d60acbf5
commit 4323d8e480
4 changed files with 1657 additions and 2 deletions

View File

@ -0,0 +1,75 @@
From a5505e56f5bc1761beba7685309e6d7314a9588c Mon Sep 17 00:00:00 2001
From: Illia Volochii <illia.volochii@gmail.com>
Date: Fri, 5 Dec 2025 16:41:33 +0200
Subject: [PATCH] Security fix for CVE-2025-66418
* Add a hard-coded limit for the decompression chain
* Reuse new list
(cherry picked from commit 24d7b67eac89f94e11003424bcf0d8f7b72222a8)
---
changelog/GHSA-gm62-xv2j-4w53.security.rst | 4 ++++
src/urllib3/response.py | 12 +++++++++++-
test/test_response.py | 10 ++++++++++
3 files changed, 25 insertions(+), 1 deletion(-)
create mode 100644 changelog/GHSA-gm62-xv2j-4w53.security.rst
diff --git a/changelog/GHSA-gm62-xv2j-4w53.security.rst b/changelog/GHSA-gm62-xv2j-4w53.security.rst
new file mode 100644
index 00000000..6646eaa3
--- /dev/null
+++ b/changelog/GHSA-gm62-xv2j-4w53.security.rst
@@ -0,0 +1,4 @@
+Fixed a security issue where an attacker could compose an HTTP response with
+virtually unlimited links in the ``Content-Encoding`` header, potentially
+leading to a denial of service (DoS) attack by exhausting system resources
+during decoding. The number of allowed chained encodings is now limited to 5.
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
index d0665533..f945c41c 100644
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -224,8 +224,18 @@ class MultiDecoder(object):
they were applied.
"""
+ # Maximum allowed number of chained HTTP encodings in the
+ # Content-Encoding header.
+ max_decode_links = 5
+
def __init__(self, modes):
- self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
+ encodings = [m.strip() for m in modes.split(",")]
+ if len(encodings) > self.max_decode_links:
+ raise DecodeError(
+ "Too many content encodings in the chain: "
+ f"{len(encodings)} > {self.max_decode_links}"
+ )
+ self._decoders = [_get_decoder(e) for e in encodings]
def flush(self):
return self._decoders[0].flush()
diff --git a/test/test_response.py b/test/test_response.py
index f949b2b1..33f570c4 100644
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -543,6 +543,16 @@ class TestResponse(object):
assert r.read(9 * 37) == b"foobarbaz" * 37
assert r.read() == b""
+ def test_read_multi_decoding_too_many_links(self):
+ fp = BytesIO(b"foo")
+ with pytest.raises(
+ DecodeError, match="Too many content encodings in the chain: 6 > 5"
+ ):
+ HTTPResponse(
+ fp,
+ headers={"content-encoding": "gzip, deflate, br, zstd, gzip, deflate"},
+ )
+
def test_body_blob(self):
resp = HTTPResponse(b"foo")
assert resp.data == b"foo"
--
2.52.0

1352
SOURCES/CVE-2025-66471.patch Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,219 @@
From 8c0a6cd7918719c5a636640d1c93e1609c6c6ce0 Mon Sep 17 00:00:00 2001
From: Ousret <ahmed.tahri@cloudnursery.dev>
Date: Thu, 17 Nov 2022 01:40:19 +0100
Subject: [PATCH 1/2] Prevent issue in HTTPResponse().read() when
decoded_content is True and then False Provided it has initialized eligible
decoder(decompressor) and did decode once
(cherry picked from commit cefd1dbba6a20ea4f017e6e472f9ada3a8a743e0)
---
changelog/2800.bugfix.rst | 1 +
src/urllib3/response.py | 12 ++++++++++++
test/test_response.py | 35 +++++++++++++++++++++++++++++++++++
3 files changed, 48 insertions(+)
create mode 100644 changelog/2800.bugfix.rst
diff --git a/changelog/2800.bugfix.rst b/changelog/2800.bugfix.rst
new file mode 100644
index 00000000..9dcf1eec
--- /dev/null
+++ b/changelog/2800.bugfix.rst
@@ -0,0 +1 @@
+Prevented issue in HTTPResponse().read() when decoded_content is True and then False.
\ No newline at end of file
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
index f945c41c..81ef4455 100644
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -412,6 +412,7 @@ class HTTPResponse(io.IOBase):
self.reason = reason
self.strict = strict
self.decode_content = decode_content
+ self._has_decoded_content = False
self.retries = retries
self.enforce_content_length = enforce_content_length
self.auto_close = auto_close
@@ -587,6 +588,11 @@ class HTTPResponse(io.IOBase):
Decode the data passed in and potentially flush the decoder.
"""
if not decode_content:
+ if self._has_decoded_content:
+ raise RuntimeError(
+ "Calling read(decode_content=False) is not supported after "
+ "read(decode_content=True) was called."
+ )
return data
if max_length is None or flush_decoder:
@@ -595,6 +601,7 @@ class HTTPResponse(io.IOBase):
try:
if self._decoder:
data = self._decoder.decompress(data, max_length=max_length)
+ self._has_decoded_content = True
except self.DECODER_ERROR_CLASSES as e:
content_encoding = self.headers.get("content-encoding", "").lower()
raise DecodeError(
@@ -822,6 +829,11 @@ class HTTPResponse(io.IOBase):
else:
# do not waste memory on buffer when not decoding
if not decode_content:
+ if self._has_decoded_content:
+ raise RuntimeError(
+ "Calling read(decode_content=False) is not supported after "
+ "read(decode_content=True) was called."
+ )
return data
decoded_data = self._decode(
diff --git a/test/test_response.py b/test/test_response.py
index 33f570c4..2614960f 100644
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -710,6 +710,41 @@ class TestResponse(object):
next(reader)
assert re.match("I/O operation on closed file.?", str(ctx.value))
+ def test_read_with_illegal_mix_decode_toggle(self):
+ compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
+ data = compress.compress(b"foo")
+ data += compress.flush()
+
+ fp = BytesIO(data)
+
+ resp = HTTPResponse(
+ fp, headers={"content-encoding": "deflate"}, preload_content=False
+ )
+
+ assert resp.read(1) == b"f"
+
+ with pytest.raises(
+ RuntimeError,
+ match=(
+ r"Calling read\(decode_content=False\) is not supported after "
+ r"read\(decode_content=True\) was called"
+ ),
+ ):
+ resp.read(1, decode_content=False)
+
+ def test_read_with_mix_decode_toggle(self):
+ compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
+ data = compress.compress(b"foo")
+ data += compress.flush()
+
+ fp = BytesIO(data)
+
+ resp = HTTPResponse(
+ fp, headers={"content-encoding": "deflate"}, preload_content=False
+ )
+ resp.read(1, decode_content=False)
+ assert resp.read(1, decode_content=True) == b"o"
+
def test_streaming(self):
fp = BytesIO(b"foo")
resp = HTTPResponse(fp, preload_content=False)
--
2.52.0
From 868d351ffcd32b0aa30fb94db61b2dd51c6c231b Mon Sep 17 00:00:00 2001
From: Illia Volochii <illia.volochii@gmail.com>
Date: Wed, 7 Jan 2026 18:07:30 +0200
Subject: [PATCH 2/2] Security fix for CVE-2026-21441
* Stop decoding response content during redirects needlessly
* Rename the new query parameter
* Add a changelog entry
(cherry picked from commit 8864ac407bba8607950025e0979c4c69bc7abc7b)
---
CHANGES.rst | 3 +++
dummyserver/handlers.py | 8 +++++++-
src/urllib3/response.py | 6 +++++-
test/with_dummyserver/test_connectionpool.py | 19 +++++++++++++++++++
4 files changed, 34 insertions(+), 2 deletions(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index 943c7679..f712291e 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -8,6 +8,9 @@ Backports
compressed HTTP content ("decompression bombs") leading to excessive resource
consumption even when a small amount of data was requested. Reading small
chunks of compressed data is safer and much more efficient now.
+- Fixed a high-severity security issue where decompression-bomb safeguards of
+ the streaming API were bypassed when HTTP redirects were followed.
+ (`GHSA-38jv-5279-wg99 <https://github.com/urllib3/urllib3/security/advisories/GHSA-38jv-5279-wg99>`__)
1.26.19 (2024-06-17)
--------------------
diff --git a/dummyserver/handlers.py b/dummyserver/handlers.py
index acd181d2..dea01682 100644
--- a/dummyserver/handlers.py
+++ b/dummyserver/handlers.py
@@ -188,9 +188,15 @@ class TestingApp(RequestHandler):
status = "%s Redirect" % status.decode("latin-1")
elif isinstance(status, bytes):
status = status.decode("latin-1")
+ compressed = request.params.get("compressed") == b"true"
headers = [("Location", target)]
- return Response(status=status, headers=headers)
+ if compressed:
+ headers.append(("Content-Encoding", "gzip"))
+ data = gzip.compress(b"foo")
+ else:
+ data = b""
+ return Response(data, status=status, headers=headers)
def not_found(self, request):
return Response("Not found", status="404 Not Found")
diff --git a/src/urllib3/response.py b/src/urllib3/response.py
index 81ef4455..1357d65c 100644
--- a/src/urllib3/response.py
+++ b/src/urllib3/response.py
@@ -480,7 +480,11 @@ class HTTPResponse(io.IOBase):
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
"""
try:
- self.read()
+ self.read(
+ # Do not spend resources decoding the content unless
+ # decoding has already been initiated.
+ decode_content=self._has_decoded_content,
+ )
except (HTTPError, SocketError, BaseSSLError, HTTPException):
pass
diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
index cde027b9..6e74883a 100644
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -464,6 +464,25 @@ class TestConnectionPool(HTTPDummyServerTestCase):
assert r.status == 200
assert r.data == b"Dummy server!"
+ @mock.patch("urllib3.response.GzipDecoder.decompress")
+ def test_no_decoding_with_redirect_when_preload_disabled(
+ self, gzip_decompress
+ ):
+ """
+ Test that urllib3 does not attempt to decode a gzipped redirect
+ response when `preload_content` is set to `False`.
+ """
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ # Three requests are expected: two redirects and one final / 200 OK.
+ response = pool.request(
+ "GET",
+ "/redirect",
+ fields={"target": "/redirect?compressed=true", "compressed": "true"},
+ preload_content=False,
+ )
+ assert response.status == 200
+ gzip_decompress.assert_not_called()
+
def test_303_redirect_makes_request_lose_body(self):
with HTTPConnectionPool(self.host, self.port) as pool:
response = pool.request(
--
2.52.0

View File

@ -6,13 +6,16 @@
Name: python%{python3_pkgversion}-urllib3
Version: 1.26.19
Release: 1%{?dist}
Release: 2%{?dist}
Summary: HTTP library with thread-safe connection pooling, file post, and more
# SPDX
License: MIT
URL: https://github.com/urllib3/urllib3
Source: %{url}/archive/%{version}/urllib3-%{version}.tar.gz
Patch1: CVE-2025-66471.patch
Patch2: CVE-2025-66418.patch
Patch3: CVE-2026-21441.patch
BuildArch: noarch
@ -72,7 +75,7 @@ many critical features that are missing from the Python standard libraries:
%prep
%autosetup -n urllib3-%{version}
%autosetup -p1 -n urllib3-%{version}
# Make sure that the RECENT_DATE value doesn't get too far behind what the current date is.
# RECENT_DATE must not be older that 2 years from the build time, or else test_recent_date
# (from test/test_connection.py) would fail. However, it shouldn't be to close to the build time either,
@ -130,6 +133,12 @@ ignore="${ignore-} --ignore=test/test_no_ssl.py"
%changelog
* Tue Dec 16 2025 Miro Hrončok <mhroncok@redhat.com> - 1.26.19-2
- Security fix for CVE-2025-66471
- Security fix for CVE-2025-66418
- Security fix for CVE-2026-21441
Resolves: RHEL-139409
* Wed Sep 25 2024 Lumír Balhar <lbalhar@redhat.com> - 1.26.19-1
- Rebase to 1.26.19 to fix CVE-2024-37891
Resolves: RHEL-59989