Compare commits

...

No commits in common. "c8" and "c8-beta" have entirely different histories.
c8 ... c8-beta

4 changed files with 1 additions and 780 deletions

View File

@ -1,90 +0,0 @@
From 87acab66e124912549fbc3151f27ca7fae76386c Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka <storchaka@gmail.com>
Date: Tue, 23 Apr 2024 19:54:00 +0200
Subject: [PATCH] gh-115133: Fix tests for XMLPullParser with Expat 2.6.0
Feeding the parser by too small chunks defers parsing to prevent
CVE-2023-52425. Future versions of Expat may be more reactive.
(cherry picked from commit 4a08e7b3431cd32a0daf22a33421cd3035343dc4)
---
Lib/test/test_xml_etree.py | 53 +++++++++++--------
...-02-08-14-21-28.gh-issue-115133.ycl4ko.rst | 2 +
2 files changed, 33 insertions(+), 22 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
index acaa519..c01af47 100644
--- a/Lib/test/test_xml_etree.py
+++ b/Lib/test/test_xml_etree.py
@@ -1044,28 +1044,37 @@ class XMLPullParserTest(unittest.TestCase):
self.assertEqual([(action, elem.tag) for action, elem in events],
expected)
- def test_simple_xml(self):
- for chunk_size in (None, 1, 5):
- with self.subTest(chunk_size=chunk_size):
- parser = ET.XMLPullParser()
- self.assert_event_tags(parser, [])
- self._feed(parser, "<!-- comment -->\n", chunk_size)
- self.assert_event_tags(parser, [])
- self._feed(parser,
- "<root>\n <element key='value'>text</element",
- chunk_size)
- self.assert_event_tags(parser, [])
- self._feed(parser, ">\n", chunk_size)
- self.assert_event_tags(parser, [('end', 'element')])
- self._feed(parser, "<element>text</element>tail\n", chunk_size)
- self._feed(parser, "<empty-element/>\n", chunk_size)
- self.assert_event_tags(parser, [
- ('end', 'element'),
- ('end', 'empty-element'),
- ])
- self._feed(parser, "</root>\n", chunk_size)
- self.assert_event_tags(parser, [('end', 'root')])
- self.assertIsNone(parser.close())
+ def test_simple_xml(self, chunk_size=None):
+ parser = ET.XMLPullParser()
+ self.assert_event_tags(parser, [])
+ self._feed(parser, "<!-- comment -->\n", chunk_size)
+ self.assert_event_tags(parser, [])
+ self._feed(parser,
+ "<root>\n <element key='value'>text</element",
+ chunk_size)
+ self.assert_event_tags(parser, [])
+ self._feed(parser, ">\n", chunk_size)
+ self.assert_event_tags(parser, [('end', 'element')])
+ self._feed(parser, "<element>text</element>tail\n", chunk_size)
+ self._feed(parser, "<empty-element/>\n", chunk_size)
+ self.assert_event_tags(parser, [
+ ('end', 'element'),
+ ('end', 'empty-element'),
+ ])
+ self._feed(parser, "</root>\n", chunk_size)
+ self.assert_event_tags(parser, [('end', 'root')])
+ self.assertIsNone(parser.close())
+
+ @unittest.expectedFailure
+ def test_simple_xml_chunk_1(self):
+ self.test_simple_xml(chunk_size=1)
+
+ @unittest.expectedFailure
+ def test_simple_xml_chunk_5(self):
+ self.test_simple_xml(chunk_size=5)
+
+ def test_simple_xml_chunk_22(self):
+ self.test_simple_xml(chunk_size=22)
def test_feed_while_iterating(self):
parser = ET.XMLPullParser()
diff --git a/Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst b/Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst
new file mode 100644
index 0000000..6f10152
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst
@@ -0,0 +1,2 @@
+Fix tests for :class:`~xml.etree.ElementTree.XMLPullParser` with Expat
+2.6.0.
--
2.44.0

View File

@ -1,291 +0,0 @@
From 82f1ea4b72be40f58fd0a9a37f8d8d2f7d16f9e0 Mon Sep 17 00:00:00 2001
From: Lumir Balhar <lbalhar@redhat.com>
Date: Wed, 24 Apr 2024 00:19:23 +0200
Subject: [PATCH] CVE-2023-6597
Co-authored-by: Søren Løvborg <sorenl@unity3d.com>
Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
---
Lib/tempfile.py | 44 +++++++++-
Lib/test/test_tempfile.py | 166 +++++++++++++++++++++++++++++++++++---
2 files changed, 199 insertions(+), 11 deletions(-)
diff --git a/Lib/tempfile.py b/Lib/tempfile.py
index 2cb5434..d79b70c 100644
--- a/Lib/tempfile.py
+++ b/Lib/tempfile.py
@@ -276,6 +276,23 @@ def _mkstemp_inner(dir, pre, suf, flags, output_type):
"No usable temporary file name found")
+def _dont_follow_symlinks(func, path, *args):
+ # Pass follow_symlinks=False, unless not supported on this platform.
+ if func in _os.supports_follow_symlinks:
+ func(path, *args, follow_symlinks=False)
+ elif _os.name == 'nt' or not _os.path.islink(path):
+ func(path, *args)
+
+
+def _resetperms(path):
+ try:
+ chflags = _os.chflags
+ except AttributeError:
+ pass
+ else:
+ _dont_follow_symlinks(chflags, path, 0)
+ _dont_follow_symlinks(_os.chmod, path, 0o700)
+
# User visible interfaces.
def gettempprefix():
@@ -794,9 +811,32 @@ class TemporaryDirectory(object):
self, self._cleanup, self.name,
warn_message="Implicitly cleaning up {!r}".format(self))
+ @classmethod
+ def _rmtree(cls, name):
+ def onerror(func, path, exc_info):
+ if issubclass(exc_info[0], PermissionError):
+ try:
+ if path != name:
+ _resetperms(_os.path.dirname(path))
+ _resetperms(path)
+
+ try:
+ _os.unlink(path)
+ # PermissionError is raised on FreeBSD for directories
+ except (IsADirectoryError, PermissionError):
+ cls._rmtree(path)
+ except FileNotFoundError:
+ pass
+ elif issubclass(exc_info[0], FileNotFoundError):
+ pass
+ else:
+ raise
+
+ _shutil.rmtree(name, onerror=onerror)
+
@classmethod
def _cleanup(cls, name, warn_message):
- _shutil.rmtree(name)
+ cls._rmtree(name)
_warnings.warn(warn_message, ResourceWarning)
def __repr__(self):
@@ -810,4 +850,4 @@ class TemporaryDirectory(object):
def cleanup(self):
if self._finalizer.detach():
- _shutil.rmtree(self.name)
+ self._rmtree(self.name)
diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py
index 710756b..c5560e1 100644
--- a/Lib/test/test_tempfile.py
+++ b/Lib/test/test_tempfile.py
@@ -1298,19 +1298,25 @@ class NulledModules:
class TestTemporaryDirectory(BaseTestCase):
"""Test TemporaryDirectory()."""
- def do_create(self, dir=None, pre="", suf="", recurse=1):
+ def do_create(self, dir=None, pre="", suf="", recurse=1, dirs=1, files=1):
if dir is None:
dir = tempfile.gettempdir()
tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf)
self.nameCheck(tmp.name, dir, pre, suf)
- # Create a subdirectory and some files
- if recurse:
- d1 = self.do_create(tmp.name, pre, suf, recurse-1)
- d1.name = None
- with open(os.path.join(tmp.name, "test.txt"), "wb") as f:
- f.write(b"Hello world!")
+ self.do_create2(tmp.name, recurse, dirs, files)
return tmp
+ def do_create2(self, path, recurse=1, dirs=1, files=1):
+ # Create subdirectories and some files
+ if recurse:
+ for i in range(dirs):
+ name = os.path.join(path, "dir%d" % i)
+ os.mkdir(name)
+ self.do_create2(name, recurse-1, dirs, files)
+ for i in range(files):
+ with open(os.path.join(path, "test%d.txt" % i), "wb") as f:
+ f.write(b"Hello world!")
+
def test_mkdtemp_failure(self):
# Check no additional exception if mkdtemp fails
# Previously would raise AttributeError instead
@@ -1350,11 +1356,108 @@ class TestTemporaryDirectory(BaseTestCase):
"TemporaryDirectory %s exists after cleanup" % d1.name)
self.assertTrue(os.path.exists(d2.name),
"Directory pointed to by a symlink was deleted")
- self.assertEqual(os.listdir(d2.name), ['test.txt'],
+ self.assertEqual(os.listdir(d2.name), ['test0.txt'],
"Contents of the directory pointed to by a symlink "
"were deleted")
d2.cleanup()
+ @support.skip_unless_symlink
+ def test_cleanup_with_symlink_modes(self):
+ # cleanup() should not follow symlinks when fixing mode bits (#91133)
+ with self.do_create(recurse=0) as d2:
+ file1 = os.path.join(d2, 'file1')
+ open(file1, 'wb').close()
+ dir1 = os.path.join(d2, 'dir1')
+ os.mkdir(dir1)
+ for mode in range(8):
+ mode <<= 6
+ with self.subTest(mode=format(mode, '03o')):
+ def test(target, target_is_directory):
+ d1 = self.do_create(recurse=0)
+ symlink = os.path.join(d1.name, 'symlink')
+ os.symlink(target, symlink,
+ target_is_directory=target_is_directory)
+ try:
+ os.chmod(symlink, mode, follow_symlinks=False)
+ except NotImplementedError:
+ pass
+ try:
+ os.chmod(symlink, mode)
+ except FileNotFoundError:
+ pass
+ os.chmod(d1.name, mode)
+ d1.cleanup()
+ self.assertFalse(os.path.exists(d1.name))
+
+ with self.subTest('nonexisting file'):
+ test('nonexisting', target_is_directory=False)
+ with self.subTest('nonexisting dir'):
+ test('nonexisting', target_is_directory=True)
+
+ with self.subTest('existing file'):
+ os.chmod(file1, mode)
+ old_mode = os.stat(file1).st_mode
+ test(file1, target_is_directory=False)
+ new_mode = os.stat(file1).st_mode
+ self.assertEqual(new_mode, old_mode,
+ '%03o != %03o' % (new_mode, old_mode))
+
+ with self.subTest('existing dir'):
+ os.chmod(dir1, mode)
+ old_mode = os.stat(dir1).st_mode
+ test(dir1, target_is_directory=True)
+ new_mode = os.stat(dir1).st_mode
+ self.assertEqual(new_mode, old_mode,
+ '%03o != %03o' % (new_mode, old_mode))
+
+ @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.chflags')
+ @support.skip_unless_symlink
+ def test_cleanup_with_symlink_flags(self):
+ # cleanup() should not follow symlinks when fixing flags (#91133)
+ flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
+ self.check_flags(flags)
+
+ with self.do_create(recurse=0) as d2:
+ file1 = os.path.join(d2, 'file1')
+ open(file1, 'wb').close()
+ dir1 = os.path.join(d2, 'dir1')
+ os.mkdir(dir1)
+ def test(target, target_is_directory):
+ d1 = self.do_create(recurse=0)
+ symlink = os.path.join(d1.name, 'symlink')
+ os.symlink(target, symlink,
+ target_is_directory=target_is_directory)
+ try:
+ os.chflags(symlink, flags, follow_symlinks=False)
+ except NotImplementedError:
+ pass
+ try:
+ os.chflags(symlink, flags)
+ except FileNotFoundError:
+ pass
+ os.chflags(d1.name, flags)
+ d1.cleanup()
+ self.assertFalse(os.path.exists(d1.name))
+
+ with self.subTest('nonexisting file'):
+ test('nonexisting', target_is_directory=False)
+ with self.subTest('nonexisting dir'):
+ test('nonexisting', target_is_directory=True)
+
+ with self.subTest('existing file'):
+ os.chflags(file1, flags)
+ old_flags = os.stat(file1).st_flags
+ test(file1, target_is_directory=False)
+ new_flags = os.stat(file1).st_flags
+ self.assertEqual(new_flags, old_flags)
+
+ with self.subTest('existing dir'):
+ os.chflags(dir1, flags)
+ old_flags = os.stat(dir1).st_flags
+ test(dir1, target_is_directory=True)
+ new_flags = os.stat(dir1).st_flags
+ self.assertEqual(new_flags, old_flags)
+
@support.cpython_only
def test_del_on_collection(self):
# A TemporaryDirectory is deleted when garbage collected
@@ -1385,7 +1488,7 @@ class TestTemporaryDirectory(BaseTestCase):
tmp2 = os.path.join(tmp.name, 'test_dir')
os.mkdir(tmp2)
- with open(os.path.join(tmp2, "test.txt"), "w") as f:
+ with open(os.path.join(tmp2, "test0.txt"), "w") as f:
f.write("Hello world!")
{mod}.tmp = tmp
@@ -1453,6 +1556,51 @@ class TestTemporaryDirectory(BaseTestCase):
self.assertEqual(name, d.name)
self.assertFalse(os.path.exists(name))
+ def test_modes(self):
+ for mode in range(8):
+ mode <<= 6
+ with self.subTest(mode=format(mode, '03o')):
+ d = self.do_create(recurse=3, dirs=2, files=2)
+ with d:
+ # Change files and directories mode recursively.
+ for root, dirs, files in os.walk(d.name, topdown=False):
+ for name in files:
+ os.chmod(os.path.join(root, name), mode)
+ os.chmod(root, mode)
+ d.cleanup()
+ self.assertFalse(os.path.exists(d.name))
+
+ def check_flags(self, flags):
+ # skip the test if these flags are not supported (ex: FreeBSD 13)
+ filename = support.TESTFN
+ try:
+ open(filename, "w").close()
+ try:
+ os.chflags(filename, flags)
+ except OSError as exc:
+ # "OSError: [Errno 45] Operation not supported"
+ self.skipTest(f"chflags() doesn't support flags "
+ f"{flags:#b}: {exc}")
+ else:
+ os.chflags(filename, 0)
+ finally:
+ support.unlink(filename)
+
+ @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.lchflags')
+ def test_flags(self):
+ flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
+ self.check_flags(flags)
+
+ d = self.do_create(recurse=3, dirs=2, files=2)
+ with d:
+ # Change files and directories flags recursively.
+ for root, dirs, files in os.walk(d.name, topdown=False):
+ for name in files:
+ os.chflags(os.path.join(root, name), flags)
+ os.chflags(root, flags)
+ d.cleanup()
+ self.assertFalse(os.path.exists(d.name))
+
if __name__ == "__main__":
unittest.main()
--
2.44.0

View File

@ -1,346 +0,0 @@
From 066df4fd454d6ff9be66e80b2a65995b10af174f Mon Sep 17 00:00:00 2001
From: John Jolly <john.jolly@gmail.com>
Date: Tue, 30 Jan 2018 01:51:35 -0700
Subject: [PATCH] bpo-22908: Add seek and tell functionality to ZipExtFile
(GH-4966)
This allows for nested zip files, tar files within zip files, zip files within tar files, etc.
Contributed by: John Jolly
---
Doc/library/zipfile.rst | 6 +-
Lib/test/test_zipfile.py | 34 ++++++++
Lib/zipfile.py | 82 +++++++++++++++++++
.../2017-12-21-22-00-11.bpo-22908.cVm89I.rst | 2 +
4 files changed, 121 insertions(+), 3 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2017-12-21-22-00-11.bpo-22908.cVm89I.rst
diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst
index d58efe0b417516..7c9a8c80225491 100644
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -246,9 +246,9 @@ ZipFile Objects
With *mode* ``'r'`` the file-like object
(``ZipExtFile``) is read-only and provides the following methods:
:meth:`~io.BufferedIOBase.read`, :meth:`~io.IOBase.readline`,
- :meth:`~io.IOBase.readlines`, :meth:`__iter__`,
- :meth:`~iterator.__next__`. These objects can operate independently of
- the ZipFile.
+ :meth:`~io.IOBase.readlines`, :meth:`~io.IOBase.seek`,
+ :meth:`~io.IOBase.tell`, :meth:`__iter__`, :meth:`~iterator.__next__`.
+ These objects can operate independently of the ZipFile.
With ``mode='w'``, a writable file handle is returned, which supports the
:meth:`~io.BufferedIOBase.write` method. While a writable file handle is open,
diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py
index 94db858a1517c4..61c3e349a69ef4 100644
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -1628,6 +1628,40 @@ def test_open_conflicting_handles(self):
self.assertEqual(zipf.read('baz'), msg3)
self.assertEqual(zipf.namelist(), ['foo', 'bar', 'baz'])
+ def test_seek_tell(self):
+ # Test seek functionality
+ txt = b"Where's Bruce?"
+ bloc = txt.find(b"Bruce")
+ # Check seek on a file
+ with zipfile.ZipFile(TESTFN, "w") as zipf:
+ zipf.writestr("foo.txt", txt)
+ with zipfile.ZipFile(TESTFN, "r") as zipf:
+ with zipf.open("foo.txt", "r") as fp:
+ fp.seek(bloc, os.SEEK_SET)
+ self.assertEqual(fp.tell(), bloc)
+ fp.seek(-bloc, os.SEEK_CUR)
+ self.assertEqual(fp.tell(), 0)
+ fp.seek(bloc, os.SEEK_CUR)
+ self.assertEqual(fp.tell(), bloc)
+ self.assertEqual(fp.read(5), txt[bloc:bloc+5])
+ fp.seek(0, os.SEEK_END)
+ self.assertEqual(fp.tell(), len(txt))
+ # Check seek on memory file
+ data = io.BytesIO()
+ with zipfile.ZipFile(data, mode="w") as zipf:
+ zipf.writestr("foo.txt", txt)
+ with zipfile.ZipFile(data, mode="r") as zipf:
+ with zipf.open("foo.txt", "r") as fp:
+ fp.seek(bloc, os.SEEK_SET)
+ self.assertEqual(fp.tell(), bloc)
+ fp.seek(-bloc, os.SEEK_CUR)
+ self.assertEqual(fp.tell(), 0)
+ fp.seek(bloc, os.SEEK_CUR)
+ self.assertEqual(fp.tell(), bloc)
+ self.assertEqual(fp.read(5), txt[bloc:bloc+5])
+ fp.seek(0, os.SEEK_END)
+ self.assertEqual(fp.tell(), len(txt))
+
def tearDown(self):
unlink(TESTFN)
unlink(TESTFN2)
diff --git a/Lib/zipfile.py b/Lib/zipfile.py
index f9db45f58a2bde..5df7b1bf75b9d9 100644
--- a/Lib/zipfile.py
+++ b/Lib/zipfile.py
@@ -696,6 +696,18 @@ def __init__(self, file, pos, close, lock, writing):
self._close = close
self._lock = lock
self._writing = writing
+ self.seekable = file.seekable
+ self.tell = file.tell
+
+ def seek(self, offset, whence=0):
+ with self._lock:
+ if self.writing():
+ raise ValueError("Can't reposition in the ZIP file while "
+ "there is an open writing handle on it. "
+ "Close the writing handle before trying to read.")
+ self._file.seek(self._pos)
+ self._pos = self._file.tell()
+ return self._pos
def read(self, n=-1):
with self._lock:
@@ -746,6 +758,9 @@ class ZipExtFile(io.BufferedIOBase):
# Read from compressed files in 4k blocks.
MIN_READ_SIZE = 4096
+ # Chunk size to read during seek
+ MAX_SEEK_READ = 1 << 24
+
def __init__(self, fileobj, mode, zipinfo, decrypter=None,
close_fileobj=False):
self._fileobj = fileobj
@@ -778,6 +793,17 @@ def __init__(self, fileobj, mode, zipinfo, decrypter=None,
else:
self._expected_crc = None
+ self._seekable = False
+ try:
+ if fileobj.seekable():
+ self._orig_compress_start = fileobj.tell()
+ self._orig_compress_size = zipinfo.compress_size
+ self._orig_file_size = zipinfo.file_size
+ self._orig_start_crc = self._running_crc
+ self._seekable = True
+ except AttributeError:
+ pass
+
def __repr__(self):
result = ['<%s.%s' % (self.__class__.__module__,
self.__class__.__qualname__)]
@@ -963,6 +989,62 @@ def close(self):
finally:
super().close()
+ def seekable(self):
+ return self._seekable
+
+ def seek(self, offset, whence=0):
+ if not self._seekable:
+ raise io.UnsupportedOperation("underlying stream is not seekable")
+ curr_pos = self.tell()
+ if whence == 0: # Seek from start of file
+ new_pos = offset
+ elif whence == 1: # Seek from current position
+ new_pos = curr_pos + offset
+ elif whence == 2: # Seek from EOF
+ new_pos = self._orig_file_size + offset
+ else:
+ raise ValueError("whence must be os.SEEK_SET (0), "
+ "os.SEEK_CUR (1), or os.SEEK_END (2)")
+
+ if new_pos > self._orig_file_size:
+ new_pos = self._orig_file_size
+
+ if new_pos < 0:
+ new_pos = 0
+
+ read_offset = new_pos - curr_pos
+ buff_offset = read_offset + self._offset
+
+ if buff_offset >= 0 and buff_offset < len(self._readbuffer):
+ # Just move the _offset index if the new position is in the _readbuffer
+ self._offset = buff_offset
+ read_offset = 0
+ elif read_offset < 0:
+ # Position is before the current position. Reset the ZipExtFile
+
+ self._fileobj.seek(self._orig_compress_start)
+ self._running_crc = self._orig_start_crc
+ self._compress_left = self._orig_compress_size
+ self._left = self._orig_file_size
+ self._readbuffer = b''
+ self._offset = 0
+ self._decompressor = zipfile._get_decompressor(self._compress_type)
+ self._eof = False
+ read_offset = new_pos
+
+ while read_offset > 0:
+ read_len = min(self.MAX_SEEK_READ, read_offset)
+ self.read(read_len)
+ read_offset -= read_len
+
+ return self.tell()
+
+ def tell(self):
+ if not self._seekable:
+ raise io.UnsupportedOperation("underlying stream is not seekable")
+ filepos = self._orig_file_size - self._left - len(self._readbuffer) + self._offset
+ return filepos
+
class _ZipWriteFile(io.BufferedIOBase):
def __init__(self, zf, zinfo, zip64):
diff --git a/Misc/NEWS.d/next/Library/2017-12-21-22-00-11.bpo-22908.cVm89I.rst b/Misc/NEWS.d/next/Library/2017-12-21-22-00-11.bpo-22908.cVm89I.rst
new file mode 100644
index 00000000000000..4f3cc0166019f1
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2017-12-21-22-00-11.bpo-22908.cVm89I.rst
@@ -0,0 +1,2 @@
+Added seek and tell to the ZipExtFile class. This only works if the file
+object used to open the zipfile is seekable.
From 55beb125db2942b5362454e05542e9661e964a65 Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka <storchaka@gmail.com>
Date: Tue, 23 Apr 2024 14:29:31 +0200
Subject: [PATCH] gh-109858: Protect zipfile from "quoted-overlap" zipbomb
(GH-110016) (GH-113916)
Raise BadZipFile when try to read an entry that overlaps with other entry or
central directory.
(cherry picked from commit 66363b9a7b9fe7c99eba3a185b74c5fdbf842eba)
---
Lib/test/test_zipfile.py | 60 +++++++++++++++++++
Lib/zipfile.py | 12 ++++
...-09-28-13-15-51.gh-issue-109858.43e2dg.rst | 3 +
3 files changed, 75 insertions(+)
create mode 100644 Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst
diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py
index 7f82586..0379909 100644
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -1644,6 +1644,66 @@ class OtherTests(unittest.TestCase):
fp.seek(0, os.SEEK_END)
self.assertEqual(fp.tell(), len(txt))
+ @requires_zlib
+ def test_full_overlap(self):
+ data = (
+ b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e'
+ b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00a\xed'
+ b'\xc0\x81\x08\x00\x00\x00\xc00\xd6\xfbK\\d\x0b`P'
+ b'K\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2'
+ b'\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00aPK'
+ b'\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e'
+ b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00\x00\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00bPK\x05'
+ b'\x06\x00\x00\x00\x00\x02\x00\x02\x00^\x00\x00\x00/\x00\x00'
+ b'\x00\x00\x00'
+ )
+ with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf:
+ self.assertEqual(zipf.namelist(), ['a', 'b'])
+ zi = zipf.getinfo('a')
+ self.assertEqual(zi.header_offset, 0)
+ self.assertEqual(zi.compress_size, 16)
+ self.assertEqual(zi.file_size, 1033)
+ zi = zipf.getinfo('b')
+ self.assertEqual(zi.header_offset, 0)
+ self.assertEqual(zi.compress_size, 16)
+ self.assertEqual(zi.file_size, 1033)
+ self.assertEqual(len(zipf.read('a')), 1033)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'File name.*differ'):
+ zipf.read('b')
+
+ @requires_zlib
+ def test_quoted_overlap(self):
+ data = (
+ b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05Y\xfc'
+ b'8\x044\x00\x00\x00(\x04\x00\x00\x01\x00\x00\x00a\x00'
+ b'\x1f\x00\xe0\xffPK\x03\x04\x14\x00\x00\x00\x08\x00\xa0l'
+ b'H\x05\xe2\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00'
+ b'\x00\x00b\xed\xc0\x81\x08\x00\x00\x00\xc00\xd6\xfbK\\'
+ b'd\x0b`PK\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0'
+ b'lH\x05Y\xfc8\x044\x00\x00\x00(\x04\x00\x00\x01'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ b'\x00aPK\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0l'
+ b'H\x05\xe2\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$\x00\x00\x00'
+ b'bPK\x05\x06\x00\x00\x00\x00\x02\x00\x02\x00^\x00\x00'
+ b'\x00S\x00\x00\x00\x00\x00'
+ )
+ with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf:
+ self.assertEqual(zipf.namelist(), ['a', 'b'])
+ zi = zipf.getinfo('a')
+ self.assertEqual(zi.header_offset, 0)
+ self.assertEqual(zi.compress_size, 52)
+ self.assertEqual(zi.file_size, 1064)
+ zi = zipf.getinfo('b')
+ self.assertEqual(zi.header_offset, 36)
+ self.assertEqual(zi.compress_size, 16)
+ self.assertEqual(zi.file_size, 1033)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'Overlapped entries'):
+ zipf.read('a')
+ self.assertEqual(len(zipf.read('b')), 1033)
+
def tearDown(self):
unlink(TESTFN)
unlink(TESTFN2)
diff --git a/Lib/zipfile.py b/Lib/zipfile.py
index 0ab9fac..e6d7676 100644
--- a/Lib/zipfile.py
+++ b/Lib/zipfile.py
@@ -338,6 +338,7 @@ class ZipInfo (object):
'compress_size',
'file_size',
'_raw_time',
+ '_end_offset',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
@@ -376,6 +377,7 @@ class ZipInfo (object):
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
+ self._end_offset = None # Start of the next local header or central directory
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
@@ -1346,6 +1348,12 @@ class ZipFile:
if self.debug > 2:
print("total", total)
+ end_offset = self.start_dir
+ for zinfo in sorted(self.filelist,
+ key=lambda zinfo: zinfo.header_offset,
+ reverse=True):
+ zinfo._end_offset = end_offset
+ end_offset = zinfo.header_offset
def namelist(self):
"""Return a list of file names in the archive."""
@@ -1500,6 +1508,10 @@ class ZipFile:
'File name in directory %r and header %r differ.'
% (zinfo.orig_filename, fname))
+ if (zinfo._end_offset is not None and
+ zef_file.tell() + zinfo.compress_size > zinfo._end_offset):
+ raise BadZipFile(f"Overlapped entries: {zinfo.orig_filename!r} (possible zip bomb)")
+
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
diff --git a/Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst b/Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst
new file mode 100644
index 0000000..be279ca
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst
@@ -0,0 +1,3 @@
+Protect :mod:`zipfile` from "quoted-overlap" zipbomb. It now raises
+BadZipFile when try to read an entry that overlaps with other entry or
+central directory.
--
2.44.0

View File

@ -14,7 +14,7 @@ URL: https://www.python.org/
# WARNING When rebasing to a new Python version, # WARNING When rebasing to a new Python version,
# remember to update the python3-docs package as well # remember to update the python3-docs package as well
Version: %{pybasever}.8 Version: %{pybasever}.8
Release: 62%{?dist} Release: 59%{?dist}
License: Python License: Python
@ -837,43 +837,6 @@ Patch414: 00414-skip_test_zlib_s390x.patch
# config file or environment variable. # config file or environment variable.
Patch415: 00415-cve-2023-27043-gh-102988-reject-malformed-addresses-in-email-parseaddr-111116.patch Patch415: 00415-cve-2023-27043-gh-102988-reject-malformed-addresses-in-email-parseaddr-111116.patch
# 00422 #
# gh-115133: Fix tests for XMLPullParser with Expat 2.6.0
#
# Feeding the parser by too small chunks defers parsing to prevent
# CVE-2023-52425. Future versions of Expat may be more reactive.
#
# Patch rebased because the CVE fix is backported to older expat in RHEL.
Patch422: 00422-gh-115133-fix-tests-for-xmlpullparser-with-expat-2-6-0.patch
# 426 #
# CVE-2023-6597
#
# Path traversal on tempfile.TemporaryDirectory
#
# Upstream: https://github.com/python/cpython/issues/91133
# Tracking bug: https://bugzilla.redhat.com/show_bug.cgi?id=CVE-2023-6597
#
# To backport the fix cleanly the patch contains also this rebased commit:
# Fix permission errors in TemporaryDirectory cleanup
# https://github.com/python/cpython/commit/e9b51c0ad81da1da11ae65840ac8b50a8521373c
Patch426: 00426-CVE-2023-6597.patch
# 427 #
# CVE-2024-0450
#
# The zipfile module is vulnerable to zip-bombs leading to denial of service.
#
# Upstream: https://github.com/python/cpython/issues/109858
# Tracking bug: https://bugzilla.redhat.com/show_bug.cgi?id=CVE-2024-0450
#
# To backport the fix cleanly also this change is backported:
# Add seek and tell functionality to ZipExtFile
# https://github.com/python/cpython/commit/066df4fd454d6ff9be66e80b2a65995b10af174f
#
# Patch rebased from 3.8.
Patch427: 00427-CVE-2024-0450.patch
# (New patches go here ^^^) # (New patches go here ^^^)
# #
# When adding new patches to "python" and "python3" in Fedora, EL, etc., # When adding new patches to "python" and "python3" in Fedora, EL, etc.,
@ -1227,9 +1190,6 @@ git apply %{PATCH351}
%patch413 -p1 %patch413 -p1
%patch414 -p1 %patch414 -p1
%patch415 -p1 %patch415 -p1
%patch422 -p1
%patch426 -p1
%patch427 -p1
# Remove files that should be generated by the build # Remove files that should be generated by the build
# (This is after patching, so that we can use patches directly from upstream) # (This is after patching, so that we can use patches directly from upstream)
@ -2161,18 +2121,6 @@ fi
# ====================================================== # ======================================================
%changelog %changelog
* Wed Apr 24 2024 Lumír Balhar <lbalhar@redhat.com> - 3.6.8-62
- Security fix for CVE-2024-0450
Resolves: RHEL-33683
* Wed Apr 24 2024 Lumír Balhar <lbalhar@redhat.com> - 3.6.8-61
- Security fix for CVE-2023-6597
Resolves: RHEL-33671
* Wed Apr 24 2024 Lumír Balhar <lbalhar@redhat.com> - 3.6.8-60
- Fix build with expat with fixed CVE-2023-52425
Related: RHEL-33671
* Thu Jan 04 2024 Lumír Balhar <lbalhar@redhat.com> - 3.6.8-59 * Thu Jan 04 2024 Lumír Balhar <lbalhar@redhat.com> - 3.6.8-59
- Security fix for CVE-2023-27043 - Security fix for CVE-2023-27043
Resolves: RHEL-20610 Resolves: RHEL-20610