Compare commits
4 Commits
Author | SHA1 | Date | |
---|---|---|---|
fb9cb95ae7 | |||
b3973cea86 | |||
70de29d384 | |||
b9dae68211 |
@ -1,7 +1,7 @@
|
||||
From 5cedde59cde3f05af798a7cb5bc722cb0deb4835 Mon Sep 17 00:00:00 2001
|
||||
From 43ce74d971fad62db6ccd723fe6b01da9c7ff407 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Thu, 12 Dec 2019 16:58:31 +0100
|
||||
Subject: [PATCH 1/6] Expose blake2b and blake2s hashes from OpenSSL
|
||||
Subject: [PATCH 1/5] Expose blake2b and blake2s hashes from OpenSSL
|
||||
|
||||
These aren't as powerful as Python's own implementation, but they can be
|
||||
used under FIPS.
|
||||
@ -251,13 +251,13 @@ index fb61a44..1e42b87 100644
|
||||
-/*[clinic end generated code: output=b339e255db698147 input=a9049054013a1b77]*/
|
||||
+/*[clinic end generated code: output=1d988d457a8beebe input=a9049054013a1b77]*/
|
||||
--
|
||||
2.43.0
|
||||
2.45.0
|
||||
|
||||
|
||||
From 2a12baa9e201f54560ec99ad5ee1fa5b0006aa39 Mon Sep 17 00:00:00 2001
|
||||
From 6872b634078a2c69644235781ebffb07f8edcb83 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <pviktori@redhat.com>
|
||||
Date: Thu, 25 Jul 2019 17:19:06 +0200
|
||||
Subject: [PATCH 2/6] Disable Python's hash implementations in FIPS mode,
|
||||
Subject: [PATCH 2/5] Disable Python's hash implementations in FIPS mode,
|
||||
forcing OpenSSL
|
||||
|
||||
---
|
||||
@ -445,10 +445,10 @@ index a8bad9d..1b1d937 100644
|
||||
+ if (_Py_hashlib_fips_error(exc, name)) return NULL; \
|
||||
+} while (0)
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 1876f77..1875d1e 100644
|
||||
index 65ad1c2..b5f9ab5 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -7439,7 +7439,8 @@ PY_STDLIB_MOD([_sha2],
|
||||
@@ -7463,7 +7463,8 @@ PY_STDLIB_MOD([_sha2],
|
||||
PY_STDLIB_MOD([_sha3], [test "$with_builtin_sha3" = yes])
|
||||
PY_STDLIB_MOD([_blake2],
|
||||
[test "$with_builtin_blake2" = yes], [],
|
||||
@ -459,13 +459,13 @@ index 1876f77..1875d1e 100644
|
||||
PY_STDLIB_MOD([_crypt],
|
||||
[], [test "$ac_cv_crypt_crypt" = yes],
|
||||
--
|
||||
2.43.0
|
||||
2.45.0
|
||||
|
||||
|
||||
From bca05b7fdb8dcab21ef80db1d59dd5daa835d84b Mon Sep 17 00:00:00 2001
|
||||
From f904abdd7a607282c2cdfd18288045cedfa28414 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Fri, 29 Jan 2021 14:16:21 +0100
|
||||
Subject: [PATCH 3/6] Use python's fall back crypto implementations only if we
|
||||
Subject: [PATCH 3/5] Use python's fall back crypto implementations only if we
|
||||
are not in FIPS mode
|
||||
|
||||
---
|
||||
@ -552,13 +552,13 @@ index dd61a9a..6031b02 100644
|
||||
get_builtin_constructor = getattr(hashlib,
|
||||
'__get_builtin_constructor')
|
||||
--
|
||||
2.43.0
|
||||
2.45.0
|
||||
|
||||
|
||||
From c9a79f0aafd28677e3e0b8a1f6410105a71ff071 Mon Sep 17 00:00:00 2001
|
||||
From 9bf0a53b7831409613c44fd7feecb56476f5e5e7 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Wed, 31 Jul 2019 15:43:43 +0200
|
||||
Subject: [PATCH 4/6] Test equivalence of hashes for the various digests with
|
||||
Subject: [PATCH 4/5] Test equivalence of hashes for the various digests with
|
||||
usedforsecurity=True/False
|
||||
|
||||
---
|
||||
@ -712,13 +712,13 @@ index 6031b02..5bd5297 100644
|
||||
class KDFTests(unittest.TestCase):
|
||||
|
||||
--
|
||||
2.43.0
|
||||
2.45.0
|
||||
|
||||
|
||||
From e972a838729ea84a0f2e0ca8e88ae1bfc129e7d8 Mon Sep 17 00:00:00 2001
|
||||
From 8a76571515a64a57b4ea0586ae8376cf2ef0ac60 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <pviktori@redhat.com>
|
||||
Date: Mon, 26 Aug 2019 19:39:48 +0200
|
||||
Subject: [PATCH 5/6] Guard against Python HMAC in FIPS mode
|
||||
Subject: [PATCH 5/5] Guard against Python HMAC in FIPS mode
|
||||
|
||||
---
|
||||
Lib/hmac.py | 13 +++++++++----
|
||||
@ -726,7 +726,7 @@ Subject: [PATCH 5/6] Guard against Python HMAC in FIPS mode
|
||||
2 files changed, 19 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/Lib/hmac.py b/Lib/hmac.py
|
||||
index 8b4f920..20ef96c 100644
|
||||
index 8b4eb2f..e8e4864 100644
|
||||
--- a/Lib/hmac.py
|
||||
+++ b/Lib/hmac.py
|
||||
@@ -16,8 +16,9 @@ else:
|
||||
@ -773,7 +773,7 @@ index 8b4f920..20ef96c 100644
|
||||
digest_cons = digestmod
|
||||
elif isinstance(digestmod, str):
|
||||
diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py
|
||||
index a39a2c4..b7b24ab 100644
|
||||
index 1502fba..7997073 100644
|
||||
--- a/Lib/test/test_hmac.py
|
||||
+++ b/Lib/test/test_hmac.py
|
||||
@@ -5,6 +5,7 @@ import hashlib
|
||||
@ -812,7 +812,7 @@ index a39a2c4..b7b24ab 100644
|
||||
@unittest.skipUnless(sha256_module is not None, 'need _sha256')
|
||||
def test_with_sha256_module(self):
|
||||
h = hmac.HMAC(b"key", b"hash this!", digestmod=sha256_module.sha256)
|
||||
@@ -481,6 +489,7 @@ class SanityTestCase(unittest.TestCase):
|
||||
@@ -489,6 +497,7 @@ class UpdateTestCase(unittest.TestCase):
|
||||
|
||||
class CopyTestCase(unittest.TestCase):
|
||||
|
||||
@ -820,7 +820,7 @@ index a39a2c4..b7b24ab 100644
|
||||
@hashlib_helper.requires_hashdigest('sha256')
|
||||
def test_attributes_old(self):
|
||||
# Testing if attributes are of same type.
|
||||
@@ -492,6 +501,7 @@ class CopyTestCase(unittest.TestCase):
|
||||
@@ -500,6 +509,7 @@ class CopyTestCase(unittest.TestCase):
|
||||
self.assertEqual(type(h1._outer), type(h2._outer),
|
||||
"Types of outer don't match.")
|
||||
|
||||
@ -829,270 +829,5 @@ index a39a2c4..b7b24ab 100644
|
||||
def test_realcopy_old(self):
|
||||
# Testing if the copy method created a real copy.
|
||||
--
|
||||
2.43.0
|
||||
|
||||
|
||||
From b12202196a78b877dcd32cfea273051b60038a41 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <encukou@gmail.com>
|
||||
Date: Wed, 25 Aug 2021 16:44:43 +0200
|
||||
Subject: [PATCH 6/6] Disable hash-based PYCs in FIPS mode
|
||||
|
||||
If FIPS mode is on, we can't use siphash-based HMAC
|
||||
(_Py_KeyedHash), so:
|
||||
|
||||
- Unchecked hash PYCs can be imported, but not created
|
||||
- Checked hash PYCs can not be imported nor created
|
||||
- The default mode is timestamp-based PYCs, even if
|
||||
SOURCE_DATE_EPOCH is set.
|
||||
|
||||
If FIPS mode is off, there are no changes in behavior.
|
||||
|
||||
Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1835169
|
||||
---
|
||||
Lib/py_compile.py | 4 +++-
|
||||
Lib/test/support/__init__.py | 14 +++++++++++++
|
||||
Lib/test/test_cmd_line_script.py | 2 ++
|
||||
Lib/test/test_compileall.py | 11 +++++++++-
|
||||
.../test_importlib/source/test_file_loader.py | 6 ++++++
|
||||
Lib/test/test_py_compile.py | 11 ++++++++--
|
||||
Lib/test/test_zipimport.py | 2 ++
|
||||
Python/import.c | 20 +++++++++++++++++++
|
||||
8 files changed, 66 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/Lib/py_compile.py b/Lib/py_compile.py
|
||||
index 388614e..fd9a139 100644
|
||||
--- a/Lib/py_compile.py
|
||||
+++ b/Lib/py_compile.py
|
||||
@@ -70,7 +70,9 @@ class PycInvalidationMode(enum.Enum):
|
||||
|
||||
|
||||
def _get_default_invalidation_mode():
|
||||
- if os.environ.get('SOURCE_DATE_EPOCH'):
|
||||
+ import _hashlib
|
||||
+ if (os.environ.get('SOURCE_DATE_EPOCH') and not
|
||||
+ _hashlib.get_fips_mode()):
|
||||
return PycInvalidationMode.CHECKED_HASH
|
||||
else:
|
||||
return PycInvalidationMode.TIMESTAMP
|
||||
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
|
||||
index fd9265c..fcd1ea7 100644
|
||||
--- a/Lib/test/support/__init__.py
|
||||
+++ b/Lib/test/support/__init__.py
|
||||
@@ -2346,6 +2346,20 @@ def sleeping_retry(timeout, err_msg=None, /,
|
||||
delay = min(delay * 2, max_delay)
|
||||
|
||||
|
||||
+def fails_in_fips_mode(expected_error):
|
||||
+ import _hashlib
|
||||
+ if _hashlib.get_fips_mode():
|
||||
+ def _decorator(func):
|
||||
+ def _wrapper(self, *args, **kwargs):
|
||||
+ with self.assertRaises(expected_error):
|
||||
+ func(self, *args, **kwargs)
|
||||
+ return _wrapper
|
||||
+ else:
|
||||
+ def _decorator(func):
|
||||
+ return func
|
||||
+ return _decorator
|
||||
+
|
||||
+
|
||||
@contextlib.contextmanager
|
||||
def adjust_int_max_str_digits(max_digits):
|
||||
"""Temporarily change the integer string conversion length limit."""
|
||||
diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py
|
||||
index 1b58882..d6caff1 100644
|
||||
--- a/Lib/test/test_cmd_line_script.py
|
||||
+++ b/Lib/test/test_cmd_line_script.py
|
||||
@@ -286,6 +286,7 @@ class CmdLineTest(unittest.TestCase):
|
||||
self._check_script(zip_name, run_name, zip_name, zip_name, '',
|
||||
zipimport.zipimporter)
|
||||
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
def test_zipfile_compiled_checked_hash(self):
|
||||
with os_helper.temp_dir() as script_dir:
|
||||
script_name = _make_test_script(script_dir, '__main__')
|
||||
@@ -296,6 +297,7 @@ class CmdLineTest(unittest.TestCase):
|
||||
self._check_script(zip_name, run_name, zip_name, zip_name, '',
|
||||
zipimport.zipimporter)
|
||||
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
def test_zipfile_compiled_unchecked_hash(self):
|
||||
with os_helper.temp_dir() as script_dir:
|
||||
script_name = _make_test_script(script_dir, '__main__')
|
||||
diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py
|
||||
index 9cd92ad..4ec29a1 100644
|
||||
--- a/Lib/test/test_compileall.py
|
||||
+++ b/Lib/test/test_compileall.py
|
||||
@@ -806,14 +806,23 @@ class CommandLineTestsBase:
|
||||
out = self.assertRunOK('badfilename')
|
||||
self.assertRegex(out, b"Can't list 'badfilename'")
|
||||
|
||||
- def test_pyc_invalidation_mode(self):
|
||||
+ @support.fails_in_fips_mode(AssertionError)
|
||||
+ def test_pyc_invalidation_mode_checked(self):
|
||||
script_helper.make_script(self.pkgdir, 'f1', '')
|
||||
pyc = importlib.util.cache_from_source(
|
||||
os.path.join(self.pkgdir, 'f1.py'))
|
||||
+
|
||||
self.assertRunOK('--invalidation-mode=checked-hash', self.pkgdir)
|
||||
with open(pyc, 'rb') as fp:
|
||||
data = fp.read()
|
||||
self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11)
|
||||
+
|
||||
+ @support.fails_in_fips_mode(AssertionError)
|
||||
+ def test_pyc_invalidation_mode_unchecked(self):
|
||||
+ script_helper.make_script(self.pkgdir, 'f1', '')
|
||||
+ pyc = importlib.util.cache_from_source(
|
||||
+ os.path.join(self.pkgdir, 'f1.py'))
|
||||
+
|
||||
self.assertRunOK('--invalidation-mode=unchecked-hash', self.pkgdir)
|
||||
with open(pyc, 'rb') as fp:
|
||||
data = fp.read()
|
||||
diff --git a/Lib/test/test_importlib/source/test_file_loader.py b/Lib/test/test_importlib/source/test_file_loader.py
|
||||
index f35adec..62087c6 100644
|
||||
--- a/Lib/test/test_importlib/source/test_file_loader.py
|
||||
+++ b/Lib/test/test_importlib/source/test_file_loader.py
|
||||
@@ -16,6 +16,7 @@ import types
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
+from test import support
|
||||
from test.support.import_helper import make_legacy_pyc, unload
|
||||
|
||||
from test.test_py_compile import without_source_date_epoch
|
||||
@@ -237,6 +238,7 @@ class SimpleTest(abc.LoaderTests):
|
||||
loader.load_module('bad name')
|
||||
|
||||
@util.writes_bytecode_files
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
def test_checked_hash_based_pyc(self):
|
||||
with util.create_modules('_temp') as mapping:
|
||||
source = mapping['_temp']
|
||||
@@ -268,6 +270,7 @@ class SimpleTest(abc.LoaderTests):
|
||||
)
|
||||
|
||||
@util.writes_bytecode_files
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
def test_overridden_checked_hash_based_pyc(self):
|
||||
with util.create_modules('_temp') as mapping, \
|
||||
unittest.mock.patch('_imp.check_hash_based_pycs', 'never'):
|
||||
@@ -293,6 +296,7 @@ class SimpleTest(abc.LoaderTests):
|
||||
self.assertEqual(mod.state, 'old')
|
||||
|
||||
@util.writes_bytecode_files
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
def test_unchecked_hash_based_pyc(self):
|
||||
with util.create_modules('_temp') as mapping:
|
||||
source = mapping['_temp']
|
||||
@@ -323,6 +327,7 @@ class SimpleTest(abc.LoaderTests):
|
||||
)
|
||||
|
||||
@util.writes_bytecode_files
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
def test_overridden_unchecked_hash_based_pyc(self):
|
||||
with util.create_modules('_temp') as mapping, \
|
||||
unittest.mock.patch('_imp.check_hash_based_pycs', 'always'):
|
||||
@@ -432,6 +437,7 @@ class BadBytecodeTest:
|
||||
del_source=del_source)
|
||||
test('_temp', mapping, bc_path)
|
||||
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
def _test_partial_hash(self, test, *, del_source=False):
|
||||
with util.create_modules('_temp') as mapping:
|
||||
bc_path = self.manipulate_bytecode(
|
||||
diff --git a/Lib/test/test_py_compile.py b/Lib/test/test_py_compile.py
|
||||
index c4e6551..81fd962 100644
|
||||
--- a/Lib/test/test_py_compile.py
|
||||
+++ b/Lib/test/test_py_compile.py
|
||||
@@ -141,13 +141,16 @@ class PyCompileTestsBase:
|
||||
importlib.util.cache_from_source(bad_coding)))
|
||||
|
||||
def test_source_date_epoch(self):
|
||||
+ import _hashlib
|
||||
py_compile.compile(self.source_path, self.pyc_path)
|
||||
self.assertTrue(os.path.exists(self.pyc_path))
|
||||
self.assertFalse(os.path.exists(self.cache_path))
|
||||
with open(self.pyc_path, 'rb') as fp:
|
||||
flags = importlib._bootstrap_external._classify_pyc(
|
||||
fp.read(), 'test', {})
|
||||
- if os.environ.get('SOURCE_DATE_EPOCH'):
|
||||
+ if _hashlib.get_fips_mode():
|
||||
+ expected_flags = 0b00
|
||||
+ elif os.environ.get('SOURCE_DATE_EPOCH'):
|
||||
expected_flags = 0b11
|
||||
else:
|
||||
expected_flags = 0b00
|
||||
@@ -178,7 +181,8 @@ class PyCompileTestsBase:
|
||||
# Specifying optimized bytecode should lead to a path reflecting that.
|
||||
self.assertIn('opt-2', py_compile.compile(self.source_path, optimize=2))
|
||||
|
||||
- def test_invalidation_mode(self):
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
+ def test_invalidation_mode_checked(self):
|
||||
py_compile.compile(
|
||||
self.source_path,
|
||||
invalidation_mode=py_compile.PycInvalidationMode.CHECKED_HASH,
|
||||
@@ -187,6 +191,9 @@ class PyCompileTestsBase:
|
||||
flags = importlib._bootstrap_external._classify_pyc(
|
||||
fp.read(), 'test', {})
|
||||
self.assertEqual(flags, 0b11)
|
||||
+
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
+ def test_invalidation_mode_unchecked(self):
|
||||
py_compile.compile(
|
||||
self.source_path,
|
||||
invalidation_mode=py_compile.PycInvalidationMode.UNCHECKED_HASH,
|
||||
diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py
|
||||
index 14c1971..bcd1466 100644
|
||||
--- a/Lib/test/test_zipimport.py
|
||||
+++ b/Lib/test/test_zipimport.py
|
||||
@@ -190,6 +190,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
|
||||
TESTMOD + pyc_ext: (NOW, test_pyc)}
|
||||
self.doTest(pyc_ext, files, TESTMOD)
|
||||
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
def testUncheckedHashBasedPyc(self):
|
||||
source = b"state = 'old'"
|
||||
source_hash = importlib.util.source_hash(source)
|
||||
@@ -204,6 +205,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
|
||||
self.assertEqual(mod.state, 'old')
|
||||
self.doTest(None, files, TESTMOD, call=check)
|
||||
|
||||
+ @support.fails_in_fips_mode(ImportError)
|
||||
@unittest.mock.patch('_imp.check_hash_based_pycs', 'always')
|
||||
def test_checked_hash_based_change_pyc(self):
|
||||
source = b"state = 'old'"
|
||||
diff --git a/Python/import.c b/Python/import.c
|
||||
index 54232a1..236786b 100644
|
||||
--- a/Python/import.c
|
||||
+++ b/Python/import.c
|
||||
@@ -3829,6 +3829,26 @@ static PyObject *
|
||||
_imp_source_hash_impl(PyObject *module, long key, Py_buffer *source)
|
||||
/*[clinic end generated code: output=edb292448cf399ea input=9aaad1e590089789]*/
|
||||
{
|
||||
+ PyObject *_hashlib = PyImport_ImportModule("_hashlib");
|
||||
+ if (_hashlib == NULL) {
|
||||
+ return NULL;
|
||||
+ }
|
||||
+ PyObject *fips_mode_obj = PyObject_CallMethod(_hashlib, "get_fips_mode", NULL);
|
||||
+ Py_DECREF(_hashlib);
|
||||
+ if (fips_mode_obj == NULL) {
|
||||
+ return NULL;
|
||||
+ }
|
||||
+ int fips_mode = PyObject_IsTrue(fips_mode_obj);
|
||||
+ Py_DECREF(fips_mode_obj);
|
||||
+ if (fips_mode < 0) {
|
||||
+ return NULL;
|
||||
+ }
|
||||
+ if (fips_mode) {
|
||||
+ PyErr_SetString(
|
||||
+ PyExc_ImportError,
|
||||
+ "hash-based PYC validation (siphash24) not available in FIPS mode");
|
||||
+ return NULL;
|
||||
+ };
|
||||
union {
|
||||
uint64_t x;
|
||||
char data[sizeof(uint64_t)];
|
||||
--
|
||||
2.43.0
|
||||
2.45.0
|
||||
|
||||
|
@ -0,0 +1,88 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: Serhiy Storchaka <storchaka@gmail.com>
|
||||
Date: Sun, 11 Feb 2024 12:08:39 +0200
|
||||
Subject: [PATCH] 00422: gh-115133: Fix tests for XMLPullParser with Expat
|
||||
2.6.0
|
||||
|
||||
Feeding the parser by too small chunks defers parsing to prevent
|
||||
CVE-2023-52425. Future versions of Expat may be more reactive.
|
||||
|
||||
(cherry picked from commit 4a08e7b3431cd32a0daf22a33421cd3035343dc4)
|
||||
---
|
||||
Lib/test/test_xml_etree.py | 58 ++++++++++++-------
|
||||
...-02-08-14-21-28.gh-issue-115133.ycl4ko.rst | 2 +
|
||||
2 files changed, 38 insertions(+), 22 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst
|
||||
|
||||
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
|
||||
index b50898f1d1..6fb888cb21 100644
|
||||
--- a/Lib/test/test_xml_etree.py
|
||||
+++ b/Lib/test/test_xml_etree.py
|
||||
@@ -1400,28 +1400,37 @@ class XMLPullParserTest(unittest.TestCase):
|
||||
self.assertEqual([(action, elem.tag) for action, elem in events],
|
||||
expected)
|
||||
|
||||
- def test_simple_xml(self):
|
||||
- for chunk_size in (None, 1, 5):
|
||||
- with self.subTest(chunk_size=chunk_size):
|
||||
- parser = ET.XMLPullParser()
|
||||
- self.assert_event_tags(parser, [])
|
||||
- self._feed(parser, "<!-- comment -->\n", chunk_size)
|
||||
- self.assert_event_tags(parser, [])
|
||||
- self._feed(parser,
|
||||
- "<root>\n <element key='value'>text</element",
|
||||
- chunk_size)
|
||||
- self.assert_event_tags(parser, [])
|
||||
- self._feed(parser, ">\n", chunk_size)
|
||||
- self.assert_event_tags(parser, [('end', 'element')])
|
||||
- self._feed(parser, "<element>text</element>tail\n", chunk_size)
|
||||
- self._feed(parser, "<empty-element/>\n", chunk_size)
|
||||
- self.assert_event_tags(parser, [
|
||||
- ('end', 'element'),
|
||||
- ('end', 'empty-element'),
|
||||
- ])
|
||||
- self._feed(parser, "</root>\n", chunk_size)
|
||||
- self.assert_event_tags(parser, [('end', 'root')])
|
||||
- self.assertIsNone(parser.close())
|
||||
+ def test_simple_xml(self, chunk_size=None):
|
||||
+ parser = ET.XMLPullParser()
|
||||
+ self.assert_event_tags(parser, [])
|
||||
+ self._feed(parser, "<!-- comment -->\n", chunk_size)
|
||||
+ self.assert_event_tags(parser, [])
|
||||
+ self._feed(parser,
|
||||
+ "<root>\n <element key='value'>text</element",
|
||||
+ chunk_size)
|
||||
+ self.assert_event_tags(parser, [])
|
||||
+ self._feed(parser, ">\n", chunk_size)
|
||||
+ self.assert_event_tags(parser, [('end', 'element')])
|
||||
+ self._feed(parser, "<element>text</element>tail\n", chunk_size)
|
||||
+ self._feed(parser, "<empty-element/>\n", chunk_size)
|
||||
+ self.assert_event_tags(parser, [
|
||||
+ ('end', 'element'),
|
||||
+ ('end', 'empty-element'),
|
||||
+ ])
|
||||
+ self._feed(parser, "</root>\n", chunk_size)
|
||||
+ self.assert_event_tags(parser, [('end', 'root')])
|
||||
+ self.assertIsNone(parser.close())
|
||||
+
|
||||
+ @unittest.expectedFailure
|
||||
+ def test_simple_xml_chunk_1(self):
|
||||
+ self.test_simple_xml(chunk_size=1)
|
||||
+
|
||||
+ @unittest.expectedFailure
|
||||
+ def test_simple_xml_chunk_5(self):
|
||||
+ self.test_simple_xml(chunk_size=5)
|
||||
+
|
||||
+ def test_simple_xml_chunk_22(self):
|
||||
+ self.test_simple_xml(chunk_size=22)
|
||||
|
||||
def test_feed_while_iterating(self):
|
||||
parser = ET.XMLPullParser()
|
||||
diff --git a/Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst b/Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst
|
||||
new file mode 100644
|
||||
index 0000000000..6f1015235c
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2024-02-08-14-21-28.gh-issue-115133.ycl4ko.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Fix tests for :class:`~xml.etree.ElementTree.XMLPullParser` with Expat
|
||||
+2.6.0.
|
345
SOURCES/00435-CVE-2024-6923.patch
Normal file
345
SOURCES/00435-CVE-2024-6923.patch
Normal file
@ -0,0 +1,345 @@
|
||||
From 7a25b2f511054dd2011308275bb24e914e1977af Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <encukou@gmail.com>
|
||||
Date: Tue, 6 Aug 2024 19:07:19 +0200
|
||||
Subject: [PATCH] gh-121650: Encode newlines in headers, and verify headers are
|
||||
sound (GH-122233) (#122599)
|
||||
|
||||
* gh-121650: Encode newlines in headers, and verify headers are sound (GH-122233)
|
||||
|
||||
- Encode header parts that contain newlines
|
||||
|
||||
Per RFC 2047:
|
||||
|
||||
> [...] these encoding schemes allow the
|
||||
> encoding of arbitrary octet values, mail readers that implement this
|
||||
> decoding should also ensure that display of the decoded data on the
|
||||
> recipient's terminal will not cause unwanted side-effects
|
||||
|
||||
It seems that the "quoted-word" scheme is a valid way to include
|
||||
a newline character in a header value, just like we already allow
|
||||
undecodable bytes or control characters.
|
||||
They do need to be properly quoted when serialized to text, though.
|
||||
|
||||
- Verify that email headers are well-formed
|
||||
|
||||
This should fail for custom fold() implementations that aren't careful
|
||||
about newlines.
|
||||
|
||||
Co-authored-by: Bas Bloemsaat <bas@bloemsaat.org>
|
||||
Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
|
||||
(cherry picked from commit 097633981879b3c9de9a1dd120d3aa585ecc2384)
|
||||
|
||||
* Document changes as made in 3.12.5
|
||||
---
|
||||
Doc/library/email.errors.rst | 7 +++
|
||||
Doc/library/email.policy.rst | 18 ++++++
|
||||
Lib/email/_header_value_parser.py | 12 +++-
|
||||
Lib/email/_policybase.py | 8 +++
|
||||
Lib/email/errors.py | 4 ++
|
||||
Lib/email/generator.py | 13 +++-
|
||||
Lib/test/test_email/test_generator.py | 62 +++++++++++++++++++
|
||||
Lib/test/test_email/test_policy.py | 26 ++++++++
|
||||
...-07-27-16-10-41.gh-issue-121650.nf6oc9.rst | 5 ++
|
||||
9 files changed, 151 insertions(+), 4 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst
|
||||
|
||||
diff --git a/Doc/library/email.errors.rst b/Doc/library/email.errors.rst
|
||||
index 56aea65..27b0481 100644
|
||||
--- a/Doc/library/email.errors.rst
|
||||
+++ b/Doc/library/email.errors.rst
|
||||
@@ -58,6 +58,13 @@ The following exception classes are defined in the :mod:`email.errors` module:
|
||||
:class:`~email.mime.nonmultipart.MIMENonMultipart` (e.g.
|
||||
:class:`~email.mime.image.MIMEImage`).
|
||||
|
||||
+
|
||||
+.. exception:: HeaderWriteError()
|
||||
+
|
||||
+ Raised when an error occurs when the :mod:`~email.generator` outputs
|
||||
+ headers.
|
||||
+
|
||||
+
|
||||
.. exception:: MessageDefect()
|
||||
|
||||
This is the base class for all defects found when parsing email messages.
|
||||
diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst
|
||||
index fd47dd0..6ec6e4d 100644
|
||||
--- a/Doc/library/email.policy.rst
|
||||
+++ b/Doc/library/email.policy.rst
|
||||
@@ -230,6 +230,24 @@ added matters. To illustrate::
|
||||
|
||||
.. versionadded:: 3.6
|
||||
|
||||
+
|
||||
+ .. attribute:: verify_generated_headers
|
||||
+
|
||||
+ If ``True`` (the default), the generator will raise
|
||||
+ :exc:`~email.errors.HeaderWriteError` instead of writing a header
|
||||
+ that is improperly folded or delimited, such that it would
|
||||
+ be parsed as multiple headers or joined with adjacent data.
|
||||
+ Such headers can be generated by custom header classes or bugs
|
||||
+ in the ``email`` module.
|
||||
+
|
||||
+ As it's a security feature, this defaults to ``True`` even in the
|
||||
+ :class:`~email.policy.Compat32` policy.
|
||||
+ For backwards compatible, but unsafe, behavior, it must be set to
|
||||
+ ``False`` explicitly.
|
||||
+
|
||||
+ .. versionadded:: 3.12.5
|
||||
+
|
||||
+
|
||||
The following :class:`Policy` method is intended to be called by code using
|
||||
the email library to create policy instances with custom settings:
|
||||
|
||||
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
|
||||
index 0d6bd81..362edc5 100644
|
||||
--- a/Lib/email/_header_value_parser.py
|
||||
+++ b/Lib/email/_header_value_parser.py
|
||||
@@ -92,6 +92,8 @@ TOKEN_ENDS = TSPECIALS | WSP
|
||||
ASPECIALS = TSPECIALS | set("*'%")
|
||||
ATTRIBUTE_ENDS = ASPECIALS | WSP
|
||||
EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%')
|
||||
+NLSET = {'\n', '\r'}
|
||||
+SPECIALSNL = SPECIALS | NLSET
|
||||
|
||||
def quote_string(value):
|
||||
return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
|
||||
@@ -2776,9 +2778,13 @@ def _refold_parse_tree(parse_tree, *, policy):
|
||||
wrap_as_ew_blocked -= 1
|
||||
continue
|
||||
tstr = str(part)
|
||||
- if part.token_type == 'ptext' and set(tstr) & SPECIALS:
|
||||
- # Encode if tstr contains special characters.
|
||||
- want_encoding = True
|
||||
+ if not want_encoding:
|
||||
+ if part.token_type == 'ptext':
|
||||
+ # Encode if tstr contains special characters.
|
||||
+ want_encoding = not SPECIALSNL.isdisjoint(tstr)
|
||||
+ else:
|
||||
+ # Encode if tstr contains newlines.
|
||||
+ want_encoding = not NLSET.isdisjoint(tstr)
|
||||
try:
|
||||
tstr.encode(encoding)
|
||||
charset = encoding
|
||||
diff --git a/Lib/email/_policybase.py b/Lib/email/_policybase.py
|
||||
index c9cbadd..d1f4821 100644
|
||||
--- a/Lib/email/_policybase.py
|
||||
+++ b/Lib/email/_policybase.py
|
||||
@@ -157,6 +157,13 @@ class Policy(_PolicyBase, metaclass=abc.ABCMeta):
|
||||
message_factory -- the class to use to create new message objects.
|
||||
If the value is None, the default is Message.
|
||||
|
||||
+ verify_generated_headers
|
||||
+ -- if true, the generator verifies that each header
|
||||
+ they are properly folded, so that a parser won't
|
||||
+ treat it as multiple headers, start-of-body, or
|
||||
+ part of another header.
|
||||
+ This is a check against custom Header & fold()
|
||||
+ implementations.
|
||||
"""
|
||||
|
||||
raise_on_defect = False
|
||||
@@ -165,6 +172,7 @@ class Policy(_PolicyBase, metaclass=abc.ABCMeta):
|
||||
max_line_length = 78
|
||||
mangle_from_ = False
|
||||
message_factory = None
|
||||
+ verify_generated_headers = True
|
||||
|
||||
def handle_defect(self, obj, defect):
|
||||
"""Based on policy, either raise defect or call register_defect.
|
||||
diff --git a/Lib/email/errors.py b/Lib/email/errors.py
|
||||
index 3ad0056..02aa5ec 100644
|
||||
--- a/Lib/email/errors.py
|
||||
+++ b/Lib/email/errors.py
|
||||
@@ -29,6 +29,10 @@ class CharsetError(MessageError):
|
||||
"""An illegal charset was given."""
|
||||
|
||||
|
||||
+class HeaderWriteError(MessageError):
|
||||
+ """Error while writing headers."""
|
||||
+
|
||||
+
|
||||
# These are parsing defects which the parser was able to work around.
|
||||
class MessageDefect(ValueError):
|
||||
"""Base class for a message defect."""
|
||||
diff --git a/Lib/email/generator.py b/Lib/email/generator.py
|
||||
index 7ccbe10..ea87ad2 100644
|
||||
--- a/Lib/email/generator.py
|
||||
+++ b/Lib/email/generator.py
|
||||
@@ -14,12 +14,14 @@ import random
|
||||
from copy import deepcopy
|
||||
from io import StringIO, BytesIO
|
||||
from email.utils import _has_surrogates
|
||||
+from email.errors import HeaderWriteError
|
||||
|
||||
UNDERSCORE = '_'
|
||||
NL = '\n' # XXX: no longer used by the code below.
|
||||
|
||||
NLCRE = re.compile(r'\r\n|\r|\n')
|
||||
fcre = re.compile(r'^From ', re.MULTILINE)
|
||||
+NEWLINE_WITHOUT_FWSP = re.compile(r'\r\n[^ \t]|\r[^ \n\t]|\n[^ \t]')
|
||||
|
||||
|
||||
class Generator:
|
||||
@@ -222,7 +224,16 @@ class Generator:
|
||||
|
||||
def _write_headers(self, msg):
|
||||
for h, v in msg.raw_items():
|
||||
- self.write(self.policy.fold(h, v))
|
||||
+ folded = self.policy.fold(h, v)
|
||||
+ if self.policy.verify_generated_headers:
|
||||
+ linesep = self.policy.linesep
|
||||
+ if not folded.endswith(self.policy.linesep):
|
||||
+ raise HeaderWriteError(
|
||||
+ f'folded header does not end with {linesep!r}: {folded!r}')
|
||||
+ if NEWLINE_WITHOUT_FWSP.search(folded.removesuffix(linesep)):
|
||||
+ raise HeaderWriteError(
|
||||
+ f'folded header contains newline: {folded!r}')
|
||||
+ self.write(folded)
|
||||
# A blank line always separates headers from body
|
||||
self.write(self._NL)
|
||||
|
||||
diff --git a/Lib/test/test_email/test_generator.py b/Lib/test/test_email/test_generator.py
|
||||
index 89e7ede..d29400f 100644
|
||||
--- a/Lib/test/test_email/test_generator.py
|
||||
+++ b/Lib/test/test_email/test_generator.py
|
||||
@@ -6,6 +6,7 @@ from email.message import EmailMessage
|
||||
from email.generator import Generator, BytesGenerator
|
||||
from email.headerregistry import Address
|
||||
from email import policy
|
||||
+import email.errors
|
||||
from test.test_email import TestEmailBase, parameterize
|
||||
|
||||
|
||||
@@ -216,6 +217,44 @@ class TestGeneratorBase:
|
||||
g.flatten(msg)
|
||||
self.assertEqual(s.getvalue(), self.typ(expected))
|
||||
|
||||
+ def test_keep_encoded_newlines(self):
|
||||
+ msg = self.msgmaker(self.typ(textwrap.dedent("""\
|
||||
+ To: nobody
|
||||
+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com
|
||||
+
|
||||
+ None
|
||||
+ """)))
|
||||
+ expected = textwrap.dedent("""\
|
||||
+ To: nobody
|
||||
+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com
|
||||
+
|
||||
+ None
|
||||
+ """)
|
||||
+ s = self.ioclass()
|
||||
+ g = self.genclass(s, policy=self.policy.clone(max_line_length=80))
|
||||
+ g.flatten(msg)
|
||||
+ self.assertEqual(s.getvalue(), self.typ(expected))
|
||||
+
|
||||
+ def test_keep_long_encoded_newlines(self):
|
||||
+ msg = self.msgmaker(self.typ(textwrap.dedent("""\
|
||||
+ To: nobody
|
||||
+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com
|
||||
+
|
||||
+ None
|
||||
+ """)))
|
||||
+ expected = textwrap.dedent("""\
|
||||
+ To: nobody
|
||||
+ Subject: Bad subject
|
||||
+ =?utf-8?q?=0A?=Bcc:
|
||||
+ injection@example.com
|
||||
+
|
||||
+ None
|
||||
+ """)
|
||||
+ s = self.ioclass()
|
||||
+ g = self.genclass(s, policy=self.policy.clone(max_line_length=30))
|
||||
+ g.flatten(msg)
|
||||
+ self.assertEqual(s.getvalue(), self.typ(expected))
|
||||
+
|
||||
|
||||
class TestGenerator(TestGeneratorBase, TestEmailBase):
|
||||
|
||||
@@ -224,6 +263,29 @@ class TestGenerator(TestGeneratorBase, TestEmailBase):
|
||||
ioclass = io.StringIO
|
||||
typ = str
|
||||
|
||||
+ def test_verify_generated_headers(self):
|
||||
+ """gh-121650: by default the generator prevents header injection"""
|
||||
+ class LiteralHeader(str):
|
||||
+ name = 'Header'
|
||||
+ def fold(self, **kwargs):
|
||||
+ return self
|
||||
+
|
||||
+ for text in (
|
||||
+ 'Value\r\nBad Injection\r\n',
|
||||
+ 'NoNewLine'
|
||||
+ ):
|
||||
+ with self.subTest(text=text):
|
||||
+ message = message_from_string(
|
||||
+ "Header: Value\r\n\r\nBody",
|
||||
+ policy=self.policy,
|
||||
+ )
|
||||
+
|
||||
+ del message['Header']
|
||||
+ message['Header'] = LiteralHeader(text)
|
||||
+
|
||||
+ with self.assertRaises(email.errors.HeaderWriteError):
|
||||
+ message.as_string()
|
||||
+
|
||||
|
||||
class TestBytesGenerator(TestGeneratorBase, TestEmailBase):
|
||||
|
||||
diff --git a/Lib/test/test_email/test_policy.py b/Lib/test/test_email/test_policy.py
|
||||
index e87c275..ff1ddf7 100644
|
||||
--- a/Lib/test/test_email/test_policy.py
|
||||
+++ b/Lib/test/test_email/test_policy.py
|
||||
@@ -26,6 +26,7 @@ class PolicyAPITests(unittest.TestCase):
|
||||
'raise_on_defect': False,
|
||||
'mangle_from_': True,
|
||||
'message_factory': None,
|
||||
+ 'verify_generated_headers': True,
|
||||
}
|
||||
# These default values are the ones set on email.policy.default.
|
||||
# If any of these defaults change, the docs must be updated.
|
||||
@@ -277,6 +278,31 @@ class PolicyAPITests(unittest.TestCase):
|
||||
with self.assertRaises(email.errors.HeaderParseError):
|
||||
policy.fold("Subject", subject)
|
||||
|
||||
+ def test_verify_generated_headers(self):
|
||||
+ """Turning protection off allows header injection"""
|
||||
+ policy = email.policy.default.clone(verify_generated_headers=False)
|
||||
+ for text in (
|
||||
+ 'Header: Value\r\nBad: Injection\r\n',
|
||||
+ 'Header: NoNewLine'
|
||||
+ ):
|
||||
+ with self.subTest(text=text):
|
||||
+ message = email.message_from_string(
|
||||
+ "Header: Value\r\n\r\nBody",
|
||||
+ policy=policy,
|
||||
+ )
|
||||
+ class LiteralHeader(str):
|
||||
+ name = 'Header'
|
||||
+ def fold(self, **kwargs):
|
||||
+ return self
|
||||
+
|
||||
+ del message['Header']
|
||||
+ message['Header'] = LiteralHeader(text)
|
||||
+
|
||||
+ self.assertEqual(
|
||||
+ message.as_string(),
|
||||
+ f"{text}\nBody",
|
||||
+ )
|
||||
+
|
||||
# XXX: Need subclassing tests.
|
||||
# For adding subclassed objects, make sure the usual rules apply (subclass
|
||||
# wins), but that the order still works (right overrides left).
|
||||
diff --git a/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst b/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst
|
||||
new file mode 100644
|
||||
index 0000000..83dd28d
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst
|
||||
@@ -0,0 +1,5 @@
|
||||
+:mod:`email` headers with embedded newlines are now quoted on output. The
|
||||
+:mod:`~email.generator` will now refuse to serialize (write) headers that
|
||||
+are unsafely folded or delimited; see
|
||||
+:attr:`~email.policy.Policy.verify_generated_headers`. (Contributed by Bas
|
||||
+Bloemsaat and Petr Viktorin in :gh:`121650`.)
|
||||
--
|
||||
2.45.2
|
||||
|
@ -0,0 +1,121 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Mon, 12 Aug 2024 02:35:17 +0200
|
||||
Subject: [PATCH] 00436: [CVE-2024-8088] gh-122905: Sanitize names in
|
||||
zipfile.Path.
|
||||
|
||||
---
|
||||
Lib/test/test_zipfile/_path/test_path.py | 17 +++++
|
||||
Lib/zipfile/_path/__init__.py | 64 ++++++++++++++++++-
|
||||
...-08-11-14-08-04.gh-issue-122905.7tDsxA.rst | 1 +
|
||||
3 files changed, 81 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst
|
||||
|
||||
diff --git a/Lib/test/test_zipfile/_path/test_path.py b/Lib/test/test_zipfile/_path/test_path.py
|
||||
index 06d5aab69b..90885dbbe3 100644
|
||||
--- a/Lib/test/test_zipfile/_path/test_path.py
|
||||
+++ b/Lib/test/test_zipfile/_path/test_path.py
|
||||
@@ -577,3 +577,20 @@ def test_getinfo_missing(self, alpharep):
|
||||
zipfile.Path(alpharep)
|
||||
with self.assertRaises(KeyError):
|
||||
alpharep.getinfo('does-not-exist')
|
||||
+
|
||||
+ def test_malformed_paths(self):
|
||||
+ """
|
||||
+ Path should handle malformed paths.
|
||||
+ """
|
||||
+ data = io.BytesIO()
|
||||
+ zf = zipfile.ZipFile(data, "w")
|
||||
+ zf.writestr("/one-slash.txt", b"content")
|
||||
+ zf.writestr("//two-slash.txt", b"content")
|
||||
+ zf.writestr("../parent.txt", b"content")
|
||||
+ zf.filename = ''
|
||||
+ root = zipfile.Path(zf)
|
||||
+ assert list(map(str, root.iterdir())) == [
|
||||
+ 'one-slash.txt',
|
||||
+ 'two-slash.txt',
|
||||
+ 'parent.txt',
|
||||
+ ]
|
||||
diff --git a/Lib/zipfile/_path/__init__.py b/Lib/zipfile/_path/__init__.py
|
||||
index 78c413563b..42f9fded21 100644
|
||||
--- a/Lib/zipfile/_path/__init__.py
|
||||
+++ b/Lib/zipfile/_path/__init__.py
|
||||
@@ -83,7 +83,69 @@ def __setstate__(self, state):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
-class CompleteDirs(InitializedState, zipfile.ZipFile):
|
||||
+class SanitizedNames:
|
||||
+ """
|
||||
+ ZipFile mix-in to ensure names are sanitized.
|
||||
+ """
|
||||
+
|
||||
+ def namelist(self):
|
||||
+ return list(map(self._sanitize, super().namelist()))
|
||||
+
|
||||
+ @staticmethod
|
||||
+ def _sanitize(name):
|
||||
+ r"""
|
||||
+ Ensure a relative path with posix separators and no dot names.
|
||||
+
|
||||
+ Modeled after
|
||||
+ https://github.com/python/cpython/blob/bcc1be39cb1d04ad9fc0bd1b9193d3972835a57c/Lib/zipfile/__init__.py#L1799-L1813
|
||||
+ but provides consistent cross-platform behavior.
|
||||
+
|
||||
+ >>> san = SanitizedNames._sanitize
|
||||
+ >>> san('/foo/bar')
|
||||
+ 'foo/bar'
|
||||
+ >>> san('//foo.txt')
|
||||
+ 'foo.txt'
|
||||
+ >>> san('foo/.././bar.txt')
|
||||
+ 'foo/bar.txt'
|
||||
+ >>> san('foo../.bar.txt')
|
||||
+ 'foo../.bar.txt'
|
||||
+ >>> san('\\foo\\bar.txt')
|
||||
+ 'foo/bar.txt'
|
||||
+ >>> san('D:\\foo.txt')
|
||||
+ 'D/foo.txt'
|
||||
+ >>> san('\\\\server\\share\\file.txt')
|
||||
+ 'server/share/file.txt'
|
||||
+ >>> san('\\\\?\\GLOBALROOT\\Volume3')
|
||||
+ '?/GLOBALROOT/Volume3'
|
||||
+ >>> san('\\\\.\\PhysicalDrive1\\root')
|
||||
+ 'PhysicalDrive1/root'
|
||||
+
|
||||
+ Retain any trailing slash.
|
||||
+ >>> san('abc/')
|
||||
+ 'abc/'
|
||||
+
|
||||
+ Raises a ValueError if the result is empty.
|
||||
+ >>> san('../..')
|
||||
+ Traceback (most recent call last):
|
||||
+ ...
|
||||
+ ValueError: Empty filename
|
||||
+ """
|
||||
+
|
||||
+ def allowed(part):
|
||||
+ return part and part not in {'..', '.'}
|
||||
+
|
||||
+ # Remove the drive letter.
|
||||
+ # Don't use ntpath.splitdrive, because that also strips UNC paths
|
||||
+ bare = re.sub('^([A-Z]):', r'\1', name, flags=re.IGNORECASE)
|
||||
+ clean = bare.replace('\\', '/')
|
||||
+ parts = clean.split('/')
|
||||
+ joined = '/'.join(filter(allowed, parts))
|
||||
+ if not joined:
|
||||
+ raise ValueError("Empty filename")
|
||||
+ return joined + '/' * name.endswith('/')
|
||||
+
|
||||
+
|
||||
+class CompleteDirs(InitializedState, SanitizedNames, zipfile.ZipFile):
|
||||
"""
|
||||
A ZipFile subclass that ensures that implied directories
|
||||
are always included in the namelist.
|
||||
diff --git a/Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst b/Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst
|
||||
new file mode 100644
|
||||
index 0000000000..1be44c906c
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2024-08-11-14-08-04.gh-issue-122905.7tDsxA.rst
|
||||
@@ -0,0 +1 @@
|
||||
+:class:`zipfile.Path` objects now sanitize names from the zipfile.
|
245
SOURCES/00437-CVE-2024-6232.patch
Normal file
245
SOURCES/00437-CVE-2024-6232.patch
Normal file
@ -0,0 +1,245 @@
|
||||
From 4eaf4891c12589e3c7bdad5f5b076e4c8392dd06 Mon Sep 17 00:00:00 2001
|
||||
From: "Miss Islington (bot)"
|
||||
<31488909+miss-islington@users.noreply.github.com>
|
||||
Date: Sun, 1 Sep 2024 00:35:24 +0200
|
||||
Subject: [PATCH] [3.12] gh-121285: Remove backtracking when parsing tarfile
|
||||
headers (GH-121286) (GH-123543)
|
||||
|
||||
gh-121285: Remove backtracking when parsing tarfile headers (GH-121286)
|
||||
|
||||
* Remove backtracking when parsing tarfile headers
|
||||
* Rewrite PAX header parsing to be stricter
|
||||
* Optimize parsing of GNU extended sparse headers v0.0
|
||||
|
||||
(cherry picked from commit 34ddb64d088dd7ccc321f6103d23153256caa5d4)
|
||||
|
||||
Co-authored-by: Seth Michael Larson <seth@python.org>
|
||||
Co-authored-by: Kirill Podoprigora <kirill.bast9@mail.ru>
|
||||
Co-authored-by: Gregory P. Smith <greg@krypto.org>
|
||||
---
|
||||
Lib/tarfile.py | 103 ++++++++++++------
|
||||
Lib/test/test_tarfile.py | 42 +++++++
|
||||
...-07-02-13-39-20.gh-issue-121285.hrl-yI.rst | 2 +
|
||||
3 files changed, 112 insertions(+), 35 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
|
||||
|
||||
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
|
||||
index e1487e3864d44b..0a0f31eca06c04 100755
|
||||
--- a/Lib/tarfile.py
|
||||
+++ b/Lib/tarfile.py
|
||||
@@ -843,6 +843,9 @@ def data_filter(member, dest_path):
|
||||
# Sentinel for replace() defaults, meaning "don't change the attribute"
|
||||
_KEEP = object()
|
||||
|
||||
+# Header length is digits followed by a space.
|
||||
+_header_length_prefix_re = re.compile(br"([0-9]{1,20}) ")
|
||||
+
|
||||
class TarInfo(object):
|
||||
"""Informational class which holds the details about an
|
||||
archive member given by a tar header block.
|
||||
@@ -1412,37 +1415,59 @@ def _proc_pax(self, tarfile):
|
||||
else:
|
||||
pax_headers = tarfile.pax_headers.copy()
|
||||
|
||||
- # Check if the pax header contains a hdrcharset field. This tells us
|
||||
- # the encoding of the path, linkpath, uname and gname fields. Normally,
|
||||
- # these fields are UTF-8 encoded but since POSIX.1-2008 tar
|
||||
- # implementations are allowed to store them as raw binary strings if
|
||||
- # the translation to UTF-8 fails.
|
||||
- match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
|
||||
- if match is not None:
|
||||
- pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
|
||||
-
|
||||
- # For the time being, we don't care about anything other than "BINARY".
|
||||
- # The only other value that is currently allowed by the standard is
|
||||
- # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
|
||||
- hdrcharset = pax_headers.get("hdrcharset")
|
||||
- if hdrcharset == "BINARY":
|
||||
- encoding = tarfile.encoding
|
||||
- else:
|
||||
- encoding = "utf-8"
|
||||
-
|
||||
# Parse pax header information. A record looks like that:
|
||||
# "%d %s=%s\n" % (length, keyword, value). length is the size
|
||||
# of the complete record including the length field itself and
|
||||
- # the newline. keyword and value are both UTF-8 encoded strings.
|
||||
- regex = re.compile(br"(\d+) ([^=]+)=")
|
||||
+ # the newline.
|
||||
pos = 0
|
||||
- while match := regex.match(buf, pos):
|
||||
- length, keyword = match.groups()
|
||||
- length = int(length)
|
||||
- if length == 0:
|
||||
+ encoding = None
|
||||
+ raw_headers = []
|
||||
+ while len(buf) > pos and buf[pos] != 0x00:
|
||||
+ if not (match := _header_length_prefix_re.match(buf, pos)):
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+ try:
|
||||
+ length = int(match.group(1))
|
||||
+ except ValueError:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+ # Headers must be at least 5 bytes, shortest being '5 x=\n'.
|
||||
+ # Value is allowed to be empty.
|
||||
+ if length < 5:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+ if pos + length > len(buf):
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+
|
||||
+ header_value_end_offset = match.start(1) + length - 1 # Last byte of the header
|
||||
+ keyword_and_value = buf[match.end(1) + 1:header_value_end_offset]
|
||||
+ raw_keyword, equals, raw_value = keyword_and_value.partition(b"=")
|
||||
+
|
||||
+ # Check the framing of the header. The last character must be '\n' (0x0A)
|
||||
+ if not raw_keyword or equals != b"=" or buf[header_value_end_offset] != 0x0A:
|
||||
raise InvalidHeaderError("invalid header")
|
||||
- value = buf[match.end(2) + 1:match.start(1) + length - 1]
|
||||
+ raw_headers.append((length, raw_keyword, raw_value))
|
||||
+
|
||||
+ # Check if the pax header contains a hdrcharset field. This tells us
|
||||
+ # the encoding of the path, linkpath, uname and gname fields. Normally,
|
||||
+ # these fields are UTF-8 encoded but since POSIX.1-2008 tar
|
||||
+ # implementations are allowed to store them as raw binary strings if
|
||||
+ # the translation to UTF-8 fails. For the time being, we don't care about
|
||||
+ # anything other than "BINARY". The only other value that is currently
|
||||
+ # allowed by the standard is "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
|
||||
+ # Note that we only follow the initial 'hdrcharset' setting to preserve
|
||||
+ # the initial behavior of the 'tarfile' module.
|
||||
+ if raw_keyword == b"hdrcharset" and encoding is None:
|
||||
+ if raw_value == b"BINARY":
|
||||
+ encoding = tarfile.encoding
|
||||
+ else: # This branch ensures only the first 'hdrcharset' header is used.
|
||||
+ encoding = "utf-8"
|
||||
|
||||
+ pos += length
|
||||
+
|
||||
+ # If no explicit hdrcharset is set, we use UTF-8 as a default.
|
||||
+ if encoding is None:
|
||||
+ encoding = "utf-8"
|
||||
+
|
||||
+ # After parsing the raw headers we can decode them to text.
|
||||
+ for length, raw_keyword, raw_value in raw_headers:
|
||||
# Normally, we could just use "utf-8" as the encoding and "strict"
|
||||
# as the error handler, but we better not take the risk. For
|
||||
# example, GNU tar <= 1.23 is known to store filenames it cannot
|
||||
@@ -1450,17 +1475,16 @@ def _proc_pax(self, tarfile):
|
||||
# hdrcharset=BINARY header).
|
||||
# We first try the strict standard encoding, and if that fails we
|
||||
# fall back on the user's encoding and error handler.
|
||||
- keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
|
||||
+ keyword = self._decode_pax_field(raw_keyword, "utf-8", "utf-8",
|
||||
tarfile.errors)
|
||||
if keyword in PAX_NAME_FIELDS:
|
||||
- value = self._decode_pax_field(value, encoding, tarfile.encoding,
|
||||
+ value = self._decode_pax_field(raw_value, encoding, tarfile.encoding,
|
||||
tarfile.errors)
|
||||
else:
|
||||
- value = self._decode_pax_field(value, "utf-8", "utf-8",
|
||||
+ value = self._decode_pax_field(raw_value, "utf-8", "utf-8",
|
||||
tarfile.errors)
|
||||
|
||||
pax_headers[keyword] = value
|
||||
- pos += length
|
||||
|
||||
# Fetch the next header.
|
||||
try:
|
||||
@@ -1475,7 +1499,7 @@ def _proc_pax(self, tarfile):
|
||||
|
||||
elif "GNU.sparse.size" in pax_headers:
|
||||
# GNU extended sparse format version 0.0.
|
||||
- self._proc_gnusparse_00(next, pax_headers, buf)
|
||||
+ self._proc_gnusparse_00(next, raw_headers)
|
||||
|
||||
elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
|
||||
# GNU extended sparse format version 1.0.
|
||||
@@ -1497,15 +1521,24 @@ def _proc_pax(self, tarfile):
|
||||
|
||||
return next
|
||||
|
||||
- def _proc_gnusparse_00(self, next, pax_headers, buf):
|
||||
+ def _proc_gnusparse_00(self, next, raw_headers):
|
||||
"""Process a GNU tar extended sparse header, version 0.0.
|
||||
"""
|
||||
offsets = []
|
||||
- for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
|
||||
- offsets.append(int(match.group(1)))
|
||||
numbytes = []
|
||||
- for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
|
||||
- numbytes.append(int(match.group(1)))
|
||||
+ for _, keyword, value in raw_headers:
|
||||
+ if keyword == b"GNU.sparse.offset":
|
||||
+ try:
|
||||
+ offsets.append(int(value.decode()))
|
||||
+ except ValueError:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+
|
||||
+ elif keyword == b"GNU.sparse.numbytes":
|
||||
+ try:
|
||||
+ numbytes.append(int(value.decode()))
|
||||
+ except ValueError:
|
||||
+ raise InvalidHeaderError("invalid header")
|
||||
+
|
||||
next.sparse = list(zip(offsets, numbytes))
|
||||
|
||||
def _proc_gnusparse_01(self, next, pax_headers):
|
||||
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
|
||||
index 3fbd25e742b181..e28d0311826e2b 100644
|
||||
--- a/Lib/test/test_tarfile.py
|
||||
+++ b/Lib/test/test_tarfile.py
|
||||
@@ -1237,6 +1237,48 @@ def test_pax_number_fields(self):
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
+ def test_pax_header_bad_formats(self):
|
||||
+ # The fields from the pax header have priority over the
|
||||
+ # TarInfo.
|
||||
+ pax_header_replacements = (
|
||||
+ b" foo=bar\n",
|
||||
+ b"0 \n",
|
||||
+ b"1 \n",
|
||||
+ b"2 \n",
|
||||
+ b"3 =\n",
|
||||
+ b"4 =a\n",
|
||||
+ b"1000000 foo=bar\n",
|
||||
+ b"0 foo=bar\n",
|
||||
+ b"-12 foo=bar\n",
|
||||
+ b"000000000000000000000000036 foo=bar\n",
|
||||
+ )
|
||||
+ pax_headers = {"foo": "bar"}
|
||||
+
|
||||
+ for replacement in pax_header_replacements:
|
||||
+ with self.subTest(header=replacement):
|
||||
+ tar = tarfile.open(tmpname, "w", format=tarfile.PAX_FORMAT,
|
||||
+ encoding="iso8859-1")
|
||||
+ try:
|
||||
+ t = tarfile.TarInfo()
|
||||
+ t.name = "pax" # non-ASCII
|
||||
+ t.uid = 1
|
||||
+ t.pax_headers = pax_headers
|
||||
+ tar.addfile(t)
|
||||
+ finally:
|
||||
+ tar.close()
|
||||
+
|
||||
+ with open(tmpname, "rb") as f:
|
||||
+ data = f.read()
|
||||
+ self.assertIn(b"11 foo=bar\n", data)
|
||||
+ data = data.replace(b"11 foo=bar\n", replacement)
|
||||
+
|
||||
+ with open(tmpname, "wb") as f:
|
||||
+ f.truncate()
|
||||
+ f.write(data)
|
||||
+
|
||||
+ with self.assertRaisesRegex(tarfile.ReadError, r"method tar: ReadError\('invalid header'\)"):
|
||||
+ tarfile.open(tmpname, encoding="iso8859-1")
|
||||
+
|
||||
|
||||
class WriteTestBase(TarTest):
|
||||
# Put all write tests in here that are supposed to be tested
|
||||
diff --git a/Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst b/Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
|
||||
new file mode 100644
|
||||
index 00000000000000..81f918bfe2b255
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Remove backtracking from tarfile header parsing for ``hdrcharset``, PAX, and
|
||||
+GNU sparse headers.
|
@ -16,7 +16,6 @@ LEVELS = (None, 1, 2)
|
||||
# list of globs of test and other files that we expect not to have bytecode
|
||||
not_compiled = [
|
||||
'/usr/bin/*',
|
||||
'/usr/lib/rpm/redhat/*',
|
||||
'*/test/badsyntax_*.py',
|
||||
'*/tokenizedata/bad_coding.py',
|
||||
'*/tokenizedata/bad_coding2.py',
|
||||
|
@ -1,171 +0,0 @@
|
||||
'''Script to perform import of each module given to %%py_check_import
|
||||
'''
|
||||
import argparse
|
||||
import importlib
|
||||
import fnmatch
|
||||
import os
|
||||
import re
|
||||
import site
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def read_modules_files(file_paths):
|
||||
'''Read module names from the files (modules must be newline separated).
|
||||
|
||||
Return the module names list or, if no files were provided, an empty list.
|
||||
'''
|
||||
|
||||
if not file_paths:
|
||||
return []
|
||||
|
||||
modules = []
|
||||
for file in file_paths:
|
||||
file_contents = file.read_text()
|
||||
modules.extend(file_contents.split())
|
||||
return modules
|
||||
|
||||
|
||||
def read_modules_from_cli(argv):
|
||||
'''Read module names from command-line arguments (space or comma separated).
|
||||
|
||||
Return the module names list.
|
||||
'''
|
||||
|
||||
if not argv:
|
||||
return []
|
||||
|
||||
# %%py3_check_import allows to separate module list with comma or whitespace,
|
||||
# we need to unify the output to a list of particular elements
|
||||
modules_as_str = ' '.join(argv)
|
||||
modules = re.split(r'[\s,]+', modules_as_str)
|
||||
# Because of shell expansion in some less typical cases it may happen
|
||||
# that a trailing space will occur at the end of the list.
|
||||
# Remove the empty items from the list before passing it further
|
||||
modules = [m for m in modules if m]
|
||||
return modules
|
||||
|
||||
|
||||
def filter_top_level_modules_only(modules):
|
||||
'''Filter out entries with nested modules (containing dot) ie. 'foo.bar'.
|
||||
|
||||
Return the list of top-level modules.
|
||||
'''
|
||||
|
||||
return [module for module in modules if '.' not in module]
|
||||
|
||||
|
||||
def any_match(text, globs):
|
||||
'''Return True if any of given globs fnmatchcase's the given text.'''
|
||||
|
||||
return any(fnmatch.fnmatchcase(text, g) for g in globs)
|
||||
|
||||
|
||||
def exclude_unwanted_module_globs(globs, modules):
|
||||
'''Filter out entries which match the either of the globs given as argv.
|
||||
|
||||
Return the list of filtered modules.
|
||||
'''
|
||||
|
||||
return [m for m in modules if not any_match(m, globs)]
|
||||
|
||||
|
||||
def read_modules_from_all_args(args):
|
||||
'''Return a joined list of modules from all given command-line arguments.
|
||||
'''
|
||||
|
||||
modules = read_modules_files(args.filename)
|
||||
modules.extend(read_modules_from_cli(args.modules))
|
||||
if args.exclude:
|
||||
modules = exclude_unwanted_module_globs(args.exclude, modules)
|
||||
|
||||
if args.top_level:
|
||||
modules = filter_top_level_modules_only(modules)
|
||||
|
||||
# Error when someone accidentally managed to filter out everything
|
||||
if len(modules) == 0:
|
||||
raise ValueError('No modules to check were left')
|
||||
|
||||
return modules
|
||||
|
||||
|
||||
def import_modules(modules):
|
||||
'''Procedure to perform import check for each module name from the given list of modules.
|
||||
'''
|
||||
|
||||
for module in modules:
|
||||
print('Check import:', module, file=sys.stderr)
|
||||
importlib.import_module(module)
|
||||
|
||||
|
||||
def argparser():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate list of all importable modules for import check.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'modules', nargs='*',
|
||||
help=('Add modules to check the import (space or comma separated).'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-f', '--filename', action='append', type=Path,
|
||||
help='Add importable module names list from file.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-t', '--top-level', action='store_true',
|
||||
help='Check only top-level modules.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', action='append',
|
||||
help='Provide modules globs to be excluded from the check.',
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
@contextmanager
|
||||
def remove_unwanteds_from_sys_path():
|
||||
'''Remove cwd and this script's parent from sys.path for the import test.
|
||||
Bring the original contents back after import is done (or failed)
|
||||
'''
|
||||
|
||||
cwd_absolute = Path.cwd().absolute()
|
||||
this_file_parent = Path(__file__).parent.absolute()
|
||||
old_sys_path = list(sys.path)
|
||||
for path in old_sys_path:
|
||||
if Path(path).absolute() in (cwd_absolute, this_file_parent):
|
||||
sys.path.remove(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.path = old_sys_path
|
||||
|
||||
|
||||
def addsitedirs_from_environ():
|
||||
'''Load directories from the _PYTHONSITE environment variable (separated by :)
|
||||
and load the ones already present in sys.path via site.addsitedir()
|
||||
to handle .pth files in them.
|
||||
|
||||
This is needed to properly import old-style namespace packages with nspkg.pth files.
|
||||
See https://bugzilla.redhat.com/2018551 for a more detailed rationale.'''
|
||||
for path in os.getenv('_PYTHONSITE', '').split(':'):
|
||||
if path in sys.path:
|
||||
site.addsitedir(path)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
|
||||
cli_args = argparser().parse_args(argv)
|
||||
|
||||
if not cli_args.modules and not cli_args.filename:
|
||||
raise ValueError('No modules to check were provided')
|
||||
|
||||
modules = read_modules_from_all_args(cli_args)
|
||||
|
||||
with remove_unwanteds_from_sys_path():
|
||||
addsitedirs_from_environ()
|
||||
import_modules(modules)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,91 +0,0 @@
|
||||
%__python3 /usr/bin/python3.12
|
||||
%python3_pkgversion 3.12
|
||||
|
||||
# The following are macros from macros.python3 in Fedora that are newer/different than those in the python3-rpm-macros package in RHEL 8.
|
||||
# These macros overwrite/supercede some of the macros in the python3-rpm-macros package in RHEL.
|
||||
|
||||
# nb: $RPM_BUILD_ROOT is not set when the macros are expanded (at spec parse time)
|
||||
# so we set it manually (to empty string), making our Python prefer the correct install scheme location
|
||||
# platbase/base is explicitly set to %%{_prefix} to support custom values, such as /app for flatpaks
|
||||
%python3_sitelib %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_path('purelib', vars={'platbase': '%{_prefix}', 'base': '%{_prefix}'}))")
|
||||
%python3_sitearch %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_path('platlib', vars={'platbase': '%{_prefix}', 'base': '%{_prefix}'}))")
|
||||
%python3_version %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; sys.stdout.write('{0.major}.{0.minor}'.format(sys.version_info))")
|
||||
%python3_version_nodots %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; sys.stdout.write('{0.major}{0.minor}'.format(sys.version_info))")
|
||||
%python3_platform %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_platform())")
|
||||
%python3_platform_triplet %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_config_var('MULTIARCH'))")
|
||||
%python3_ext_suffix %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_config_var('EXT_SUFFIX'))")
|
||||
%python3_cache_tag %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; print(sys.implementation.cache_tag)")
|
||||
|
||||
%_py3_shebang_s s
|
||||
%_py3_shebang_P %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; print('P' if hasattr(sys.flags, 'safe_path') else '')")
|
||||
%py3_shbang_opts -%{?_py3_shebang_s}%{?_py3_shebang_P}
|
||||
|
||||
%py3_shebang_fix %{expand:\\\
|
||||
if [ -z "%{?py3_shebang_flags}" ]; then
|
||||
shebang_flags="-k"
|
||||
else
|
||||
shebang_flags="-ka%{py3_shebang_flags}"
|
||||
fi
|
||||
%{__python3} -B %{_rpmconfigdir}/redhat/pathfix_py3_12.py -pni %{__python3} $shebang_flags}
|
||||
|
||||
%py3_install() %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
%{__python3} %{py_setup} %{?py_setup_args} install -O1 --skip-build --root %{buildroot} --prefix %{_prefix} %{?*}
|
||||
rm -rfv %{buildroot}%{_bindir}/__pycache__
|
||||
}
|
||||
|
||||
%py3_install_egg() %{expand:\\\
|
||||
mkdir -p %{buildroot}%{python3_sitelib}
|
||||
%{__python3} -m easy_install -m --prefix %{buildroot}%{_prefix} -Z dist/*-py%{python3_version}.egg %{?*}
|
||||
rm -rfv %{buildroot}%{_bindir}/__pycache__
|
||||
}
|
||||
|
||||
%py3_install_wheel() %{expand:\\\
|
||||
%{__python3} -m pip install -I dist/%{1} --root %{buildroot} --prefix %{_prefix} --no-deps --no-index --no-warn-script-location
|
||||
rm -rfv %{buildroot}%{_bindir}/__pycache__
|
||||
for distinfo in %{buildroot}%{python3_sitelib}/*.dist-info %{buildroot}%{python3_sitearch}/*.dist-info; do
|
||||
if [ -f ${distinfo}/direct_url.json ]; then
|
||||
rm -fv ${distinfo}/direct_url.json
|
||||
sed -i '/direct_url.json/d' ${distinfo}/RECORD
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# With $PATH and $PYTHONPATH set to the %%buildroot,
|
||||
# try to import the Python 3 module(s) given as command-line args or read from file (-f).
|
||||
# Respect the custom values of %%py3_shebang_flags or set nothing if it's undefined.
|
||||
# Filter and check import on only top-level modules using -t flag.
|
||||
# Exclude unwanted modules by passing their globs to -e option.
|
||||
# Useful as a smoke test in %%check when running tests is not feasible.
|
||||
# Use spaces or commas as separators if providing list directly.
|
||||
# Use newlines as separators if providing list in a file.
|
||||
%py3_check_import(e:tf:) %{expand:\\\
|
||||
PATH="%{buildroot}%{_bindir}:$PATH"\\\
|
||||
PYTHONPATH="${PYTHONPATH:-%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}}"\\\
|
||||
_PYTHONSITE="%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}"\\\
|
||||
PYTHONDONTWRITEBYTECODE=1\\\
|
||||
%{lua:
|
||||
local command = "%{__python3} "
|
||||
if rpm.expand("%{?py3_shebang_flags}") ~= "" then
|
||||
command = command .. "-%{py3_shebang_flags}"
|
||||
end
|
||||
command = command .. " %{_rpmconfigdir}/redhat/import_all_modules_py3_12.py "
|
||||
-- handle multiline arguments correctly, see https://bugzilla.redhat.com/2018809
|
||||
local args=rpm.expand('%{?**}'):gsub("[%s\\\\]*%s+", " ")
|
||||
print(command .. args)
|
||||
}
|
||||
}
|
||||
|
||||
# Environment variables used by %%pytest, %%tox or standalone, e.g.:
|
||||
# %%{py3_test_envvars} %%{python3} -m unittest
|
||||
%py3_test_envvars %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
PATH="%{buildroot}%{_bindir}:$PATH"\\\
|
||||
PYTHONPATH="${PYTHONPATH:-%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}}"\\\
|
||||
PYTHONDONTWRITEBYTECODE=1\\\
|
||||
%{?__pytest_addopts:PYTEST_ADDOPTS="${PYTEST_ADDOPTS:-} %{__pytest_addopts}"}\\\
|
||||
PYTEST_XDIST_AUTO_NUM_WORKERS=%{_smp_build_ncpus}}
|
||||
|
||||
# This is intended for Python 3 only, hence also no Python version in the name.
|
||||
%__pytest /usr/bin/pytest-%{python3_version}
|
||||
%pytest %py3_test_envvars %__pytest
|
@ -1,199 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
from stat import *
|
||||
import getopt
|
||||
|
||||
err = sys.stderr.write
|
||||
dbg = err
|
||||
rep = sys.stdout.write
|
||||
|
||||
new_interpreter = None
|
||||
preserve_timestamps = False
|
||||
create_backup = True
|
||||
keep_flags = False
|
||||
add_flags = b''
|
||||
|
||||
|
||||
def main():
|
||||
global new_interpreter
|
||||
global preserve_timestamps
|
||||
global create_backup
|
||||
global keep_flags
|
||||
global add_flags
|
||||
|
||||
usage = ('usage: %s -i /interpreter -p -n -k -a file-or-directory ...\n' %
|
||||
sys.argv[0])
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], 'i:a:kpn')
|
||||
except getopt.error as msg:
|
||||
err(str(msg) + '\n')
|
||||
err(usage)
|
||||
sys.exit(2)
|
||||
for o, a in opts:
|
||||
if o == '-i':
|
||||
new_interpreter = a.encode()
|
||||
if o == '-p':
|
||||
preserve_timestamps = True
|
||||
if o == '-n':
|
||||
create_backup = False
|
||||
if o == '-k':
|
||||
keep_flags = True
|
||||
if o == '-a':
|
||||
add_flags = a.encode()
|
||||
if b' ' in add_flags:
|
||||
err("-a option doesn't support whitespaces")
|
||||
sys.exit(2)
|
||||
if not new_interpreter or not new_interpreter.startswith(b'/') or \
|
||||
not args:
|
||||
err('-i option or file-or-directory missing\n')
|
||||
err(usage)
|
||||
sys.exit(2)
|
||||
bad = 0
|
||||
for arg in args:
|
||||
if os.path.isdir(arg):
|
||||
if recursedown(arg): bad = 1
|
||||
elif os.path.islink(arg):
|
||||
err(arg + ': will not process symbolic links\n')
|
||||
bad = 1
|
||||
else:
|
||||
if fix(arg): bad = 1
|
||||
sys.exit(bad)
|
||||
|
||||
|
||||
def ispython(name):
|
||||
return name.endswith('.py')
|
||||
|
||||
|
||||
def recursedown(dirname):
|
||||
dbg('recursedown(%r)\n' % (dirname,))
|
||||
bad = 0
|
||||
try:
|
||||
names = os.listdir(dirname)
|
||||
except OSError as msg:
|
||||
err('%s: cannot list directory: %r\n' % (dirname, msg))
|
||||
return 1
|
||||
names.sort()
|
||||
subdirs = []
|
||||
for name in names:
|
||||
if name in (os.curdir, os.pardir): continue
|
||||
fullname = os.path.join(dirname, name)
|
||||
if os.path.islink(fullname): pass
|
||||
elif os.path.isdir(fullname):
|
||||
subdirs.append(fullname)
|
||||
elif ispython(name):
|
||||
if fix(fullname): bad = 1
|
||||
for fullname in subdirs:
|
||||
if recursedown(fullname): bad = 1
|
||||
return bad
|
||||
|
||||
|
||||
def fix(filename):
|
||||
## dbg('fix(%r)\n' % (filename,))
|
||||
try:
|
||||
f = open(filename, 'rb')
|
||||
except IOError as msg:
|
||||
err('%s: cannot open: %r\n' % (filename, msg))
|
||||
return 1
|
||||
with f:
|
||||
line = f.readline()
|
||||
fixed = fixline(line)
|
||||
if line == fixed:
|
||||
rep(filename+': no change\n')
|
||||
return
|
||||
head, tail = os.path.split(filename)
|
||||
tempname = os.path.join(head, '@' + tail)
|
||||
try:
|
||||
g = open(tempname, 'wb')
|
||||
except IOError as msg:
|
||||
err('%s: cannot create: %r\n' % (tempname, msg))
|
||||
return 1
|
||||
with g:
|
||||
rep(filename + ': updating\n')
|
||||
g.write(fixed)
|
||||
BUFSIZE = 8*1024
|
||||
while 1:
|
||||
buf = f.read(BUFSIZE)
|
||||
if not buf: break
|
||||
g.write(buf)
|
||||
|
||||
# Finishing touch -- move files
|
||||
|
||||
mtime = None
|
||||
atime = None
|
||||
# First copy the file's mode to the temp file
|
||||
try:
|
||||
statbuf = os.stat(filename)
|
||||
mtime = statbuf.st_mtime
|
||||
atime = statbuf.st_atime
|
||||
os.chmod(tempname, statbuf[ST_MODE] & 0o7777)
|
||||
except OSError as msg:
|
||||
err('%s: warning: chmod failed (%r)\n' % (tempname, msg))
|
||||
# Then make a backup of the original file as filename~
|
||||
if create_backup:
|
||||
try:
|
||||
os.rename(filename, filename + '~')
|
||||
except OSError as msg:
|
||||
err('%s: warning: backup failed (%r)\n' % (filename, msg))
|
||||
else:
|
||||
try:
|
||||
os.remove(filename)
|
||||
except OSError as msg:
|
||||
err('%s: warning: removing failed (%r)\n' % (filename, msg))
|
||||
# Now move the temp file to the original file
|
||||
try:
|
||||
os.rename(tempname, filename)
|
||||
except OSError as msg:
|
||||
err('%s: rename failed (%r)\n' % (filename, msg))
|
||||
return 1
|
||||
if preserve_timestamps:
|
||||
if atime and mtime:
|
||||
try:
|
||||
os.utime(filename, (atime, mtime))
|
||||
except OSError as msg:
|
||||
err('%s: reset of timestamp failed (%r)\n' % (filename, msg))
|
||||
return 1
|
||||
# Return success
|
||||
return 0
|
||||
|
||||
|
||||
def parse_shebang(shebangline):
|
||||
shebangline = shebangline.rstrip(b'\n')
|
||||
start = shebangline.find(b' -')
|
||||
if start == -1:
|
||||
return b''
|
||||
return shebangline[start:]
|
||||
|
||||
|
||||
def populate_flags(shebangline):
|
||||
old_flags = b''
|
||||
if keep_flags:
|
||||
old_flags = parse_shebang(shebangline)
|
||||
if old_flags:
|
||||
old_flags = old_flags[2:]
|
||||
if not (old_flags or add_flags):
|
||||
return b''
|
||||
# On Linux, the entire string following the interpreter name
|
||||
# is passed as a single argument to the interpreter.
|
||||
# e.g. "#! /usr/bin/python3 -W Error -s" runs "/usr/bin/python3 "-W Error -s"
|
||||
# so shebang should have single '-' where flags are given and
|
||||
# flag might need argument for that reasons adding new flags is
|
||||
# between '-' and original flags
|
||||
# e.g. #! /usr/bin/python3 -sW Error
|
||||
return b' -' + add_flags + old_flags
|
||||
|
||||
|
||||
def fixline(line):
|
||||
if not line.startswith(b'#!'):
|
||||
return line
|
||||
|
||||
if b"python" not in line:
|
||||
return line
|
||||
|
||||
flags = populate_flags(line)
|
||||
return b'#! ' + new_interpreter + flags + b'\n'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -20,7 +20,7 @@ URL: https://www.python.org/
|
||||
#global prerel ...
|
||||
%global upstream_version %{general_version}%{?prerel}
|
||||
Version: %{general_version}%{?prerel:~%{prerel}}
|
||||
Release: 4%{?dist}
|
||||
Release: 4%{?dist}.4
|
||||
License: Python
|
||||
|
||||
|
||||
@ -62,6 +62,7 @@ License: Python
|
||||
# Whether to use RPM build wheels from the python-{pip,setuptools,wheel}-wheel packages
|
||||
# Uses upstream bundled prebuilt wheels otherwise
|
||||
%bcond_without rpmwheels
|
||||
|
||||
# If the rpmwheels condition is disabled, we use the bundled wheel packages
|
||||
# from Python with the versions below.
|
||||
# This needs to be manually updated when we update Python.
|
||||
@ -313,11 +314,6 @@ Source1: %{url}ftp/python/%{general_version}/Python-%{upstream_version}.tar.xz.a
|
||||
# The release manager for Python 3.12 is Thomas Wouters
|
||||
Source2: https://github.com/Yhg1s.gpg
|
||||
|
||||
# Sources for the python3.12-rpm-macros
|
||||
Source3: macros.python3.12
|
||||
Source4: import_all_modules_py3_12.py
|
||||
Source5: pathfix_py3_12.py
|
||||
|
||||
# A simple script to check timestamps of bytecode files
|
||||
# Run in check section with Python that is currently being built
|
||||
# Originally written by bkabrda
|
||||
@ -389,6 +385,46 @@ Patch397: 00397-tarfile-filter.patch
|
||||
# Thomas Dwyer.
|
||||
Patch415: 00415-cve-2023-27043-gh-102988-reject-malformed-addresses-in-email-parseaddr-111116.patch
|
||||
|
||||
# 00422 # a353cebef737c41420dc7ae2469dd657371b8881
|
||||
# gh-115133: Fix tests for XMLPullParser with Expat 2.6.0
|
||||
#
|
||||
# Feeding the parser by too small chunks defers parsing to prevent
|
||||
# CVE-2023-52425. Future versions of Expat may be more reactive.
|
||||
Patch422: 00422-gh-115133-fix-tests-for-xmlpullparser-with-expat-2-6-0.patch
|
||||
|
||||
# 00435 #
|
||||
# Security fix for CVE-2024-6923
|
||||
# gh-121650: Encode newlines in headers, and verify headers are sound
|
||||
#
|
||||
# Encode header parts that contain newlines
|
||||
#
|
||||
# Per RFC 2047:
|
||||
#
|
||||
# > [...] these encoding schemes allow the
|
||||
# > encoding of arbitrary octet values, mail readers that implement this
|
||||
# > decoding should also ensure that display of the decoded data on the
|
||||
# > recipient's terminal will not cause unwanted side-effects
|
||||
#
|
||||
# It seems that the "quoted-word" scheme is a valid way to include
|
||||
# a newline character in a header value, just like we already allow
|
||||
# undecodable bytes or control characters.
|
||||
# They do need to be properly quoted when serialized to text, though.
|
||||
#
|
||||
# Verify that email headers are well-formed
|
||||
#
|
||||
# This should fail for custom fold() implementations that aren't careful about newlines.
|
||||
# Tracking bugzilla: https://bugzilla.redhat.com/show_bug.cgi?id=2302255
|
||||
# Resolved upstream: https://github.com/python/cpython/issues/121650
|
||||
Patch435: 00435-CVE-2024-6923.patch
|
||||
|
||||
# 00436 # c76cc2aa3a2c30375ade4859b732ada851cc89ed
|
||||
# [CVE-2024-8088] gh-122905: Sanitize names in zipfile.Path.
|
||||
Patch436: 00436-cve-2024-8088-gh-122905-sanitize-names-in-zipfile-path.patch
|
||||
|
||||
# CVE-2024-6232: Remove backtracking when parsing tarfile headers
|
||||
# Resolved upstream: https://github.com/python/cpython/issues/121285
|
||||
Patch437: 00437-CVE-2024-6232.patch
|
||||
|
||||
# (New patches go here ^^^)
|
||||
#
|
||||
# When adding new patches to "python" and "python3" in Fedora, EL, etc.,
|
||||
@ -407,14 +443,6 @@ Patch415: 00415-cve-2023-27043-gh-102988-reject-malformed-addresses-in-email-par
|
||||
# Descriptions, and metadata for subpackages
|
||||
# ==========================================
|
||||
|
||||
# Require alternatives version that implements the --keep-foreign flag and fixes rhbz#2203820
|
||||
Requires: alternatives >= 1.19.2-1
|
||||
Requires(post): alternatives >= 1.19.2-1
|
||||
Requires(postun): alternatives >= 1.19.2-1
|
||||
|
||||
# When the user tries to `yum install python`, yum will list this package among
|
||||
# the possible alternatives
|
||||
Provides: alternative-for(python)
|
||||
|
||||
%if %{with main_python}
|
||||
# Description for the python3X SRPM only:
|
||||
@ -489,7 +517,6 @@ Documentation for Python is provided in the %{pkgname}-docs package.
|
||||
Packages containing additional libraries for Python are generally named with
|
||||
the "%{pkgname}-" prefix.
|
||||
|
||||
For the unversioned "python" executable, see manual page "unversioned-python".
|
||||
|
||||
%if %{with main_python}
|
||||
# https://fedoraproject.org/wiki/Changes/Move_usr_bin_python_into_separate_package
|
||||
@ -570,12 +597,9 @@ Requires: (python3-rpm-macros if rpm-build)
|
||||
# On Fedora, we keep this to avoid one additional round of %%generate_buildrequires.
|
||||
%{!?rhel:Requires: (pyproject-rpm-macros if rpm-build)}
|
||||
|
||||
# Require alternatives version that implements the --keep-foreign flag and fixes rhbz#2203820
|
||||
Requires(postun): alternatives >= 1.19.2-1
|
||||
|
||||
# python3.12 installs the alternatives master symlink to which we attach a slave
|
||||
Requires(post): %{pkgname}
|
||||
Requires(postun): %{pkgname}
|
||||
# We provide the python3.12-rpm-macros here to make it possible to
|
||||
# BuildRequire them in the same manner as RHEL8.
|
||||
Provides: %{pkgname}-rpm-macros = %{version}-%{release}
|
||||
|
||||
%unversioned_obsoletes_of_python3_X_if_main devel
|
||||
|
||||
@ -624,13 +648,6 @@ Provides: idle = %{version}-%{release}
|
||||
Provides: %{pkgname}-tools = %{version}-%{release}
|
||||
Provides: %{pkgname}-tools%{?_isa} = %{version}-%{release}
|
||||
|
||||
# Require alternatives version that implements the --keep-foreign flag and fixes rhbz#2203820
|
||||
Requires(postun): alternatives >= 1.19.2-1
|
||||
|
||||
# python3.12 installs the alternatives master symlink to which we attach a slave
|
||||
Requires(post): %{pkgname}
|
||||
Requires(postun): %{pkgname}
|
||||
|
||||
%description -n %{pkgname}-idle
|
||||
IDLE is Python’s Integrated Development and Learning Environment.
|
||||
|
||||
@ -702,13 +719,6 @@ Requires: %{pkgname}-idle%{?_isa} = %{version}-%{release}
|
||||
|
||||
%unversioned_obsoletes_of_python3_X_if_main debug
|
||||
|
||||
# Require alternatives version that implements the --keep-foreign flag and fixes rhbz#2203820
|
||||
Requires(postun): alternatives >= 1.19.2-1
|
||||
|
||||
# python3.12 installs the alternatives master symlink to which we attach a slave
|
||||
Requires(post): %{pkgname}
|
||||
Requires(postun): %{pkgname}
|
||||
|
||||
%description -n %{pkgname}-debug
|
||||
python3-debug provides a version of the Python runtime with numerous debugging
|
||||
features enabled, aimed at advanced Python users such as developers of Python
|
||||
@ -727,24 +737,6 @@ The debug runtime additionally supports debug builds of C-API extensions
|
||||
%endif # with debug_build
|
||||
|
||||
|
||||
# We package the python3.12-rpm-macros in RHEL8 as to properly set the
|
||||
# %%__python3 and %%python3_pkgversion macros as well as provide modern
|
||||
# versions the current base macros.
|
||||
%package -n %{pkgname}-rpm-macros
|
||||
Summary: RPM macros for building RPMs with Python %{pybasever}
|
||||
License: MIT
|
||||
Provides: python-modular-rpm-macros == %{pybasever}
|
||||
Conflicts: python-modular-rpm-macros > %{pybasever}
|
||||
Requires: python3-rpm-macros
|
||||
BuildArch: noarch
|
||||
|
||||
%description -n %{pkgname}-rpm-macros
|
||||
RPM macros for building RPMs with Python %{pybasever} from the python%{pyshortver} module.
|
||||
If you want to build an RPM against the python%{pyshortver} module, you need to add:
|
||||
|
||||
BuildRequire: %{pkgname}-rpm-macros.
|
||||
|
||||
|
||||
# ======================================================
|
||||
# The prep phase of the build:
|
||||
# ======================================================
|
||||
@ -753,6 +745,14 @@ If you want to build an RPM against the python%{pyshortver} module, you need to
|
||||
%gpgverify -k2 -s1 -d0
|
||||
%autosetup -S git_am -n Python-%{upstream_version}
|
||||
|
||||
# Verify the second level of bundled provides is up to date
|
||||
# Arguably this should be done in %%check, but %%prep has a faster feedback loop
|
||||
# setuptools.whl does not contain the vendored.txt files
|
||||
if [ -f %{_rpmconfigdir}/pythonbundles.py ]; then
|
||||
%{_rpmconfigdir}/pythonbundles.py <(unzip -p Lib/ensurepip/_bundled/pip-*.whl pip/_vendor/vendor.txt) --compare-with '%pip_bundled_provides'
|
||||
%{_rpmconfigdir}/pythonbundles.py <(unzip -p Lib/test/wheel-*.whl wheel/vendored/vendor.txt) --compare-with '%wheel_bundled_provides'
|
||||
fi
|
||||
|
||||
%if %{with rpmwheels}
|
||||
rm Lib/ensurepip/_bundled/pip-%{pip_version}-py3-none-any.whl
|
||||
rm Lib/test/setuptools-%{setuptools_version}-py3-none-any.whl
|
||||
@ -1059,11 +1059,8 @@ done
|
||||
# Switch all shebangs to refer to the specific Python version.
|
||||
# This currently only covers files matching ^[a-zA-Z0-9_]+\.py$,
|
||||
# so handle files named using other naming scheme separately.
|
||||
# - RHEL 8 note: we use %%{SOURCE5} instead of pathfix.py, because in RHEL 8 we
|
||||
# ship our own versioned pathfix_py3_12.py in this package, but during
|
||||
# bootstrap it's not yet installed.
|
||||
LD_LIBRARY_PATH=./build/optimized ./build/optimized/python \
|
||||
%{SOURCE5} \
|
||||
%{_rpmconfigdir}/redhat/pathfix.py \
|
||||
-i "%{_bindir}/python%{pybasever}" -pn \
|
||||
%{buildroot} \
|
||||
%{buildroot}%{_bindir}/*%{pybasever}.py \
|
||||
@ -1165,29 +1162,6 @@ for file in %{buildroot}%{pylibdir}/pydoc_data/topics.py $(grep --include='*.py'
|
||||
rm ${directory}/{__pycache__/${module}.cpython-%{pyshortver}.opt-?.pyc,${module}.py}
|
||||
done
|
||||
|
||||
# Python RPM macros for python3.12-rpm-macros
|
||||
mkdir -p %{buildroot}%{rpmmacrodir}/
|
||||
install -m 644 %{SOURCE3} \
|
||||
%{buildroot}/%{rpmmacrodir}/
|
||||
|
||||
# Add scripts that are being used by python3.12-rpm-macros
|
||||
mkdir -p %{buildroot}%{_rpmconfigdir}/redhat
|
||||
install -m 644 %{SOURCE4} %{buildroot}%{_rpmconfigdir}/redhat/
|
||||
install -m 644 %{SOURCE5} %{buildroot}%{_rpmconfigdir}/redhat/
|
||||
|
||||
# All ghost files controlled by alternatives need to exist for the files
|
||||
# section check to succeed
|
||||
# - Don't list /usr/bin/python as a ghost file so `yum install /usr/bin/python`
|
||||
# doesn't install this package
|
||||
touch %{buildroot}%{_bindir}/unversioned-python
|
||||
touch %{buildroot}%{_mandir}/man1/python.1.gz
|
||||
touch %{buildroot}%{_bindir}/python3
|
||||
touch %{buildroot}%{_mandir}/man1/python3.1.gz
|
||||
touch %{buildroot}%{_bindir}/pydoc3
|
||||
touch %{buildroot}%{_bindir}/pydoc-3
|
||||
touch %{buildroot}%{_bindir}/idle3
|
||||
touch %{buildroot}%{_bindir}/python3-config
|
||||
|
||||
# ======================================================
|
||||
# Checks for packaging issues
|
||||
# ======================================================
|
||||
@ -1272,119 +1246,10 @@ CheckPython optimized
|
||||
|
||||
%endif # with tests
|
||||
|
||||
# ======================================================
|
||||
# Scriptlets for alternatives on rhel8
|
||||
# ======================================================
|
||||
%post
|
||||
# Alternative for /usr/bin/python -> /usr/bin/python3 + man page
|
||||
alternatives --install %{_bindir}/unversioned-python \
|
||||
python \
|
||||
%{_bindir}/python3 \
|
||||
300 \
|
||||
--slave %{_bindir}/python \
|
||||
unversioned-python \
|
||||
%{_bindir}/python3 \
|
||||
--slave %{_mandir}/man1/python.1.gz \
|
||||
unversioned-python-man \
|
||||
%{_mandir}/man1/python3.1.gz
|
||||
|
||||
# Alternative for /usr/bin/python -> /usr/bin/python3.12 + man page
|
||||
alternatives --install %{_bindir}/unversioned-python \
|
||||
python \
|
||||
%{_bindir}/python3.12 \
|
||||
211 \
|
||||
--slave %{_bindir}/python \
|
||||
unversioned-python \
|
||||
%{_bindir}/python3.12 \
|
||||
--slave %{_mandir}/man1/python.1.gz \
|
||||
unversioned-python-man \
|
||||
%{_mandir}/man1/python3.12.1.gz
|
||||
|
||||
# Alternative for /usr/bin/python3 -> /usr/bin/python3.12 + related files
|
||||
# Create only if it doesn't exist already
|
||||
EXISTS=`alternatives --display python3 | \
|
||||
grep -c "^/usr/bin/python3.12 - priority [0-9]*"`
|
||||
|
||||
if [ $EXISTS -eq 0 ]; then
|
||||
alternatives --install %{_bindir}/python3 \
|
||||
python3 \
|
||||
%{_bindir}/python3.12 \
|
||||
31200 \
|
||||
--slave %{_mandir}/man1/python3.1.gz \
|
||||
python3-man \
|
||||
%{_mandir}/man1/python3.12.1.gz \
|
||||
--slave %{_bindir}/pydoc3 \
|
||||
pydoc3 \
|
||||
%{_bindir}/pydoc3.12 \
|
||||
--slave %{_bindir}/pydoc-3 \
|
||||
pydoc-3 \
|
||||
%{_bindir}/pydoc3.12
|
||||
fi
|
||||
|
||||
%postun
|
||||
# Do this only during uninstall process (not during update)
|
||||
if [ $1 -eq 0 ]; then
|
||||
alternatives --keep-foreign --remove python \
|
||||
%{_bindir}/python3.12
|
||||
|
||||
alternatives --keep-foreign --remove python3 \
|
||||
%{_bindir}/python3.12
|
||||
|
||||
# Remove link python → python3 if no other python3.* exists
|
||||
if ! alternatives --display python3 > /dev/null; then
|
||||
alternatives --keep-foreign --remove python \
|
||||
%{_bindir}/python3
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
%post devel
|
||||
alternatives --add-slave python3 %{_bindir}/python3.12 \
|
||||
%{_bindir}/python3-config \
|
||||
python3-config \
|
||||
%{_bindir}/python3.12-config
|
||||
|
||||
%postun devel
|
||||
# Do this only during uninstall process (not during update)
|
||||
if [ $1 -eq 0 ]; then
|
||||
alternatives --keep-foreign --remove-slave python3 %{_bindir}/python3.12 \
|
||||
python3-config
|
||||
fi
|
||||
|
||||
%post idle
|
||||
alternatives --add-slave python3 %{_bindir}/python3.12 \
|
||||
%{_bindir}/idle3 \
|
||||
idle3 \
|
||||
%{_bindir}/idle3.12
|
||||
|
||||
%postun idle
|
||||
# Do this only during uninstall process (not during update)
|
||||
if [ $1 -eq 0 ]; then
|
||||
alternatives --keep-foreign --remove-slave python3 %{_bindir}/python3.12 \
|
||||
idle3
|
||||
fi
|
||||
|
||||
# ======================================================
|
||||
# Files for each RPM (sub)package
|
||||
# ======================================================
|
||||
|
||||
%files -n %{pkgname}-rpm-macros
|
||||
%{rpmmacrodir}/macros.python%{pybasever}
|
||||
%{_rpmconfigdir}/redhat/import_all_modules_py3_12.py
|
||||
%{_rpmconfigdir}/redhat/pathfix_py3_12.py
|
||||
|
||||
|
||||
%files -n %{pkgname}
|
||||
%doc README.rst
|
||||
|
||||
# Alternatives
|
||||
%ghost %{_bindir}/unversioned-python
|
||||
%ghost %{_mandir}/man1/python.1.gz
|
||||
%ghost %{_bindir}/python3
|
||||
%ghost %{_mandir}/man1/python3.1.gz
|
||||
%ghost %{_bindir}/pydoc3
|
||||
%ghost %{_bindir}/pydoc-3
|
||||
|
||||
%if %{with main_python}
|
||||
%{_bindir}/pydoc*
|
||||
%{_bindir}/python3
|
||||
@ -1676,9 +1541,6 @@ fi
|
||||
%{_bindir}/python%{pybasever}-config
|
||||
%{_bindir}/python%{LDVERSION_optimized}-config
|
||||
%{_bindir}/python%{LDVERSION_optimized}-*-config
|
||||
# Alternatives
|
||||
%ghost %{_bindir}/python3-config
|
||||
|
||||
%{_libdir}/libpython%{LDVERSION_optimized}.so
|
||||
%{_libdir}/pkgconfig/python-%{LDVERSION_optimized}.pc
|
||||
%{_libdir}/pkgconfig/python-%{LDVERSION_optimized}-embed.pc
|
||||
@ -1691,8 +1553,6 @@ fi
|
||||
%{_bindir}/idle*
|
||||
%else
|
||||
%{_bindir}/idle%{pybasever}
|
||||
# Alternatives
|
||||
%ghost %{_bindir}/idle3
|
||||
%endif
|
||||
|
||||
%{pylibdir}/idlelib
|
||||
@ -1876,6 +1736,23 @@ fi
|
||||
# ======================================================
|
||||
|
||||
%changelog
|
||||
* Mon Oct 07 2024 Arti Agrawal <artagraw@redhat.com> - 3.12.1-4.4
|
||||
- Security fix for CVE-2024-6232
|
||||
Resolves: RHEL-57416
|
||||
|
||||
* Fri Aug 23 2024 Charalampos Stratakis <cstratak@redhat.com> - 3.12.1-4.3
|
||||
- Security fix for CVE-2024-8088
|
||||
Resolves: RHEL-55964
|
||||
|
||||
* Mon Aug 12 2024 Charalampos Stratakis <cstratak@redhat.com> - 3.12.1-4.2
|
||||
- Security fix for CVE-2024-6923
|
||||
Resolves: RHEL-53087
|
||||
|
||||
* Fri May 03 2024 Lumír Balhar <lbalhar@redhat.com> - 3.12.1-4.1
|
||||
- Fix tests for XMLPullParser with Expat with fixed CVE
|
||||
- Enable importing of hash-based .pyc files under FIPS mode
|
||||
Resolves: RHEL-40773
|
||||
|
||||
* Mon Feb 19 2024 Charalampos Stratakis <cstratak@redhat.com> - 3.12.1-4
|
||||
- Add Red Hat configuration for CVE-2007-4559
|
||||
|
||||
@ -1932,4 +1809,3 @@ fi
|
||||
Ville Skyttä <ville.skytta@iki.fi>
|
||||
Yaakov Selkowitz <yselkowi@redhat.com>
|
||||
Zbigniew Jędrzejewski-Szmek <zbyszek@in.waw.pl>
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user