import python38-3.8.8-4.module+el8.5.0+12205+a865257a
This commit is contained in:
parent
3b3eb22c65
commit
09494c8ac4
2
.gitignore
vendored
2
.gitignore
vendored
@ -1 +1 @@
|
|||||||
SOURCES/Python-3.8.6-noexe.tar.xz
|
SOURCES/Python-3.8.8-noexe.tar.xz
|
||||||
|
@ -1 +1 @@
|
|||||||
e77d08894869ecf483e9f945663f75316ad68bf1 SOURCES/Python-3.8.6-noexe.tar.xz
|
e3e4bc64d5e353b8db5882570d6eaec8e4d42f71 SOURCES/Python-3.8.8-noexe.tar.xz
|
||||||
|
@ -12,7 +12,7 @@ We might eventually pursuit upstream support, but it's low prio
|
|||||||
1 file changed, 22 insertions(+), 10 deletions(-)
|
1 file changed, 22 insertions(+), 10 deletions(-)
|
||||||
|
|
||||||
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
|
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
|
||||||
index 9415fd73b8..f58dab1800 100644
|
index 38bb42104b..413c1b300e 100644
|
||||||
--- a/Lib/ensurepip/__init__.py
|
--- a/Lib/ensurepip/__init__.py
|
||||||
+++ b/Lib/ensurepip/__init__.py
|
+++ b/Lib/ensurepip/__init__.py
|
||||||
@@ -1,6 +1,7 @@
|
@@ -1,6 +1,7 @@
|
||||||
@ -24,7 +24,7 @@ index 9415fd73b8..f58dab1800 100644
|
|||||||
import sys
|
import sys
|
||||||
import runpy
|
import runpy
|
||||||
import tempfile
|
import tempfile
|
||||||
@@ -8,10 +9,24 @@ import tempfile
|
@@ -9,10 +10,24 @@ import subprocess
|
||||||
|
|
||||||
__all__ = ["version", "bootstrap"]
|
__all__ = ["version", "bootstrap"]
|
||||||
|
|
||||||
@ -33,7 +33,7 @@ index 9415fd73b8..f58dab1800 100644
|
|||||||
-_SETUPTOOLS_VERSION = "49.2.1"
|
-_SETUPTOOLS_VERSION = "49.2.1"
|
||||||
+_wheels = {}
|
+_wheels = {}
|
||||||
|
|
||||||
-_PIP_VERSION = "20.2.1"
|
-_PIP_VERSION = "20.2.3"
|
||||||
+def _get_most_recent_wheel_version(pkg):
|
+def _get_most_recent_wheel_version(pkg):
|
||||||
+ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg))
|
+ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg))
|
||||||
+ _wheels[pkg] = {}
|
+ _wheels[pkg] = {}
|
||||||
@ -51,7 +51,7 @@ index 9415fd73b8..f58dab1800 100644
|
|||||||
|
|
||||||
_PROJECTS = [
|
_PROJECTS = [
|
||||||
("setuptools", _SETUPTOOLS_VERSION, "py3"),
|
("setuptools", _SETUPTOOLS_VERSION, "py3"),
|
||||||
@@ -105,13 +120,10 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
|
@@ -102,13 +117,10 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
|
||||||
# additional paths that need added to sys.path
|
# additional paths that need added to sys.path
|
||||||
additional_paths = []
|
additional_paths = []
|
||||||
for project, version, py_tag in _PROJECTS:
|
for project, version, py_tag in _PROJECTS:
|
||||||
|
@ -1,186 +0,0 @@
|
|||||||
From ece5dfd403dac211f8d3c72701fe7ba7b7aa5b5f Mon Sep 17 00:00:00 2001
|
|
||||||
From: "Miss Islington (bot)"
|
|
||||||
<31488909+miss-islington@users.noreply.github.com>
|
|
||||||
Date: Mon, 18 Jan 2021 13:28:52 -0800
|
|
||||||
Subject: [PATCH] closes bpo-42938: Replace snprintf with Python unicode
|
|
||||||
formatting in ctypes param reprs. (GH-24248)
|
|
||||||
|
|
||||||
(cherry picked from commit 916610ef90a0d0761f08747f7b0905541f0977c7)
|
|
||||||
|
|
||||||
Co-authored-by: Benjamin Peterson <benjamin@python.org>
|
|
||||||
|
|
||||||
Co-authored-by: Benjamin Peterson <benjamin@python.org>
|
|
||||||
---
|
|
||||||
Lib/ctypes/test/test_parameters.py | 43 ++++++++++++++++
|
|
||||||
.../2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst | 2 +
|
|
||||||
Modules/_ctypes/callproc.c | 51 +++++++------------
|
|
||||||
3 files changed, 64 insertions(+), 32 deletions(-)
|
|
||||||
create mode 100644 Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
|
||||||
|
|
||||||
diff --git a/Lib/ctypes/test/test_parameters.py b/Lib/ctypes/test/test_parameters.py
|
|
||||||
index e4c25fd880cef..531894fdec838 100644
|
|
||||||
--- a/Lib/ctypes/test/test_parameters.py
|
|
||||||
+++ b/Lib/ctypes/test/test_parameters.py
|
|
||||||
@@ -201,6 +201,49 @@ def __dict__(self):
|
|
||||||
with self.assertRaises(ZeroDivisionError):
|
|
||||||
WorseStruct().__setstate__({}, b'foo')
|
|
||||||
|
|
||||||
+ def test_parameter_repr(self):
|
|
||||||
+ from ctypes import (
|
|
||||||
+ c_bool,
|
|
||||||
+ c_char,
|
|
||||||
+ c_wchar,
|
|
||||||
+ c_byte,
|
|
||||||
+ c_ubyte,
|
|
||||||
+ c_short,
|
|
||||||
+ c_ushort,
|
|
||||||
+ c_int,
|
|
||||||
+ c_uint,
|
|
||||||
+ c_long,
|
|
||||||
+ c_ulong,
|
|
||||||
+ c_longlong,
|
|
||||||
+ c_ulonglong,
|
|
||||||
+ c_float,
|
|
||||||
+ c_double,
|
|
||||||
+ c_longdouble,
|
|
||||||
+ c_char_p,
|
|
||||||
+ c_wchar_p,
|
|
||||||
+ c_void_p,
|
|
||||||
+ )
|
|
||||||
+ self.assertRegex(repr(c_bool.from_param(True)), r"^<cparam '\?' at 0x[A-Fa-f0-9]+>$")
|
|
||||||
+ self.assertEqual(repr(c_char.from_param(97)), "<cparam 'c' ('a')>")
|
|
||||||
+ self.assertRegex(repr(c_wchar.from_param('a')), r"^<cparam 'u' at 0x[A-Fa-f0-9]+>$")
|
|
||||||
+ self.assertEqual(repr(c_byte.from_param(98)), "<cparam 'b' (98)>")
|
|
||||||
+ self.assertEqual(repr(c_ubyte.from_param(98)), "<cparam 'B' (98)>")
|
|
||||||
+ self.assertEqual(repr(c_short.from_param(511)), "<cparam 'h' (511)>")
|
|
||||||
+ self.assertEqual(repr(c_ushort.from_param(511)), "<cparam 'H' (511)>")
|
|
||||||
+ self.assertRegex(repr(c_int.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
|
|
||||||
+ self.assertRegex(repr(c_uint.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
|
|
||||||
+ self.assertRegex(repr(c_long.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
|
|
||||||
+ self.assertRegex(repr(c_ulong.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
|
|
||||||
+ self.assertRegex(repr(c_longlong.from_param(20000)), r"^<cparam '[liq]' \(20000\)>$")
|
|
||||||
+ self.assertRegex(repr(c_ulonglong.from_param(20000)), r"^<cparam '[LIQ]' \(20000\)>$")
|
|
||||||
+ self.assertEqual(repr(c_float.from_param(1.5)), "<cparam 'f' (1.5)>")
|
|
||||||
+ self.assertEqual(repr(c_double.from_param(1.5)), "<cparam 'd' (1.5)>")
|
|
||||||
+ self.assertEqual(repr(c_double.from_param(1e300)), "<cparam 'd' (1e+300)>")
|
|
||||||
+ self.assertRegex(repr(c_longdouble.from_param(1.5)), r"^<cparam ('d' \(1.5\)|'g' at 0x[A-Fa-f0-9]+)>$")
|
|
||||||
+ self.assertRegex(repr(c_char_p.from_param(b'hihi')), "^<cparam 'z' \(0x[A-Fa-f0-9]+\)>$")
|
|
||||||
+ self.assertRegex(repr(c_wchar_p.from_param('hihi')), "^<cparam 'Z' \(0x[A-Fa-f0-9]+\)>$")
|
|
||||||
+ self.assertRegex(repr(c_void_p.from_param(0x12)), r"^<cparam 'P' \(0x0*12\)>$")
|
|
||||||
+
|
|
||||||
################################################################
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
diff --git a/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
|
||||||
new file mode 100644
|
|
||||||
index 0000000000000..7df65a156feab
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
|
||||||
@@ -0,0 +1,2 @@
|
|
||||||
+Avoid static buffers when computing the repr of :class:`ctypes.c_double` and
|
|
||||||
+:class:`ctypes.c_longdouble` values.
|
|
||||||
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
|
|
||||||
index a9b8675cd951b..de75918d49f37 100644
|
|
||||||
--- a/Modules/_ctypes/callproc.c
|
|
||||||
+++ b/Modules/_ctypes/callproc.c
|
|
||||||
@@ -484,58 +484,47 @@ is_literal_char(unsigned char c)
|
|
||||||
static PyObject *
|
|
||||||
PyCArg_repr(PyCArgObject *self)
|
|
||||||
{
|
|
||||||
- char buffer[256];
|
|
||||||
switch(self->tag) {
|
|
||||||
case 'b':
|
|
||||||
case 'B':
|
|
||||||
- sprintf(buffer, "<cparam '%c' (%d)>",
|
|
||||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
|
|
||||||
self->tag, self->value.b);
|
|
||||||
- break;
|
|
||||||
case 'h':
|
|
||||||
case 'H':
|
|
||||||
- sprintf(buffer, "<cparam '%c' (%d)>",
|
|
||||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
|
|
||||||
self->tag, self->value.h);
|
|
||||||
- break;
|
|
||||||
case 'i':
|
|
||||||
case 'I':
|
|
||||||
- sprintf(buffer, "<cparam '%c' (%d)>",
|
|
||||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>",
|
|
||||||
self->tag, self->value.i);
|
|
||||||
- break;
|
|
||||||
case 'l':
|
|
||||||
case 'L':
|
|
||||||
- sprintf(buffer, "<cparam '%c' (%ld)>",
|
|
||||||
+ return PyUnicode_FromFormat("<cparam '%c' (%ld)>",
|
|
||||||
self->tag, self->value.l);
|
|
||||||
- break;
|
|
||||||
|
|
||||||
case 'q':
|
|
||||||
case 'Q':
|
|
||||||
- sprintf(buffer,
|
|
||||||
-#ifdef MS_WIN32
|
|
||||||
- "<cparam '%c' (%I64d)>",
|
|
||||||
-#else
|
|
||||||
- "<cparam '%c' (%lld)>",
|
|
||||||
-#endif
|
|
||||||
+ return PyUnicode_FromFormat("<cparam '%c' (%lld)>",
|
|
||||||
self->tag, self->value.q);
|
|
||||||
- break;
|
|
||||||
case 'd':
|
|
||||||
- sprintf(buffer, "<cparam '%c' (%f)>",
|
|
||||||
- self->tag, self->value.d);
|
|
||||||
- break;
|
|
||||||
- case 'f':
|
|
||||||
- sprintf(buffer, "<cparam '%c' (%f)>",
|
|
||||||
- self->tag, self->value.f);
|
|
||||||
- break;
|
|
||||||
-
|
|
||||||
+ case 'f': {
|
|
||||||
+ PyObject *f = PyFloat_FromDouble((self->tag == 'f') ? self->value.f : self->value.d);
|
|
||||||
+ if (f == NULL) {
|
|
||||||
+ return NULL;
|
|
||||||
+ }
|
|
||||||
+ PyObject *result = PyUnicode_FromFormat("<cparam '%c' (%R)>", self->tag, f);
|
|
||||||
+ Py_DECREF(f);
|
|
||||||
+ return result;
|
|
||||||
+ }
|
|
||||||
case 'c':
|
|
||||||
if (is_literal_char((unsigned char)self->value.c)) {
|
|
||||||
- sprintf(buffer, "<cparam '%c' ('%c')>",
|
|
||||||
+ return PyUnicode_FromFormat("<cparam '%c' ('%c')>",
|
|
||||||
self->tag, self->value.c);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
- sprintf(buffer, "<cparam '%c' ('\\x%02x')>",
|
|
||||||
+ return PyUnicode_FromFormat("<cparam '%c' ('\\x%02x')>",
|
|
||||||
self->tag, (unsigned char)self->value.c);
|
|
||||||
}
|
|
||||||
- break;
|
|
||||||
|
|
||||||
/* Hm, are these 'z' and 'Z' codes useful at all?
|
|
||||||
Shouldn't they be replaced by the functionality of c_string
|
|
||||||
@@ -544,22 +533,20 @@ PyCArg_repr(PyCArgObject *self)
|
|
||||||
case 'z':
|
|
||||||
case 'Z':
|
|
||||||
case 'P':
|
|
||||||
- sprintf(buffer, "<cparam '%c' (%p)>",
|
|
||||||
+ return PyUnicode_FromFormat("<cparam '%c' (%p)>",
|
|
||||||
self->tag, self->value.p);
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
if (is_literal_char((unsigned char)self->tag)) {
|
|
||||||
- sprintf(buffer, "<cparam '%c' at %p>",
|
|
||||||
+ return PyUnicode_FromFormat("<cparam '%c' at %p>",
|
|
||||||
(unsigned char)self->tag, (void *)self);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
- sprintf(buffer, "<cparam 0x%02x at %p>",
|
|
||||||
+ return PyUnicode_FromFormat("<cparam 0x%02x at %p>",
|
|
||||||
(unsigned char)self->tag, (void *)self);
|
|
||||||
}
|
|
||||||
- break;
|
|
||||||
}
|
|
||||||
- return PyUnicode_FromString(buffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyMemberDef PyCArgType_members[] = {
|
|
574
SOURCES/00359-CVE-2021-23336.patch
Normal file
574
SOURCES/00359-CVE-2021-23336.patch
Normal file
@ -0,0 +1,574 @@
|
|||||||
|
From a11d61081c3887c2b4c36e8726597e05f789c2e2 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Lumir Balhar <lbalhar@redhat.com>
|
||||||
|
Date: Thu, 1 Apr 2021 08:18:07 +0200
|
||||||
|
Subject: [PATCH] CVE-2021-23336: Add `separator` argument to parse_qs; warn
|
||||||
|
with default
|
||||||
|
MIME-Version: 1.0
|
||||||
|
Content-Type: text/plain; charset=UTF-8
|
||||||
|
Content-Transfer-Encoding: 8bit
|
||||||
|
|
||||||
|
Partially backports https://bugs.python.org/issue42967 : [security] Address a web cache-poisoning issue reported in urllib.parse.parse_qsl().
|
||||||
|
However, this solution is different than the upstream solution in Python 3.6.13.
|
||||||
|
|
||||||
|
An optional argument seperator is added to specify the separator.
|
||||||
|
It is recommended to set it to '&' or ';' to match the application or proxy in use.
|
||||||
|
The default can be set with an env variable of a config file.
|
||||||
|
If neither the argument, env var or config file specifies a separator, "&" is used
|
||||||
|
but a warning is raised if parse_qs is used on input that contains ';'.
|
||||||
|
|
||||||
|
Co-authors of the upstream change (who do not necessarily agree with this):
|
||||||
|
Co-authored-by: Adam Goldschmidt <adamgold7@gmail.com>
|
||||||
|
Co-authored-by: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com>
|
||||||
|
Co-authored-by: Éric Araujo <merwok@netwok.org>
|
||||||
|
---
|
||||||
|
Doc/library/cgi.rst | 2 +-
|
||||||
|
Doc/library/urllib.parse.rst | 12 +-
|
||||||
|
Lib/cgi.py | 4 +-
|
||||||
|
Lib/test/test_cgi.py | 29 +++
|
||||||
|
Lib/test/test_urlparse.py | 232 +++++++++++++++++-
|
||||||
|
Lib/urllib/parse.py | 77 +++++-
|
||||||
|
.../2021-02-14-15-59-16.bpo-42967.YApqDS.rst | 1 +
|
||||||
|
7 files changed, 340 insertions(+), 17 deletions(-)
|
||||||
|
create mode 100644 Misc/NEWS.d/next/Security/2021-02-14-15-59-16.bpo-42967.YApqDS.rst
|
||||||
|
|
||||||
|
diff --git a/Doc/library/cgi.rst b/Doc/library/cgi.rst
|
||||||
|
index 880074b..d8a6dc1 100644
|
||||||
|
--- a/Doc/library/cgi.rst
|
||||||
|
+++ b/Doc/library/cgi.rst
|
||||||
|
@@ -277,7 +277,7 @@ These are useful if you want more control, or if you want to employ some of the
|
||||||
|
algorithms implemented in this module in other circumstances.
|
||||||
|
|
||||||
|
|
||||||
|
-.. function:: parse(fp=None, environ=os.environ, keep_blank_values=False, strict_parsing=False, separator="&")
|
||||||
|
+.. function:: parse(fp=None, environ=os.environ, keep_blank_values=False, strict_parsing=False, separator=None)
|
||||||
|
|
||||||
|
Parse a query in the environment or from a file (the file defaults to
|
||||||
|
``sys.stdin``). The *keep_blank_values*, *strict_parsing* and *separator* parameters are
|
||||||
|
diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst
|
||||||
|
index fcad707..9bcef69 100644
|
||||||
|
--- a/Doc/library/urllib.parse.rst
|
||||||
|
+++ b/Doc/library/urllib.parse.rst
|
||||||
|
@@ -165,7 +165,7 @@ or on combining URL components into a URL string.
|
||||||
|
now raise :exc:`ValueError`.
|
||||||
|
|
||||||
|
|
||||||
|
-.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator='&')
|
||||||
|
+.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator=None)
|
||||||
|
|
||||||
|
Parse a query string given as a string argument (data of type
|
||||||
|
:mimetype:`application/x-www-form-urlencoded`). Data are returned as a
|
||||||
|
@@ -191,7 +191,13 @@ or on combining URL components into a URL string.
|
||||||
|
*max_num_fields* fields read.
|
||||||
|
|
||||||
|
The optional argument *separator* is the symbol to use for separating the
|
||||||
|
- query arguments. It defaults to ``&``.
|
||||||
|
+ query arguments. It is recommended to set it to ``'&'`` or ``';'``.
|
||||||
|
+ It defaults to ``'&'``; a warning is raised if this default is used.
|
||||||
|
+ This default may be changed with the following environment variable settings:
|
||||||
|
+
|
||||||
|
+ - ``PYTHON_URLLIB_QS_SEPARATOR='&'``: use only ``&`` as separator, without warning (as in Python 3.6.13+ or 3.10)
|
||||||
|
+ - ``PYTHON_URLLIB_QS_SEPARATOR=';'``: use only ``;`` as separator
|
||||||
|
+ - ``PYTHON_URLLIB_QS_SEPARATOR=legacy``: use both ``&`` and ``;`` (as in previous versions of Python)
|
||||||
|
|
||||||
|
Use the :func:`urllib.parse.urlencode` function (with the ``doseq``
|
||||||
|
parameter set to ``True``) to convert such dictionaries into query
|
||||||
|
@@ -236,7 +242,7 @@ or on combining URL components into a URL string.
|
||||||
|
*max_num_fields* fields read.
|
||||||
|
|
||||||
|
The optional argument *separator* is the symbol to use for separating the
|
||||||
|
- query arguments. It defaults to ``&``.
|
||||||
|
+ query arguments. It works as in :py:func:`parse_qs`.
|
||||||
|
|
||||||
|
Use the :func:`urllib.parse.urlencode` function to convert such lists of pairs into
|
||||||
|
query strings.
|
||||||
|
diff --git a/Lib/cgi.py b/Lib/cgi.py
|
||||||
|
index 1e880e5..d7b994b 100755
|
||||||
|
--- a/Lib/cgi.py
|
||||||
|
+++ b/Lib/cgi.py
|
||||||
|
@@ -116,7 +116,7 @@ log = initlog # The current logging function
|
||||||
|
maxlen = 0
|
||||||
|
|
||||||
|
def parse(fp=None, environ=os.environ, keep_blank_values=0,
|
||||||
|
- strict_parsing=0, separator='&'):
|
||||||
|
+ strict_parsing=0, separator=None):
|
||||||
|
"""Parse a query in the environment or from a file (default stdin)
|
||||||
|
|
||||||
|
Arguments, all optional:
|
||||||
|
@@ -319,7 +319,7 @@ class FieldStorage:
|
||||||
|
def __init__(self, fp=None, headers=None, outerboundary=b'',
|
||||||
|
environ=os.environ, keep_blank_values=0, strict_parsing=0,
|
||||||
|
limit=None, encoding='utf-8', errors='replace',
|
||||||
|
- max_num_fields=None, separator='&'):
|
||||||
|
+ max_num_fields=None, separator=None):
|
||||||
|
"""Constructor. Read multipart/* until last part.
|
||||||
|
|
||||||
|
Arguments, all optional:
|
||||||
|
diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py
|
||||||
|
index 4e1506a..49b6926 100644
|
||||||
|
--- a/Lib/test/test_cgi.py
|
||||||
|
+++ b/Lib/test/test_cgi.py
|
||||||
|
@@ -180,6 +180,35 @@ Content-Length: 3
|
||||||
|
|
||||||
|
env = {'QUERY_STRING': orig}
|
||||||
|
fs = cgi.FieldStorage(environ=env)
|
||||||
|
+ if isinstance(expect, dict):
|
||||||
|
+ # test dict interface
|
||||||
|
+ self.assertEqual(len(expect), len(fs))
|
||||||
|
+ self.assertCountEqual(expect.keys(), fs.keys())
|
||||||
|
+ self.assertEqual(fs.getvalue("nonexistent field", "default"), "default")
|
||||||
|
+ # test individual fields
|
||||||
|
+ for key in expect.keys():
|
||||||
|
+ expect_val = expect[key]
|
||||||
|
+ self.assertIn(key, fs)
|
||||||
|
+ if len(expect_val) > 1:
|
||||||
|
+ self.assertEqual(fs.getvalue(key), expect_val)
|
||||||
|
+ else:
|
||||||
|
+ self.assertEqual(fs.getvalue(key), expect_val[0])
|
||||||
|
+
|
||||||
|
+ def test_separator(self):
|
||||||
|
+ parse_semicolon = [
|
||||||
|
+ ("x=1;y=2.0", {'x': ['1'], 'y': ['2.0']}),
|
||||||
|
+ ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
|
||||||
|
+ (";", ValueError("bad query field: ''")),
|
||||||
|
+ (";;", ValueError("bad query field: ''")),
|
||||||
|
+ ("=;a", ValueError("bad query field: 'a'")),
|
||||||
|
+ (";b=a", ValueError("bad query field: ''")),
|
||||||
|
+ ("b;=a", ValueError("bad query field: 'b'")),
|
||||||
|
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
|
||||||
|
+ ("a=a+b;a=b+a", {'a': ['a b', 'b a']}),
|
||||||
|
+ ]
|
||||||
|
+ for orig, expect in parse_semicolon:
|
||||||
|
+ env = {'QUERY_STRING': orig}
|
||||||
|
+ fs = cgi.FieldStorage(separator=';', environ=env)
|
||||||
|
if isinstance(expect, dict):
|
||||||
|
# test dict interface
|
||||||
|
self.assertEqual(len(expect), len(fs))
|
||||||
|
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
|
||||||
|
index 90c8d69..90349ee 100644
|
||||||
|
--- a/Lib/test/test_urlparse.py
|
||||||
|
+++ b/Lib/test/test_urlparse.py
|
||||||
|
@@ -2,6 +2,11 @@ import sys
|
||||||
|
import unicodedata
|
||||||
|
import unittest
|
||||||
|
import urllib.parse
|
||||||
|
+from test.support import EnvironmentVarGuard
|
||||||
|
+from warnings import catch_warnings
|
||||||
|
+import tempfile
|
||||||
|
+import contextlib
|
||||||
|
+import os.path
|
||||||
|
|
||||||
|
RFC1808_BASE = "http://a/b/c/d;p?q#f"
|
||||||
|
RFC2396_BASE = "http://a/b/c/d;p?q"
|
||||||
|
@@ -32,10 +37,34 @@ parse_qsl_test_cases = [
|
||||||
|
(b"&a=b", [(b'a', b'b')]),
|
||||||
|
(b"a=a+b&b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
|
||||||
|
(b"a=1&a=2", [(b'a', b'1'), (b'a', b'2')]),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qsl_test_cases_semicolon = [
|
||||||
|
+ (";", []),
|
||||||
|
+ (";;", []),
|
||||||
|
+ (";a=b", [('a', 'b')]),
|
||||||
|
+ ("a=a+b;b=b+c", [('a', 'a b'), ('b', 'b c')]),
|
||||||
|
+ ("a=1;a=2", [('a', '1'), ('a', '2')]),
|
||||||
|
+ (b";", []),
|
||||||
|
+ (b";;", []),
|
||||||
|
+ (b";a=b", [(b'a', b'b')]),
|
||||||
|
+ (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
|
||||||
|
+ (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qsl_test_cases_legacy = [
|
||||||
|
+ (b"a=1;a=2&a=3", [(b'a', b'1'), (b'a', b'2'), (b'a', b'3')]),
|
||||||
|
+ (b"a=1;b=2&c=3", [(b'a', b'1'), (b'b', b'2'), (b'c', b'3')]),
|
||||||
|
+ (b"a=1&b=2&c=3;", [(b'a', b'1'), (b'b', b'2'), (b'c', b'3')]),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qsl_test_cases_warn = [
|
||||||
|
(";a=b", [(';a', 'b')]),
|
||||||
|
("a=a+b;b=b+c", [('a', 'a b;b=b c')]),
|
||||||
|
(b";a=b", [(b';a', b'b')]),
|
||||||
|
(b"a=a+b;b=b+c", [(b'a', b'a b;b=b c')]),
|
||||||
|
+ ("a=1;a=2&a=3", [('a', '1;a=2'), ('a', '3')]),
|
||||||
|
+ (b"a=1;a=2&a=3", [(b'a', b'1;a=2'), (b'a', b'3')]),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Each parse_qs testcase is a two-tuple that contains
|
||||||
|
@@ -62,10 +91,37 @@ parse_qs_test_cases = [
|
||||||
|
(b"&a=b", {b'a': [b'b']}),
|
||||||
|
(b"a=a+b&b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
|
||||||
|
(b"a=1&a=2", {b'a': [b'1', b'2']}),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qs_test_cases_semicolon = [
|
||||||
|
+ (";", {}),
|
||||||
|
+ (";;", {}),
|
||||||
|
+ (";a=b", {'a': ['b']}),
|
||||||
|
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
|
||||||
|
+ ("a=1;a=2", {'a': ['1', '2']}),
|
||||||
|
+ (b";", {}),
|
||||||
|
+ (b";;", {}),
|
||||||
|
+ (b";a=b", {b'a': [b'b']}),
|
||||||
|
+ (b"a=a+b;b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
|
||||||
|
+ (b"a=1;a=2", {b'a': [b'1', b'2']}),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qs_test_cases_legacy = [
|
||||||
|
+ ("a=1;a=2&a=3", {'a': ['1', '2', '3']}),
|
||||||
|
+ ("a=1;b=2&c=3", {'a': ['1'], 'b': ['2'], 'c': ['3']}),
|
||||||
|
+ ("a=1&b=2&c=3;", {'a': ['1'], 'b': ['2'], 'c': ['3']}),
|
||||||
|
+ (b"a=1;a=2&a=3", {b'a': [b'1', b'2', b'3']}),
|
||||||
|
+ (b"a=1;b=2&c=3", {b'a': [b'1'], b'b': [b'2'], b'c': [b'3']}),
|
||||||
|
+ (b"a=1&b=2&c=3;", {b'a': [b'1'], b'b': [b'2'], b'c': [b'3']}),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qs_test_cases_warn = [
|
||||||
|
(";a=b", {';a': ['b']}),
|
||||||
|
("a=a+b;b=b+c", {'a': ['a b;b=b c']}),
|
||||||
|
(b";a=b", {b';a': [b'b']}),
|
||||||
|
(b"a=a+b;b=b+c", {b'a':[ b'a b;b=b c']}),
|
||||||
|
+ ("a=1;a=2&a=3", {'a': ['1;a=2', '3']}),
|
||||||
|
+ (b"a=1;a=2&a=3", {b'a': [b'1;a=2', b'3']}),
|
||||||
|
]
|
||||||
|
|
||||||
|
class UrlParseTestCase(unittest.TestCase):
|
||||||
|
@@ -123,23 +179,57 @@ class UrlParseTestCase(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_qsl(self):
|
||||||
|
for orig, expect in parse_qsl_test_cases:
|
||||||
|
- result = urllib.parse.parse_qsl(orig, keep_blank_values=True)
|
||||||
|
+ result = urllib.parse.parse_qsl(orig, keep_blank_values=True, separator="&")
|
||||||
|
self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
expect_without_blanks = [v for v in expect if len(v[1])]
|
||||||
|
- result = urllib.parse.parse_qsl(orig, keep_blank_values=False)
|
||||||
|
+ result = urllib.parse.parse_qsl(orig, keep_blank_values=False, separator="&")
|
||||||
|
self.assertEqual(result, expect_without_blanks,
|
||||||
|
"Error parsing %r" % orig)
|
||||||
|
|
||||||
|
def test_qs(self):
|
||||||
|
for orig, expect in parse_qs_test_cases:
|
||||||
|
- result = urllib.parse.parse_qs(orig, keep_blank_values=True)
|
||||||
|
+ result = urllib.parse.parse_qs(orig, keep_blank_values=True, separator="&")
|
||||||
|
self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
expect_without_blanks = {v: expect[v]
|
||||||
|
for v in expect if len(expect[v][0])}
|
||||||
|
- result = urllib.parse.parse_qs(orig, keep_blank_values=False)
|
||||||
|
+ result = urllib.parse.parse_qs(orig, keep_blank_values=False, separator="&")
|
||||||
|
self.assertEqual(result, expect_without_blanks,
|
||||||
|
"Error parsing %r" % orig)
|
||||||
|
|
||||||
|
+ def test_qs_default_warn(self):
|
||||||
|
+ for orig, expect in parse_qs_test_cases_warn:
|
||||||
|
+ with self.subTest(orig=orig, expect=expect):
|
||||||
|
+ with catch_warnings(record=True) as w:
|
||||||
|
+ result = urllib.parse.parse_qs(orig, keep_blank_values=True)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 1)
|
||||||
|
+ self.assertEqual(w[0].category, urllib.parse._QueryStringSeparatorWarning)
|
||||||
|
+
|
||||||
|
+ def test_qsl_default_warn(self):
|
||||||
|
+ for orig, expect in parse_qsl_test_cases_warn:
|
||||||
|
+ with self.subTest(orig=orig, expect=expect):
|
||||||
|
+ with catch_warnings(record=True) as w:
|
||||||
|
+ result = urllib.parse.parse_qsl(orig, keep_blank_values=True)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 1)
|
||||||
|
+ self.assertEqual(w[0].category, urllib.parse._QueryStringSeparatorWarning)
|
||||||
|
+
|
||||||
|
+ def test_default_qs_no_warnings(self):
|
||||||
|
+ for orig, expect in parse_qs_test_cases:
|
||||||
|
+ with self.subTest(orig=orig, expect=expect):
|
||||||
|
+ with catch_warnings(record=True) as w:
|
||||||
|
+ result = urllib.parse.parse_qs(orig, keep_blank_values=True)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_default_qsl_no_warnings(self):
|
||||||
|
+ for orig, expect in parse_qsl_test_cases:
|
||||||
|
+ with self.subTest(orig=orig, expect=expect):
|
||||||
|
+ with catch_warnings(record=True) as w:
|
||||||
|
+ result = urllib.parse.parse_qsl(orig, keep_blank_values=True)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
def test_roundtrips(self):
|
||||||
|
str_cases = [
|
||||||
|
('file:///tmp/junk.txt',
|
||||||
|
@@ -871,8 +961,8 @@ class UrlParseTestCase(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_parse_qsl_max_num_fields(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
- urllib.parse.parse_qs('&'.join(['a=a']*11), max_num_fields=10)
|
||||||
|
- urllib.parse.parse_qs('&'.join(['a=a']*10), max_num_fields=10)
|
||||||
|
+ urllib.parse.parse_qs('&'.join(['a=a']*11), max_num_fields=10, separator='&')
|
||||||
|
+ urllib.parse.parse_qs('&'.join(['a=a']*10), max_num_fields=10, separator='&')
|
||||||
|
|
||||||
|
def test_parse_qs_separator(self):
|
||||||
|
parse_qs_semicolon_cases = [
|
||||||
|
@@ -912,6 +1002,136 @@ class UrlParseTestCase(unittest.TestCase):
|
||||||
|
self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
|
||||||
|
|
||||||
|
+ @contextlib.contextmanager
|
||||||
|
+ def _qsl_sep_config(self, sep):
|
||||||
|
+ """Context for the given parse_qsl default separator configured in config file"""
|
||||||
|
+ old_filename = urllib.parse._QS_SEPARATOR_CONFIG_FILENAME
|
||||||
|
+ urllib.parse._default_qs_separator = None
|
||||||
|
+ try:
|
||||||
|
+ with tempfile.TemporaryDirectory() as tmpdirname:
|
||||||
|
+ filename = os.path.join(tmpdirname, 'conf.cfg')
|
||||||
|
+ with open(filename, 'w') as file:
|
||||||
|
+ file.write(f'[parse_qs]\n')
|
||||||
|
+ file.write(f'PYTHON_URLLIB_QS_SEPARATOR = {sep}')
|
||||||
|
+ urllib.parse._QS_SEPARATOR_CONFIG_FILENAME = filename
|
||||||
|
+ yield
|
||||||
|
+ finally:
|
||||||
|
+ urllib.parse._QS_SEPARATOR_CONFIG_FILENAME = old_filename
|
||||||
|
+ urllib.parse._default_qs_separator = None
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_semicolon(self):
|
||||||
|
+ for orig, expect in parse_qs_test_cases_semicolon:
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='arg'):
|
||||||
|
+ result = urllib.parse.parse_qs(orig, separator=';')
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='env'):
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = ';'
|
||||||
|
+ result = urllib.parse.parse_qs(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='conf'):
|
||||||
|
+ with self._qsl_sep_config(';'), catch_warnings(record=True) as w:
|
||||||
|
+ result = urllib.parse.parse_qs(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_parse_qsl_separator_semicolon(self):
|
||||||
|
+ for orig, expect in parse_qsl_test_cases_semicolon:
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='arg'):
|
||||||
|
+ result = urllib.parse.parse_qsl(orig, separator=';')
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='env'):
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = ';'
|
||||||
|
+ result = urllib.parse.parse_qsl(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='conf'):
|
||||||
|
+ with self._qsl_sep_config(';'), catch_warnings(record=True) as w:
|
||||||
|
+ result = urllib.parse.parse_qsl(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_legacy(self):
|
||||||
|
+ for orig, expect in parse_qs_test_cases_legacy:
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='env'):
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = 'legacy'
|
||||||
|
+ result = urllib.parse.parse_qs(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='conf'):
|
||||||
|
+ with self._qsl_sep_config('legacy'), catch_warnings(record=True) as w:
|
||||||
|
+ result = urllib.parse.parse_qs(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_parse_qsl_separator_legacy(self):
|
||||||
|
+ for orig, expect in parse_qsl_test_cases_legacy:
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='env'):
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = 'legacy'
|
||||||
|
+ result = urllib.parse.parse_qsl(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+ with self.subTest(orig=orig, expect=expect, method='conf'):
|
||||||
|
+ with self._qsl_sep_config('legacy'), catch_warnings(record=True) as w:
|
||||||
|
+ result = urllib.parse.parse_qsl(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_bad_value_env_or_config(self):
|
||||||
|
+ for bad_sep in '', 'abc', 'safe', '&;', 'SEP':
|
||||||
|
+ with self.subTest(bad_sep, method='env'):
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = bad_sep
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urllib.parse.parse_qsl('a=1;b=2')
|
||||||
|
+ with self.subTest(bad_sep, method='conf'):
|
||||||
|
+ with self._qsl_sep_config('bad_sep'), catch_warnings(record=True) as w:
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urllib.parse.parse_qsl('a=1;b=2')
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_bad_value_arg(self):
|
||||||
|
+ for bad_sep in True, {}, '':
|
||||||
|
+ with self.subTest(bad_sep):
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urllib.parse.parse_qsl('a=1;b=2', separator=bad_sep)
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_num_fields(self):
|
||||||
|
+ for qs, sep in (
|
||||||
|
+ ('a&b&c', '&'),
|
||||||
|
+ ('a;b;c', ';'),
|
||||||
|
+ ('a&b;c', 'legacy'),
|
||||||
|
+ ):
|
||||||
|
+ with self.subTest(qs=qs, sep=sep):
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ if sep != 'legacy':
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urllib.parse.parse_qsl(qs, separator=sep, max_num_fields=2)
|
||||||
|
+ if sep:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = sep
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urllib.parse.parse_qsl(qs, max_num_fields=2)
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_priority(self):
|
||||||
|
+ # env variable trumps config file
|
||||||
|
+ with self._qsl_sep_config('~'), EnvironmentVarGuard() as environ:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = '!'
|
||||||
|
+ result = urllib.parse.parse_qs('a=1!b=2~c=3')
|
||||||
|
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
|
||||||
|
+ # argument trumps config file
|
||||||
|
+ with self._qsl_sep_config('~'):
|
||||||
|
+ result = urllib.parse.parse_qs('a=1$b=2~c=3', separator='$')
|
||||||
|
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
|
||||||
|
+ # argument trumps env variable
|
||||||
|
+ with EnvironmentVarGuard() as environ:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = '~'
|
||||||
|
+ result = urllib.parse.parse_qs('a=1$b=2~c=3', separator='$')
|
||||||
|
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
|
||||||
|
+
|
||||||
|
+
|
||||||
|
def test_urlencode_sequences(self):
|
||||||
|
# Other tests incidentally urlencode things; test non-covered cases:
|
||||||
|
# Sequence and object values.
|
||||||
|
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
|
||||||
|
index 0c1c94f..83638bb 100644
|
||||||
|
--- a/Lib/urllib/parse.py
|
||||||
|
+++ b/Lib/urllib/parse.py
|
||||||
|
@@ -28,6 +28,7 @@ test_urlparse.py provides a good indicator of parsing behavior.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
+import os
|
||||||
|
import sys
|
||||||
|
import collections
|
||||||
|
import warnings
|
||||||
|
@@ -650,7 +651,7 @@ def unquote(string, encoding='utf-8', errors='replace'):
|
||||||
|
|
||||||
|
|
||||||
|
def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
|
||||||
|
- encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
|
||||||
|
+ encoding='utf-8', errors='replace', max_num_fields=None, separator=None):
|
||||||
|
"""Parse a query given as a string argument.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
@@ -690,9 +691,16 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
|
||||||
|
parsed_result[name] = [value]
|
||||||
|
return parsed_result
|
||||||
|
|
||||||
|
+class _QueryStringSeparatorWarning(RuntimeWarning):
|
||||||
|
+ """Warning for using default `separator` in parse_qs or parse_qsl"""
|
||||||
|
+
|
||||||
|
+# The default "separator" for parse_qsl can be specified in a config file.
|
||||||
|
+# It's cached after first read.
|
||||||
|
+_QS_SEPARATOR_CONFIG_FILENAME = '/etc/python/urllib.cfg'
|
||||||
|
+_default_qs_separator = None
|
||||||
|
|
||||||
|
def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
|
||||||
|
- encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
|
||||||
|
+ encoding='utf-8', errors='replace', max_num_fields=None, separator=None):
|
||||||
|
"""Parse a query given as a string argument.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
@@ -722,18 +730,77 @@ def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
|
||||||
|
"""
|
||||||
|
qs, _coerce_result = _coerce_args(qs)
|
||||||
|
|
||||||
|
- if not separator or (not isinstance(separator, (str, bytes))):
|
||||||
|
+ if isinstance(separator, bytes):
|
||||||
|
+ separator = separator.decode('ascii')
|
||||||
|
+
|
||||||
|
+ if (not separator or (not isinstance(separator, (str, bytes)))) and separator is not None:
|
||||||
|
raise ValueError("Separator must be of type string or bytes.")
|
||||||
|
|
||||||
|
+ # Used when both "&" and ";" act as separators. (Need a non-string value.)
|
||||||
|
+ _legacy = object()
|
||||||
|
+
|
||||||
|
+ if separator is None:
|
||||||
|
+ global _default_qs_separator
|
||||||
|
+ separator = _default_qs_separator
|
||||||
|
+ envvar_name = 'PYTHON_URLLIB_QS_SEPARATOR'
|
||||||
|
+ if separator is None:
|
||||||
|
+ # Set default separator from environment variable
|
||||||
|
+ separator = os.environ.get(envvar_name)
|
||||||
|
+ config_source = 'environment variable'
|
||||||
|
+ if separator is None:
|
||||||
|
+ # Set default separator from the configuration file
|
||||||
|
+ try:
|
||||||
|
+ file = open(_QS_SEPARATOR_CONFIG_FILENAME)
|
||||||
|
+ except FileNotFoundError:
|
||||||
|
+ pass
|
||||||
|
+ else:
|
||||||
|
+ with file:
|
||||||
|
+ import configparser
|
||||||
|
+ config = configparser.ConfigParser(
|
||||||
|
+ interpolation=None,
|
||||||
|
+ comment_prefixes=('#', ),
|
||||||
|
+ )
|
||||||
|
+ config.read_file(file)
|
||||||
|
+ separator = config.get('parse_qs', envvar_name, fallback=None)
|
||||||
|
+ _default_qs_separator = separator
|
||||||
|
+ config_source = _QS_SEPARATOR_CONFIG_FILENAME
|
||||||
|
+ if separator is None:
|
||||||
|
+ # The default is '&', but warn if not specified explicitly
|
||||||
|
+ if ';' in qs:
|
||||||
|
+ from warnings import warn
|
||||||
|
+ warn("The default separator of urllib.parse.parse_qsl and "
|
||||||
|
+ + "parse_qs was changed to '&' to avoid a web cache "
|
||||||
|
+ + "poisoning issue (CVE-2021-23336). "
|
||||||
|
+ + "By default, semicolons no longer act as query field "
|
||||||
|
+ + "separators. "
|
||||||
|
+ + "See https://access.redhat.com/articles/5860431 for "
|
||||||
|
+ + "more details.",
|
||||||
|
+ _QueryStringSeparatorWarning, stacklevel=2)
|
||||||
|
+ separator = '&'
|
||||||
|
+ elif separator == 'legacy':
|
||||||
|
+ separator = _legacy
|
||||||
|
+ elif len(separator) != 1:
|
||||||
|
+ raise ValueError(
|
||||||
|
+ f'{envvar_name} (from {config_source}) must contain '
|
||||||
|
+ + '1 character, or "legacy". See '
|
||||||
|
+ + 'https://access.redhat.com/articles/5860431 for more details.'
|
||||||
|
+ )
|
||||||
|
+
|
||||||
|
# If max_num_fields is defined then check that the number of fields
|
||||||
|
# is less than max_num_fields. This prevents a memory exhaustion DOS
|
||||||
|
# attack via post bodies with many fields.
|
||||||
|
if max_num_fields is not None:
|
||||||
|
- num_fields = 1 + qs.count(separator)
|
||||||
|
+ if separator is _legacy:
|
||||||
|
+ num_fields = 1 + qs.count('&') + qs.count(';')
|
||||||
|
+ else:
|
||||||
|
+ num_fields = 1 + qs.count(separator)
|
||||||
|
if max_num_fields < num_fields:
|
||||||
|
raise ValueError('Max number of fields exceeded')
|
||||||
|
|
||||||
|
- pairs = [s1 for s1 in qs.split(separator)]
|
||||||
|
+ if separator is _legacy:
|
||||||
|
+ pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
|
||||||
|
+ else:
|
||||||
|
+ pairs = [s1 for s1 in qs.split(separator)]
|
||||||
|
r = []
|
||||||
|
for name_value in pairs:
|
||||||
|
if not name_value and not strict_parsing:
|
||||||
|
diff --git a/Misc/NEWS.d/next/Security/2021-02-14-15-59-16.bpo-42967.YApqDS.rst b/Misc/NEWS.d/next/Security/2021-02-14-15-59-16.bpo-42967.YApqDS.rst
|
||||||
|
new file mode 100644
|
||||||
|
index 0000000..bc82c96
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Misc/NEWS.d/next/Security/2021-02-14-15-59-16.bpo-42967.YApqDS.rst
|
||||||
|
@@ -0,0 +1 @@
|
||||||
|
+Make it possible to fix web cache poisoning vulnerability by allowing the user to choose a custom separator query args.
|
||||||
|
--
|
||||||
|
2.30.2
|
||||||
|
|
100
SOURCES/00360-CVE-2021-3426.patch
Normal file
100
SOURCES/00360-CVE-2021-3426.patch
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
From 7e38d3309e0a5a7b9e23ef933aef0079c6e317f7 Mon Sep 17 00:00:00 2001
|
||||||
|
From: "Miss Islington (bot)"
|
||||||
|
<31488909+miss-islington@users.noreply.github.com>
|
||||||
|
Date: Mon, 29 Mar 2021 06:02:40 -0700
|
||||||
|
Subject: [PATCH] bpo-42988: Remove the pydoc getfile feature (GH-25015)
|
||||||
|
MIME-Version: 1.0
|
||||||
|
Content-Type: text/plain; charset=UTF-8
|
||||||
|
Content-Transfer-Encoding: 8bit
|
||||||
|
|
||||||
|
CVE-2021-3426: Remove the "getfile" feature of the pydoc module which
|
||||||
|
could be abused to read arbitrary files on the disk (directory
|
||||||
|
traversal vulnerability). Moreover, even source code of Python
|
||||||
|
modules can contain sensitive data like passwords. Vulnerability
|
||||||
|
reported by David Schwörer.
|
||||||
|
(cherry picked from commit 9b999479c0022edfc9835a8a1f06e046f3881048)
|
||||||
|
|
||||||
|
Co-authored-by: Victor Stinner <vstinner@python.org>
|
||||||
|
---
|
||||||
|
Lib/pydoc.py | 18 ------------------
|
||||||
|
Lib/test/test_pydoc.py | 6 ------
|
||||||
|
.../2021-03-24-14-16-56.bpo-42988.P2aNco.rst | 4 ++++
|
||||||
|
3 files changed, 4 insertions(+), 24 deletions(-)
|
||||||
|
create mode 100644 Misc/NEWS.d/next/Security/2021-03-24-14-16-56.bpo-42988.P2aNco.rst
|
||||||
|
|
||||||
|
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
|
||||||
|
index dc3377d68f8caa..afec613dd85a06 100644
|
||||||
|
--- a/Lib/pydoc.py
|
||||||
|
+++ b/Lib/pydoc.py
|
||||||
|
@@ -2364,9 +2364,6 @@ def page(self, title, contents):
|
||||||
|
%s</head><body bgcolor="#f0f0f8">%s<div style="clear:both;padding-top:.5em;">%s</div>
|
||||||
|
</body></html>''' % (title, css_link, html_navbar(), contents)
|
||||||
|
|
||||||
|
- def filelink(self, url, path):
|
||||||
|
- return '<a href="getfile?key=%s">%s</a>' % (url, path)
|
||||||
|
-
|
||||||
|
|
||||||
|
html = _HTMLDoc()
|
||||||
|
|
||||||
|
@@ -2452,19 +2449,6 @@ def bltinlink(name):
|
||||||
|
'key = %s' % key, '#ffffff', '#ee77aa', '<br>'.join(results))
|
||||||
|
return 'Search Results', contents
|
||||||
|
|
||||||
|
- def html_getfile(path):
|
||||||
|
- """Get and display a source file listing safely."""
|
||||||
|
- path = urllib.parse.unquote(path)
|
||||||
|
- with tokenize.open(path) as fp:
|
||||||
|
- lines = html.escape(fp.read())
|
||||||
|
- body = '<pre>%s</pre>' % lines
|
||||||
|
- heading = html.heading(
|
||||||
|
- '<big><big><strong>File Listing</strong></big></big>',
|
||||||
|
- '#ffffff', '#7799ee')
|
||||||
|
- contents = heading + html.bigsection(
|
||||||
|
- 'File: %s' % path, '#ffffff', '#ee77aa', body)
|
||||||
|
- return 'getfile %s' % path, contents
|
||||||
|
-
|
||||||
|
def html_topics():
|
||||||
|
"""Index of topic texts available."""
|
||||||
|
|
||||||
|
@@ -2556,8 +2540,6 @@ def get_html_page(url):
|
||||||
|
op, _, url = url.partition('=')
|
||||||
|
if op == "search?key":
|
||||||
|
title, content = html_search(url)
|
||||||
|
- elif op == "getfile?key":
|
||||||
|
- title, content = html_getfile(url)
|
||||||
|
elif op == "topic?key":
|
||||||
|
# try topics first, then objects.
|
||||||
|
try:
|
||||||
|
diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py
|
||||||
|
index c80477c50f0980..72ed8a93b712b0 100644
|
||||||
|
--- a/Lib/test/test_pydoc.py
|
||||||
|
+++ b/Lib/test/test_pydoc.py
|
||||||
|
@@ -1360,18 +1360,12 @@ def test_url_requests(self):
|
||||||
|
("topic?key=def", "Pydoc: KEYWORD def"),
|
||||||
|
("topic?key=STRINGS", "Pydoc: TOPIC STRINGS"),
|
||||||
|
("foobar", "Pydoc: Error - foobar"),
|
||||||
|
- ("getfile?key=foobar", "Pydoc: Error - getfile?key=foobar"),
|
||||||
|
]
|
||||||
|
|
||||||
|
with self.restrict_walk_packages():
|
||||||
|
for url, title in requests:
|
||||||
|
self.call_url_handler(url, title)
|
||||||
|
|
||||||
|
- path = string.__file__
|
||||||
|
- title = "Pydoc: getfile " + path
|
||||||
|
- url = "getfile?key=" + path
|
||||||
|
- self.call_url_handler(url, title)
|
||||||
|
-
|
||||||
|
|
||||||
|
class TestHelper(unittest.TestCase):
|
||||||
|
def test_keywords(self):
|
||||||
|
diff --git a/Misc/NEWS.d/next/Security/2021-03-24-14-16-56.bpo-42988.P2aNco.rst b/Misc/NEWS.d/next/Security/2021-03-24-14-16-56.bpo-42988.P2aNco.rst
|
||||||
|
new file mode 100644
|
||||||
|
index 00000000000000..4b42dd05305a83
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Misc/NEWS.d/next/Security/2021-03-24-14-16-56.bpo-42988.P2aNco.rst
|
||||||
|
@@ -0,0 +1,4 @@
|
||||||
|
+CVE-2021-3426: Remove the ``getfile`` feature of the :mod:`pydoc` module which
|
||||||
|
+could be abused to read arbitrary files on the disk (directory traversal
|
||||||
|
+vulnerability). Moreover, even source code of Python modules can contain
|
||||||
|
+sensitive data like passwords. Vulnerability reported by David Schwörer.
|
62
SOURCES/00365-CVE-2021-29921.patch
Normal file
62
SOURCES/00365-CVE-2021-29921.patch
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst
|
||||||
|
index 2cdfddb..d464d2a 100644
|
||||||
|
--- a/Doc/library/ipaddress.rst
|
||||||
|
+++ b/Doc/library/ipaddress.rst
|
||||||
|
@@ -104,8 +104,7 @@ write code that handles both IP versions correctly. Address objects are
|
||||||
|
1. A string in decimal-dot notation, consisting of four decimal integers in
|
||||||
|
the inclusive range 0--255, separated by dots (e.g. ``192.168.0.1``). Each
|
||||||
|
integer represents an octet (byte) in the address. Leading zeroes are
|
||||||
|
- tolerated only for values less than 8 (as there is no ambiguity
|
||||||
|
- between the decimal and octal interpretations of such strings).
|
||||||
|
+ not tolerated to prevent confusion with octal notation.
|
||||||
|
2. An integer that fits into 32 bits.
|
||||||
|
3. An integer packed into a :class:`bytes` object of length 4 (most
|
||||||
|
significant octet first).
|
||||||
|
diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py
|
||||||
|
index 28b7b61..d351f07 100644
|
||||||
|
--- a/Lib/ipaddress.py
|
||||||
|
+++ b/Lib/ipaddress.py
|
||||||
|
@@ -1173,6 +1173,11 @@ class _BaseV4:
|
||||||
|
if len(octet_str) > 3:
|
||||||
|
msg = "At most 3 characters permitted in %r"
|
||||||
|
raise ValueError(msg % octet_str)
|
||||||
|
+ # Handle leading zeros as strict as glibc's inet_pton()
|
||||||
|
+ # See security bug bpo-36384
|
||||||
|
+ if octet_str != '0' and octet_str[0] == '0':
|
||||||
|
+ msg = "Leading zeros are not permitted in %r"
|
||||||
|
+ raise ValueError(msg % octet_str)
|
||||||
|
# Convert to integer (we know digits are legal)
|
||||||
|
octet_int = int(octet_str, 10)
|
||||||
|
if octet_int > 255:
|
||||||
|
diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py
|
||||||
|
index 2f1c5b6..1297b83 100644
|
||||||
|
--- a/Lib/test/test_ipaddress.py
|
||||||
|
+++ b/Lib/test/test_ipaddress.py
|
||||||
|
@@ -97,10 +97,23 @@ class CommonTestMixin:
|
||||||
|
class CommonTestMixin_v4(CommonTestMixin):
|
||||||
|
|
||||||
|
def test_leading_zeros(self):
|
||||||
|
- self.assertInstancesEqual("000.000.000.000", "0.0.0.0")
|
||||||
|
- self.assertInstancesEqual("192.168.000.001", "192.168.0.1")
|
||||||
|
- self.assertInstancesEqual("016.016.016.016", "16.16.16.16")
|
||||||
|
- self.assertInstancesEqual("001.000.008.016", "1.0.8.16")
|
||||||
|
+ # bpo-36384: no leading zeros to avoid ambiguity with octal notation
|
||||||
|
+ msg = "Leading zeros are not permitted in '\d+'"
|
||||||
|
+ addresses = [
|
||||||
|
+ "000.000.000.000",
|
||||||
|
+ "192.168.000.001",
|
||||||
|
+ "016.016.016.016",
|
||||||
|
+ "192.168.000.001",
|
||||||
|
+ "001.000.008.016",
|
||||||
|
+ "01.2.3.40",
|
||||||
|
+ "1.02.3.40",
|
||||||
|
+ "1.2.03.40",
|
||||||
|
+ "1.2.3.040",
|
||||||
|
+ ]
|
||||||
|
+ for address in addresses:
|
||||||
|
+ with self.subTest(address=address):
|
||||||
|
+ with self.assertAddressError(msg):
|
||||||
|
+ self.factory(address)
|
||||||
|
|
||||||
|
def test_int(self):
|
||||||
|
self.assertInstancesEqual(0, "0.0.0.0")
|
@ -13,11 +13,11 @@ URL: https://www.python.org/
|
|||||||
|
|
||||||
# WARNING When rebasing to a new Python version,
|
# WARNING When rebasing to a new Python version,
|
||||||
# remember to update the python3-docs package as well
|
# remember to update the python3-docs package as well
|
||||||
%global general_version %{pybasever}.6
|
%global general_version %{pybasever}.8
|
||||||
#global prerel ...
|
#global prerel ...
|
||||||
%global upstream_version %{general_version}%{?prerel}
|
%global upstream_version %{general_version}%{?prerel}
|
||||||
Version: %{general_version}%{?prerel:~%{prerel}}
|
Version: %{general_version}%{?prerel:~%{prerel}}
|
||||||
Release: 3%{?dist}
|
Release: 4%{?dist}
|
||||||
License: Python
|
License: Python
|
||||||
|
|
||||||
# Exclude i686 arch. Due to a modularity issue it's being added to the
|
# Exclude i686 arch. Due to a modularity issue it's being added to the
|
||||||
@ -350,11 +350,24 @@ Patch329: 00329-fips.patch
|
|||||||
# a nightmare because it's basically a binary file.
|
# a nightmare because it's basically a binary file.
|
||||||
Patch353: 00353-architecture-names-upstream-downstream.patch
|
Patch353: 00353-architecture-names-upstream-downstream.patch
|
||||||
|
|
||||||
# 00357 #
|
# 00359 #
|
||||||
# Security fix for CVE-2021-3177
|
# CVE-2021-23336 python: Web Cache Poisoning via urllib.parse.parse_qsl and
|
||||||
# Stack-based buffer overflow in PyCArg_repr in _ctypes/callproc.c
|
# urllib.parse.parse_qs by using a semicolon in query parameters
|
||||||
# Resolves upstream: https://bugs.python.org/issue42938
|
# Upstream: https://bugs.python.org/issue42967
|
||||||
Patch357: 00357-CVE-2021-3177.patch
|
# Main BZ: https://bugzilla.redhat.com/show_bug.cgi?id=1928904
|
||||||
|
Patch359: 00359-CVE-2021-23336.patch
|
||||||
|
|
||||||
|
# 00360 #
|
||||||
|
# CVE-2021-3426: information disclosure via pydoc
|
||||||
|
# Upstream: https://bugs.python.org/issue42988
|
||||||
|
# Main BZ: https://bugzilla.redhat.com/show_bug.cgi?id=1935913
|
||||||
|
Patch360: 00360-CVE-2021-3426.patch
|
||||||
|
|
||||||
|
# 00365 #
|
||||||
|
# CVE-2021-29921: Improper input validation of octal strings in the ipaddress module
|
||||||
|
# Upstream: https://bugs.python.org/issue36384
|
||||||
|
# Main bugzilla: https://bugzilla.redhat.com/show_bug.cgi?id=1957458
|
||||||
|
Patch365: 00365-CVE-2021-29921.patch
|
||||||
|
|
||||||
# (New patches go here ^^^)
|
# (New patches go here ^^^)
|
||||||
#
|
#
|
||||||
@ -382,10 +395,10 @@ Provides: python%{pybasever} = %{version}-%{release}
|
|||||||
# the possible alternatives
|
# the possible alternatives
|
||||||
Provides: alternative-for(python)
|
Provides: alternative-for(python)
|
||||||
|
|
||||||
# Runtime require alternatives
|
# Require alternatives version that implements the --keep-foreign flag
|
||||||
Requires: %{_sbindir}/alternatives
|
Requires: alternatives >= 1.19.1-1
|
||||||
Requires(post): %{_sbindir}/alternatives
|
Requires(post): alternatives >= 1.19.1-1
|
||||||
Requires(postun): %{_sbindir}/alternatives
|
Requires(postun): alternatives >= 1.19.1-1
|
||||||
|
|
||||||
%if %{without flatpackage}
|
%if %{without flatpackage}
|
||||||
|
|
||||||
@ -512,6 +525,9 @@ BuildRequires: python-rpm-macros
|
|||||||
# But we want them when packages BuildRequire python3-devel
|
# But we want them when packages BuildRequire python3-devel
|
||||||
Requires: (python-rpm-macros if rpm-build)
|
Requires: (python-rpm-macros if rpm-build)
|
||||||
Requires: (python3-rpm-macros if rpm-build)
|
Requires: (python3-rpm-macros if rpm-build)
|
||||||
|
|
||||||
|
# Require alternatives version that implements the --keep-foreign flag
|
||||||
|
Requires(postun): alternatives >= 1.19.1-1
|
||||||
# python38 installs the alternatives master symlink to which we attach a slave
|
# python38 installs the alternatives master symlink to which we attach a slave
|
||||||
Requires(post): python38
|
Requires(post): python38
|
||||||
Requires(postun): python38
|
Requires(postun): python38
|
||||||
@ -550,6 +566,8 @@ Requires: %{name}-tkinter = %{version}-%{release}
|
|||||||
|
|
||||||
%{?python_provide:%python_provide python38-idle}
|
%{?python_provide:%python_provide python38-idle}
|
||||||
|
|
||||||
|
# Require alternatives version that implements the --keep-foreign flag
|
||||||
|
Requires(postun): alternatives >= 1.19.1-1
|
||||||
# python38 installs the alternatives master symlink to which we attach a slave
|
# python38 installs the alternatives master symlink to which we attach a slave
|
||||||
Requires(post): python38
|
Requires(post): python38
|
||||||
Requires(postun): python38
|
Requires(postun): python38
|
||||||
@ -606,6 +624,9 @@ Requires: %{name}-devel%{?_isa} = %{version}-%{release}
|
|||||||
Requires: %{name}-test%{?_isa} = %{version}-%{release}
|
Requires: %{name}-test%{?_isa} = %{version}-%{release}
|
||||||
Requires: %{name}-tkinter%{?_isa} = %{version}-%{release}
|
Requires: %{name}-tkinter%{?_isa} = %{version}-%{release}
|
||||||
Requires: %{name}-idle%{?_isa} = %{version}-%{release}
|
Requires: %{name}-idle%{?_isa} = %{version}-%{release}
|
||||||
|
|
||||||
|
# Require alternatives version that implements the --keep-foreign flag
|
||||||
|
Requires(postun): alternatives >= 1.19.1-1
|
||||||
# python38 installs the alternatives master symlink to which we attach a slave
|
# python38 installs the alternatives master symlink to which we attach a slave
|
||||||
Requires(post): python38
|
Requires(post): python38
|
||||||
Requires(postun): python38
|
Requires(postun): python38
|
||||||
@ -703,8 +724,9 @@ rm Lib/ensurepip/_bundled/*.whl
|
|||||||
%patch328 -p1
|
%patch328 -p1
|
||||||
%patch329 -p1
|
%patch329 -p1
|
||||||
%patch353 -p1
|
%patch353 -p1
|
||||||
%patch357 -p1
|
%patch359 -p1
|
||||||
|
%patch360 -p1
|
||||||
|
%patch365 -p1
|
||||||
|
|
||||||
# Remove files that should be generated by the build
|
# Remove files that should be generated by the build
|
||||||
# (This is after patching, so that we can use patches directly from upstream)
|
# (This is after patching, so that we can use patches directly from upstream)
|
||||||
@ -1224,15 +1246,15 @@ fi
|
|||||||
%postun
|
%postun
|
||||||
# Do this only during uninstall process (not during update)
|
# Do this only during uninstall process (not during update)
|
||||||
if [ $1 -eq 0 ]; then
|
if [ $1 -eq 0 ]; then
|
||||||
alternatives --remove python \
|
alternatives --keep-foreign --remove python \
|
||||||
%{_bindir}/python3.8
|
%{_bindir}/python3.8
|
||||||
|
|
||||||
alternatives --remove python3 \
|
alternatives --keep-foreign --remove python3 \
|
||||||
%{_bindir}/python3.8
|
%{_bindir}/python3.8
|
||||||
|
|
||||||
# Remove link python → python3 if no other python3.* exists
|
# Remove link python → python3 if no other python3.* exists
|
||||||
if ! alternatives --display python3 > /dev/null; then
|
if ! alternatives --display python3 > /dev/null; then
|
||||||
alternatives --remove python \
|
alternatives --keep-foreign --remove python \
|
||||||
%{_bindir}/python3
|
%{_bindir}/python3
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
@ -1247,7 +1269,7 @@ alternatives --add-slave python3 %{_bindir}/python3.8 \
|
|||||||
%postun devel
|
%postun devel
|
||||||
# Do this only during uninstall process (not during update)
|
# Do this only during uninstall process (not during update)
|
||||||
if [ $1 -eq 0 ]; then
|
if [ $1 -eq 0 ]; then
|
||||||
alternatives --remove-slave python3 %{_bindir}/python3.8 \
|
alternatives --keep-foreign --remove-slave python3 %{_bindir}/python3.8 \
|
||||||
python3-config
|
python3-config
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -1265,9 +1287,9 @@ alternatives --add-slave python3 %{_bindir}/python3.8 \
|
|||||||
%postun debug
|
%postun debug
|
||||||
# Do this only during uninstall process (not during update)
|
# Do this only during uninstall process (not during update)
|
||||||
if [ $1 -eq 0 ]; then
|
if [ $1 -eq 0 ]; then
|
||||||
alternatives --remove-slave python3 %{_bindir}/python3.8 \
|
alternatives --keep-foreign --remove-slave python3 %{_bindir}/python3.8 \
|
||||||
python3-debug
|
python3-debug
|
||||||
alternatives --remove-slave python3 %{_bindir}/python3.8 \
|
alternatives --keep-foreign --remove-slave python3 %{_bindir}/python3.8 \
|
||||||
python3-debug-config
|
python3-debug-config
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -1281,7 +1303,7 @@ alternatives --add-slave python3 %{_bindir}/python3.8 \
|
|||||||
%postun idle
|
%postun idle
|
||||||
# Do this only during uninstall process (not during update)
|
# Do this only during uninstall process (not during update)
|
||||||
if [ $1 -eq 0 ]; then
|
if [ $1 -eq 0 ]; then
|
||||||
alternatives --remove-slave python3 %{_bindir}/python3.8 \
|
alternatives --keep-foreign --remove-slave python3 %{_bindir}/python3.8 \
|
||||||
idle3
|
idle3
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -1793,6 +1815,22 @@ fi
|
|||||||
# ======================================================
|
# ======================================================
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
* Mon Aug 02 2021 Tomas Orsava <torsava@redhat.com> - 3.8.8-4
|
||||||
|
- Adjusted the postun scriptlets to enable upgrading to RHEL 9
|
||||||
|
- Resolves: rhbz#1933055
|
||||||
|
|
||||||
|
* Tue Jul 27 2021 Charalampos Stratakis <cstratak@redhat.com> - 3.8.8-3
|
||||||
|
- Security fix for CVE-2021-29921: Leading zeros in IPv4 addresses are no longer tolerated
|
||||||
|
Resolves: rhbz#1957458
|
||||||
|
|
||||||
|
* Fri Apr 30 2021 Charalampos Stratakis <cstratak@redhat.com> - 3.8.8-2
|
||||||
|
- Security fix for CVE-2021-3426: information disclosure via pydoc
|
||||||
|
Resolves: rhbz#1935913
|
||||||
|
|
||||||
|
* Mon Mar 15 2021 Lumír Balhar <lbalhar@redhat.com> - 3.8.8-1
|
||||||
|
- Update to 3.8.8 and fix CVE-2021-23336
|
||||||
|
Resolves: rhbz#1928904
|
||||||
|
|
||||||
* Fri Jan 22 2021 Charalampos Stratakis <cstratak@redhat.com> - 3.8.6-3
|
* Fri Jan 22 2021 Charalampos Stratakis <cstratak@redhat.com> - 3.8.6-3
|
||||||
- Security fix for CVE-2021-3177
|
- Security fix for CVE-2021-3177
|
||||||
Resolves: rhbz#1919161
|
Resolves: rhbz#1919161
|
||||||
|
Loading…
Reference in New Issue
Block a user