Import rpm: d4a05a5353d5b6b665297fbd4771399357b1359b
This commit is contained in:
commit
966b47a9e5
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
SOURCES/Python-2.7.18-noexe.tar.xz
|
28
00001-pydocnogui.patch
Normal file
28
00001-pydocnogui.patch
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
diff -up Python-2.7.3/Lib/pydoc.py.no_gui Python-2.7.3/Lib/pydoc.py
|
||||||
|
--- Python-2.7.3/Lib/pydoc.py.no_gui 2012-04-09 19:07:31.000000000 -0400
|
||||||
|
+++ Python-2.7.3/Lib/pydoc.py 2013-02-19 13:48:44.480054515 -0500
|
||||||
|
@@ -19,9 +19,6 @@ of all available modules.
|
||||||
|
local machine to generate documentation web pages. Port number 0 can be
|
||||||
|
used to get an arbitrary unused port.
|
||||||
|
|
||||||
|
-For platforms without a command line, "pydoc -g" starts the HTTP server
|
||||||
|
-and also pops up a little window for controlling it.
|
||||||
|
-
|
||||||
|
Run "pydoc -w <name>" to write out the HTML documentation for a module
|
||||||
|
to a file named "<name>.html".
|
||||||
|
|
||||||
|
@@ -2346,13 +2340,10 @@ def cli():
|
||||||
|
Start an HTTP server on the given port on the local machine. Port
|
||||||
|
number 0 can be used to get an arbitrary unused port.
|
||||||
|
|
||||||
|
-%s -g
|
||||||
|
- Pop up a graphical interface for finding and serving documentation.
|
||||||
|
-
|
||||||
|
%s -w <name> ...
|
||||||
|
Write out the HTML documentation for a module to a file in the current
|
||||||
|
directory. If <name> contains a '%s', it is treated as a filename; if
|
||||||
|
it names a directory, documentation is written for all the contents.
|
||||||
|
-""" % (cmd, os.sep, cmd, cmd, cmd, cmd, os.sep)
|
||||||
|
+""" % (cmd, os.sep, cmd, cmd, cmd, os.sep)
|
||||||
|
|
||||||
|
if __name__ == '__main__': cli()
|
21
00010-2.7.13-binutils-no-dep.patch
Normal file
21
00010-2.7.13-binutils-no-dep.patch
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py
|
||||||
|
index ab10ec5..923d1b7 100644
|
||||||
|
--- a/Lib/ctypes/util.py
|
||||||
|
+++ b/Lib/ctypes/util.py
|
||||||
|
@@ -140,11 +140,15 @@ elif os.name == "posix":
|
||||||
|
# assuming GNU binutils / ELF
|
||||||
|
if not f:
|
||||||
|
return None
|
||||||
|
- cmd = 'if ! type objdump >/dev/null 2>&1; then exit; fi;' \
|
||||||
|
+ cmd = 'if ! type objdump >/dev/null 2>&1; then exit 10; fi;' \
|
||||||
|
'objdump -p -j .dynamic 2>/dev/null "$1"'
|
||||||
|
proc = subprocess.Popen((cmd, '_get_soname', f), shell=True,
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
[dump, _] = proc.communicate()
|
||||||
|
+ if proc.returncode == 10:
|
||||||
|
+ return os.path.basename(f) # This is good for GLibc, I think,
|
||||||
|
+ # and a dep on binutils is big (for
|
||||||
|
+ # live CDs).
|
||||||
|
res = re.search(br'\sSONAME\s+([^\s]+)', dump)
|
||||||
|
if not res:
|
||||||
|
return None
|
198
00055-systemtap.patch
Normal file
198
00055-systemtap.patch
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
diff -up Python-2.7rc1/configure.ac.systemtap Python-2.7rc1/configure.ac
|
||||||
|
--- Python-2.7rc1/configure.ac.systemtap 2010-06-06 10:53:15.514975012 -0400
|
||||||
|
+++ Python-2.7rc1/configure.ac 2010-06-06 10:53:15.520974361 -0400
|
||||||
|
@@ -2616,6 +2616,38 @@ if test "$with_valgrind" != no; then
|
||||||
|
)
|
||||||
|
fi
|
||||||
|
|
||||||
|
+# Check for dtrace support
|
||||||
|
+AC_MSG_CHECKING(for --with-dtrace)
|
||||||
|
+AC_ARG_WITH(dtrace,
|
||||||
|
+ AC_HELP_STRING(--with(out)-dtrace, disable/enable dtrace support))
|
||||||
|
+
|
||||||
|
+if test ! -z "$with_dtrace"
|
||||||
|
+then
|
||||||
|
+ if dtrace -G -o /dev/null -s $srcdir/Include/pydtrace.d 2>/dev/null
|
||||||
|
+ then
|
||||||
|
+ AC_DEFINE(WITH_DTRACE, 1,
|
||||||
|
+ [Define if you want to compile in Dtrace support])
|
||||||
|
+ with_dtrace="Sun"
|
||||||
|
+ DTRACEOBJS="Python/dtrace.o"
|
||||||
|
+ DTRADEHDRS=""
|
||||||
|
+ elif dtrace -h -o /dev/null -s $srcdir/Include/pydtrace.d
|
||||||
|
+ then
|
||||||
|
+ AC_DEFINE(WITH_DTRACE, 1,
|
||||||
|
+ [Define if you want to compile in Dtrace support])
|
||||||
|
+ with_dtrace="Apple"
|
||||||
|
+ DTRACEOBJS=""
|
||||||
|
+ DTRADEHDRS="pydtrace.h"
|
||||||
|
+ else
|
||||||
|
+ with_dtrace="no"
|
||||||
|
+ fi
|
||||||
|
+else
|
||||||
|
+ with_dtrace="no"
|
||||||
|
+fi
|
||||||
|
+
|
||||||
|
+AC_MSG_RESULT($with_dtrace)
|
||||||
|
+AC_SUBST(DTRACEOBJS)
|
||||||
|
+AC_SUBST(DTRACEHDRS)
|
||||||
|
+
|
||||||
|
# Check for --with-wctype-functions
|
||||||
|
AC_MSG_CHECKING(for --with-wctype-functions)
|
||||||
|
AC_ARG_WITH(wctype-functions,
|
||||||
|
diff -up Python-2.7rc1/Include/pydtrace.d.systemtap Python-2.7rc1/Include/pydtrace.d
|
||||||
|
--- Python-2.7rc1/Include/pydtrace.d.systemtap 2010-06-06 10:53:15.520974361 -0400
|
||||||
|
+++ Python-2.7rc1/Include/pydtrace.d 2010-06-06 10:53:15.520974361 -0400
|
||||||
|
@@ -0,0 +1,10 @@
|
||||||
|
+provider python {
|
||||||
|
+ probe function__entry(const char *, const char *, int);
|
||||||
|
+ probe function__return(const char *, const char *, int);
|
||||||
|
+};
|
||||||
|
+
|
||||||
|
+#pragma D attributes Evolving/Evolving/Common provider python provider
|
||||||
|
+#pragma D attributes Private/Private/Common provider python module
|
||||||
|
+#pragma D attributes Private/Private/Common provider python function
|
||||||
|
+#pragma D attributes Evolving/Evolving/Common provider python name
|
||||||
|
+#pragma D attributes Evolving/Evolving/Common provider python args
|
||||||
|
diff -up Python-2.7rc1/Makefile.pre.in.systemtap Python-2.7rc1/Makefile.pre.in
|
||||||
|
--- Python-2.7rc1/Makefile.pre.in.systemtap 2010-06-06 10:53:15.488978775 -0400
|
||||||
|
+++ Python-2.7rc1/Makefile.pre.in 2010-06-06 11:05:30.411100568 -0400
|
||||||
|
@@ -298,6 +298,7 @@ PYTHON_OBJS= \
|
||||||
|
Python/formatter_unicode.o \
|
||||||
|
Python/formatter_string.o \
|
||||||
|
Python/$(DYNLOADFILE) \
|
||||||
|
+ @DTRACEOBJS@ \
|
||||||
|
$(LIBOBJS) \
|
||||||
|
$(MACHDEP_OBJS) \
|
||||||
|
$(THREADOBJ)
|
||||||
|
@@ -599,6 +600,18 @@ Python/formatter_unicode.o: $(srcdir)/Py
|
||||||
|
Python/formatter_string.o: $(srcdir)/Python/formatter_string.c \
|
||||||
|
$(STRINGLIB_HEADERS)
|
||||||
|
|
||||||
|
+# Only needed with --with-dtrace
|
||||||
|
+buildinclude:
|
||||||
|
+ mkdir -p Include
|
||||||
|
+
|
||||||
|
+Include/pydtrace.h: buildinclude $(srcdir)/Include/pydtrace.d
|
||||||
|
+ dtrace -o $@ $(DFLAGS) -C -h -s $(srcdir)/Include/pydtrace.d
|
||||||
|
+
|
||||||
|
+Python/ceval.o: Include/pydtrace.h
|
||||||
|
+
|
||||||
|
+Python/dtrace.o: buildinclude $(srcdir)/Include/pydtrace.d Python/ceval.o
|
||||||
|
+ dtrace -o $@ $(DFLAGS) -C -G -s $(srcdir)/Include/pydtrace.d Python/ceval.o
|
||||||
|
+
|
||||||
|
############################################################################
|
||||||
|
# Header files
|
||||||
|
|
||||||
|
@@ -1251,7 +1264,7 @@ Python/thread.o: @THREADHEADERS@
|
||||||
|
.PHONY: frameworkinstall frameworkinstallframework frameworkinstallstructure
|
||||||
|
.PHONY: frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools
|
||||||
|
.PHONY: frameworkaltinstallunixtools recheck clean clobber distclean
|
||||||
|
-.PHONY: smelly funny patchcheck altmaninstall commoninstall
|
||||||
|
+.PHONY: smelly funny patchcheck altmaninstall commoninstall buildinclude
|
||||||
|
.PHONY: gdbhooks
|
||||||
|
|
||||||
|
# IF YOU PUT ANYTHING HERE IT WILL GO AWAY
|
||||||
|
diff -up Python-2.7rc1/pyconfig.h.in.systemtap Python-2.7rc1/pyconfig.h.in
|
||||||
|
--- Python-2.7rc1/pyconfig.h.in.systemtap 2010-05-08 07:04:18.000000000 -0400
|
||||||
|
+++ Python-2.7rc1/pyconfig.h.in 2010-06-06 10:53:15.521974070 -0400
|
||||||
|
@@ -1074,6 +1074,9 @@
|
||||||
|
/* Define if you want documentation strings in extension modules */
|
||||||
|
#undef WITH_DOC_STRINGS
|
||||||
|
|
||||||
|
+/* Define if you want to compile in Dtrace support */
|
||||||
|
+#undef WITH_DTRACE
|
||||||
|
+
|
||||||
|
/* Define if you want to use the new-style (Openstep, Rhapsody, MacOS) dynamic
|
||||||
|
linker (dyld) instead of the old-style (NextStep) dynamic linker (rld).
|
||||||
|
Dyld is necessary to support frameworks. */
|
||||||
|
diff -up Python-2.7rc1/Python/ceval.c.systemtap Python-2.7rc1/Python/ceval.c
|
||||||
|
--- Python-2.7rc1/Python/ceval.c.systemtap 2010-05-09 10:46:46.000000000 -0400
|
||||||
|
+++ Python-2.7rc1/Python/ceval.c 2010-06-06 11:08:40.683100500 -0400
|
||||||
|
@@ -19,6 +19,10 @@
|
||||||
|
|
||||||
|
#include <ctype.h>
|
||||||
|
|
||||||
|
+#ifdef WITH_DTRACE
|
||||||
|
+#include "pydtrace.h"
|
||||||
|
+#endif
|
||||||
|
+
|
||||||
|
#ifndef WITH_TSC
|
||||||
|
|
||||||
|
#define READ_TIMESTAMP(var)
|
||||||
|
@@ -671,6 +675,55 @@ PyEval_EvalCode(PyCodeObject *co, PyObje
|
||||||
|
NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
+#ifdef WITH_DTRACE
|
||||||
|
+static void
|
||||||
|
+dtrace_entry(PyFrameObject *f)
|
||||||
|
+{
|
||||||
|
+ const char *filename;
|
||||||
|
+ const char *fname;
|
||||||
|
+ int lineno;
|
||||||
|
+
|
||||||
|
+ filename = PyString_AsString(f->f_code->co_filename);
|
||||||
|
+ fname = PyString_AsString(f->f_code->co_name);
|
||||||
|
+ lineno = PyCode_Addr2Line(f->f_code, f->f_lasti);
|
||||||
|
+
|
||||||
|
+ PYTHON_FUNCTION_ENTRY((char *)filename, (char *)fname, lineno);
|
||||||
|
+
|
||||||
|
+ /*
|
||||||
|
+ * Currently a USDT tail-call will not receive the correct arguments.
|
||||||
|
+ * Disable the tail call here.
|
||||||
|
+ */
|
||||||
|
+#if defined(__sparc)
|
||||||
|
+ asm("nop");
|
||||||
|
+#endif
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+static void
|
||||||
|
+dtrace_return(PyFrameObject *f)
|
||||||
|
+{
|
||||||
|
+ const char *filename;
|
||||||
|
+ const char *fname;
|
||||||
|
+ int lineno;
|
||||||
|
+
|
||||||
|
+ filename = PyString_AsString(f->f_code->co_filename);
|
||||||
|
+ fname = PyString_AsString(f->f_code->co_name);
|
||||||
|
+ lineno = PyCode_Addr2Line(f->f_code, f->f_lasti);
|
||||||
|
+ PYTHON_FUNCTION_RETURN((char *)filename, (char *)fname, lineno);
|
||||||
|
+
|
||||||
|
+ /*
|
||||||
|
+ * Currently a USDT tail-call will not receive the correct arguments.
|
||||||
|
+ * Disable the tail call here.
|
||||||
|
+ */
|
||||||
|
+#if defined(__sparc)
|
||||||
|
+ asm("nop");
|
||||||
|
+#endif
|
||||||
|
+}
|
||||||
|
+#else
|
||||||
|
+#define PYTHON_FUNCTION_ENTRY_ENABLED() 0
|
||||||
|
+#define PYTHON_FUNCTION_RETURN_ENABLED() 0
|
||||||
|
+#define dtrace_entry(f)
|
||||||
|
+#define dtrace_return(f)
|
||||||
|
+#endif
|
||||||
|
|
||||||
|
/* Interpreter main loop */
|
||||||
|
|
||||||
|
@@ -909,6 +962,9 @@ PyEval_EvalFrameEx(PyFrameObject *f, int
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
+ if (PYTHON_FUNCTION_ENTRY_ENABLED())
|
||||||
|
+ dtrace_entry(f);
|
||||||
|
+
|
||||||
|
co = f->f_code;
|
||||||
|
names = co->co_names;
|
||||||
|
consts = co->co_consts;
|
||||||
|
@@ -3000,6 +3056,9 @@ fast_yield:
|
||||||
|
|
||||||
|
/* pop frame */
|
||||||
|
exit_eval_frame:
|
||||||
|
+ if (PYTHON_FUNCTION_RETURN_ENABLED())
|
||||||
|
+ dtrace_return(f);
|
||||||
|
+
|
||||||
|
Py_LeaveRecursiveCall();
|
||||||
|
tstate->frame = f->f_back;
|
||||||
|
|
193
00102-2.7.13-lib64.patch
Normal file
193
00102-2.7.13-lib64.patch
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
|
||||||
|
index b9f1c6c..7b23714 100644
|
||||||
|
--- a/Lib/distutils/command/install.py
|
||||||
|
+++ b/Lib/distutils/command/install.py
|
||||||
|
@@ -42,14 +42,14 @@ else:
|
||||||
|
INSTALL_SCHEMES = {
|
||||||
|
'unix_prefix': {
|
||||||
|
'purelib': '$base/lib/python$py_version_short/site-packages',
|
||||||
|
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
|
||||||
|
+ 'platlib': '$platbase/lib64/python$py_version_short/site-packages',
|
||||||
|
'headers': '$base/include/python$py_version_short/$dist_name',
|
||||||
|
'scripts': '$base/bin',
|
||||||
|
'data' : '$base',
|
||||||
|
},
|
||||||
|
'unix_home': {
|
||||||
|
'purelib': '$base/lib/python',
|
||||||
|
- 'platlib': '$base/lib/python',
|
||||||
|
+ 'platlib': '$base/lib64/python',
|
||||||
|
'headers': '$base/include/python/$dist_name',
|
||||||
|
'scripts': '$base/bin',
|
||||||
|
'data' : '$base',
|
||||||
|
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
|
||||||
|
index 031f809..ec5d584 100644
|
||||||
|
--- a/Lib/distutils/sysconfig.py
|
||||||
|
+++ b/Lib/distutils/sysconfig.py
|
||||||
|
@@ -120,8 +120,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
|
||||||
|
prefix = plat_specific and EXEC_PREFIX or PREFIX
|
||||||
|
|
||||||
|
if os.name == "posix":
|
||||||
|
+ if plat_specific or standard_lib:
|
||||||
|
+ lib = "lib64"
|
||||||
|
+ else:
|
||||||
|
+ lib = "lib"
|
||||||
|
libpython = os.path.join(prefix,
|
||||||
|
- "lib", "python" + get_python_version())
|
||||||
|
+ lib, "python" + get_python_version())
|
||||||
|
if standard_lib:
|
||||||
|
return libpython
|
||||||
|
else:
|
||||||
|
diff --git a/Lib/site.py b/Lib/site.py
|
||||||
|
index c360802..868b7cb 100644
|
||||||
|
--- a/Lib/site.py
|
||||||
|
+++ b/Lib/site.py
|
||||||
|
@@ -288,12 +288,16 @@ def getsitepackages():
|
||||||
|
if sys.platform in ('os2emx', 'riscos'):
|
||||||
|
sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
|
||||||
|
elif os.sep == '/':
|
||||||
|
+ sitepackages.append(os.path.join(prefix, "lib64",
|
||||||
|
+ "python" + sys.version[:3],
|
||||||
|
+ "site-packages"))
|
||||||
|
sitepackages.append(os.path.join(prefix, "lib",
|
||||||
|
"python" + sys.version[:3],
|
||||||
|
"site-packages"))
|
||||||
|
sitepackages.append(os.path.join(prefix, "lib", "site-python"))
|
||||||
|
else:
|
||||||
|
sitepackages.append(prefix)
|
||||||
|
+ sitepackages.append(os.path.join(prefix, "lib64", "site-packages"))
|
||||||
|
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
|
||||||
|
return sitepackages
|
||||||
|
|
||||||
|
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
|
||||||
|
index b4384ee..349f688 100644
|
||||||
|
--- a/Lib/test/test_site.py
|
||||||
|
+++ b/Lib/test/test_site.py
|
||||||
|
@@ -254,17 +254,20 @@ class HelperFunctionsTests(unittest.TestCase):
|
||||||
|
self.assertEqual(dirs[0], wanted)
|
||||||
|
elif os.sep == '/':
|
||||||
|
# OS X, Linux, FreeBSD, etc
|
||||||
|
- self.assertEqual(len(dirs), 2)
|
||||||
|
- wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3],
|
||||||
|
+ self.assertEqual(len(dirs), 3)
|
||||||
|
+ wanted = os.path.join('xoxo', 'lib64', 'python' + sys.version[:3],
|
||||||
|
'site-packages')
|
||||||
|
self.assertEqual(dirs[0], wanted)
|
||||||
|
- wanted = os.path.join('xoxo', 'lib', 'site-python')
|
||||||
|
+ wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3],
|
||||||
|
+ 'site-packages')
|
||||||
|
self.assertEqual(dirs[1], wanted)
|
||||||
|
+ wanted = os.path.join('xoxo', 'lib', 'site-python')
|
||||||
|
+ self.assertEqual(dirs[2], wanted)
|
||||||
|
else:
|
||||||
|
# other platforms
|
||||||
|
self.assertEqual(len(dirs), 2)
|
||||||
|
self.assertEqual(dirs[0], 'xoxo')
|
||||||
|
- wanted = os.path.join('xoxo', 'lib', 'site-packages')
|
||||||
|
+ wanted = os.path.join('xoxo', 'lib64', 'site-packages')
|
||||||
|
self.assertEqual(dirs[1], wanted)
|
||||||
|
|
||||||
|
def test_no_home_directory(self):
|
||||||
|
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||||
|
index 4f59dd3..877698c 100644
|
||||||
|
--- a/Makefile.pre.in
|
||||||
|
+++ b/Makefile.pre.in
|
||||||
|
@@ -110,7 +110,7 @@ LIBDIR= @libdir@
|
||||||
|
MANDIR= @mandir@
|
||||||
|
INCLUDEDIR= @includedir@
|
||||||
|
CONFINCLUDEDIR= $(exec_prefix)/include
|
||||||
|
-SCRIPTDIR= $(prefix)/lib
|
||||||
|
+SCRIPTDIR= $(prefix)/lib64
|
||||||
|
|
||||||
|
# Detailed destination directories
|
||||||
|
BINLIBDEST= $(LIBDIR)/python$(VERSION)
|
||||||
|
diff --git a/Modules/Setup.dist b/Modules/Setup.dist
|
||||||
|
index 2cf35a9..c4c88cb 100644
|
||||||
|
--- a/Modules/Setup.dist
|
||||||
|
+++ b/Modules/Setup.dist
|
||||||
|
@@ -231,7 +231,7 @@ crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems
|
||||||
|
# Some more UNIX dependent modules -- off by default, since these
|
||||||
|
# are not supported by all UNIX systems:
|
||||||
|
|
||||||
|
-nis nismodule.c -lnsl -ltirpc -I/usr/include/tirpc -I/usr/include/nsl -L/usr/lib/nsl
|
||||||
|
+nis nismodule.c -lnsl -ltirpc -I/usr/include/tirpc -I/usr/include/nsl -L/usr/lib64/nsl
|
||||||
|
termios termios.c # Steen Lumholt's termios module
|
||||||
|
resource resource.c # Jeremy Hylton's rlimit interface
|
||||||
|
|
||||||
|
@@ -416,7 +416,7 @@ gdbm gdbmmodule.c -lgdbm
|
||||||
|
# Edit the variables DB and DBLIBVERto point to the db top directory
|
||||||
|
# and the subdirectory of PORT where you built it.
|
||||||
|
DBINC=/usr/include/libdb
|
||||||
|
-DBLIB=/usr/lib
|
||||||
|
+DBLIB=/usr/lib64
|
||||||
|
_bsddb _bsddb.c -I$(DBINC) -L$(DBLIB) -ldb
|
||||||
|
|
||||||
|
# Historical Berkeley DB 1.85
|
||||||
|
@@ -462,7 +462,7 @@ cPickle cPickle.c
|
||||||
|
# Andrew Kuchling's zlib module.
|
||||||
|
# This require zlib 1.1.3 (or later).
|
||||||
|
# See http://www.gzip.org/zlib/
|
||||||
|
-zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz
|
||||||
|
+zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib64 -lz
|
||||||
|
|
||||||
|
# Interface to the Expat XML parser
|
||||||
|
# More information on Expat can be found at www.libexpat.org.
|
||||||
|
diff --git a/Modules/getpath.c b/Modules/getpath.c
|
||||||
|
index fd33a01..c5c86fd 100644
|
||||||
|
--- a/Modules/getpath.c
|
||||||
|
+++ b/Modules/getpath.c
|
||||||
|
@@ -108,7 +108,7 @@ static char prefix[MAXPATHLEN+1];
|
||||||
|
static char exec_prefix[MAXPATHLEN+1];
|
||||||
|
static char progpath[MAXPATHLEN+1];
|
||||||
|
static char *module_search_path = NULL;
|
||||||
|
-static char lib_python[] = "lib/python" VERSION;
|
||||||
|
+static char lib_python[] = "lib64/python" VERSION;
|
||||||
|
|
||||||
|
static void
|
||||||
|
reduce(char *dir)
|
||||||
|
@@ -548,7 +548,7 @@ calculate_path(void)
|
||||||
|
fprintf(stderr,
|
||||||
|
"Could not find platform dependent libraries <exec_prefix>\n");
|
||||||
|
strncpy(exec_prefix, EXEC_PREFIX, MAXPATHLEN);
|
||||||
|
- joinpath(exec_prefix, "lib/lib-dynload");
|
||||||
|
+ joinpath(exec_prefix, "lib64/lib-dynload");
|
||||||
|
}
|
||||||
|
/* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */
|
||||||
|
|
||||||
|
diff --git a/setup.py b/setup.py
|
||||||
|
index 0288a6b..7905f6f 100644
|
||||||
|
--- a/setup.py
|
||||||
|
+++ b/setup.py
|
||||||
|
@@ -456,7 +456,7 @@ class PyBuildExt(build_ext):
|
||||||
|
def detect_modules(self):
|
||||||
|
# Ensure that /usr/local is always used
|
||||||
|
if not cross_compiling:
|
||||||
|
- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
|
||||||
|
+ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64')
|
||||||
|
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
|
||||||
|
if cross_compiling:
|
||||||
|
self.add_gcc_paths()
|
||||||
|
@@ -782,11 +782,11 @@ class PyBuildExt(build_ext):
|
||||||
|
elif curses_library:
|
||||||
|
readline_libs.append(curses_library)
|
||||||
|
elif self.compiler.find_library_file(lib_dirs +
|
||||||
|
- ['/usr/lib/termcap'],
|
||||||
|
+ ['/usr/lib64/termcap'],
|
||||||
|
'termcap'):
|
||||||
|
readline_libs.append('termcap')
|
||||||
|
exts.append( Extension('readline', ['readline.c'],
|
||||||
|
- library_dirs=['/usr/lib/termcap'],
|
||||||
|
+ library_dirs=['/usr/lib64/termcap'],
|
||||||
|
extra_link_args=readline_extra_link_args,
|
||||||
|
libraries=readline_libs) )
|
||||||
|
else:
|
||||||
|
@@ -821,8 +821,8 @@ class PyBuildExt(build_ext):
|
||||||
|
if krb5_h:
|
||||||
|
ssl_incs += krb5_h
|
||||||
|
ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
|
||||||
|
- ['/usr/local/ssl/lib',
|
||||||
|
- '/usr/contrib/ssl/lib/'
|
||||||
|
+ ['/usr/local/ssl/lib64',
|
||||||
|
+ '/usr/contrib/ssl/lib64/'
|
||||||
|
] )
|
||||||
|
|
||||||
|
if (ssl_incs is not None and
|
13
00104-lib64-fix-for-test_install.patch
Normal file
13
00104-lib64-fix-for-test_install.patch
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
--- Python-2.7.2/Lib/distutils/tests/test_install.py.lib64 2011-09-08 17:51:57.851405376 -0400
|
||||||
|
+++ Python-2.7.2/Lib/distutils/tests/test_install.py 2011-09-08 18:40:46.754205096 -0400
|
||||||
|
@@ -41,8 +41,9 @@ class InstallTestCase(support.TempdirMan
|
||||||
|
self.assertEqual(got, expected)
|
||||||
|
|
||||||
|
libdir = os.path.join(destination, "lib", "python")
|
||||||
|
+ platlibdir = os.path.join(destination, "lib64", "python")
|
||||||
|
check_path(cmd.install_lib, libdir)
|
||||||
|
- check_path(cmd.install_platlib, libdir)
|
||||||
|
+ check_path(cmd.install_platlib, platlibdir)
|
||||||
|
check_path(cmd.install_purelib, libdir)
|
||||||
|
check_path(cmd.install_headers,
|
||||||
|
os.path.join(destination, "include", "python", "foopkg"))
|
50
00111-no-static-lib.patch
Normal file
50
00111-no-static-lib.patch
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
diff -up Python-2.7.6/Makefile.pre.in.no-static-lib Python-2.7.6/Makefile.pre.in
|
||||||
|
--- Python-2.7.6/Makefile.pre.in.no-static-lib 2014-01-29 13:58:32.933226720 +0100
|
||||||
|
+++ Python-2.7.6/Makefile.pre.in 2014-01-29 14:10:25.002247272 +0100
|
||||||
|
@@ -437,7 +437,7 @@ coverage:
|
||||||
|
|
||||||
|
|
||||||
|
# Build the interpreter
|
||||||
|
-$(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY)
|
||||||
|
+$(BUILDPYTHON): Modules/python.o $(LDLIBRARY)
|
||||||
|
$(LINKCC) $(CFLAGS) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
|
||||||
|
Modules/python.o \
|
||||||
|
$(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
|
||||||
|
@@ -464,18 +464,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.tx
|
||||||
|
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
|
||||||
|
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
|
||||||
|
|
||||||
|
-# Build static library
|
||||||
|
-# avoid long command lines, same as LIBRARY_OBJS
|
||||||
|
-$(LIBRARY): $(LIBRARY_OBJS)
|
||||||
|
- -rm -f $@
|
||||||
|
- $(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o
|
||||||
|
- $(AR) $(ARFLAGS) $@ $(PARSER_OBJS)
|
||||||
|
- $(AR) $(ARFLAGS) $@ $(OBJECT_OBJS)
|
||||||
|
- $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS)
|
||||||
|
- $(AR) $(ARFLAGS) $@ $(MODULE_OBJS) $(SIGNAL_OBJS)
|
||||||
|
- $(AR) $(ARFLAGS) $@ $(MODOBJS)
|
||||||
|
- $(RANLIB) $@
|
||||||
|
-
|
||||||
|
libpython$(VERSION).so: $(LIBRARY_OBJS)
|
||||||
|
if test $(INSTSONAME) != $(LDLIBRARY); then \
|
||||||
|
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
|
||||||
|
@@ -1097,18 +1085,6 @@ libainstall: all python-config
|
||||||
|
else true; \
|
||||||
|
fi; \
|
||||||
|
done
|
||||||
|
- @if test -d $(LIBRARY); then :; else \
|
||||||
|
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
|
||||||
|
- if test "$(SO)" = .dll; then \
|
||||||
|
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
|
||||||
|
- else \
|
||||||
|
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
|
||||||
|
- $(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
|
||||||
|
- fi; \
|
||||||
|
- else \
|
||||||
|
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \
|
||||||
|
- fi; \
|
||||||
|
- fi
|
||||||
|
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
|
||||||
|
$(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o
|
||||||
|
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
|
324
00112-2.7.13-debug-build.patch
Normal file
324
00112-2.7.13-debug-build.patch
Normal file
@ -0,0 +1,324 @@
|
|||||||
|
From 898f93aa206e577dfe854c59bc62d0cea09cd5ed Mon Sep 17 00:00:00 2001
|
||||||
|
From: Tomas Orsava <torsava@redhat.com>
|
||||||
|
Date: Tue, 10 Jan 2017 16:19:50 +0100
|
||||||
|
Subject: [PATCH] Patch to support building both optimized vs debug stacks DSO
|
||||||
|
ABIs,
|
||||||
|
|
||||||
|
sharing the same .py and .pyc files, using "_d.so" to signify a debug build of
|
||||||
|
an extension module.
|
||||||
|
---
|
||||||
|
Lib/distutils/command/build_ext.py | 7 ++++-
|
||||||
|
Lib/distutils/sysconfig.py | 5 ++--
|
||||||
|
Lib/distutils/tests/test_install.py | 3 +-
|
||||||
|
Makefile.pre.in | 56 ++++++++++++++++++++-----------------
|
||||||
|
Misc/python-config.in | 2 +-
|
||||||
|
Modules/makesetup | 2 +-
|
||||||
|
Python/dynload_shlib.c | 11 ++++++--
|
||||||
|
Python/sysmodule.c | 6 ++++
|
||||||
|
configure.ac | 14 ++++++++--
|
||||||
|
9 files changed, 69 insertions(+), 37 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
|
||||||
|
index 2c68be3..029d144 100644
|
||||||
|
--- a/Lib/distutils/command/build_ext.py
|
||||||
|
+++ b/Lib/distutils/command/build_ext.py
|
||||||
|
@@ -677,7 +677,10 @@ class build_ext (Command):
|
||||||
|
so_ext = get_config_var('SO')
|
||||||
|
if os.name == 'nt' and self.debug:
|
||||||
|
return os.path.join(*ext_path) + '_d' + so_ext
|
||||||
|
- return os.path.join(*ext_path) + so_ext
|
||||||
|
+
|
||||||
|
+ # Similarly, extensions in debug mode are named 'module_d.so', to
|
||||||
|
+ # avoid adding the _d to the SO config variable:
|
||||||
|
+ return os.path.join(*ext_path) + (sys.pydebug and "_d" or "") + so_ext
|
||||||
|
|
||||||
|
def get_export_symbols (self, ext):
|
||||||
|
"""Return the list of symbols that a shared extension has to
|
||||||
|
@@ -762,6 +765,8 @@ class build_ext (Command):
|
||||||
|
template = "python%d.%d"
|
||||||
|
pythonlib = (template %
|
||||||
|
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||||
|
+ if sys.pydebug:
|
||||||
|
+ pythonlib += '_d'
|
||||||
|
return ext.libraries + [pythonlib]
|
||||||
|
else:
|
||||||
|
return ext.libraries
|
||||||
|
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
|
||||||
|
index 3e7f077..ec5d584 100644
|
||||||
|
--- a/Lib/distutils/sysconfig.py
|
||||||
|
+++ b/Lib/distutils/sysconfig.py
|
||||||
|
@@ -90,7 +90,8 @@ def get_python_inc(plat_specific=0, prefix=None):
|
||||||
|
# Include is located in the srcdir
|
||||||
|
inc_dir = os.path.join(srcdir, "Include")
|
||||||
|
return inc_dir
|
||||||
|
- return os.path.join(prefix, "include", "python" + get_python_version())
|
||||||
|
+ return os.path.join(prefix, "include",
|
||||||
|
+ "python" + get_python_version() + (sys.pydebug and '-debug' or ''))
|
||||||
|
elif os.name == "nt":
|
||||||
|
return os.path.join(prefix, "include")
|
||||||
|
elif os.name == "os2":
|
||||||
|
@@ -248,7 +249,7 @@ def get_makefile_filename():
|
||||||
|
if python_build:
|
||||||
|
return os.path.join(project_base, "Makefile")
|
||||||
|
lib_dir = get_python_lib(plat_specific=1, standard_lib=1)
|
||||||
|
- return os.path.join(lib_dir, "config", "Makefile")
|
||||||
|
+ return os.path.join(lib_dir, "config" + (sys.pydebug and "-debug" or ""), "Makefile")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_config_h(fp, g=None):
|
||||||
|
diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
|
||||||
|
index 78fac46..d1d0931 100644
|
||||||
|
--- a/Lib/distutils/tests/test_install.py
|
||||||
|
+++ b/Lib/distutils/tests/test_install.py
|
||||||
|
@@ -20,8 +20,9 @@ from distutils.tests import support
|
||||||
|
|
||||||
|
|
||||||
|
def _make_ext_name(modname):
|
||||||
|
- if os.name == 'nt' and sys.executable.endswith('_d.exe'):
|
||||||
|
+ if sys.pydebug:
|
||||||
|
modname += '_d'
|
||||||
|
+
|
||||||
|
return modname + sysconfig.get_config_var('SO')
|
||||||
|
|
||||||
|
|
||||||
|
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||||
|
index 997a2fc..467e782 100644
|
||||||
|
--- a/Makefile.pre.in
|
||||||
|
+++ b/Makefile.pre.in
|
||||||
|
@@ -116,8 +116,8 @@ SCRIPTDIR= $(prefix)/lib64
|
||||||
|
# Detailed destination directories
|
||||||
|
BINLIBDEST= $(LIBDIR)/python$(VERSION)
|
||||||
|
LIBDEST= $(SCRIPTDIR)/python$(VERSION)
|
||||||
|
-INCLUDEPY= $(INCLUDEDIR)/python$(VERSION)
|
||||||
|
-CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(VERSION)
|
||||||
|
+INCLUDEPY= $(INCLUDEDIR)/python$(VERSION)$(DEBUG_SUFFIX)
|
||||||
|
+CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(VERSION)$(DEBUG_SUFFIX)
|
||||||
|
LIBP= $(LIBDIR)/python$(VERSION)
|
||||||
|
|
||||||
|
# Symbols used for using shared libraries
|
||||||
|
@@ -131,6 +131,12 @@ DESTSHARED= $(BINLIBDEST)/lib-dynload
|
||||||
|
EXE= @EXEEXT@
|
||||||
|
BUILDEXE= @BUILDEXEEXT@
|
||||||
|
|
||||||
|
+# DEBUG_EXT is used by ELF files (names and SONAMEs); it will be "_d" for a debug build
|
||||||
|
+# DEBUG_SUFFIX is used by filesystem paths; it will be "-debug" for a debug build
|
||||||
|
+# Both will be empty in an optimized build
|
||||||
|
+DEBUG_EXT= @DEBUG_EXT@
|
||||||
|
+DEBUG_SUFFIX= @DEBUG_SUFFIX@
|
||||||
|
+
|
||||||
|
# Short name and location for Mac OS X Python framework
|
||||||
|
UNIVERSALSDK=@UNIVERSALSDK@
|
||||||
|
PYTHONFRAMEWORK= @PYTHONFRAMEWORK@
|
||||||
|
@@ -197,8 +203,8 @@ LIBOBJDIR= Python/
|
||||||
|
LIBOBJS= @LIBOBJS@
|
||||||
|
UNICODE_OBJS= @UNICODE_OBJS@
|
||||||
|
|
||||||
|
-PYTHON= python$(EXE)
|
||||||
|
-BUILDPYTHON= python$(BUILDEXE)
|
||||||
|
+PYTHON= python$(DEBUG_SUFFIX)$(EXE)
|
||||||
|
+BUILDPYTHON= python$(DEBUG_SUFFIX)$(BUILDEXE)
|
||||||
|
|
||||||
|
PYTHON_FOR_REGEN=@PYTHON_FOR_REGEN@
|
||||||
|
PYTHON_FOR_BUILD=@PYTHON_FOR_BUILD@
|
||||||
|
@@ -547,7 +553,7 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
|
||||||
|
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
|
||||||
|
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
|
||||||
|
|
||||||
|
-libpython$(VERSION).so: $(LIBRARY_OBJS)
|
||||||
|
+libpython$(VERSION)$(DEBUG_EXT).so: $(LIBRARY_OBJS)
|
||||||
|
if test $(INSTSONAME) != $(LDLIBRARY); then \
|
||||||
|
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
|
||||||
|
$(LN) -f $(INSTSONAME) $@; \
|
||||||
|
@@ -954,18 +960,18 @@ bininstall: altbininstall
|
||||||
|
then rm -f $(DESTDIR)$(BINDIR)/$(PYTHON); \
|
||||||
|
else true; \
|
||||||
|
fi
|
||||||
|
- (cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(EXE) $(PYTHON))
|
||||||
|
- -rm -f $(DESTDIR)$(BINDIR)/python2$(EXE)
|
||||||
|
- (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(EXE) python2$(EXE))
|
||||||
|
- -rm -f $(DESTDIR)$(BINDIR)/python2-config
|
||||||
|
- (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)-config python2-config)
|
||||||
|
- -rm -f $(DESTDIR)$(BINDIR)/python-config
|
||||||
|
- (cd $(DESTDIR)$(BINDIR); $(LN) -s python2-config python-config)
|
||||||
|
+ (cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(DEBUG_SUFFIX)$(EXE) $(PYTHON))
|
||||||
|
+ -rm -f $(DESTDIR)$(BINDIR)/python2$(DEBUG_SUFFIX)$(EXE)
|
||||||
|
+ (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(DEBUG_SUFFIX)$(EXE) python2$(DEBUG_SUFFIX)$(EXE))
|
||||||
|
+ -rm -f $(DESTDIR)$(BINDIR)/python2$(DEBUG_SUFFIX)-config
|
||||||
|
+ (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(DEBUG_SUFFIX)-config python2$(DEBUG_SUFFIX)-config)
|
||||||
|
+ -rm -f $(DESTDIR)$(BINDIR)/python$(DEBUG_SUFFIX)-config
|
||||||
|
+ (cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(DEBUG_SUFFIX)-config python$(DEBUG_SUFFIX)-config)
|
||||||
|
-test -d $(DESTDIR)$(LIBPC) || $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(LIBPC)
|
||||||
|
- -rm -f $(DESTDIR)$(LIBPC)/python2.pc
|
||||||
|
- (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION).pc python2.pc)
|
||||||
|
- -rm -f $(DESTDIR)$(LIBPC)/python.pc
|
||||||
|
- (cd $(DESTDIR)$(LIBPC); $(LN) -s python2.pc python.pc)
|
||||||
|
+ -rm -f $(DESTDIR)$(LIBPC)/python2$(DEBUG_SUFFIX).pc
|
||||||
|
+ (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION)$(DEBUG_SUFFIX).pc python2$(DEBUG_SUFFIX).pc)
|
||||||
|
+ -rm -f $(DESTDIR)$(LIBPC)/python$(DEBUG_SUFFIX).pc
|
||||||
|
+ (cd $(DESTDIR)$(LIBPC); $(LN) -s python2$(DEBUG_SUFFIX).pc python$(DEBUG_SUFFIX).pc)
|
||||||
|
|
||||||
|
# Install the interpreter with $(VERSION) affixed
|
||||||
|
# This goes into $(exec_prefix)
|
||||||
|
@@ -978,7 +984,7 @@ altbininstall: $(BUILDPYTHON)
|
||||||
|
else true; \
|
||||||
|
fi; \
|
||||||
|
done
|
||||||
|
- $(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE)
|
||||||
|
+ $(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(VERSION)$(DEBUG_SUFFIX)$(EXE)
|
||||||
|
if test -f $(LDLIBRARY); then \
|
||||||
|
if test -n "$(DLLLIBRARY)" ; then \
|
||||||
|
$(INSTALL_SHARED) $(DLLLIBRARY) $(DESTDIR)$(BINDIR); \
|
||||||
|
@@ -1148,10 +1154,11 @@ $(srcdir)/Lib/$(PLATDIR):
|
||||||
|
fi; \
|
||||||
|
cd $(srcdir)/Lib/$(PLATDIR); $(RUNSHARED) ./regen
|
||||||
|
|
||||||
|
-python-config: $(srcdir)/Misc/python-config.in
|
||||||
|
+python$(DEBUG_SUFFIX)-config: $(srcdir)/Misc/python-config.in
|
||||||
|
# Substitution happens here, as the completely-expanded BINDIR
|
||||||
|
# is not available in configure
|
||||||
|
- sed -e "s,@EXENAME@,$(BINDIR)/python$(VERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config
|
||||||
|
+ sed -e "s,@EXENAME@,$(BINDIR)/python$(VERSION)$(DEBUG_SUFFIX)$(EXE)," < $(srcdir)/Misc/python-config.in >python$(DEBUG_SUFFIX)-config
|
||||||
|
+
|
||||||
|
|
||||||
|
# Install the include files
|
||||||
|
INCLDIRSTOMAKE=$(INCLUDEDIR) $(CONFINCLUDEDIR) $(INCLUDEPY) $(CONFINCLUDEPY)
|
||||||
|
@@ -1172,13 +1179,13 @@ inclinstall:
|
||||||
|
$(INSTALL_DATA) pyconfig.h $(DESTDIR)$(CONFINCLUDEPY)/pyconfig.h
|
||||||
|
|
||||||
|
# Install the library and miscellaneous stuff needed for extending/embedding
|
||||||
|
-# This goes into $(exec_prefix)
|
||||||
|
-LIBPL= $(LIBP)/config
|
||||||
|
+# This goes into $(exec_prefix)$(DEBUG_SUFFIX)
|
||||||
|
+LIBPL= $(LIBP)/config$(DEBUG_SUFFIX)
|
||||||
|
|
||||||
|
# pkgconfig directory
|
||||||
|
LIBPC= $(LIBDIR)/pkgconfig
|
||||||
|
|
||||||
|
-libainstall: @DEF_MAKE_RULE@ python-config
|
||||||
|
+libainstall: @DEF_MAKE_RULE@ python$(DEBUG_SUFFIX)-config
|
||||||
|
@for i in $(LIBDIR) $(LIBP) $(LIBPL) $(LIBPC); \
|
||||||
|
do \
|
||||||
|
if test ! -d $(DESTDIR)$$i; then \
|
||||||
|
@@ -1194,11 +1201,10 @@ libainstall: all python-config
|
||||||
|
$(INSTALL_DATA) Modules/Setup $(DESTDIR)$(LIBPL)/Setup
|
||||||
|
$(INSTALL_DATA) Modules/Setup.local $(DESTDIR)$(LIBPL)/Setup.local
|
||||||
|
$(INSTALL_DATA) Modules/Setup.config $(DESTDIR)$(LIBPL)/Setup.config
|
||||||
|
- $(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION).pc
|
||||||
|
+ $(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION)$(DEBUG_SUFFIX).pc
|
||||||
|
$(INSTALL_SCRIPT) $(srcdir)/Modules/makesetup $(DESTDIR)$(LIBPL)/makesetup
|
||||||
|
$(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh
|
||||||
|
- $(INSTALL_SCRIPT) python-config $(DESTDIR)$(BINDIR)/python$(VERSION)-config
|
||||||
|
- rm python-config
|
||||||
|
+ $(INSTALL_SCRIPT) python$(DEBUG_SUFFIX)-config $(DESTDIR)$(BINDIR)/python$(VERSION)$(DEBUG_SUFFIX)-config
|
||||||
|
@if [ -s Modules/python.exp -a \
|
||||||
|
"`echo $(MACHDEP) | sed 's/^\(...\).*/\1/'`" = "aix" ]; then \
|
||||||
|
echo; echo "Installing support files for building shared extension modules on AIX:"; \
|
||||||
|
diff --git a/Misc/python-config.in b/Misc/python-config.in
|
||||||
|
index a09e07c..c1691ef 100644
|
||||||
|
--- a/Misc/python-config.in
|
||||||
|
+++ b/Misc/python-config.in
|
||||||
|
@@ -44,7 +44,7 @@ for opt in opt_flags:
|
||||||
|
print ' '.join(flags)
|
||||||
|
|
||||||
|
elif opt in ('--libs', '--ldflags'):
|
||||||
|
- libs = ['-lpython' + pyver]
|
||||||
|
+ libs = ['-lpython' + pyver + (sys.pydebug and "_d" or "")]
|
||||||
|
libs += getvar('LIBS').split()
|
||||||
|
libs += getvar('SYSLIBS').split()
|
||||||
|
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
|
||||||
|
diff --git a/Modules/makesetup b/Modules/makesetup
|
||||||
|
index 1bffcbf..f0bc743 100755
|
||||||
|
--- a/Modules/makesetup
|
||||||
|
+++ b/Modules/makesetup
|
||||||
|
@@ -233,7 +233,7 @@ sed -e 's/[ ]*#.*//' -e '/^[ ]*$/d' |
|
||||||
|
*$mod.o*) base=$mod;;
|
||||||
|
*) base=${mod}module;;
|
||||||
|
esac
|
||||||
|
- file="$srcdir/$base\$(SO)"
|
||||||
|
+ file="$srcdir/$base\$(DEBUG_EXT)\$(SO)"
|
||||||
|
case $doconfig in
|
||||||
|
no) SHAREDMODS="$SHAREDMODS $file";;
|
||||||
|
esac
|
||||||
|
diff --git a/Python/dynload_shlib.c b/Python/dynload_shlib.c
|
||||||
|
index 17ebab1..02a94aa 100644
|
||||||
|
--- a/Python/dynload_shlib.c
|
||||||
|
+++ b/Python/dynload_shlib.c
|
||||||
|
@@ -46,11 +46,16 @@ const struct filedescr _PyImport_DynLoadFiletab[] = {
|
||||||
|
{"module.exe", "rb", C_EXTENSION},
|
||||||
|
{"MODULE.EXE", "rb", C_EXTENSION},
|
||||||
|
#else
|
||||||
|
+#ifdef Py_DEBUG
|
||||||
|
+ {"_d.so", "rb", C_EXTENSION},
|
||||||
|
+ {"module_d.so", "rb", C_EXTENSION},
|
||||||
|
+#else
|
||||||
|
{".so", "rb", C_EXTENSION},
|
||||||
|
{"module.so", "rb", C_EXTENSION},
|
||||||
|
-#endif
|
||||||
|
-#endif
|
||||||
|
-#endif
|
||||||
|
+#endif /* Py_DEBUG */
|
||||||
|
+#endif /* __VMS */
|
||||||
|
+#endif /* defined(PYOS_OS2) && defined(PYCC_GCC) */
|
||||||
|
+#endif /* __CYGWIN__ */
|
||||||
|
{0, 0}
|
||||||
|
};
|
||||||
|
|
||||||
|
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
|
||||||
|
index aeff38a..183e3cc 100644
|
||||||
|
--- a/Python/sysmodule.c
|
||||||
|
+++ b/Python/sysmodule.c
|
||||||
|
@@ -1524,6 +1524,12 @@ _PySys_Init(void)
|
||||||
|
PyString_FromString("legacy"));
|
||||||
|
#endif
|
||||||
|
|
||||||
|
+#ifdef Py_DEBUG
|
||||||
|
+ PyDict_SetItemString(sysdict, "pydebug", Py_True);
|
||||||
|
+#else
|
||||||
|
+ PyDict_SetItemString(sysdict, "pydebug", Py_False);
|
||||||
|
+#endif
|
||||||
|
+
|
||||||
|
#undef SET_SYS_FROM_STRING
|
||||||
|
if (PyErr_Occurred())
|
||||||
|
return NULL;
|
||||||
|
diff --git a/configure.ac b/configure.ac
|
||||||
|
index 0a902c7..5caedb7 100644
|
||||||
|
--- a/configure.ac
|
||||||
|
+++ b/configure.ac
|
||||||
|
@@ -764,7 +764,7 @@ AC_SUBST(LIBRARY)
|
||||||
|
AC_MSG_CHECKING(LIBRARY)
|
||||||
|
if test -z "$LIBRARY"
|
||||||
|
then
|
||||||
|
- LIBRARY='libpython$(VERSION).a'
|
||||||
|
+ LIBRARY='libpython$(VERSION)$(DEBUG_EXT).a'
|
||||||
|
fi
|
||||||
|
AC_MSG_RESULT($LIBRARY)
|
||||||
|
|
||||||
|
@@ -910,8 +910,8 @@ if test $enable_shared = "yes"; then
|
||||||
|
INSTSONAME="$LDLIBRARY".$SOVERSION
|
||||||
|
;;
|
||||||
|
Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*)
|
||||||
|
- LDLIBRARY='libpython$(VERSION).so'
|
||||||
|
- BLDLIBRARY='-L. -lpython$(VERSION)'
|
||||||
|
+ LDLIBRARY='libpython$(VERSION)$(DEBUG_EXT).so'
|
||||||
|
+ BLDLIBRARY='-L. -lpython$(VERSION)$(DEBUG_EXT)'
|
||||||
|
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
|
||||||
|
case $ac_sys_system in
|
||||||
|
FreeBSD*)
|
||||||
|
@@ -1040,6 +1040,14 @@ else AC_MSG_RESULT(no); Py_DEBUG='false'
|
||||||
|
fi],
|
||||||
|
[AC_MSG_RESULT(no)])
|
||||||
|
|
||||||
|
+if test "$Py_DEBUG" = 'true'
|
||||||
|
+then
|
||||||
|
+ DEBUG_EXT=_d
|
||||||
|
+ DEBUG_SUFFIX=-debug
|
||||||
|
+fi
|
||||||
|
+AC_SUBST(DEBUG_EXT)
|
||||||
|
+AC_SUBST(DEBUG_SUFFIX)
|
||||||
|
+
|
||||||
|
# XXX Shouldn't the code above that fiddles with BASECFLAGS and OPT be
|
||||||
|
# merged with this chunk of code?
|
||||||
|
|
||||||
|
--
|
||||||
|
2.11.0
|
||||||
|
|
50
00113-more-configuration-flags.patch
Normal file
50
00113-more-configuration-flags.patch
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
diff -up Python-2.6.5/configure.ac.more-configuration-flags Python-2.6.5/configure.ac
|
||||||
|
--- Python-2.6.5/configure.ac.more-configuration-flags 2010-05-24 18:51:25.410111792 -0400
|
||||||
|
+++ Python-2.6.5/configure.ac 2010-05-24 18:59:23.954986388 -0400
|
||||||
|
@@ -2515,6 +2515,30 @@ else AC_MSG_RESULT(no)
|
||||||
|
fi],
|
||||||
|
[AC_MSG_RESULT(no)])
|
||||||
|
|
||||||
|
+AC_MSG_CHECKING(for --with-count-allocs)
|
||||||
|
+AC_ARG_WITH(count-allocs,
|
||||||
|
+[ --with(out)count-allocs enable/disable per-type instance accounting], [
|
||||||
|
+if test "$withval" != no
|
||||||
|
+then
|
||||||
|
+ AC_DEFINE(COUNT_ALLOCS, 1,
|
||||||
|
+ [Define to keep records of the number of instances of each type])
|
||||||
|
+ AC_MSG_RESULT(yes)
|
||||||
|
+else AC_MSG_RESULT(no)
|
||||||
|
+fi],
|
||||||
|
+[AC_MSG_RESULT(no)])
|
||||||
|
+
|
||||||
|
+AC_MSG_CHECKING(for --with-call-profile)
|
||||||
|
+AC_ARG_WITH(call-profile,
|
||||||
|
+[ --with(out)-call-profile enable/disable statistics on function call invocation], [
|
||||||
|
+if test "$withval" != no
|
||||||
|
+then
|
||||||
|
+ AC_DEFINE(CALL_PROFILE, 1,
|
||||||
|
+ [Define to keep records on function call invocation])
|
||||||
|
+ AC_MSG_RESULT(yes)
|
||||||
|
+else AC_MSG_RESULT(no)
|
||||||
|
+fi],
|
||||||
|
+[AC_MSG_RESULT(no)])
|
||||||
|
+
|
||||||
|
# Check for Python-specific malloc support
|
||||||
|
AC_MSG_CHECKING(for --with-pymalloc)
|
||||||
|
AC_ARG_WITH(pymalloc,
|
||||||
|
diff -up Python-2.6.5/pyconfig.h.in.more-configuration-flags Python-2.6.5/pyconfig.h.in
|
||||||
|
--- Python-2.6.5/pyconfig.h.in.more-configuration-flags 2010-05-24 18:51:45.677988086 -0400
|
||||||
|
+++ Python-2.6.5/pyconfig.h.in 2010-05-24 19:00:44.163987730 -0400
|
||||||
|
@@ -1019,6 +1019,12 @@
|
||||||
|
/* Define to profile with the Pentium timestamp counter */
|
||||||
|
#undef WITH_TSC
|
||||||
|
|
||||||
|
+/* Define to keep records of the number of instances of each type */
|
||||||
|
+#undef COUNT_ALLOCS
|
||||||
|
+
|
||||||
|
+/* Define to keep records on function call invocation */
|
||||||
|
+#undef CALL_PROFILE
|
||||||
|
+
|
||||||
|
/* Define if you want pymalloc to be disabled when running under valgrind */
|
||||||
|
#undef WITH_VALGRIND
|
||||||
|
|
47
00114-statvfs-f_flag-constants.patch
Normal file
47
00114-statvfs-f_flag-constants.patch
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
diff -up Python-2.7rc1/Modules/posixmodule.c.statvfs-f-flag-constants Python-2.7rc1/Modules/posixmodule.c
|
||||||
|
--- Python-2.7rc1/Modules/posixmodule.c.statvfs-f-flag-constants 2010-05-15 17:45:30.000000000 -0400
|
||||||
|
+++ Python-2.7rc1/Modules/posixmodule.c 2010-06-07 22:54:16.162068624 -0400
|
||||||
|
@@ -9174,6 +9174,43 @@ all_ins(PyObject *d)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
+ /* These came from statvfs.h */
|
||||||
|
+#ifdef ST_RDONLY
|
||||||
|
+ if (ins(d, "ST_RDONLY", (long)ST_RDONLY)) return -1;
|
||||||
|
+#endif /* ST_RDONLY */
|
||||||
|
+#ifdef ST_NOSUID
|
||||||
|
+ if (ins(d, "ST_NOSUID", (long)ST_NOSUID)) return -1;
|
||||||
|
+#endif /* ST_NOSUID */
|
||||||
|
+
|
||||||
|
+ /* GNU extensions */
|
||||||
|
+#ifdef ST_NODEV
|
||||||
|
+ if (ins(d, "ST_NODEV", (long)ST_NODEV)) return -1;
|
||||||
|
+#endif /* ST_NODEV */
|
||||||
|
+#ifdef ST_NOEXEC
|
||||||
|
+ if (ins(d, "ST_NOEXEC", (long)ST_NOEXEC)) return -1;
|
||||||
|
+#endif /* ST_NOEXEC */
|
||||||
|
+#ifdef ST_SYNCHRONOUS
|
||||||
|
+ if (ins(d, "ST_SYNCHRONOUS", (long)ST_SYNCHRONOUS)) return -1;
|
||||||
|
+#endif /* ST_SYNCHRONOUS */
|
||||||
|
+#ifdef ST_MANDLOCK
|
||||||
|
+ if (ins(d, "ST_MANDLOCK", (long)ST_MANDLOCK)) return -1;
|
||||||
|
+#endif /* ST_MANDLOCK */
|
||||||
|
+#ifdef ST_WRITE
|
||||||
|
+ if (ins(d, "ST_WRITE", (long)ST_WRITE)) return -1;
|
||||||
|
+#endif /* ST_WRITE */
|
||||||
|
+#ifdef ST_APPEND
|
||||||
|
+ if (ins(d, "ST_APPEND", (long)ST_APPEND)) return -1;
|
||||||
|
+#endif /* ST_APPEND */
|
||||||
|
+#ifdef ST_NOATIME
|
||||||
|
+ if (ins(d, "ST_NOATIME", (long)ST_NOATIME)) return -1;
|
||||||
|
+#endif /* ST_NOATIME */
|
||||||
|
+#ifdef ST_NODIRATIME
|
||||||
|
+ if (ins(d, "ST_NODIRATIME", (long)ST_NODIRATIME)) return -1;
|
||||||
|
+#endif /* ST_NODIRATIME */
|
||||||
|
+#ifdef ST_RELATIME
|
||||||
|
+ if (ins(d, "ST_RELATIME", (long)ST_RELATIME)) return -1;
|
||||||
|
+#endif /* ST_RELATIME */
|
||||||
|
+
|
||||||
|
#if defined(PYOS_OS2)
|
||||||
|
if (insertvalues(d)) return -1;
|
||||||
|
#endif
|
13
00121-add-Modules-to-build-path.patch
Normal file
13
00121-add-Modules-to-build-path.patch
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
--- Python-2.7.5/Lib/site.py.orig 2013-05-16 12:47:55.000000000 +0200
|
||||||
|
+++ Python-2.7.5/Lib/site.py 2013-05-16 12:56:20.089058109 +0200
|
||||||
|
@@ -529,6 +529,10 @@ def main():
|
||||||
|
|
||||||
|
abs__file__()
|
||||||
|
known_paths = removeduppaths()
|
||||||
|
+ from sysconfig import is_python_build
|
||||||
|
+ if is_python_build():
|
||||||
|
+ from _sysconfigdata import build_time_vars
|
||||||
|
+ sys.path.append(os.path.join(build_time_vars['abs_builddir'], 'Modules'))
|
||||||
|
if ENABLE_USER_SITE is None:
|
||||||
|
ENABLE_USER_SITE = check_enableusersite()
|
||||||
|
known_paths = addusersitepackages(known_paths)
|
11
00131-disable-tests-in-test_io.patch
Normal file
11
00131-disable-tests-in-test_io.patch
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
diff -up Python-2.7.2/Lib/test/test_io.py.disable-tests-in-test_io Python-2.7.2/Lib/test/test_io.py
|
||||||
|
--- Python-2.7.2/Lib/test/test_io.py.disable-tests-in-test_io 2011-09-01 14:18:45.963304089 -0400
|
||||||
|
+++ Python-2.7.2/Lib/test/test_io.py 2011-09-01 15:08:53.796098413 -0400
|
||||||
|
@@ -2669,6 +2669,7 @@ class SignalsTest(unittest.TestCase):
|
||||||
|
self.check_interrupted_read_retry(lambda x: x,
|
||||||
|
mode="r")
|
||||||
|
|
||||||
|
+ @unittest.skip('rhbz#732998')
|
||||||
|
@unittest.skipUnless(threading, 'Threading required for this test.')
|
||||||
|
def check_interrupted_write_retry(self, item, **fdopen_kwargs):
|
||||||
|
"""Check that a buffered write, when it gets interrupted (either
|
68
00132-add-rpmbuild-hooks-to-unittest.patch
Normal file
68
00132-add-rpmbuild-hooks-to-unittest.patch
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
diff -up Python-2.7.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-2.7.2/Lib/unittest/case.py
|
||||||
|
--- Python-2.7.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-08 14:45:47.677169191 -0400
|
||||||
|
+++ Python-2.7.2/Lib/unittest/case.py 2011-09-08 16:01:36.287858159 -0400
|
||||||
|
@@ -1,6 +1,7 @@
|
||||||
|
"""Test case implementation"""
|
||||||
|
|
||||||
|
import collections
|
||||||
|
+import os
|
||||||
|
import sys
|
||||||
|
import functools
|
||||||
|
import difflib
|
||||||
|
@@ -94,6 +95,43 @@ def expectedFailure(func):
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
+# Non-standard/downstream-only hooks for handling issues with specific test
|
||||||
|
+# cases:
|
||||||
|
+
|
||||||
|
+def _skipInRpmBuild(reason):
|
||||||
|
+ """
|
||||||
|
+ Non-standard/downstream-only decorator for marking a specific unit test
|
||||||
|
+ to be skipped when run within the %check of an rpmbuild.
|
||||||
|
+
|
||||||
|
+ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within
|
||||||
|
+ the environment, and has no effect otherwise.
|
||||||
|
+ """
|
||||||
|
+ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ:
|
||||||
|
+ return skip(reason)
|
||||||
|
+ else:
|
||||||
|
+ return _id
|
||||||
|
+
|
||||||
|
+def _expectedFailureInRpmBuild(func):
|
||||||
|
+ """
|
||||||
|
+ Non-standard/downstream-only decorator for marking a specific unit test
|
||||||
|
+ as expected to fail within the %check of an rpmbuild.
|
||||||
|
+
|
||||||
|
+ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within
|
||||||
|
+ the environment, and has no effect otherwise.
|
||||||
|
+ """
|
||||||
|
+ @functools.wraps(func)
|
||||||
|
+ def wrapper(*args, **kwargs):
|
||||||
|
+ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ:
|
||||||
|
+ try:
|
||||||
|
+ func(*args, **kwargs)
|
||||||
|
+ except Exception:
|
||||||
|
+ raise _ExpectedFailure(sys.exc_info())
|
||||||
|
+ raise _UnexpectedSuccess
|
||||||
|
+ else:
|
||||||
|
+ # Call directly:
|
||||||
|
+ func(*args, **kwargs)
|
||||||
|
+ return wrapper
|
||||||
|
+
|
||||||
|
class _AssertRaisesContext(object):
|
||||||
|
"""A context manager used to implement TestCase.assertRaises* methods."""
|
||||||
|
|
||||||
|
diff -up Python-2.7.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-2.7.2/Lib/unittest/__init__.py
|
||||||
|
--- Python-2.7.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-08 14:59:39.534112310 -0400
|
||||||
|
+++ Python-2.7.2/Lib/unittest/__init__.py 2011-09-08 15:07:09.191081562 -0400
|
||||||
|
@@ -57,7 +57,8 @@ __unittest = True
|
||||||
|
|
||||||
|
from .result import TestResult
|
||||||
|
from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf,
|
||||||
|
- skipUnless, expectedFailure)
|
||||||
|
+ skipUnless, expectedFailure,
|
||||||
|
+ _skipInRpmBuild, _expectedFailureInRpmBuild)
|
||||||
|
from .suite import BaseTestSuite, TestSuite
|
||||||
|
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
|
||||||
|
findTestCases)
|
13
00133-skip-test_dl.patch
Normal file
13
00133-skip-test_dl.patch
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
diff -up Python-2.7.2/Lib/test/test_dl.py.skip-test_dl Python-2.7.2/Lib/test/test_dl.py
|
||||||
|
--- Python-2.7.2/Lib/test/test_dl.py.skip-test_dl 2011-09-08 15:18:40.529034289 -0400
|
||||||
|
+++ Python-2.7.2/Lib/test/test_dl.py 2011-09-08 16:29:45.184742670 -0400
|
||||||
|
@@ -13,6 +13,9 @@ sharedlibs = [
|
||||||
|
('/usr/lib/libc.dylib', 'getpid'),
|
||||||
|
]
|
||||||
|
|
||||||
|
+# (also, "dl" is deprecated in favor of ctypes)
|
||||||
|
+@unittest._skipInRpmBuild('fails on 64-bit builds: '
|
||||||
|
+ 'module dl requires sizeof(int) == sizeof(long) == sizeof(char*)')
|
||||||
|
def test_main():
|
||||||
|
for s, func in sharedlibs:
|
||||||
|
try:
|
11
00136-skip-tests-of-seeking-stdin-in-rpmbuild.patch
Normal file
11
00136-skip-tests-of-seeking-stdin-in-rpmbuild.patch
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
diff -up Python-2.7.6/Lib/test/test_file2k.py.stdin-test Python-2.7.6/Lib/test/test_file2k.py
|
||||||
|
--- Python-2.7.6/Lib/test/test_file2k.py.stdin-test 2013-11-10 08:36:40.000000000 +0100
|
||||||
|
+++ Python-2.7.6/Lib/test/test_file2k.py 2014-01-29 14:28:01.029488055 +0100
|
||||||
|
@@ -223,6 +223,7 @@ class OtherFileTests(unittest.TestCase):
|
||||||
|
else:
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
+ @unittest._skipInRpmBuild('seems not to raise the exception when run in Koji')
|
||||||
|
def testStdinSeek(self):
|
||||||
|
if sys.platform == 'osf1V5':
|
||||||
|
# This causes the interpreter to exit on OSF1 v5.1.
|
12
00137-skip-distutils-tests-that-fail-in-rpmbuild.patch
Normal file
12
00137-skip-distutils-tests-that-fail-in-rpmbuild.patch
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
diff -up Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py.mark-tests-that-fail-in-rpmbuild Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py
|
||||||
|
--- Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py.mark-tests-that-fail-in-rpmbuild 2012-04-09 19:07:29.000000000 -0400
|
||||||
|
+++ Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py 2012-04-13 00:20:08.223819263 -0400
|
||||||
|
@@ -24,6 +24,7 @@ setup(name='foo', version='0.1', py_modu
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
+@unittest._skipInRpmBuild("don't try to nest one rpm build inside another rpm build")
|
||||||
|
class BuildRpmTestCase(support.TempdirManager,
|
||||||
|
support.EnvironGuard,
|
||||||
|
support.LoggingSilencer,
|
||||||
|
diff -up Python-2.7.3/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild Python-2.7.3/Lib/distutils/tests/test_build_ext.py
|
68
00138-fix-distutils-tests-in-debug-build.patch
Normal file
68
00138-fix-distutils-tests-in-debug-build.patch
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
diff -up Python-2.7.2/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild Python-2.7.2/Lib/distutils/tests/test_build_ext.py
|
||||||
|
--- Python-2.7.2/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild 2011-09-08 16:07:25.033834312 -0400
|
||||||
|
+++ Python-2.7.2/Lib/distutils/tests/test_build_ext.py 2011-09-08 17:43:15.656441082 -0400
|
||||||
|
@@ -330,6 +332,7 @@ class BuildExtTestCase(support.TempdirMa
|
||||||
|
self.assertEqual(lastdir, 'bar')
|
||||||
|
|
||||||
|
def test_ext_fullpath(self):
|
||||||
|
+ debug_ext = sysconfig.get_config_var("DEBUG_EXT")
|
||||||
|
ext = sysconfig.get_config_vars()['SO']
|
||||||
|
dist = Distribution()
|
||||||
|
cmd = build_ext(dist)
|
||||||
|
@@ -337,14 +340,14 @@ class BuildExtTestCase(support.TempdirMa
|
||||||
|
cmd.distribution.package_dir = {'': 'src'}
|
||||||
|
cmd.distribution.packages = ['lxml', 'lxml.html']
|
||||||
|
curdir = os.getcwd()
|
||||||
|
- wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
|
||||||
|
+ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + debug_ext + ext)
|
||||||
|
path = cmd.get_ext_fullpath('lxml.etree')
|
||||||
|
self.assertEqual(wanted, path)
|
||||||
|
|
||||||
|
# building lxml.etree not inplace
|
||||||
|
cmd.inplace = 0
|
||||||
|
cmd.build_lib = os.path.join(curdir, 'tmpdir')
|
||||||
|
- wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
|
||||||
|
+ wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + debug_ext + ext)
|
||||||
|
path = cmd.get_ext_fullpath('lxml.etree')
|
||||||
|
self.assertEqual(wanted, path)
|
||||||
|
|
||||||
|
@@ -354,13 +357,13 @@ class BuildExtTestCase(support.TempdirMa
|
||||||
|
cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
|
||||||
|
path = cmd.get_ext_fullpath('twisted.runner.portmap')
|
||||||
|
wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
|
||||||
|
- 'portmap' + ext)
|
||||||
|
+ 'portmap' + debug_ext + ext)
|
||||||
|
self.assertEqual(wanted, path)
|
||||||
|
|
||||||
|
# building twisted.runner.portmap inplace
|
||||||
|
cmd.inplace = 1
|
||||||
|
path = cmd.get_ext_fullpath('twisted.runner.portmap')
|
||||||
|
- wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
|
||||||
|
+ wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + debug_ext + ext)
|
||||||
|
self.assertEqual(wanted, path)
|
||||||
|
|
||||||
|
def test_build_ext_inplace(self):
|
||||||
|
@@ -373,8 +376,9 @@ class BuildExtTestCase(support.TempdirMa
|
||||||
|
cmd.distribution.package_dir = {'': 'src'}
|
||||||
|
cmd.distribution.packages = ['lxml', 'lxml.html']
|
||||||
|
curdir = os.getcwd()
|
||||||
|
+ debug_ext = sysconfig.get_config_var("DEBUG_EXT")
|
||||||
|
ext = sysconfig.get_config_var("SO")
|
||||||
|
- wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
|
||||||
|
+ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + debug_ext + ext)
|
||||||
|
path = cmd.get_ext_fullpath('lxml.etree')
|
||||||
|
self.assertEqual(wanted, path)
|
||||||
|
|
||||||
|
@@ -412,10 +416,11 @@ class BuildExtTestCase(support.TempdirMa
|
||||||
|
dist = Distribution({'name': 'UpdateManager'})
|
||||||
|
cmd = build_ext(dist)
|
||||||
|
cmd.ensure_finalized()
|
||||||
|
+ debug_ext = sysconfig.get_config_var("DEBUG_EXT")
|
||||||
|
ext = sysconfig.get_config_var("SO")
|
||||||
|
ext_name = os.path.join('UpdateManager', 'fdsend')
|
||||||
|
ext_path = cmd.get_ext_fullpath(ext_name)
|
||||||
|
- wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + ext)
|
||||||
|
+ wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + debug_ext + ext)
|
||||||
|
self.assertEqual(ext_path, wanted)
|
||||||
|
|
||||||
|
@unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows')
|
11
00139-skip-test_float-known-failure-on-arm.patch
Normal file
11
00139-skip-test_float-known-failure-on-arm.patch
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
diff -up Python-2.7.2/Lib/test/test_float.py.skip-test_float-known-failure-on-arm Python-2.7.2/Lib/test/test_float.py
|
||||||
|
--- Python-2.7.2/Lib/test/test_float.py.skip-test_float-known-failure-on-arm 2011-09-08 19:34:09.000986128 -0400
|
||||||
|
+++ Python-2.7.2/Lib/test/test_float.py 2011-09-08 19:34:57.969982779 -0400
|
||||||
|
@@ -1072,6 +1072,7 @@ class HexFloatTestCase(unittest.TestCase
|
||||||
|
self.identical(got, expected)
|
||||||
|
|
||||||
|
|
||||||
|
+ @unittest.skip('Known failure on ARM: http://bugs.python.org/issue8265')
|
||||||
|
def test_from_hex(self):
|
||||||
|
MIN = self.MIN;
|
||||||
|
MAX = self.MAX;
|
11
00140-skip-test_ctypes-known-failure-on-sparc.patch
Normal file
11
00140-skip-test_ctypes-known-failure-on-sparc.patch
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
diff -up Python-2.7.2/Lib/ctypes/test/test_callbacks.py.skip-test_ctypes-known-failure-on-sparc Python-2.7.2/Lib/ctypes/test/test_callbacks.py
|
||||||
|
--- Python-2.7.2/Lib/ctypes/test/test_callbacks.py.skip-test_ctypes-known-failure-on-sparc 2011-09-08 19:42:35.541951490 -0400
|
||||||
|
+++ Python-2.7.2/Lib/ctypes/test/test_callbacks.py 2011-09-08 19:43:40.676947036 -0400
|
||||||
|
@@ -67,6 +67,7 @@ class Callbacks(unittest.TestCase):
|
||||||
|
self.check_type(c_longlong, 42)
|
||||||
|
self.check_type(c_longlong, -42)
|
||||||
|
|
||||||
|
+ @unittest.skip('Known failure on Sparc: http://bugs.python.org/issue8314')
|
||||||
|
def test_ulonglong(self):
|
||||||
|
# test some 64-bit values, with and without msb set.
|
||||||
|
self.check_type(c_ulonglong, 10955412242170339782)
|
22
00142-skip-failing-pty-tests-in-rpmbuild.patch
Normal file
22
00142-skip-failing-pty-tests-in-rpmbuild.patch
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
diff -up Python-2.7.6/Lib/test/test_openpty.py.tty-fail Python-2.7.6/Lib/test/test_openpty.py
|
||||||
|
--- Python-2.7.6/Lib/test/test_openpty.py.tty-fail 2014-01-29 14:31:43.761343267 +0100
|
||||||
|
+++ Python-2.7.6/Lib/test/test_openpty.py 2014-01-29 14:32:19.284090165 +0100
|
||||||
|
@@ -8,6 +8,7 @@ if not hasattr(os, "openpty"):
|
||||||
|
|
||||||
|
|
||||||
|
class OpenptyTest(unittest.TestCase):
|
||||||
|
+ @unittest._skipInRpmBuild('sometimes fails in Koji, possibly due to a mock issue (rhbz#714627)')
|
||||||
|
def test(self):
|
||||||
|
master, slave = os.openpty()
|
||||||
|
self.addCleanup(os.close, master)
|
||||||
|
diff -up Python-2.7.6/Lib/test/test_pty.py.tty-fail Python-2.7.6/Lib/test/test_pty.py
|
||||||
|
--- Python-2.7.6/Lib/test/test_pty.py.tty-fail 2013-11-10 08:36:40.000000000 +0100
|
||||||
|
+++ Python-2.7.6/Lib/test/test_pty.py 2014-01-29 14:31:43.761343267 +0100
|
||||||
|
@@ -111,6 +111,7 @@ class PtyTest(unittest.TestCase):
|
||||||
|
os.close(master_fd)
|
||||||
|
|
||||||
|
|
||||||
|
+ @unittest._skipInRpmBuild('sometimes fails in Koji, possibly due to a mock issue (rhbz#714627)')
|
||||||
|
def test_fork(self):
|
||||||
|
debug("calling pty.fork()")
|
||||||
|
pid, master_fd = pty.fork()
|
58
00143-tsc-on-ppc.patch
Normal file
58
00143-tsc-on-ppc.patch
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
diff -up Python-2.7.2/Python/ceval.c.tsc-on-ppc Python-2.7.2/Python/ceval.c
|
||||||
|
--- Python-2.7.2/Python/ceval.c.tsc-on-ppc 2011-08-23 14:59:48.051300849 -0400
|
||||||
|
+++ Python-2.7.2/Python/ceval.c 2011-08-23 15:33:25.412162902 -0400
|
||||||
|
@@ -37,24 +37,42 @@ typedef unsigned long long uint64;
|
||||||
|
*/
|
||||||
|
#if defined(__ppc__) || defined (__powerpc__)
|
||||||
|
|
||||||
|
-#define READ_TIMESTAMP(var) ppc_getcounter(&var)
|
||||||
|
+#if defined( __powerpc64__) || defined(__LP64__)
|
||||||
|
+/* 64-bit PowerPC */
|
||||||
|
+#define READ_TIMESTAMP(var) ppc64_getcounter(&var)
|
||||||
|
+static void
|
||||||
|
+ppc64_getcounter(uint64 *v)
|
||||||
|
+{
|
||||||
|
+ /* On 64-bit PowerPC we can read the 64-bit timebase directly into a
|
||||||
|
+ 64-bit register */
|
||||||
|
+ uint64 timebase;
|
||||||
|
+#ifdef _ARCH_PWR4
|
||||||
|
+ asm volatile ("mfspr %0,268" : "=r" (timebase));
|
||||||
|
+#else
|
||||||
|
+ asm volatile ("mftb %0" : "=r" (timebase));
|
||||||
|
+#endif
|
||||||
|
+ *v = timebase;
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+#else
|
||||||
|
+/* 32-bit PowerPC */
|
||||||
|
+#define READ_TIMESTAMP(var) ppc32_getcounter(&var)
|
||||||
|
|
||||||
|
static void
|
||||||
|
-ppc_getcounter(uint64 *v)
|
||||||
|
+ppc32_getcounter(uint64 *v)
|
||||||
|
{
|
||||||
|
- register unsigned long tbu, tb, tbu2;
|
||||||
|
+ union { long long ll; long ii[2]; } u;
|
||||||
|
+ long tmp;
|
||||||
|
|
||||||
|
loop:
|
||||||
|
- asm volatile ("mftbu %0" : "=r" (tbu) );
|
||||||
|
- asm volatile ("mftb %0" : "=r" (tb) );
|
||||||
|
- asm volatile ("mftbu %0" : "=r" (tbu2));
|
||||||
|
- if (__builtin_expect(tbu != tbu2, 0)) goto loop;
|
||||||
|
-
|
||||||
|
- /* The slightly peculiar way of writing the next lines is
|
||||||
|
- compiled better by GCC than any other way I tried. */
|
||||||
|
- ((long*)(v))[0] = tbu;
|
||||||
|
- ((long*)(v))[1] = tb;
|
||||||
|
+ asm volatile ("mftbu %0" : "=r" (u.ii[0]) );
|
||||||
|
+ asm volatile ("mftb %0" : "=r" (u.ii[1]) );
|
||||||
|
+ asm volatile ("mftbu %0" : "=r" (tmp));
|
||||||
|
+ if (__builtin_expect(u.ii[0] != tmp, 0)) goto loop;
|
||||||
|
+
|
||||||
|
+ *v = u.ll;
|
||||||
|
}
|
||||||
|
+#endif /* powerpc 32/64 bit */
|
||||||
|
|
||||||
|
#elif defined(__i386__)
|
||||||
|
|
12
00144-no-gdbm.patch
Normal file
12
00144-no-gdbm.patch
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
diff -up Python-2.7.2/Modules/Setup.dist.no-gdbm Python-2.7.2/Modules/Setup.dist
|
||||||
|
--- Python-2.7.2/Modules/Setup.dist.no-gdbm 2011-09-13 14:25:43.496095926 -0400
|
||||||
|
+++ Python-2.7.2/Modules/Setup.dist 2011-09-13 14:25:46.491095724 -0400
|
||||||
|
@@ -396,7 +396,7 @@ dl dlmodule.c
|
||||||
|
#
|
||||||
|
# First, look at Setup.config; configure may have set this for you.
|
||||||
|
|
||||||
|
-gdbm gdbmmodule.c -lgdbm
|
||||||
|
+# gdbm gdbmmodule.c -lgdbm
|
||||||
|
|
||||||
|
|
||||||
|
# Sleepycat Berkeley DB interface.
|
844
00146-hashlib-fips.patch
Normal file
844
00146-hashlib-fips.patch
Normal file
@ -0,0 +1,844 @@
|
|||||||
|
From ece76465680b0df5b3fce7bf8ff1ff0253933889 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Petr Viktorin <pviktori@redhat.com>
|
||||||
|
Date: Mon, 2 Sep 2019 17:33:29 +0200
|
||||||
|
Subject: [PATCH 01/11] Remove HASH_OBJ_CONSTRUCTOR
|
||||||
|
|
||||||
|
See https://github.com/python/cpython/commit/c7e219132aff1e21cb9ccb0a9b570dc6c750039b
|
||||||
|
---
|
||||||
|
Modules/_hashopenssl.c | 59 ------------------------------------------
|
||||||
|
1 file changed, 59 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
|
||||||
|
index 78445ebabdd3..cb81e9765251 100644
|
||||||
|
--- a/Modules/_hashopenssl.c
|
||||||
|
+++ b/Modules/_hashopenssl.c
|
||||||
|
@@ -48,10 +48,6 @@
|
||||||
|
* to allow the user to optimize based on the platform they're using. */
|
||||||
|
#define HASHLIB_GIL_MINSIZE 2048
|
||||||
|
|
||||||
|
-#ifndef HASH_OBJ_CONSTRUCTOR
|
||||||
|
-#define HASH_OBJ_CONSTRUCTOR 0
|
||||||
|
-#endif
|
||||||
|
-
|
||||||
|
#if defined(OPENSSL_VERSION_NUMBER) && (OPENSSL_VERSION_NUMBER >= 0x00908000)
|
||||||
|
#define _OPENSSL_SUPPORTS_SHA2
|
||||||
|
#endif
|
||||||
|
@@ -384,53 +380,6 @@ EVP_repr(PyObject *self)
|
||||||
|
return PyString_FromString(buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
-#if HASH_OBJ_CONSTRUCTOR
|
||||||
|
-static int
|
||||||
|
-EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds)
|
||||||
|
-{
|
||||||
|
- static char *kwlist[] = {"name", "string", NULL};
|
||||||
|
- PyObject *name_obj = NULL;
|
||||||
|
- Py_buffer view = { 0 };
|
||||||
|
- char *nameStr;
|
||||||
|
- const EVP_MD *digest;
|
||||||
|
-
|
||||||
|
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|s*:HASH", kwlist,
|
||||||
|
- &name_obj, &view)) {
|
||||||
|
- return -1;
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- if (!PyArg_Parse(name_obj, "s", &nameStr)) {
|
||||||
|
- PyErr_SetString(PyExc_TypeError, "name must be a string");
|
||||||
|
- PyBuffer_Release(&view);
|
||||||
|
- return -1;
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- digest = EVP_get_digestbyname(nameStr);
|
||||||
|
- if (!digest) {
|
||||||
|
- PyErr_SetString(PyExc_ValueError, "unknown hash function");
|
||||||
|
- PyBuffer_Release(&view);
|
||||||
|
- return -1;
|
||||||
|
- }
|
||||||
|
- EVP_DigestInit(self->ctx, digest);
|
||||||
|
-
|
||||||
|
- self->name = name_obj;
|
||||||
|
- Py_INCREF(self->name);
|
||||||
|
-
|
||||||
|
- if (view.obj) {
|
||||||
|
- if (view.len >= HASHLIB_GIL_MINSIZE) {
|
||||||
|
- Py_BEGIN_ALLOW_THREADS
|
||||||
|
- EVP_hash(self, view.buf, view.len);
|
||||||
|
- Py_END_ALLOW_THREADS
|
||||||
|
- } else {
|
||||||
|
- EVP_hash(self, view.buf, view.len);
|
||||||
|
- }
|
||||||
|
- PyBuffer_Release(&view);
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- return 0;
|
||||||
|
-}
|
||||||
|
-#endif
|
||||||
|
-
|
||||||
|
|
||||||
|
PyDoc_STRVAR(hashtype_doc,
|
||||||
|
"A hash represents the object used to calculate a checksum of a\n\
|
||||||
|
@@ -487,9 +436,6 @@ static PyTypeObject EVPtype = {
|
||||||
|
0, /* tp_descr_set */
|
||||||
|
0, /* tp_dictoffset */
|
||||||
|
#endif
|
||||||
|
-#if HASH_OBJ_CONSTRUCTOR
|
||||||
|
- (initproc)EVP_tp_init, /* tp_init */
|
||||||
|
-#endif
|
||||||
|
};
|
||||||
|
|
||||||
|
static PyObject *
|
||||||
|
@@ -928,11 +874,6 @@ init_hashlib(void)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
-#if HASH_OBJ_CONSTRUCTOR
|
||||||
|
- Py_INCREF(&EVPtype);
|
||||||
|
- PyModule_AddObject(m, "HASH", (PyObject *)&EVPtype);
|
||||||
|
-#endif
|
||||||
|
-
|
||||||
|
/* these constants are used by the convenience constructors */
|
||||||
|
INIT_CONSTRUCTOR_CONSTANTS(md5);
|
||||||
|
INIT_CONSTRUCTOR_CONSTANTS(sha1);
|
||||||
|
|
||||||
|
From d7339af75678c760f6d6c0eb455b0eb889c22574 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Petr Viktorin <pviktori@redhat.com>
|
||||||
|
Date: Mon, 2 Sep 2019 18:02:25 +0200
|
||||||
|
Subject: [PATCH 02/11] Add the usedforsecurity argument to _hashopenssl
|
||||||
|
|
||||||
|
---
|
||||||
|
Modules/_hashopenssl.c | 63 ++++++++++++++++++++++++++++++++----------
|
||||||
|
1 file changed, 48 insertions(+), 15 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
|
||||||
|
index cb81e9765251..f2dbc095cc66 100644
|
||||||
|
--- a/Modules/_hashopenssl.c
|
||||||
|
+++ b/Modules/_hashopenssl.c
|
||||||
|
@@ -441,7 +441,7 @@ static PyTypeObject EVPtype = {
|
||||||
|
static PyObject *
|
||||||
|
EVPnew(PyObject *name_obj,
|
||||||
|
const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
|
||||||
|
- const unsigned char *cp, Py_ssize_t len)
|
||||||
|
+ const unsigned char *cp, Py_ssize_t len, int usedforsecurity)
|
||||||
|
{
|
||||||
|
EVPobject *self;
|
||||||
|
|
||||||
|
@@ -456,7 +456,23 @@ EVPnew(PyObject *name_obj,
|
||||||
|
if (initial_ctx) {
|
||||||
|
EVP_MD_CTX_copy(self->ctx, initial_ctx);
|
||||||
|
} else {
|
||||||
|
- EVP_DigestInit(self->ctx, digest);
|
||||||
|
+ EVP_MD_CTX_init(self->ctx);
|
||||||
|
+
|
||||||
|
+ /*
|
||||||
|
+ If the user has declared that this digest is being used in a
|
||||||
|
+ non-security role (e.g. indexing into a data structure), set
|
||||||
|
+ the exception flag for openssl to allow it
|
||||||
|
+ */
|
||||||
|
+ if (!usedforsecurity) {
|
||||||
|
+#ifdef EVP_MD_CTX_FLAG_NON_FIPS_ALLOW
|
||||||
|
+ EVP_MD_CTX_set_flags(self->ctx, EVP_MD_CTX_FLAG_NON_FIPS_ALLOW);
|
||||||
|
+#endif
|
||||||
|
+ }
|
||||||
|
+ if (!EVP_DigestInit_ex(self->ctx, digest, NULL)) {
|
||||||
|
+ _setException(PyExc_ValueError);
|
||||||
|
+ Py_DECREF(self);
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cp && len) {
|
||||||
|
@@ -485,15 +501,16 @@ The MD5 and SHA1 algorithms are always supported.\n");
|
||||||
|
static PyObject *
|
||||||
|
EVP_new(PyObject *self, PyObject *args, PyObject *kwdict)
|
||||||
|
{
|
||||||
|
- static char *kwlist[] = {"name", "string", NULL};
|
||||||
|
+ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL};
|
||||||
|
PyObject *name_obj = NULL;
|
||||||
|
Py_buffer view = { 0 };
|
||||||
|
PyObject *ret_obj;
|
||||||
|
char *name;
|
||||||
|
const EVP_MD *digest;
|
||||||
|
+ int usedforsecurity = 1;
|
||||||
|
|
||||||
|
- if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|s*:new", kwlist,
|
||||||
|
- &name_obj, &view)) {
|
||||||
|
+ if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|s*i:new", kwlist,
|
||||||
|
+ &name_obj, &view, &usedforsecurity)) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -506,7 +523,7 @@ EVP_new(PyObject *self, PyObject *args, PyObject *kwdict)
|
||||||
|
digest = EVP_get_digestbyname(name);
|
||||||
|
|
||||||
|
ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf,
|
||||||
|
- view.len);
|
||||||
|
+ view.len, usedforsecurity);
|
||||||
|
PyBuffer_Release(&view);
|
||||||
|
|
||||||
|
return ret_obj;
|
||||||
|
@@ -771,30 +788,46 @@ generate_hash_name_list(void)
|
||||||
|
* the generic one passing it a python string and are noticeably
|
||||||
|
* faster than calling a python new() wrapper. Thats important for
|
||||||
|
* code that wants to make hashes of a bunch of small strings.
|
||||||
|
+ *
|
||||||
|
+ * For usedforsecurity=False, the optimization is not used.
|
||||||
|
*/
|
||||||
|
#define GEN_CONSTRUCTOR(NAME) \
|
||||||
|
static PyObject * \
|
||||||
|
- EVP_new_ ## NAME (PyObject *self, PyObject *args) \
|
||||||
|
+ EVP_new_ ## NAME (PyObject *self, PyObject *args, PyObject *kwdict) \
|
||||||
|
{ \
|
||||||
|
+ static char *kwlist[] = {"string", "usedforsecurity", NULL}; \
|
||||||
|
Py_buffer view = { 0 }; \
|
||||||
|
PyObject *ret_obj; \
|
||||||
|
+ int usedforsecurity=1; \
|
||||||
|
\
|
||||||
|
- if (!PyArg_ParseTuple(args, "|s*:" #NAME , &view)) { \
|
||||||
|
+ if (!PyArg_ParseTupleAndKeywords( \
|
||||||
|
+ args, kwdict, "|s*i:" #NAME, kwlist, \
|
||||||
|
+ &view, &usedforsecurity \
|
||||||
|
+ )) { \
|
||||||
|
return NULL; \
|
||||||
|
} \
|
||||||
|
- \
|
||||||
|
- ret_obj = EVPnew( \
|
||||||
|
- CONST_ ## NAME ## _name_obj, \
|
||||||
|
- NULL, \
|
||||||
|
- CONST_new_ ## NAME ## _ctx_p, \
|
||||||
|
- (unsigned char*)view.buf, view.len); \
|
||||||
|
+ if (usedforsecurity == 0) { \
|
||||||
|
+ ret_obj = EVPnew( \
|
||||||
|
+ CONST_ ## NAME ## _name_obj, \
|
||||||
|
+ EVP_get_digestbyname(#NAME), \
|
||||||
|
+ NULL, \
|
||||||
|
+ (unsigned char*)view.buf, view.len, \
|
||||||
|
+ usedforsecurity); \
|
||||||
|
+ } else { \
|
||||||
|
+ ret_obj = EVPnew( \
|
||||||
|
+ CONST_ ## NAME ## _name_obj, \
|
||||||
|
+ NULL, \
|
||||||
|
+ CONST_new_ ## NAME ## _ctx_p, \
|
||||||
|
+ (unsigned char*)view.buf, view.len, \
|
||||||
|
+ usedforsecurity); \
|
||||||
|
+ } \
|
||||||
|
PyBuffer_Release(&view); \
|
||||||
|
return ret_obj; \
|
||||||
|
}
|
||||||
|
|
||||||
|
/* a PyMethodDef structure for the constructor */
|
||||||
|
#define CONSTRUCTOR_METH_DEF(NAME) \
|
||||||
|
- {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \
|
||||||
|
+ {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS|METH_KEYWORDS, \
|
||||||
|
PyDoc_STR("Returns a " #NAME \
|
||||||
|
" hash object; optionally initialized with a string") \
|
||||||
|
}
|
||||||
|
|
||||||
|
From c8102e61fb3ade364d4bb7f2fe3f3452e2018ecd Mon Sep 17 00:00:00 2001
|
||||||
|
From: David Malcolm <dmalcolm@redhat.com>
|
||||||
|
Date: Mon, 2 Sep 2019 17:59:53 +0200
|
||||||
|
Subject: [PATCH 03/11] hashlib.py: Avoid the builtin constructor
|
||||||
|
|
||||||
|
---
|
||||||
|
Lib/hashlib.py | 58 +++++++++++++-------------------------------------
|
||||||
|
1 file changed, 15 insertions(+), 43 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
|
||||||
|
index bbd06b9996ee..404ed6891fb9 100644
|
||||||
|
--- a/Lib/hashlib.py
|
||||||
|
+++ b/Lib/hashlib.py
|
||||||
|
@@ -69,65 +69,37 @@
|
||||||
|
'pbkdf2_hmac')
|
||||||
|
|
||||||
|
|
||||||
|
-def __get_builtin_constructor(name):
|
||||||
|
- try:
|
||||||
|
- if name in ('SHA1', 'sha1'):
|
||||||
|
- import _sha
|
||||||
|
- return _sha.new
|
||||||
|
- elif name in ('MD5', 'md5'):
|
||||||
|
- import _md5
|
||||||
|
- return _md5.new
|
||||||
|
- elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'):
|
||||||
|
- import _sha256
|
||||||
|
- bs = name[3:]
|
||||||
|
- if bs == '256':
|
||||||
|
- return _sha256.sha256
|
||||||
|
- elif bs == '224':
|
||||||
|
- return _sha256.sha224
|
||||||
|
- elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'):
|
||||||
|
- import _sha512
|
||||||
|
- bs = name[3:]
|
||||||
|
- if bs == '512':
|
||||||
|
- return _sha512.sha512
|
||||||
|
- elif bs == '384':
|
||||||
|
- return _sha512.sha384
|
||||||
|
- except ImportError:
|
||||||
|
- pass # no extension module, this hash is unsupported.
|
||||||
|
-
|
||||||
|
- raise ValueError('unsupported hash type ' + name)
|
||||||
|
-
|
||||||
|
-
|
||||||
|
def __get_openssl_constructor(name):
|
||||||
|
try:
|
||||||
|
f = getattr(_hashlib, 'openssl_' + name)
|
||||||
|
# Allow the C module to raise ValueError. The function will be
|
||||||
|
# defined but the hash not actually available thanks to OpenSSL.
|
||||||
|
- f()
|
||||||
|
+ #
|
||||||
|
+ # We pass "usedforsecurity=False" to disable FIPS-based restrictions:
|
||||||
|
+ # at this stage we're merely seeing if the function is callable,
|
||||||
|
+ # rather than using it for actual work.
|
||||||
|
+ f(usedforsecurity=False)
|
||||||
|
# Use the C function directly (very fast)
|
||||||
|
return f
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
- return __get_builtin_constructor(name)
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def __py_new(name, string=''):
|
||||||
|
- """new(name, string='') - Return a new hashing object using the named algorithm;
|
||||||
|
- optionally initialized with a string.
|
||||||
|
- """
|
||||||
|
- return __get_builtin_constructor(name)(string)
|
||||||
|
+ raise
|
||||||
|
|
||||||
|
|
||||||
|
-def __hash_new(name, string=''):
|
||||||
|
- """new(name, string='') - Return a new hashing object using the named algorithm;
|
||||||
|
- optionally initialized with a string.
|
||||||
|
+def __hash_new(name, string='', usedforsecurity=True):
|
||||||
|
+ """new(name, string='', usedforsecurity=True) - Return a new hashing object
|
||||||
|
+ using the named algorithm; optionally initialized with a string.
|
||||||
|
+
|
||||||
|
+ Override 'usedforsecurity' to False when using for non-security purposes in
|
||||||
|
+ a FIPS environment
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
- return _hashlib.new(name, string)
|
||||||
|
+ return _hashlib.new(name, string, usedforsecurity)
|
||||||
|
except ValueError:
|
||||||
|
# If the _hashlib module (OpenSSL) doesn't support the named
|
||||||
|
# hash, try using our builtin implementations.
|
||||||
|
# This allows for SHA224/256 and SHA384/512 support even though
|
||||||
|
# the OpenSSL library prior to 0.9.8 doesn't provide them.
|
||||||
|
- return __get_builtin_constructor(name)(string)
|
||||||
|
+ raise
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
@@ -218,4 +190,4 @@ def prf(msg, inner=inner, outer=outer):
|
||||||
|
|
||||||
|
# Cleanup locals()
|
||||||
|
del __always_supported, __func_name, __get_hash
|
||||||
|
-del __py_new, __hash_new, __get_openssl_constructor
|
||||||
|
+del __hash_new, __get_openssl_constructor
|
||||||
|
|
||||||
|
From 2ade3e5a6c5732c0692c4cc2235a2bbe0948f50b Mon Sep 17 00:00:00 2001
|
||||||
|
From: David Malcolm <dmalcolm@redhat.com>
|
||||||
|
Date: Mon, 2 Sep 2019 17:56:46 +0200
|
||||||
|
Subject: [PATCH 04/11] Adjust docstrings & comments
|
||||||
|
|
||||||
|
---
|
||||||
|
Lib/hashlib.py | 29 ++++++++++++++++++++++-------
|
||||||
|
Modules/_hashopenssl.c | 9 ++++++++-
|
||||||
|
2 files changed, 30 insertions(+), 8 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
|
||||||
|
index 404ed6891fb9..46d0b470ab4a 100644
|
||||||
|
--- a/Lib/hashlib.py
|
||||||
|
+++ b/Lib/hashlib.py
|
||||||
|
@@ -6,9 +6,12 @@
|
||||||
|
|
||||||
|
__doc__ = """hashlib module - A common interface to many hash functions.
|
||||||
|
|
||||||
|
-new(name, string='') - returns a new hash object implementing the
|
||||||
|
- given hash function; initializing the hash
|
||||||
|
- using the given string data.
|
||||||
|
+new(name, string='', usedforsecurity=True)
|
||||||
|
+ - returns a new hash object implementing the given hash function;
|
||||||
|
+ initializing the hash using the given string data.
|
||||||
|
+
|
||||||
|
+ "usedforsecurity" is a non-standard extension for better supporting
|
||||||
|
+ FIPS-compliant environments (see below)
|
||||||
|
|
||||||
|
Named constructor functions are also available, these are much faster
|
||||||
|
than using new():
|
||||||
|
@@ -25,6 +28,20 @@
|
||||||
|
Choose your hash function wisely. Some have known collision weaknesses.
|
||||||
|
sha384 and sha512 will be slow on 32 bit platforms.
|
||||||
|
|
||||||
|
+Our implementation of hashlib uses OpenSSL.
|
||||||
|
+
|
||||||
|
+OpenSSL has a "FIPS mode", which, if enabled, may restrict the available hashes
|
||||||
|
+to only those that are compliant with FIPS regulations. For example, it may
|
||||||
|
+deny the use of MD5, on the grounds that this is not secure for uses such as
|
||||||
|
+authentication, system integrity checking, or digital signatures.
|
||||||
|
+
|
||||||
|
+If you need to use such a hash for non-security purposes (such as indexing into
|
||||||
|
+a data structure for speed), you can override the keyword argument
|
||||||
|
+"usedforsecurity" from True to False to signify that your code is not relying
|
||||||
|
+on the hash for security purposes, and this will allow the hash to be usable
|
||||||
|
+even in FIPS mode. This is not a standard feature of Python 2.7's hashlib, and
|
||||||
|
+is included here to better support FIPS mode.
|
||||||
|
+
|
||||||
|
Hash objects have these methods:
|
||||||
|
- update(arg): Update the hash object with the string arg. Repeated calls
|
||||||
|
are equivalent to a single call with the concatenation of all
|
||||||
|
@@ -82,6 +99,7 @@ def __get_openssl_constructor(name):
|
||||||
|
# Use the C function directly (very fast)
|
||||||
|
return f
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
+ # RHEL only: Fallbacks removed; we always use OpenSSL for hashes.
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@@ -95,10 +113,7 @@ def __hash_new(name, string='', usedforsecurity=True):
|
||||||
|
try:
|
||||||
|
return _hashlib.new(name, string, usedforsecurity)
|
||||||
|
except ValueError:
|
||||||
|
- # If the _hashlib module (OpenSSL) doesn't support the named
|
||||||
|
- # hash, try using our builtin implementations.
|
||||||
|
- # This allows for SHA224/256 and SHA384/512 support even though
|
||||||
|
- # the OpenSSL library prior to 0.9.8 doesn't provide them.
|
||||||
|
+ # RHEL only: Fallbacks removed; we always use OpenSSL for hashes.
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
|
||||||
|
index f2dbc095cc66..d24432e048bf 100644
|
||||||
|
--- a/Modules/_hashopenssl.c
|
||||||
|
+++ b/Modules/_hashopenssl.c
|
||||||
|
@@ -496,7 +496,14 @@ PyDoc_STRVAR(EVP_new__doc__,
|
||||||
|
An optional string argument may be provided and will be\n\
|
||||||
|
automatically hashed.\n\
|
||||||
|
\n\
|
||||||
|
-The MD5 and SHA1 algorithms are always supported.\n");
|
||||||
|
+The MD5 and SHA1 algorithms are always supported.\n \
|
||||||
|
+\n\
|
||||||
|
+An optional \"usedforsecurity=True\" keyword argument is provided for use in\n\
|
||||||
|
+environments that enforce FIPS-based restrictions. Some implementations of\n\
|
||||||
|
+OpenSSL can be configured to prevent the usage of non-secure algorithms (such\n\
|
||||||
|
+as MD5). If you have a non-security use for these algorithms (e.g. a hash\n\
|
||||||
|
+table), you can override this argument by marking the callsite as\n\
|
||||||
|
+\"usedforsecurity=False\".");
|
||||||
|
|
||||||
|
static PyObject *
|
||||||
|
EVP_new(PyObject *self, PyObject *args, PyObject *kwdict)
|
||||||
|
|
||||||
|
From 6698e1d84c3f19bbb4438b2b2c78a5ef8bd5ad42 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Petr Viktorin <pviktori@redhat.com>
|
||||||
|
Date: Thu, 29 Aug 2019 10:25:28 +0200
|
||||||
|
Subject: [PATCH 05/11] Expose OpenSSL FIPS_mode as _hashlib.get_fips_mode
|
||||||
|
|
||||||
|
---
|
||||||
|
Modules/_hashopenssl.c | 22 ++++++++++++++++++++++
|
||||||
|
1 file changed, 22 insertions(+)
|
||||||
|
|
||||||
|
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
|
||||||
|
index d24432e048bf..74f9ab9ec150 100644
|
||||||
|
--- a/Modules/_hashopenssl.c
|
||||||
|
+++ b/Modules/_hashopenssl.c
|
||||||
|
@@ -860,10 +860,32 @@ GEN_CONSTRUCTOR(sha384)
|
||||||
|
GEN_CONSTRUCTOR(sha512)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
+static PyObject *
|
||||||
|
+_hashlib_get_fips_mode(PyObject *module, PyObject *unused)
|
||||||
|
+{
|
||||||
|
+ // XXX: This function skips error checking.
|
||||||
|
+ // This is only appropriate for RHEL.
|
||||||
|
+
|
||||||
|
+ // From the OpenSSL docs:
|
||||||
|
+ // "If the library was built without support of the FIPS Object Module,
|
||||||
|
+ // then the function will return 0 with an error code of
|
||||||
|
+ // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)."
|
||||||
|
+ // In RHEL:
|
||||||
|
+ // * we do build with FIPS, so the function always succeeds
|
||||||
|
+ // * even if it didn't, people seem used to errors being left on the
|
||||||
|
+ // OpenSSL error stack.
|
||||||
|
+
|
||||||
|
+ // For more info, see:
|
||||||
|
+ // https://bugzilla.redhat.com/show_bug.cgi?id=1745499
|
||||||
|
+
|
||||||
|
+ return PyInt_FromLong(FIPS_mode());
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
/* List of functions exported by this module */
|
||||||
|
|
||||||
|
static struct PyMethodDef EVP_functions[] = {
|
||||||
|
{"new", (PyCFunction)EVP_new, METH_VARARGS|METH_KEYWORDS, EVP_new__doc__},
|
||||||
|
+ {"get_fips_mode", (PyCFunction)_hashlib_get_fips_mode, METH_NOARGS, NULL},
|
||||||
|
CONSTRUCTOR_METH_DEF(md5),
|
||||||
|
CONSTRUCTOR_METH_DEF(sha1),
|
||||||
|
#ifdef _OPENSSL_SUPPORTS_SHA2
|
||||||
|
|
||||||
|
From 9a8833619658c6be5ca72c60189a64da05536d85 Mon Sep 17 00:00:00 2001
|
||||||
|
From: David Malcolm <dmalcolm@redhat.com>
|
||||||
|
Date: Mon, 2 Sep 2019 18:00:26 +0200
|
||||||
|
Subject: [PATCH 06/11] Adjust tests
|
||||||
|
|
||||||
|
---
|
||||||
|
Lib/test/test_hashlib.py | 118 ++++++++++++++++++++++++---------------
|
||||||
|
1 file changed, 74 insertions(+), 44 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
|
||||||
|
index b8d6388feaf9..b03fc84f82b4 100644
|
||||||
|
--- a/Lib/test/test_hashlib.py
|
||||||
|
+++ b/Lib/test/test_hashlib.py
|
||||||
|
@@ -34,6 +34,8 @@ def hexstr(s):
|
||||||
|
r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
|
||||||
|
return r
|
||||||
|
|
||||||
|
+from _hashlib import get_fips_mode
|
||||||
|
+
|
||||||
|
|
||||||
|
class HashLibTestCase(unittest.TestCase):
|
||||||
|
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
|
||||||
|
@@ -63,10 +65,10 @@ def __init__(self, *args, **kwargs):
|
||||||
|
# of hashlib.new given the algorithm name.
|
||||||
|
for algorithm, constructors in self.constructors_to_test.items():
|
||||||
|
constructors.add(getattr(hashlib, algorithm))
|
||||||
|
- def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm):
|
||||||
|
+ def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, usedforsecurity=True):
|
||||||
|
if data is None:
|
||||||
|
- return hashlib.new(_alg)
|
||||||
|
- return hashlib.new(_alg, data)
|
||||||
|
+ return hashlib.new(_alg, usedforsecurity=usedforsecurity)
|
||||||
|
+ return hashlib.new(_alg, data, usedforsecurity=usedforsecurity)
|
||||||
|
constructors.add(_test_algorithm_via_hashlib_new)
|
||||||
|
|
||||||
|
_hashlib = self._conditional_import_module('_hashlib')
|
||||||
|
@@ -80,28 +82,13 @@ def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm):
|
||||||
|
if constructor:
|
||||||
|
constructors.add(constructor)
|
||||||
|
|
||||||
|
- _md5 = self._conditional_import_module('_md5')
|
||||||
|
- if _md5:
|
||||||
|
- self.constructors_to_test['md5'].add(_md5.new)
|
||||||
|
- _sha = self._conditional_import_module('_sha')
|
||||||
|
- if _sha:
|
||||||
|
- self.constructors_to_test['sha1'].add(_sha.new)
|
||||||
|
- _sha256 = self._conditional_import_module('_sha256')
|
||||||
|
- if _sha256:
|
||||||
|
- self.constructors_to_test['sha224'].add(_sha256.sha224)
|
||||||
|
- self.constructors_to_test['sha256'].add(_sha256.sha256)
|
||||||
|
- _sha512 = self._conditional_import_module('_sha512')
|
||||||
|
- if _sha512:
|
||||||
|
- self.constructors_to_test['sha384'].add(_sha512.sha384)
|
||||||
|
- self.constructors_to_test['sha512'].add(_sha512.sha512)
|
||||||
|
-
|
||||||
|
super(HashLibTestCase, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def test_hash_array(self):
|
||||||
|
a = array.array("b", range(10))
|
||||||
|
constructors = self.constructors_to_test.itervalues()
|
||||||
|
for cons in itertools.chain.from_iterable(constructors):
|
||||||
|
- c = cons(a)
|
||||||
|
+ c = cons(a, usedforsecurity=False)
|
||||||
|
c.hexdigest()
|
||||||
|
|
||||||
|
def test_algorithms_attribute(self):
|
||||||
|
@@ -122,28 +109,9 @@ def test_unknown_hash(self):
|
||||||
|
self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam')
|
||||||
|
self.assertRaises(TypeError, hashlib.new, 1)
|
||||||
|
|
||||||
|
- def test_get_builtin_constructor(self):
|
||||||
|
- get_builtin_constructor = hashlib.__dict__[
|
||||||
|
- '__get_builtin_constructor']
|
||||||
|
- self.assertRaises(ValueError, get_builtin_constructor, 'test')
|
||||||
|
- try:
|
||||||
|
- import _md5
|
||||||
|
- except ImportError:
|
||||||
|
- pass
|
||||||
|
- # This forces an ImportError for "import _md5" statements
|
||||||
|
- sys.modules['_md5'] = None
|
||||||
|
- try:
|
||||||
|
- self.assertRaises(ValueError, get_builtin_constructor, 'md5')
|
||||||
|
- finally:
|
||||||
|
- if '_md5' in locals():
|
||||||
|
- sys.modules['_md5'] = _md5
|
||||||
|
- else:
|
||||||
|
- del sys.modules['_md5']
|
||||||
|
- self.assertRaises(TypeError, get_builtin_constructor, 3)
|
||||||
|
-
|
||||||
|
def test_hexdigest(self):
|
||||||
|
for name in self.supported_hash_names:
|
||||||
|
- h = hashlib.new(name)
|
||||||
|
+ h = hashlib.new(name, usedforsecurity=False)
|
||||||
|
self.assertTrue(hexstr(h.digest()) == h.hexdigest())
|
||||||
|
|
||||||
|
def test_large_update(self):
|
||||||
|
@@ -153,16 +121,16 @@ def test_large_update(self):
|
||||||
|
abcs = aas + bees + cees
|
||||||
|
|
||||||
|
for name in self.supported_hash_names:
|
||||||
|
- m1 = hashlib.new(name)
|
||||||
|
+ m1 = hashlib.new(name, usedforsecurity=False)
|
||||||
|
m1.update(aas)
|
||||||
|
m1.update(bees)
|
||||||
|
m1.update(cees)
|
||||||
|
|
||||||
|
- m2 = hashlib.new(name)
|
||||||
|
+ m2 = hashlib.new(name, usedforsecurity=False)
|
||||||
|
m2.update(abcs)
|
||||||
|
self.assertEqual(m1.digest(), m2.digest(), name+' update problem.')
|
||||||
|
|
||||||
|
- m3 = hashlib.new(name, abcs)
|
||||||
|
+ m3 = hashlib.new(name, abcs, usedforsecurity=False)
|
||||||
|
self.assertEqual(m1.digest(), m3.digest(), name+' new problem.')
|
||||||
|
|
||||||
|
def check(self, name, data, digest):
|
||||||
|
@@ -170,7 +138,7 @@ def check(self, name, data, digest):
|
||||||
|
# 2 is for hashlib.name(...) and hashlib.new(name, ...)
|
||||||
|
self.assertGreaterEqual(len(constructors), 2)
|
||||||
|
for hash_object_constructor in constructors:
|
||||||
|
- computed = hash_object_constructor(data).hexdigest()
|
||||||
|
+ computed = hash_object_constructor(data, usedforsecurity=False).hexdigest()
|
||||||
|
self.assertEqual(
|
||||||
|
computed, digest,
|
||||||
|
"Hash algorithm %s constructed using %s returned hexdigest"
|
||||||
|
@@ -195,7 +163,7 @@ def check_update(self, name, data, digest):
|
||||||
|
|
||||||
|
def check_unicode(self, algorithm_name):
|
||||||
|
# Unicode objects are not allowed as input.
|
||||||
|
- expected = hashlib.new(algorithm_name, str(u'spam')).hexdigest()
|
||||||
|
+ expected = hashlib.new(algorithm_name, str(u'spam'), usedforsecurity=False).hexdigest()
|
||||||
|
self.check(algorithm_name, u'spam', expected)
|
||||||
|
|
||||||
|
def test_unicode(self):
|
||||||
|
@@ -393,6 +361,68 @@ def hash_in_chunks(chunk_size):
|
||||||
|
|
||||||
|
self.assertEqual(expected_hash, hasher.hexdigest())
|
||||||
|
|
||||||
|
+ def test_issue9146(self):
|
||||||
|
+ # Ensure that various ways to use "MD5" from "hashlib" don't segfault:
|
||||||
|
+ m = hashlib.md5(usedforsecurity=False)
|
||||||
|
+ m.update(b'abc\n')
|
||||||
|
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||||
|
+
|
||||||
|
+ m = hashlib.new('md5', usedforsecurity=False)
|
||||||
|
+ m.update(b'abc\n')
|
||||||
|
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||||
|
+
|
||||||
|
+ m = hashlib.md5(b'abc\n', usedforsecurity=False)
|
||||||
|
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||||
|
+
|
||||||
|
+ m = hashlib.new('md5', b'abc\n', usedforsecurity=False)
|
||||||
|
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||||
|
+
|
||||||
|
+ def assertRaisesDisabledForFIPS(self, callable_obj=None, *args, **kwargs):
|
||||||
|
+ try:
|
||||||
|
+ callable_obj(*args, **kwargs)
|
||||||
|
+ except ValueError, e:
|
||||||
|
+ if not e.args[0].endswith('disabled for FIPS'):
|
||||||
|
+ self.fail('Incorrect exception raised')
|
||||||
|
+ else:
|
||||||
|
+ self.fail('Exception was not raised')
|
||||||
|
+
|
||||||
|
+ @unittest.skipUnless(get_fips_mode(),
|
||||||
|
+ 'FIPS enforcement required for this test.')
|
||||||
|
+ def test_hashlib_fips_mode(self):
|
||||||
|
+ # Ensure that we raise a ValueError on vanilla attempts to use MD5
|
||||||
|
+ # in hashlib in a FIPS-enforced setting:
|
||||||
|
+ self.assertRaisesDisabledForFIPS(hashlib.md5)
|
||||||
|
+ self.assertRaisesDisabledForFIPS(hashlib.new, 'md5')
|
||||||
|
+
|
||||||
|
+ @unittest.skipUnless(get_fips_mode(),
|
||||||
|
+ 'FIPS enforcement required for this test.')
|
||||||
|
+ def test_hashopenssl_fips_mode(self):
|
||||||
|
+ # Verify the _hashlib module's handling of md5:
|
||||||
|
+ import _hashlib
|
||||||
|
+
|
||||||
|
+ assert hasattr(_hashlib, 'openssl_md5')
|
||||||
|
+
|
||||||
|
+ # Ensure that _hashlib raises a ValueError on vanilla attempts to
|
||||||
|
+ # use MD5 in a FIPS-enforced setting:
|
||||||
|
+ self.assertRaisesDisabledForFIPS(_hashlib.openssl_md5)
|
||||||
|
+ self.assertRaisesDisabledForFIPS(_hashlib.new, 'md5')
|
||||||
|
+
|
||||||
|
+ # Ensure that in such a setting we can whitelist a callsite with
|
||||||
|
+ # usedforsecurity=False and have it succeed:
|
||||||
|
+ m = _hashlib.openssl_md5(usedforsecurity=False)
|
||||||
|
+ m.update('abc\n')
|
||||||
|
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||||
|
+
|
||||||
|
+ m = _hashlib.new('md5', usedforsecurity=False)
|
||||||
|
+ m.update('abc\n')
|
||||||
|
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||||
|
+
|
||||||
|
+ m = _hashlib.openssl_md5('abc\n', usedforsecurity=False)
|
||||||
|
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||||
|
+
|
||||||
|
+ m = _hashlib.new('md5', 'abc\n', usedforsecurity=False)
|
||||||
|
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||||
|
+
|
||||||
|
|
||||||
|
class KDFTests(unittest.TestCase):
|
||||||
|
pbkdf2_test_vectors = [
|
||||||
|
|
||||||
|
From 31e527aa4f57845dfb0c3dd4f0e9192af5a5b4e2 Mon Sep 17 00:00:00 2001
|
||||||
|
From: David Malcolm <dmalcolm@redhat.com>
|
||||||
|
Date: Mon, 2 Sep 2019 18:00:47 +0200
|
||||||
|
Subject: [PATCH 07/11] Don't build non-OpenSSL hash implementations
|
||||||
|
|
||||||
|
---
|
||||||
|
setup.py | 15 ---------------
|
||||||
|
1 file changed, 15 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/setup.py b/setup.py
|
||||||
|
index 33cecc687573..272d2f1b5bb8 100644
|
||||||
|
--- a/setup.py
|
||||||
|
+++ b/setup.py
|
||||||
|
@@ -874,21 +874,6 @@ def detect_modules(self):
|
||||||
|
print ("warning: openssl 0x%08x is too old for _hashlib" %
|
||||||
|
openssl_ver)
|
||||||
|
missing.append('_hashlib')
|
||||||
|
- if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
|
||||||
|
- # The _sha module implements the SHA1 hash algorithm.
|
||||||
|
- exts.append( Extension('_sha', ['shamodule.c']) )
|
||||||
|
- # The _md5 module implements the RSA Data Security, Inc. MD5
|
||||||
|
- # Message-Digest Algorithm, described in RFC 1321. The
|
||||||
|
- # necessary files md5.c and md5.h are included here.
|
||||||
|
- exts.append( Extension('_md5',
|
||||||
|
- sources = ['md5module.c', 'md5.c'],
|
||||||
|
- depends = ['md5.h']) )
|
||||||
|
-
|
||||||
|
- min_sha2_openssl_ver = 0x00908000
|
||||||
|
- if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
|
||||||
|
- # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
|
||||||
|
- exts.append( Extension('_sha256', ['sha256module.c']) )
|
||||||
|
- exts.append( Extension('_sha512', ['sha512module.c']) )
|
||||||
|
|
||||||
|
# Modules that provide persistent dictionary-like semantics. You will
|
||||||
|
# probably want to arrange for at least one of them to be available on
|
||||||
|
|
||||||
|
From e9cd6a63ce17a0120b1d017bf08f05f3ed223bb1 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Petr Viktorin <pviktori@redhat.com>
|
||||||
|
Date: Mon, 2 Sep 2019 18:33:22 +0200
|
||||||
|
Subject: [PATCH 08/11] Allow for errros in pre-created context creation
|
||||||
|
|
||||||
|
---
|
||||||
|
Modules/_hashopenssl.c | 6 ++++--
|
||||||
|
1 file changed, 4 insertions(+), 2 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
|
||||||
|
index 74f9ab9ec150..7609e9e490f0 100644
|
||||||
|
--- a/Modules/_hashopenssl.c
|
||||||
|
+++ b/Modules/_hashopenssl.c
|
||||||
|
@@ -813,7 +813,7 @@ generate_hash_name_list(void)
|
||||||
|
)) { \
|
||||||
|
return NULL; \
|
||||||
|
} \
|
||||||
|
- if (usedforsecurity == 0) { \
|
||||||
|
+ if (usedforsecurity == 0 || CONST_new_ ## NAME ## _ctx_p == NULL) { \
|
||||||
|
ret_obj = EVPnew( \
|
||||||
|
CONST_ ## NAME ## _name_obj, \
|
||||||
|
EVP_get_digestbyname(#NAME), \
|
||||||
|
@@ -846,7 +846,9 @@ generate_hash_name_list(void)
|
||||||
|
CONST_ ## NAME ## _name_obj = PyString_FromString(#NAME); \
|
||||||
|
if (EVP_get_digestbyname(#NAME)) { \
|
||||||
|
CONST_new_ ## NAME ## _ctx_p = EVP_MD_CTX_new(); \
|
||||||
|
- EVP_DigestInit(CONST_new_ ## NAME ## _ctx_p, EVP_get_digestbyname(#NAME)); \
|
||||||
|
+ if (!EVP_DigestInit(CONST_new_ ## NAME ## _ctx_p, EVP_get_digestbyname(#NAME))) { \
|
||||||
|
+ CONST_new_ ## NAME ## _ctx_p = NULL; \
|
||||||
|
+ } \
|
||||||
|
} \
|
||||||
|
} \
|
||||||
|
} while (0);
|
||||||
|
|
||||||
|
From d0465ea1c07f24067b4d6f60f73a29c82f2ad03f Mon Sep 17 00:00:00 2001
|
||||||
|
From: David Malcolm <dmalcolm@redhat.com>
|
||||||
|
Date: Mon, 2 Sep 2019 18:40:08 +0200
|
||||||
|
Subject: [PATCH 09/11] use SHA-256 rather than MD5 in
|
||||||
|
multiprocessing.connection (patch 169; rhbz#879695)
|
||||||
|
|
||||||
|
---
|
||||||
|
Lib/multiprocessing/connection.py | 12 ++++++++++--
|
||||||
|
1 file changed, 10 insertions(+), 2 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
|
||||||
|
index 645a26f069ea..d4dc6ac19d53 100644
|
||||||
|
--- a/Lib/multiprocessing/connection.py
|
||||||
|
+++ b/Lib/multiprocessing/connection.py
|
||||||
|
@@ -56,6 +56,10 @@
|
||||||
|
# A very generous timeout when it comes to local connections...
|
||||||
|
CONNECTION_TIMEOUT = 20.
|
||||||
|
|
||||||
|
+# The hmac module implicitly defaults to using MD5.
|
||||||
|
+# Support using a stronger algorithm for the challenge/response code:
|
||||||
|
+HMAC_DIGEST_NAME='sha256'
|
||||||
|
+
|
||||||
|
_mmap_counter = itertools.count()
|
||||||
|
|
||||||
|
default_family = 'AF_INET'
|
||||||
|
@@ -413,12 +417,16 @@ def PipeClient(address):
|
||||||
|
WELCOME = b'#WELCOME#'
|
||||||
|
FAILURE = b'#FAILURE#'
|
||||||
|
|
||||||
|
+def get_digestmod_for_hmac():
|
||||||
|
+ import hashlib
|
||||||
|
+ return getattr(hashlib, HMAC_DIGEST_NAME)
|
||||||
|
+
|
||||||
|
def deliver_challenge(connection, authkey):
|
||||||
|
import hmac
|
||||||
|
assert isinstance(authkey, bytes)
|
||||||
|
message = os.urandom(MESSAGE_LENGTH)
|
||||||
|
connection.send_bytes(CHALLENGE + message)
|
||||||
|
- digest = hmac.new(authkey, message).digest()
|
||||||
|
+ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest()
|
||||||
|
response = connection.recv_bytes(256) # reject large message
|
||||||
|
if response == digest:
|
||||||
|
connection.send_bytes(WELCOME)
|
||||||
|
@@ -432,7 +440,7 @@ def answer_challenge(connection, authkey):
|
||||||
|
message = connection.recv_bytes(256) # reject large message
|
||||||
|
assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message
|
||||||
|
message = message[len(CHALLENGE):]
|
||||||
|
- digest = hmac.new(authkey, message).digest()
|
||||||
|
+ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest()
|
||||||
|
connection.send_bytes(digest)
|
||||||
|
response = connection.recv_bytes(256) # reject large message
|
||||||
|
if response != WELCOME:
|
||||||
|
|
||||||
|
From 82b181a2c55be0f0766fdf1f0a3e950d22fe0602 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Petr Viktorin <pviktori@redhat.com>
|
||||||
|
Date: Mon, 19 Aug 2019 13:59:40 +0200
|
||||||
|
Subject: [PATCH 10/11] Make uuid.uuid3 work (using libuuid via ctypes)
|
||||||
|
|
||||||
|
---
|
||||||
|
Lib/uuid.py | 8 ++++++++
|
||||||
|
1 file changed, 8 insertions(+)
|
||||||
|
|
||||||
|
diff --git a/Lib/uuid.py b/Lib/uuid.py
|
||||||
|
index 80d33c0bd83f..bfb7477b5f58 100644
|
||||||
|
--- a/Lib/uuid.py
|
||||||
|
+++ b/Lib/uuid.py
|
||||||
|
@@ -455,6 +455,7 @@ def _netbios_getnode():
|
||||||
|
|
||||||
|
# If ctypes is available, use it to find system routines for UUID generation.
|
||||||
|
_uuid_generate_time = _UuidCreate = None
|
||||||
|
+_uuid_generate_md5 = None
|
||||||
|
try:
|
||||||
|
import ctypes, ctypes.util
|
||||||
|
import sys
|
||||||
|
@@ -471,6 +472,8 @@ def _netbios_getnode():
|
||||||
|
continue
|
||||||
|
if hasattr(lib, 'uuid_generate_time'):
|
||||||
|
_uuid_generate_time = lib.uuid_generate_time
|
||||||
|
+ # The library that has uuid_generate_time should have md5 too.
|
||||||
|
+ _uuid_generate_md5 = getattr(lib, 'uuid_generate_md5')
|
||||||
|
break
|
||||||
|
del _libnames
|
||||||
|
|
||||||
|
@@ -595,6 +598,11 @@ def uuid1(node=None, clock_seq=None):
|
||||||
|
|
||||||
|
def uuid3(namespace, name):
|
||||||
|
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
|
||||||
|
+ if _uuid_generate_md5:
|
||||||
|
+ _buffer = ctypes.create_string_buffer(16)
|
||||||
|
+ _uuid_generate_md5(_buffer, namespace.bytes, name, len(name))
|
||||||
|
+ return UUID(bytes=_buffer.raw)
|
||||||
|
+
|
||||||
|
from hashlib import md5
|
||||||
|
hash = md5(namespace.bytes + name).digest()
|
||||||
|
return UUID(bytes=hash[:16], version=3)
|
||||||
|
|
761
00147-add-debug-malloc-stats.patch
Normal file
761
00147-add-debug-malloc-stats.patch
Normal file
@ -0,0 +1,761 @@
|
|||||||
|
diff --git a/Include/dictobject.h b/Include/dictobject.h
|
||||||
|
index 5a1e9fe..da89cec 100644
|
||||||
|
--- a/Include/dictobject.h
|
||||||
|
+++ b/Include/dictobject.h
|
||||||
|
@@ -154,6 +154,8 @@ PyAPI_FUNC(PyObject *) PyDict_GetItemString(PyObject *dp, const char *key);
|
||||||
|
PyAPI_FUNC(int) PyDict_SetItemString(PyObject *dp, const char *key, PyObject *item);
|
||||||
|
PyAPI_FUNC(int) PyDict_DelItemString(PyObject *dp, const char *key);
|
||||||
|
|
||||||
|
+PyAPI_FUNC(void) _PyDict_DebugMallocStats(FILE *out);
|
||||||
|
+
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
diff --git a/Include/floatobject.h b/Include/floatobject.h
|
||||||
|
index 54e8825..33c6ac0 100644
|
||||||
|
--- a/Include/floatobject.h
|
||||||
|
+++ b/Include/floatobject.h
|
||||||
|
@@ -132,6 +132,7 @@ PyAPI_FUNC(PyObject *) _PyFloat_FormatAdvanced(PyObject *obj,
|
||||||
|
failure. Used in builtin_round in bltinmodule.c. */
|
||||||
|
PyAPI_FUNC(PyObject *) _Py_double_round(double x, int ndigits);
|
||||||
|
|
||||||
|
+PyAPI_FUNC(void) _PyFloat_DebugMallocStats(FILE* out);
|
||||||
|
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
diff --git a/Include/frameobject.h b/Include/frameobject.h
|
||||||
|
index 3460379..db89a4a 100644
|
||||||
|
--- a/Include/frameobject.h
|
||||||
|
+++ b/Include/frameobject.h
|
||||||
|
@@ -80,6 +80,8 @@ PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *);
|
||||||
|
|
||||||
|
PyAPI_FUNC(int) PyFrame_ClearFreeList(void);
|
||||||
|
|
||||||
|
+PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out);
|
||||||
|
+
|
||||||
|
/* Return the line of code the frame is currently executing. */
|
||||||
|
PyAPI_FUNC(int) PyFrame_GetLineNumber(PyFrameObject *);
|
||||||
|
|
||||||
|
diff --git a/Include/intobject.h b/Include/intobject.h
|
||||||
|
index d198574..60cb9e0 100644
|
||||||
|
--- a/Include/intobject.h
|
||||||
|
+++ b/Include/intobject.h
|
||||||
|
@@ -78,6 +78,8 @@ PyAPI_FUNC(PyObject *) _PyInt_FormatAdvanced(PyObject *obj,
|
||||||
|
char *format_spec,
|
||||||
|
Py_ssize_t format_spec_len);
|
||||||
|
|
||||||
|
+PyAPI_FUNC(void) _PyInt_DebugMallocStats(FILE *out);
|
||||||
|
+
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
diff --git a/Include/listobject.h b/Include/listobject.h
|
||||||
|
index f19b1c5..7fccb47 100644
|
||||||
|
--- a/Include/listobject.h
|
||||||
|
+++ b/Include/listobject.h
|
||||||
|
@@ -62,6 +62,8 @@ PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *);
|
||||||
|
#define PyList_SET_ITEM(op, i, v) (((PyListObject *)(op))->ob_item[i] = (v))
|
||||||
|
#define PyList_GET_SIZE(op) Py_SIZE(op)
|
||||||
|
|
||||||
|
+PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out);
|
||||||
|
+
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
diff --git a/Include/methodobject.h b/Include/methodobject.h
|
||||||
|
index 6e160b6..1944517 100644
|
||||||
|
--- a/Include/methodobject.h
|
||||||
|
+++ b/Include/methodobject.h
|
||||||
|
@@ -87,6 +87,10 @@ typedef struct {
|
||||||
|
|
||||||
|
PyAPI_FUNC(int) PyCFunction_ClearFreeList(void);
|
||||||
|
|
||||||
|
+PyAPI_FUNC(void) _PyCFunction_DebugMallocStats(FILE *out);
|
||||||
|
+PyAPI_FUNC(void) _PyMethod_DebugMallocStats(FILE *out);
|
||||||
|
+
|
||||||
|
+
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
diff --git a/Include/object.h b/Include/object.h
|
||||||
|
index 807b241..a9d2079 100644
|
||||||
|
--- a/Include/object.h
|
||||||
|
+++ b/Include/object.h
|
||||||
|
@@ -1040,6 +1040,13 @@ PyAPI_FUNC(void) _PyTrash_thread_destroy_chain(void);
|
||||||
|
_PyTrash_thread_deposit_object((PyObject*)op); \
|
||||||
|
} while (0);
|
||||||
|
|
||||||
|
+PyAPI_FUNC(void)
|
||||||
|
+_PyDebugAllocatorStats(FILE *out, const char *block_name, int num_blocks,
|
||||||
|
+ size_t sizeof_block);
|
||||||
|
+
|
||||||
|
+PyAPI_FUNC(void)
|
||||||
|
+_PyObject_DebugTypeStats(FILE *out);
|
||||||
|
+
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
diff --git a/Include/objimpl.h b/Include/objimpl.h
|
||||||
|
index cbf6bc3..8c14ab8 100644
|
||||||
|
--- a/Include/objimpl.h
|
||||||
|
+++ b/Include/objimpl.h
|
||||||
|
@@ -101,13 +101,13 @@ PyAPI_FUNC(void) PyObject_Free(void *);
|
||||||
|
|
||||||
|
/* Macros */
|
||||||
|
#ifdef WITH_PYMALLOC
|
||||||
|
+PyAPI_FUNC(void) _PyObject_DebugMallocStats(FILE *out);
|
||||||
|
#ifdef PYMALLOC_DEBUG /* WITH_PYMALLOC && PYMALLOC_DEBUG */
|
||||||
|
PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes);
|
||||||
|
PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes);
|
||||||
|
PyAPI_FUNC(void) _PyObject_DebugFree(void *p);
|
||||||
|
PyAPI_FUNC(void) _PyObject_DebugDumpAddress(const void *p);
|
||||||
|
PyAPI_FUNC(void) _PyObject_DebugCheckAddress(const void *p);
|
||||||
|
-PyAPI_FUNC(void) _PyObject_DebugMallocStats(void);
|
||||||
|
PyAPI_FUNC(void *) _PyObject_DebugMallocApi(char api, size_t nbytes);
|
||||||
|
PyAPI_FUNC(void *) _PyObject_DebugReallocApi(char api, void *p, size_t nbytes);
|
||||||
|
PyAPI_FUNC(void) _PyObject_DebugFreeApi(char api, void *p);
|
||||||
|
diff --git a/Include/setobject.h b/Include/setobject.h
|
||||||
|
index 52b07d5..73a37b6 100644
|
||||||
|
--- a/Include/setobject.h
|
||||||
|
+++ b/Include/setobject.h
|
||||||
|
@@ -92,6 +92,7 @@ PyAPI_FUNC(int) _PySet_Next(PyObject *set, Py_ssize_t *pos, PyObject **key);
|
||||||
|
PyAPI_FUNC(int) _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key, long *hash);
|
||||||
|
PyAPI_FUNC(PyObject *) PySet_Pop(PyObject *set);
|
||||||
|
PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable);
|
||||||
|
+PyAPI_FUNC(void) _PySet_DebugMallocStats(FILE *out);
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
diff --git a/Include/stringobject.h b/Include/stringobject.h
|
||||||
|
index 12cc093..0a5fbd1 100644
|
||||||
|
--- a/Include/stringobject.h
|
||||||
|
+++ b/Include/stringobject.h
|
||||||
|
@@ -204,6 +204,8 @@ PyAPI_FUNC(PyObject *) _PyBytes_FormatAdvanced(PyObject *obj,
|
||||||
|
char *format_spec,
|
||||||
|
Py_ssize_t format_spec_len);
|
||||||
|
|
||||||
|
+PyAPI_FUNC(void) _PyString_DebugMallocStats(FILE *out);
|
||||||
|
+
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
diff --git a/Include/tupleobject.h b/Include/tupleobject.h
|
||||||
|
index a5ab733..27e6ca6 100644
|
||||||
|
--- a/Include/tupleobject.h
|
||||||
|
+++ b/Include/tupleobject.h
|
||||||
|
@@ -54,6 +54,7 @@ PyAPI_FUNC(void) _PyTuple_MaybeUntrack(PyObject *);
|
||||||
|
#define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v)
|
||||||
|
|
||||||
|
PyAPI_FUNC(int) PyTuple_ClearFreeList(void);
|
||||||
|
+PyAPI_FUNC(void) _PyTuple_DebugMallocStats(FILE *out);
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h
|
||||||
|
index 7781f96..321bd20 100644
|
||||||
|
--- a/Include/unicodeobject.h
|
||||||
|
+++ b/Include/unicodeobject.h
|
||||||
|
@@ -1406,6 +1406,8 @@ PyAPI_FUNC(int) _PyUnicode_IsAlpha(
|
||||||
|
Py_UNICODE ch /* Unicode character */
|
||||||
|
);
|
||||||
|
|
||||||
|
+PyAPI_FUNC(void) _PyUnicode_DebugMallocStats(FILE *out);
|
||||||
|
+
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
|
||||||
|
index 9342716..8eeb5ab 100644
|
||||||
|
--- a/Lib/test/test_sys.py
|
||||||
|
+++ b/Lib/test/test_sys.py
|
||||||
|
@@ -487,6 +487,32 @@ class SysModuleTest(unittest.TestCase):
|
||||||
|
p.wait()
|
||||||
|
self.assertIn(executable, ["''", repr(sys.executable)])
|
||||||
|
|
||||||
|
+ def test_debugmallocstats(self):
|
||||||
|
+ # Test sys._debugmallocstats()
|
||||||
|
+
|
||||||
|
+ import subprocess
|
||||||
|
+
|
||||||
|
+ # Verify the default of writing to stderr:
|
||||||
|
+ p = subprocess.Popen([sys.executable,
|
||||||
|
+ '-c', 'import sys; sys._debugmallocstats()'],
|
||||||
|
+ stderr=subprocess.PIPE)
|
||||||
|
+ out, err = p.communicate()
|
||||||
|
+ p.wait()
|
||||||
|
+ self.assertIn("arenas allocated current", err)
|
||||||
|
+
|
||||||
|
+ # Verify that we can redirect the output to a file (not a file-like
|
||||||
|
+ # object, though):
|
||||||
|
+ with open('mallocstats.txt', 'w') as out:
|
||||||
|
+ sys._debugmallocstats(out)
|
||||||
|
+ result = open('mallocstats.txt').read()
|
||||||
|
+ self.assertIn("arenas allocated current", result)
|
||||||
|
+ os.unlink('mallocstats.txt')
|
||||||
|
+
|
||||||
|
+ # Verify that the destination must be a file:
|
||||||
|
+ with self.assertRaises(TypeError):
|
||||||
|
+ sys._debugmallocstats(42)
|
||||||
|
+
|
||||||
|
+
|
||||||
|
@test.test_support.cpython_only
|
||||||
|
class SizeofTest(unittest.TestCase):
|
||||||
|
|
||||||
|
diff --git a/Objects/classobject.c b/Objects/classobject.c
|
||||||
|
index 02d7cfd..1c44a47 100644
|
||||||
|
--- a/Objects/classobject.c
|
||||||
|
+++ b/Objects/classobject.c
|
||||||
|
@@ -2691,3 +2691,12 @@ PyMethod_Fini(void)
|
||||||
|
{
|
||||||
|
(void)PyMethod_ClearFreeList();
|
||||||
|
}
|
||||||
|
+
|
||||||
|
+/* Print summary info about the state of the optimized allocator */
|
||||||
|
+void
|
||||||
|
+_PyMethod_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ _PyDebugAllocatorStats(out,
|
||||||
|
+ "free PyMethodObject",
|
||||||
|
+ numfree, sizeof(PyMethodObject));
|
||||||
|
+}
|
||||||
|
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
|
||||||
|
index c544ecd..89ca39c 100644
|
||||||
|
--- a/Objects/dictobject.c
|
||||||
|
+++ b/Objects/dictobject.c
|
||||||
|
@@ -225,6 +225,15 @@ show_track(void)
|
||||||
|
static PyDictObject *free_list[PyDict_MAXFREELIST];
|
||||||
|
static int numfree = 0;
|
||||||
|
|
||||||
|
+/* Print summary info about the state of the optimized allocator */
|
||||||
|
+void
|
||||||
|
+_PyDict_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ _PyDebugAllocatorStats(out,
|
||||||
|
+ "free PyDictObject", numfree, sizeof(PyDictObject));
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+
|
||||||
|
void
|
||||||
|
PyDict_Fini(void)
|
||||||
|
{
|
||||||
|
diff --git a/Objects/floatobject.c b/Objects/floatobject.c
|
||||||
|
index 5954d39..02acc8c 100644
|
||||||
|
--- a/Objects/floatobject.c
|
||||||
|
+++ b/Objects/floatobject.c
|
||||||
|
@@ -34,6 +34,22 @@ typedef struct _floatblock PyFloatBlock;
|
||||||
|
static PyFloatBlock *block_list = NULL;
|
||||||
|
static PyFloatObject *free_list = NULL;
|
||||||
|
|
||||||
|
+/* Print summary info about the state of the optimized allocator */
|
||||||
|
+void
|
||||||
|
+_PyFloat_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ int num_blocks = 0;
|
||||||
|
+ PyFloatBlock *block;
|
||||||
|
+
|
||||||
|
+ /* Walk the block list, counting */
|
||||||
|
+ for (block = block_list; block ; block = block->next) {
|
||||||
|
+ num_blocks++;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ _PyDebugAllocatorStats(out,
|
||||||
|
+ "PyFloatBlock", num_blocks, sizeof(PyFloatBlock));
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
static PyFloatObject *
|
||||||
|
fill_free_list(void)
|
||||||
|
{
|
||||||
|
diff --git a/Objects/frameobject.c b/Objects/frameobject.c
|
||||||
|
index 4c91dd0..03a66dc 100644
|
||||||
|
--- a/Objects/frameobject.c
|
||||||
|
+++ b/Objects/frameobject.c
|
||||||
|
@@ -1019,3 +1019,13 @@ PyFrame_Fini(void)
|
||||||
|
Py_XDECREF(builtin_object);
|
||||||
|
builtin_object = NULL;
|
||||||
|
}
|
||||||
|
+
|
||||||
|
+/* Print summary info about the state of the optimized allocator */
|
||||||
|
+void
|
||||||
|
+_PyFrame_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ _PyDebugAllocatorStats(out,
|
||||||
|
+ "free PyFrameObject",
|
||||||
|
+ numfree, sizeof(PyFrameObject));
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
diff --git a/Objects/intobject.c b/Objects/intobject.c
|
||||||
|
index 9b27c35..703fa5a 100644
|
||||||
|
--- a/Objects/intobject.c
|
||||||
|
+++ b/Objects/intobject.c
|
||||||
|
@@ -44,6 +44,23 @@ typedef struct _intblock PyIntBlock;
|
||||||
|
static PyIntBlock *block_list = NULL;
|
||||||
|
static PyIntObject *free_list = NULL;
|
||||||
|
|
||||||
|
+
|
||||||
|
+/* Print summary info about the state of the optimized allocator */
|
||||||
|
+void
|
||||||
|
+_PyInt_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ int num_blocks = 0;
|
||||||
|
+ PyIntBlock *block;
|
||||||
|
+
|
||||||
|
+ /* Walk the block list, counting */
|
||||||
|
+ for (block = block_list; block ; block = block->next) {
|
||||||
|
+ num_blocks++;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ _PyDebugAllocatorStats(out,
|
||||||
|
+ "PyIntBlock", num_blocks, sizeof(PyIntBlock));
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
static PyIntObject *
|
||||||
|
fill_free_list(void)
|
||||||
|
{
|
||||||
|
diff --git a/Objects/listobject.c b/Objects/listobject.c
|
||||||
|
index 24eff76..38848bd 100644
|
||||||
|
--- a/Objects/listobject.c
|
||||||
|
+++ b/Objects/listobject.c
|
||||||
|
@@ -109,6 +109,15 @@ PyList_Fini(void)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
+/* Print summary info about the state of the optimized allocator */
|
||||||
|
+void
|
||||||
|
+_PyList_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ _PyDebugAllocatorStats(out,
|
||||||
|
+ "free PyListObject",
|
||||||
|
+ numfree, sizeof(PyListObject));
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
PyObject *
|
||||||
|
PyList_New(Py_ssize_t size)
|
||||||
|
{
|
||||||
|
diff --git a/Objects/methodobject.c b/Objects/methodobject.c
|
||||||
|
index c1a99ab..ea5df77 100644
|
||||||
|
--- a/Objects/methodobject.c
|
||||||
|
+++ b/Objects/methodobject.c
|
||||||
|
@@ -412,6 +412,15 @@ PyCFunction_Fini(void)
|
||||||
|
(void)PyCFunction_ClearFreeList();
|
||||||
|
}
|
||||||
|
|
||||||
|
+/* Print summary info about the state of the optimized allocator */
|
||||||
|
+void
|
||||||
|
+_PyCFunction_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ _PyDebugAllocatorStats(out,
|
||||||
|
+ "free PyCFunction",
|
||||||
|
+ numfree, sizeof(PyCFunction));
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
/* PyCFunction_New() is now just a macro that calls PyCFunction_NewEx(),
|
||||||
|
but it's part of the API so we need to keep a function around that
|
||||||
|
existing C extensions can call.
|
||||||
|
diff --git a/Objects/object.c b/Objects/object.c
|
||||||
|
index 65366b0..acef3ce 100644
|
||||||
|
--- a/Objects/object.c
|
||||||
|
+++ b/Objects/object.c
|
||||||
|
@@ -2360,6 +2360,23 @@ PyMem_Free(void *p)
|
||||||
|
PyMem_FREE(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
+void
|
||||||
|
+_PyObject_DebugTypeStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ _PyString_DebugMallocStats(out);
|
||||||
|
+ _PyCFunction_DebugMallocStats(out);
|
||||||
|
+ _PyDict_DebugMallocStats(out);
|
||||||
|
+ _PyFloat_DebugMallocStats(out);
|
||||||
|
+ _PyFrame_DebugMallocStats(out);
|
||||||
|
+ _PyInt_DebugMallocStats(out);
|
||||||
|
+ _PyList_DebugMallocStats(out);
|
||||||
|
+ _PyMethod_DebugMallocStats(out);
|
||||||
|
+ _PySet_DebugMallocStats(out);
|
||||||
|
+ _PyTuple_DebugMallocStats(out);
|
||||||
|
+#if Py_USING_UNICODE
|
||||||
|
+ _PyUnicode_DebugMallocStats(out);
|
||||||
|
+#endif
|
||||||
|
+}
|
||||||
|
|
||||||
|
/* These methods are used to control infinite recursion in repr, str, print,
|
||||||
|
etc. Container objects that may recursively contain themselves,
|
||||||
|
diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c
|
||||||
|
index 0778c85..f049f5c 100644
|
||||||
|
--- a/Objects/obmalloc.c
|
||||||
|
+++ b/Objects/obmalloc.c
|
||||||
|
@@ -541,12 +541,10 @@ static struct arena_object* usable_arenas = NULL;
|
||||||
|
/* Number of arenas allocated that haven't been free()'d. */
|
||||||
|
static size_t narenas_currently_allocated = 0;
|
||||||
|
|
||||||
|
-#ifdef PYMALLOC_DEBUG
|
||||||
|
/* Total number of times malloc() called to allocate an arena. */
|
||||||
|
static size_t ntimes_arena_allocated = 0;
|
||||||
|
/* High water mark (max value ever seen) for narenas_currently_allocated. */
|
||||||
|
static size_t narenas_highwater = 0;
|
||||||
|
-#endif
|
||||||
|
|
||||||
|
/* Allocate a new arena. If we run out of memory, return NULL. Else
|
||||||
|
* allocate a new arena, and return the address of an arena_object
|
||||||
|
@@ -563,7 +561,7 @@ new_arena(void)
|
||||||
|
|
||||||
|
#ifdef PYMALLOC_DEBUG
|
||||||
|
if (Py_GETENV("PYTHONMALLOCSTATS"))
|
||||||
|
- _PyObject_DebugMallocStats();
|
||||||
|
+ _PyObject_DebugMallocStats(stderr);
|
||||||
|
#endif
|
||||||
|
if (unused_arena_objects == NULL) {
|
||||||
|
uint i;
|
||||||
|
@@ -631,11 +629,9 @@ new_arena(void)
|
||||||
|
arenaobj->address = (uptr)address;
|
||||||
|
|
||||||
|
++narenas_currently_allocated;
|
||||||
|
-#ifdef PYMALLOC_DEBUG
|
||||||
|
++ntimes_arena_allocated;
|
||||||
|
if (narenas_currently_allocated > narenas_highwater)
|
||||||
|
narenas_highwater = narenas_currently_allocated;
|
||||||
|
-#endif
|
||||||
|
arenaobj->freepools = NULL;
|
||||||
|
/* pool_address <- first pool-aligned address in the arena
|
||||||
|
nfreepools <- number of whole pools that fit after alignment */
|
||||||
|
@@ -1796,17 +1792,19 @@ _PyObject_DebugDumpAddress(const void *p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
+#endif /* PYMALLOC_DEBUG */
|
||||||
|
+
|
||||||
|
static size_t
|
||||||
|
-printone(const char* msg, size_t value)
|
||||||
|
+printone(FILE *out, const char* msg, size_t value)
|
||||||
|
{
|
||||||
|
int i, k;
|
||||||
|
char buf[100];
|
||||||
|
size_t origvalue = value;
|
||||||
|
|
||||||
|
- fputs(msg, stderr);
|
||||||
|
+ fputs(msg, out);
|
||||||
|
for (i = (int)strlen(msg); i < 35; ++i)
|
||||||
|
- fputc(' ', stderr);
|
||||||
|
- fputc('=', stderr);
|
||||||
|
+ fputc(' ', out);
|
||||||
|
+ fputc('=', out);
|
||||||
|
|
||||||
|
/* Write the value with commas. */
|
||||||
|
i = 22;
|
||||||
|
@@ -1827,17 +1825,32 @@ printone(const char* msg, size_t value)
|
||||||
|
|
||||||
|
while (i >= 0)
|
||||||
|
buf[i--] = ' ';
|
||||||
|
- fputs(buf, stderr);
|
||||||
|
+ fputs(buf, out);
|
||||||
|
|
||||||
|
return origvalue;
|
||||||
|
}
|
||||||
|
|
||||||
|
-/* Print summary info to stderr about the state of pymalloc's structures.
|
||||||
|
+void
|
||||||
|
+_PyDebugAllocatorStats(FILE *out,
|
||||||
|
+ const char *block_name, int num_blocks, size_t sizeof_block)
|
||||||
|
+{
|
||||||
|
+ char buf1[128];
|
||||||
|
+ char buf2[128];
|
||||||
|
+ PyOS_snprintf(buf1, sizeof(buf1),
|
||||||
|
+ "%d %ss * %zd bytes each",
|
||||||
|
+ num_blocks, block_name, sizeof_block);
|
||||||
|
+ PyOS_snprintf(buf2, sizeof(buf2),
|
||||||
|
+ "%48s ", buf1);
|
||||||
|
+ (void)printone(out, buf2, num_blocks * sizeof_block);
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+/* Print summary info to "out" about the state of pymalloc's structures.
|
||||||
|
* In Py_DEBUG mode, also perform some expensive internal consistency
|
||||||
|
* checks.
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
-_PyObject_DebugMallocStats(void)
|
||||||
|
+_PyObject_DebugMallocStats(FILE *out)
|
||||||
|
{
|
||||||
|
uint i;
|
||||||
|
const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT;
|
||||||
|
@@ -1866,7 +1879,7 @@ _PyObject_DebugMallocStats(void)
|
||||||
|
size_t total;
|
||||||
|
char buf[128];
|
||||||
|
|
||||||
|
- fprintf(stderr, "Small block threshold = %d, in %u size classes.\n",
|
||||||
|
+ fprintf(out, "Small block threshold = %d, in %u size classes.\n",
|
||||||
|
SMALL_REQUEST_THRESHOLD, numclasses);
|
||||||
|
|
||||||
|
for (i = 0; i < numclasses; ++i)
|
||||||
|
@@ -1920,10 +1933,10 @@ _PyObject_DebugMallocStats(void)
|
||||||
|
}
|
||||||
|
assert(narenas == narenas_currently_allocated);
|
||||||
|
|
||||||
|
- fputc('\n', stderr);
|
||||||
|
+ fputc('\n', out);
|
||||||
|
fputs("class size num pools blocks in use avail blocks\n"
|
||||||
|
"----- ---- --------- ------------- ------------\n",
|
||||||
|
- stderr);
|
||||||
|
+ out);
|
||||||
|
|
||||||
|
for (i = 0; i < numclasses; ++i) {
|
||||||
|
size_t p = numpools[i];
|
||||||
|
@@ -1934,7 +1947,7 @@ _PyObject_DebugMallocStats(void)
|
||||||
|
assert(b == 0 && f == 0);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
- fprintf(stderr, "%5u %6u "
|
||||||
|
+ fprintf(out, "%5u %6u "
|
||||||
|
"%11" PY_FORMAT_SIZE_T "u "
|
||||||
|
"%15" PY_FORMAT_SIZE_T "u "
|
||||||
|
"%13" PY_FORMAT_SIZE_T "u\n",
|
||||||
|
@@ -1944,36 +1957,35 @@ _PyObject_DebugMallocStats(void)
|
||||||
|
pool_header_bytes += p * POOL_OVERHEAD;
|
||||||
|
quantization += p * ((POOL_SIZE - POOL_OVERHEAD) % size);
|
||||||
|
}
|
||||||
|
- fputc('\n', stderr);
|
||||||
|
- (void)printone("# times object malloc called", serialno);
|
||||||
|
-
|
||||||
|
- (void)printone("# arenas allocated total", ntimes_arena_allocated);
|
||||||
|
- (void)printone("# arenas reclaimed", ntimes_arena_allocated - narenas);
|
||||||
|
- (void)printone("# arenas highwater mark", narenas_highwater);
|
||||||
|
- (void)printone("# arenas allocated current", narenas);
|
||||||
|
+ fputc('\n', out);
|
||||||
|
+#ifdef PYMALLOC_DEBUG
|
||||||
|
+ (void)printone(out, "# times object malloc called", serialno);
|
||||||
|
+#endif
|
||||||
|
+ (void)printone(out, "# arenas allocated total", ntimes_arena_allocated);
|
||||||
|
+ (void)printone(out, "# arenas reclaimed", ntimes_arena_allocated - narenas);
|
||||||
|
+ (void)printone(out, "# arenas highwater mark", narenas_highwater);
|
||||||
|
+ (void)printone(out, "# arenas allocated current", narenas);
|
||||||
|
|
||||||
|
PyOS_snprintf(buf, sizeof(buf),
|
||||||
|
"%" PY_FORMAT_SIZE_T "u arenas * %d bytes/arena",
|
||||||
|
narenas, ARENA_SIZE);
|
||||||
|
- (void)printone(buf, narenas * ARENA_SIZE);
|
||||||
|
+ (void)printone(out, buf, narenas * ARENA_SIZE);
|
||||||
|
|
||||||
|
- fputc('\n', stderr);
|
||||||
|
+ fputc('\n', out);
|
||||||
|
|
||||||
|
- total = printone("# bytes in allocated blocks", allocated_bytes);
|
||||||
|
- total += printone("# bytes in available blocks", available_bytes);
|
||||||
|
+ total = printone(out, "# bytes in allocated blocks", allocated_bytes);
|
||||||
|
+ total += printone(out, "# bytes in available blocks", available_bytes);
|
||||||
|
|
||||||
|
PyOS_snprintf(buf, sizeof(buf),
|
||||||
|
"%u unused pools * %d bytes", numfreepools, POOL_SIZE);
|
||||||
|
- total += printone(buf, (size_t)numfreepools * POOL_SIZE);
|
||||||
|
+ total += printone(out, buf, (size_t)numfreepools * POOL_SIZE);
|
||||||
|
|
||||||
|
- total += printone("# bytes lost to pool headers", pool_header_bytes);
|
||||||
|
- total += printone("# bytes lost to quantization", quantization);
|
||||||
|
- total += printone("# bytes lost to arena alignment", arena_alignment);
|
||||||
|
- (void)printone("Total", total);
|
||||||
|
+ total += printone(out, "# bytes lost to pool headers", pool_header_bytes);
|
||||||
|
+ total += printone(out, "# bytes lost to quantization", quantization);
|
||||||
|
+ total += printone(out, "# bytes lost to arena alignment", arena_alignment);
|
||||||
|
+ (void)printone(out, "Total", total);
|
||||||
|
}
|
||||||
|
|
||||||
|
-#endif /* PYMALLOC_DEBUG */
|
||||||
|
-
|
||||||
|
#ifdef Py_USING_MEMORY_DEBUGGER
|
||||||
|
/* Make this function last so gcc won't inline it since the definition is
|
||||||
|
* after the reference.
|
||||||
|
diff --git a/Objects/setobject.c b/Objects/setobject.c
|
||||||
|
index 31da3db..da086ab 100644
|
||||||
|
--- a/Objects/setobject.c
|
||||||
|
+++ b/Objects/setobject.c
|
||||||
|
@@ -1087,6 +1087,16 @@ PySet_Fini(void)
|
||||||
|
Py_CLEAR(emptyfrozenset);
|
||||||
|
}
|
||||||
|
|
||||||
|
+/* Print summary info about the state of the optimized allocator */
|
||||||
|
+void
|
||||||
|
+_PySet_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ _PyDebugAllocatorStats(out,
|
||||||
|
+ "free PySetObject",
|
||||||
|
+ numfree, sizeof(PySetObject));
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+
|
||||||
|
static PyObject *
|
||||||
|
set_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||||
|
{
|
||||||
|
diff --git a/Objects/stringobject.c b/Objects/stringobject.c
|
||||||
|
index c47d32f..b1ffa24 100644
|
||||||
|
--- a/Objects/stringobject.c
|
||||||
|
+++ b/Objects/stringobject.c
|
||||||
|
@@ -4880,3 +4880,43 @@ void _Py_ReleaseInternedStrings(void)
|
||||||
|
PyDict_Clear(interned);
|
||||||
|
Py_CLEAR(interned);
|
||||||
|
}
|
||||||
|
+
|
||||||
|
+void _PyString_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ ssize_t i;
|
||||||
|
+ int num_immortal = 0, num_mortal = 0;
|
||||||
|
+ ssize_t immortal_size = 0, mortal_size = 0;
|
||||||
|
+
|
||||||
|
+ if (interned == NULL || !PyDict_Check(interned))
|
||||||
|
+ return;
|
||||||
|
+
|
||||||
|
+ for (i = 0; i <= ((PyDictObject*)interned)->ma_mask; i++) {
|
||||||
|
+ PyDictEntry *ep = ((PyDictObject*)interned)->ma_table + i;
|
||||||
|
+ PyObject *pvalue = ep->me_value;
|
||||||
|
+ if (pvalue != NULL) {
|
||||||
|
+ PyStringObject *s = (PyStringObject *)ep->me_key;
|
||||||
|
+
|
||||||
|
+ switch (s->ob_sstate) {
|
||||||
|
+ case SSTATE_NOT_INTERNED:
|
||||||
|
+ /* XXX Shouldn't happen */
|
||||||
|
+ break;
|
||||||
|
+ case SSTATE_INTERNED_IMMORTAL:
|
||||||
|
+ num_immortal ++;
|
||||||
|
+ immortal_size += s->ob_size;
|
||||||
|
+ break;
|
||||||
|
+ case SSTATE_INTERNED_MORTAL:
|
||||||
|
+ num_mortal ++;
|
||||||
|
+ mortal_size += s->ob_size;
|
||||||
|
+ break;
|
||||||
|
+ default:
|
||||||
|
+ Py_FatalError("Inconsistent interned string state.");
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ fprintf(out, "%d mortal interned strings\n", num_mortal);
|
||||||
|
+ fprintf(out, "%d immortal interned strings\n", num_immortal);
|
||||||
|
+ fprintf(out, "total size of all interned strings: "
|
||||||
|
+ "%zi/%zi "
|
||||||
|
+ "mortal/immortal\n", mortal_size, immortal_size);
|
||||||
|
+}
|
||||||
|
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c
|
||||||
|
index 6f4b18c..e8e4490 100644
|
||||||
|
--- a/Objects/tupleobject.c
|
||||||
|
+++ b/Objects/tupleobject.c
|
||||||
|
@@ -44,6 +44,22 @@ show_track(void)
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
+/* Print summary info about the state of the optimized allocator */
|
||||||
|
+void
|
||||||
|
+_PyTuple_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+#if PyTuple_MAXSAVESIZE > 0
|
||||||
|
+ int i;
|
||||||
|
+ char buf[128];
|
||||||
|
+ for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
|
||||||
|
+ PyOS_snprintf(buf, sizeof(buf),
|
||||||
|
+ "free %d-sized PyTupleObject", i);
|
||||||
|
+ _PyDebugAllocatorStats(out,
|
||||||
|
+ buf,
|
||||||
|
+ numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i));
|
||||||
|
+ }
|
||||||
|
+#endif
|
||||||
|
+}
|
||||||
|
|
||||||
|
PyObject *
|
||||||
|
PyTuple_New(register Py_ssize_t size)
|
||||||
|
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
|
||||||
|
index a859fa0..b6ff83e 100644
|
||||||
|
--- a/Objects/unicodeobject.c
|
||||||
|
+++ b/Objects/unicodeobject.c
|
||||||
|
@@ -9018,6 +9018,12 @@ _PyUnicode_Fini(void)
|
||||||
|
(void)PyUnicode_ClearFreeList();
|
||||||
|
}
|
||||||
|
|
||||||
|
+void _PyUnicode_DebugMallocStats(FILE *out)
|
||||||
|
+{
|
||||||
|
+ _PyDebugAllocatorStats(out, "free PyUnicodeObject", numfree,
|
||||||
|
+ sizeof(PyUnicodeObject));
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
|
||||||
|
index b686317..c3b3e17 100644
|
||||||
|
--- a/Python/pythonrun.c
|
||||||
|
+++ b/Python/pythonrun.c
|
||||||
|
@@ -605,7 +605,7 @@ Py_Finalize(void)
|
||||||
|
#endif /* Py_TRACE_REFS */
|
||||||
|
#ifdef PYMALLOC_DEBUG
|
||||||
|
if (Py_GETENV("PYTHONMALLOCSTATS"))
|
||||||
|
- _PyObject_DebugMallocStats();
|
||||||
|
+ _PyObject_DebugMallocStats(stderr);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
call_ll_exitfuncs();
|
||||||
|
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
|
||||||
|
index 22238ba..60624f2 100644
|
||||||
|
--- a/Python/sysmodule.c
|
||||||
|
+++ b/Python/sysmodule.c
|
||||||
|
@@ -890,6 +890,57 @@ a 11-tuple where the entries in the tuple are counts of:\n\
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
|
||||||
|
+static PyObject *
|
||||||
|
+sys_debugmallocstats(PyObject *self, PyObject *args)
|
||||||
|
+{
|
||||||
|
+ PyObject *file = NULL;
|
||||||
|
+ FILE *fp;
|
||||||
|
+
|
||||||
|
+ if (!PyArg_ParseTuple(args, "|O!",
|
||||||
|
+ &PyFile_Type, &file)) {
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
+ if (!file) {
|
||||||
|
+ /* Default to sys.stderr: */
|
||||||
|
+ file = PySys_GetObject("stderr");
|
||||||
|
+ if (!file) {
|
||||||
|
+ PyErr_SetString(PyExc_ValueError, "sys.stderr not set");
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
+ if (!PyFile_Check(file)) {
|
||||||
|
+ PyErr_SetString(PyExc_TypeError, "sys.stderr is not a file");
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ Py_INCREF(file);
|
||||||
|
+ /* OK, we now own a ref on non-NULL "file" */
|
||||||
|
+
|
||||||
|
+ fp = PyFile_AsFile(file);
|
||||||
|
+ if (!fp) {
|
||||||
|
+ PyErr_SetString(PyExc_ValueError, "file is closed");
|
||||||
|
+ Py_DECREF(file);
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ _PyObject_DebugMallocStats(fp);
|
||||||
|
+ fputc('\n', fp);
|
||||||
|
+ _PyObject_DebugTypeStats(fp);
|
||||||
|
+
|
||||||
|
+ Py_DECREF(file);
|
||||||
|
+
|
||||||
|
+ Py_RETURN_NONE;
|
||||||
|
+}
|
||||||
|
+PyDoc_STRVAR(debugmallocstats_doc,
|
||||||
|
+"_debugmallocstats([file])\n\
|
||||||
|
+\n\
|
||||||
|
+Print summary info to the given file (or sys.stderr) about the state of\n\
|
||||||
|
+pymalloc's structures.\n\
|
||||||
|
+\n\
|
||||||
|
+In Py_DEBUG mode, also perform some expensive internal consistency\n\
|
||||||
|
+checks.\n\
|
||||||
|
+");
|
||||||
|
+
|
||||||
|
#ifdef Py_TRACE_REFS
|
||||||
|
/* Defined in objects.c because it uses static globals if that file */
|
||||||
|
extern PyObject *_Py_GetObjects(PyObject *, PyObject *);
|
||||||
|
@@ -988,6 +1039,8 @@ static PyMethodDef sys_methods[] = {
|
||||||
|
{"settrace", sys_settrace, METH_O, settrace_doc},
|
||||||
|
{"gettrace", sys_gettrace, METH_NOARGS, gettrace_doc},
|
||||||
|
{"call_tracing", sys_call_tracing, METH_VARARGS, call_tracing_doc},
|
||||||
|
+ {"_debugmallocstats", sys_debugmallocstats, METH_VARARGS,
|
||||||
|
+ debugmallocstats_doc},
|
||||||
|
{NULL, NULL} /* sentinel */
|
||||||
|
};
|
||||||
|
|
15
00155-avoid-ctypes-thunks.patch
Normal file
15
00155-avoid-ctypes-thunks.patch
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
diff -up Python-2.7.3/Lib/ctypes/__init__.py.rhbz814391 Python-2.7.3/Lib/ctypes/__init__.py
|
||||||
|
--- Python-2.7.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 14:51:19.390990244 -0400
|
||||||
|
+++ Python-2.7.3/Lib/ctypes/__init__.py 2012-04-20 14:51:45.141668316 -0400
|
||||||
|
@@ -272,11 +272,6 @@ def _reset_cache():
|
||||||
|
# _SimpleCData.c_char_p_from_param
|
||||||
|
POINTER(c_char).from_param = c_char_p.from_param
|
||||||
|
_pointer_type_cache[None] = c_void_p
|
||||||
|
- # XXX for whatever reasons, creating the first instance of a callback
|
||||||
|
- # function is needed for the unittests on Win64 to succeed. This MAY
|
||||||
|
- # be a compiler bug, since the problem occurs only when _ctypes is
|
||||||
|
- # compiled with the MS SDK compiler. Or an uninitialized variable?
|
||||||
|
- CFUNCTYPE(c_int)(lambda: None)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from _ctypes import set_conversion_mode
|
57
00156-gdb-autoload-safepath.patch
Normal file
57
00156-gdb-autoload-safepath.patch
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
diff -up Python-2.7.3/Lib/test/test_gdb.py.gdb-autoload-safepath Python-2.7.3/Lib/test/test_gdb.py
|
||||||
|
--- Python-2.7.3/Lib/test/test_gdb.py.gdb-autoload-safepath 2012-04-30 15:53:57.254045220 -0400
|
||||||
|
+++ Python-2.7.3/Lib/test/test_gdb.py 2012-04-30 16:19:19.569941124 -0400
|
||||||
|
@@ -54,6 +54,19 @@ def gdb_has_frame_select():
|
||||||
|
|
||||||
|
HAS_PYUP_PYDOWN = gdb_has_frame_select()
|
||||||
|
|
||||||
|
+def gdb_has_autoload_safepath():
|
||||||
|
+ # Recent GDBs will only auto-load scripts from certain safe
|
||||||
|
+ # locations, so we will need to turn off this protection.
|
||||||
|
+ # However, if the GDB doesn't have it, then the following
|
||||||
|
+ # command will generate noise on stderr (rhbz#817072):
|
||||||
|
+ cmd = "--eval-command=set auto-load safe-path /"
|
||||||
|
+ p = subprocess.Popen(["gdb", "--batch", cmd],
|
||||||
|
+ stderr=subprocess.PIPE)
|
||||||
|
+ _, stderr = p.communicate()
|
||||||
|
+ return '"on" or "off" expected.' not in stderr
|
||||||
|
+
|
||||||
|
+HAS_AUTOLOAD_SAFEPATH = gdb_has_autoload_safepath()
|
||||||
|
+
|
||||||
|
class DebuggerTests(unittest.TestCase):
|
||||||
|
|
||||||
|
"""Test that the debugger can debug Python."""
|
||||||
|
diff -up Python-2.7.10/Lib/test/test_gdb.py.ms Python-2.7.10/Lib/test/test_gdb.py
|
||||||
|
--- Python-2.7.10/Lib/test/test_gdb.py.ms 2015-05-25 17:00:25.028462615 +0200
|
||||||
|
+++ Python-2.7.10/Lib/test/test_gdb.py 2015-05-25 17:01:53.166359822 +0200
|
||||||
|
@@ -153,6 +153,17 @@ class DebuggerTests(unittest.TestCase):
|
||||||
|
|
||||||
|
'run']
|
||||||
|
|
||||||
|
+ if HAS_AUTOLOAD_SAFEPATH:
|
||||||
|
+ # Recent GDBs will only auto-load scripts from certain safe
|
||||||
|
+ # locations.
|
||||||
|
+ # Where necessary, turn off this protection to ensure that
|
||||||
|
+ # our -gdb.py script can be loaded - but not on earlier gdb builds
|
||||||
|
+ # as this would generate noise on stderr (rhbz#817072):
|
||||||
|
+ init_commands = ['set auto-load safe-path /']
|
||||||
|
+ else:
|
||||||
|
+ init_commands = []
|
||||||
|
+
|
||||||
|
+
|
||||||
|
# GDB as of 7.4 onwards can distinguish between the
|
||||||
|
# value of a variable at entry vs current value:
|
||||||
|
# http://sourceware.org/gdb/onlinedocs/gdb/Variables.html
|
||||||
|
@@ -167,10 +178,11 @@ class DebuggerTests(unittest.TestCase):
|
||||||
|
else:
|
||||||
|
commands += ['backtrace']
|
||||||
|
|
||||||
|
- # print commands
|
||||||
|
+ # print init_commands
|
||||||
|
|
||||||
|
# Use "commands" to generate the arguments with which to invoke "gdb":
|
||||||
|
args = ["gdb", "--batch", "-nx"]
|
||||||
|
+ args += ['--init-eval-command=%s' % cmd for cmd in init_commands]
|
||||||
|
args += ['--eval-command=%s' % cmd for cmd in commands]
|
||||||
|
args += ["--args",
|
||||||
|
sys.executable]
|
292
00165-crypt-module-salt-backport.patch
Normal file
292
00165-crypt-module-salt-backport.patch
Normal file
@ -0,0 +1,292 @@
|
|||||||
|
diff --git a/Doc/library/crypt.rst b/Doc/library/crypt.rst
|
||||||
|
index 91464ef..6ee64d6 100644
|
||||||
|
--- a/Doc/library/crypt.rst
|
||||||
|
+++ b/Doc/library/crypt.rst
|
||||||
|
@@ -16,9 +16,9 @@
|
||||||
|
|
||||||
|
This module implements an interface to the :manpage:`crypt(3)` routine, which is
|
||||||
|
a one-way hash function based upon a modified DES algorithm; see the Unix man
|
||||||
|
-page for further details. Possible uses include allowing Python scripts to
|
||||||
|
-accept typed passwords from the user, or attempting to crack Unix passwords with
|
||||||
|
-a dictionary.
|
||||||
|
+page for further details. Possible uses include storing hashed passwords
|
||||||
|
+so you can check passwords without storing the actual password, or attempting
|
||||||
|
+to crack Unix passwords with a dictionary.
|
||||||
|
|
||||||
|
.. index:: single: crypt(3)
|
||||||
|
|
||||||
|
@@ -27,15 +27,81 @@ the :manpage:`crypt(3)` routine in the running system. Therefore, any
|
||||||
|
extensions available on the current implementation will also be available on
|
||||||
|
this module.
|
||||||
|
|
||||||
|
+Hashing Methods
|
||||||
|
+---------------
|
||||||
|
|
||||||
|
-.. function:: crypt(word, salt)
|
||||||
|
+The :mod:`crypt` module defines the list of hashing methods (not all methods
|
||||||
|
+are available on all platforms):
|
||||||
|
+
|
||||||
|
+.. data:: METHOD_SHA512
|
||||||
|
+
|
||||||
|
+ A Modular Crypt Format method with 16 character salt and 86 character
|
||||||
|
+ hash. This is the strongest method.
|
||||||
|
+
|
||||||
|
+.. versionadded:: 3.3
|
||||||
|
+
|
||||||
|
+.. data:: METHOD_SHA256
|
||||||
|
+
|
||||||
|
+ Another Modular Crypt Format method with 16 character salt and 43
|
||||||
|
+ character hash.
|
||||||
|
+
|
||||||
|
+.. versionadded:: 3.3
|
||||||
|
+
|
||||||
|
+.. data:: METHOD_MD5
|
||||||
|
+
|
||||||
|
+ Another Modular Crypt Format method with 8 character salt and 22
|
||||||
|
+ character hash.
|
||||||
|
+
|
||||||
|
+.. versionadded:: 3.3
|
||||||
|
+
|
||||||
|
+.. data:: METHOD_CRYPT
|
||||||
|
+
|
||||||
|
+ The traditional method with a 2 character salt and 13 characters of
|
||||||
|
+ hash. This is the weakest method.
|
||||||
|
+
|
||||||
|
+.. versionadded:: 3.3
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+Module Attributes
|
||||||
|
+-----------------
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+.. attribute:: methods
|
||||||
|
+
|
||||||
|
+ A list of available password hashing algorithms, as
|
||||||
|
+ ``crypt.METHOD_*`` objects. This list is sorted from strongest to
|
||||||
|
+ weakest, and is guaranteed to have at least ``crypt.METHOD_CRYPT``.
|
||||||
|
+
|
||||||
|
+.. versionadded:: 3.3
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+Module Functions
|
||||||
|
+----------------
|
||||||
|
+
|
||||||
|
+The :mod:`crypt` module defines the following functions:
|
||||||
|
+
|
||||||
|
+.. function:: crypt(word, salt=None)
|
||||||
|
|
||||||
|
*word* will usually be a user's password as typed at a prompt or in a graphical
|
||||||
|
- interface. *salt* is usually a random two-character string which will be used
|
||||||
|
- to perturb the DES algorithm in one of 4096 ways. The characters in *salt* must
|
||||||
|
- be in the set ``[./a-zA-Z0-9]``. Returns the hashed password as a string, which
|
||||||
|
- will be composed of characters from the same alphabet as the salt (the first two
|
||||||
|
- characters represent the salt itself).
|
||||||
|
+ interface. The optional *salt* is either a string as returned from
|
||||||
|
+ :func:`mksalt`, one of the ``crypt.METHOD_*`` values (though not all
|
||||||
|
+ may be available on all platforms), or a full encrypted password
|
||||||
|
+ including salt, as returned by this function. If *salt* is not
|
||||||
|
+ provided, the strongest method will be used (as returned by
|
||||||
|
+ :func:`methods`.
|
||||||
|
+
|
||||||
|
+ Checking a password is usually done by passing the plain-text password
|
||||||
|
+ as *word* and the full results of a previous :func:`crypt` call,
|
||||||
|
+ which should be the same as the results of this call.
|
||||||
|
+
|
||||||
|
+ *salt* (either a random 2 or 16 character string, possibly prefixed with
|
||||||
|
+ ``$digit$`` to indicate the method) which will be used to perturb the
|
||||||
|
+ encryption algorithm. The characters in *salt* must be in the set
|
||||||
|
+ ``[./a-zA-Z0-9]``, with the exception of Modular Crypt Format which
|
||||||
|
+ prefixes a ``$digit$``.
|
||||||
|
+
|
||||||
|
+ Returns the hashed password as a string, which will be composed of
|
||||||
|
+ characters from the same alphabet as the salt.
|
||||||
|
|
||||||
|
.. index:: single: crypt(3)
|
||||||
|
|
||||||
|
@@ -43,6 +109,27 @@ this module.
|
||||||
|
different sizes in the *salt*, it is recommended to use the full crypted
|
||||||
|
password as salt when checking for a password.
|
||||||
|
|
||||||
|
+.. versionchanged:: 3.3
|
||||||
|
+ Before version 3.3, *salt* must be specified as a string and cannot
|
||||||
|
+ accept ``crypt.METHOD_*`` values (which don't exist anyway).
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+.. function:: mksalt(method=None)
|
||||||
|
+
|
||||||
|
+ Return a randomly generated salt of the specified method. If no
|
||||||
|
+ *method* is given, the strongest method available as returned by
|
||||||
|
+ :func:`methods` is used.
|
||||||
|
+
|
||||||
|
+ The return value is a string either of 2 characters in length for
|
||||||
|
+ ``crypt.METHOD_CRYPT``, or 19 characters starting with ``$digit$`` and
|
||||||
|
+ 16 random characters from the set ``[./a-zA-Z0-9]``, suitable for
|
||||||
|
+ passing as the *salt* argument to :func:`crypt`.
|
||||||
|
+
|
||||||
|
+.. versionadded:: 3.3
|
||||||
|
+
|
||||||
|
+Examples
|
||||||
|
+--------
|
||||||
|
+
|
||||||
|
A simple example illustrating typical use::
|
||||||
|
|
||||||
|
import crypt, getpass, pwd
|
||||||
|
@@ -59,3 +146,11 @@ A simple example illustrating typical use::
|
||||||
|
else:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
+To generate a hash of a password using the strongest available method and
|
||||||
|
+check it against the original::
|
||||||
|
+
|
||||||
|
+ import crypt
|
||||||
|
+
|
||||||
|
+ hashed = crypt.crypt(plaintext)
|
||||||
|
+ if hashed != crypt.crypt(plaintext, hashed):
|
||||||
|
+ raise "Hashed version doesn't validate against original"
|
||||||
|
diff --git a/Lib/crypt.py b/Lib/crypt.py
|
||||||
|
new file mode 100644
|
||||||
|
index 0000000..bf0a416
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Lib/crypt.py
|
||||||
|
@@ -0,0 +1,71 @@
|
||||||
|
+"""Wrapper to the POSIX crypt library call and associated functionality.
|
||||||
|
+
|
||||||
|
+Note that the ``methods`` and ``METHOD_*`` attributes are non-standard
|
||||||
|
+extensions to Python 2.7, backported from 3.3"""
|
||||||
|
+
|
||||||
|
+import _crypt
|
||||||
|
+import string as _string
|
||||||
|
+from random import SystemRandom as _SystemRandom
|
||||||
|
+from collections import namedtuple as _namedtuple
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+_saltchars = _string.ascii_letters + _string.digits + './'
|
||||||
|
+_sr = _SystemRandom()
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+class _Method(_namedtuple('_Method', 'name ident salt_chars total_size')):
|
||||||
|
+
|
||||||
|
+ """Class representing a salt method per the Modular Crypt Format or the
|
||||||
|
+ legacy 2-character crypt method."""
|
||||||
|
+
|
||||||
|
+ def __repr__(self):
|
||||||
|
+ return '<crypt.METHOD_%s>' % self.name
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+def mksalt(method=None):
|
||||||
|
+ """Generate a salt for the specified method.
|
||||||
|
+
|
||||||
|
+ If not specified, the strongest available method will be used.
|
||||||
|
+
|
||||||
|
+ This is a non-standard extension to Python 2.7, backported from 3.3
|
||||||
|
+ """
|
||||||
|
+ if method is None:
|
||||||
|
+ method = methods[0]
|
||||||
|
+ s = '$%s$' % method.ident if method.ident else ''
|
||||||
|
+ s += ''.join(_sr.sample(_saltchars, method.salt_chars))
|
||||||
|
+ return s
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+def crypt(word, salt=None):
|
||||||
|
+ """Return a string representing the one-way hash of a password, with a salt
|
||||||
|
+ prepended.
|
||||||
|
+
|
||||||
|
+ If ``salt`` is not specified or is ``None``, the strongest
|
||||||
|
+ available method will be selected and a salt generated. Otherwise,
|
||||||
|
+ ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as
|
||||||
|
+ returned by ``crypt.mksalt()``.
|
||||||
|
+
|
||||||
|
+ Note that these are non-standard extensions to Python 2.7's crypt.crypt()
|
||||||
|
+ entrypoint, backported from 3.3: the standard Python 2.7 crypt.crypt()
|
||||||
|
+ entrypoint requires two strings as the parameters, and does not support
|
||||||
|
+ keyword arguments.
|
||||||
|
+ """
|
||||||
|
+ if salt is None or isinstance(salt, _Method):
|
||||||
|
+ salt = mksalt(salt)
|
||||||
|
+ return _crypt.crypt(word, salt)
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+# available salting/crypto methods
|
||||||
|
+METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
|
||||||
|
+METHOD_MD5 = _Method('MD5', '1', 8, 34)
|
||||||
|
+METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
|
||||||
|
+METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
|
||||||
|
+
|
||||||
|
+methods = []
|
||||||
|
+for _method in (METHOD_SHA512, METHOD_SHA256, METHOD_MD5):
|
||||||
|
+ _result = crypt('', _method)
|
||||||
|
+ if _result and len(_result) == _method.total_size:
|
||||||
|
+ methods.append(_method)
|
||||||
|
+methods.append(METHOD_CRYPT)
|
||||||
|
+del _result, _method
|
||||||
|
+
|
||||||
|
diff --git a/Lib/test/test_crypt.py b/Lib/test/test_crypt.py
|
||||||
|
index 7cd9c71..b061a55 100644
|
||||||
|
--- a/Lib/test/test_crypt.py
|
||||||
|
+++ b/Lib/test/test_crypt.py
|
||||||
|
@@ -16,6 +16,25 @@ class CryptTestCase(unittest.TestCase):
|
||||||
|
self.assertEqual(cr2, cr)
|
||||||
|
|
||||||
|
|
||||||
|
+ def test_salt(self):
|
||||||
|
+ self.assertEqual(len(crypt._saltchars), 64)
|
||||||
|
+ for method in crypt.methods:
|
||||||
|
+ salt = crypt.mksalt(method)
|
||||||
|
+ self.assertEqual(len(salt),
|
||||||
|
+ method.salt_chars + (3 if method.ident else 0))
|
||||||
|
+
|
||||||
|
+ def test_saltedcrypt(self):
|
||||||
|
+ for method in crypt.methods:
|
||||||
|
+ pw = crypt.crypt('assword', method)
|
||||||
|
+ self.assertEqual(len(pw), method.total_size)
|
||||||
|
+ pw = crypt.crypt('assword', crypt.mksalt(method))
|
||||||
|
+ self.assertEqual(len(pw), method.total_size)
|
||||||
|
+
|
||||||
|
+ def test_methods(self):
|
||||||
|
+ # Gurantee that METHOD_CRYPT is the last method in crypt.methods.
|
||||||
|
+ self.assertTrue(len(crypt.methods) >= 1)
|
||||||
|
+ self.assertEqual(crypt.METHOD_CRYPT, crypt.methods[-1])
|
||||||
|
+
|
||||||
|
def test_main():
|
||||||
|
test_support.run_unittest(CryptTestCase)
|
||||||
|
|
||||||
|
diff --git a/Modules/Setup.dist b/Modules/Setup.dist
|
||||||
|
index 2712f06..3ea4f0c 100644
|
||||||
|
--- a/Modules/Setup.dist
|
||||||
|
+++ b/Modules/Setup.dist
|
||||||
|
@@ -225,7 +225,7 @@ _ssl _ssl.c \
|
||||||
|
#
|
||||||
|
# First, look at Setup.config; configure may have set this for you.
|
||||||
|
|
||||||
|
-crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems
|
||||||
|
+_crypt _cryptmodule.c -lcrypt # crypt(3); needs -lcrypt on some systems
|
||||||
|
|
||||||
|
|
||||||
|
# Some more UNIX dependent modules -- off by default, since these
|
||||||
|
diff --git a/Modules/cryptmodule.c b/Modules/cryptmodule.c
|
||||||
|
index 76de54f..7c69ca6 100644
|
||||||
|
--- a/Modules/cryptmodule.c
|
||||||
|
+++ b/Modules/cryptmodule.c
|
||||||
|
@@ -43,7 +43,7 @@ static PyMethodDef crypt_methods[] = {
|
||||||
|
};
|
||||||
|
|
||||||
|
PyMODINIT_FUNC
|
||||||
|
-initcrypt(void)
|
||||||
|
+init_crypt(void)
|
||||||
|
{
|
||||||
|
- Py_InitModule("crypt", crypt_methods);
|
||||||
|
+ Py_InitModule("_crypt", crypt_methods);
|
||||||
|
}
|
||||||
|
diff --git a/setup.py b/setup.py
|
||||||
|
index b787487..c60ac35 100644
|
||||||
|
--- a/setup.py
|
||||||
|
+++ b/setup.py
|
||||||
|
@@ -798,7 +798,7 @@ class PyBuildExt(build_ext):
|
||||||
|
libs = ['crypt']
|
||||||
|
else:
|
||||||
|
libs = []
|
||||||
|
- exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
|
||||||
|
+ exts.append( Extension('_crypt', ['_cryptmodule.c'], libraries=libs) )
|
||||||
|
|
||||||
|
# CSV files
|
||||||
|
exts.append( Extension('_csv', ['_csv.c']) )
|
@ -0,0 +1,47 @@
|
|||||||
|
diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py
|
||||||
|
index 3354b34..10ba0e5 100644
|
||||||
|
--- a/Lib/test/test_gdb.py
|
||||||
|
+++ b/Lib/test/test_gdb.py
|
||||||
|
@@ -725,11 +725,10 @@ class PyListTests(DebuggerTests):
|
||||||
|
' 2 \n'
|
||||||
|
' 3 def foo(a, b, c):\n',
|
||||||
|
bt)
|
||||||
|
-
|
||||||
|
+@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
|
||||||
|
+@unittest.skipIf(python_is_optimized(),
|
||||||
|
+ "Python was compiled with optimizations")
|
||||||
|
class StackNavigationTests(DebuggerTests):
|
||||||
|
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
|
||||||
|
- @unittest.skipIf(python_is_optimized(),
|
||||||
|
- "Python was compiled with optimizations")
|
||||||
|
def test_pyup_command(self):
|
||||||
|
'Verify that the "py-up" command works'
|
||||||
|
bt = self.get_stack_trace(script=self.get_sample_script(),
|
||||||
|
@@ -740,7 +739,6 @@ class StackNavigationTests(DebuggerTests):
|
||||||
|
baz\(a, b, c\)
|
||||||
|
$''')
|
||||||
|
|
||||||
|
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
|
||||||
|
def test_down_at_bottom(self):
|
||||||
|
'Verify handling of "py-down" at the bottom of the stack'
|
||||||
|
bt = self.get_stack_trace(script=self.get_sample_script(),
|
||||||
|
@@ -748,9 +746,6 @@ $''')
|
||||||
|
self.assertEndsWith(bt,
|
||||||
|
'Unable to find a newer python frame\n')
|
||||||
|
|
||||||
|
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
|
||||||
|
- @unittest.skipIf(python_is_optimized(),
|
||||||
|
- "Python was compiled with optimizations")
|
||||||
|
def test_up_at_top(self):
|
||||||
|
'Verify handling of "py-up" at the top of the stack'
|
||||||
|
bt = self.get_stack_trace(script=self.get_sample_script(),
|
||||||
|
@@ -758,9 +753,6 @@ $''')
|
||||||
|
self.assertEndsWith(bt,
|
||||||
|
'Unable to find an older python frame\n')
|
||||||
|
|
||||||
|
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
|
||||||
|
- @unittest.skipIf(python_is_optimized(),
|
||||||
|
- "Python was compiled with optimizations")
|
||||||
|
def test_up_then_down(self):
|
||||||
|
'Verify "py-up" followed by "py-down"'
|
||||||
|
bt = self.get_stack_trace(script=self.get_sample_script(),
|
279
00170-gc-assertions.patch
Normal file
279
00170-gc-assertions.patch
Normal file
@ -0,0 +1,279 @@
|
|||||||
|
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
|
||||||
|
index 7e47b2d..12a210d 100644
|
||||||
|
--- a/Lib/test/test_gc.py
|
||||||
|
+++ b/Lib/test/test_gc.py
|
||||||
|
@@ -1,7 +1,8 @@
|
||||||
|
import unittest
|
||||||
|
from test.support import (verbose, run_unittest, start_threads,
|
||||||
|
- requires_type_collecting)
|
||||||
|
+ requires_type_collecting, import_module)
|
||||||
|
import sys
|
||||||
|
+import sysconfig
|
||||||
|
import time
|
||||||
|
import gc
|
||||||
|
import weakref
|
||||||
|
@@ -39,6 +40,8 @@ class GC_Detector(object):
|
||||||
|
self.wr = weakref.ref(C1055820(666), it_happened)
|
||||||
|
|
||||||
|
|
||||||
|
+BUILT_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS'])
|
||||||
|
+
|
||||||
|
### Tests
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
@@ -537,6 +540,49 @@ class GCTests(unittest.TestCase):
|
||||||
|
# would be damaged, with an empty __dict__.
|
||||||
|
self.assertEqual(x, None)
|
||||||
|
|
||||||
|
+ @unittest.skipIf(BUILT_WITH_NDEBUG,
|
||||||
|
+ 'built with -NDEBUG')
|
||||||
|
+ def test_refcount_errors(self):
|
||||||
|
+ # Verify the "handling" of objects with broken refcounts
|
||||||
|
+
|
||||||
|
+ import_module("ctypes") #skip if not supported
|
||||||
|
+
|
||||||
|
+ import subprocess
|
||||||
|
+ code = '''if 1:
|
||||||
|
+ a = []
|
||||||
|
+ b = [a]
|
||||||
|
+
|
||||||
|
+ # Simulate the refcount of "a" being too low (compared to the
|
||||||
|
+ # references held on it by live data), but keeping it above zero
|
||||||
|
+ # (to avoid deallocating it):
|
||||||
|
+ import ctypes
|
||||||
|
+ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a))
|
||||||
|
+
|
||||||
|
+ # The garbage collector should now have a fatal error when it reaches
|
||||||
|
+ # the broken object:
|
||||||
|
+ import gc
|
||||||
|
+ gc.collect()
|
||||||
|
+ '''
|
||||||
|
+ p = subprocess.Popen([sys.executable, "-c", code],
|
||||||
|
+ stdout=subprocess.PIPE,
|
||||||
|
+ stderr=subprocess.PIPE)
|
||||||
|
+ stdout, stderr = p.communicate()
|
||||||
|
+ p.stdout.close()
|
||||||
|
+ p.stderr.close()
|
||||||
|
+ # Verify that stderr has a useful error message:
|
||||||
|
+ self.assertRegexpMatches(stderr,
|
||||||
|
+ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "gc->gc.gc_refs != 0" failed.')
|
||||||
|
+ self.assertRegexpMatches(stderr,
|
||||||
|
+ b'refcount was too small')
|
||||||
|
+ self.assertRegexpMatches(stderr,
|
||||||
|
+ b'object : \[\]')
|
||||||
|
+ self.assertRegexpMatches(stderr,
|
||||||
|
+ b'type : list')
|
||||||
|
+ self.assertRegexpMatches(stderr,
|
||||||
|
+ b'refcount: 1')
|
||||||
|
+ self.assertRegexpMatches(stderr,
|
||||||
|
+ b'address : 0x[0-9a-f]+')
|
||||||
|
+
|
||||||
|
class GCTogglingTests(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
gc.enable()
|
||||||
|
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
|
||||||
|
index 916e481..0233ce2 100644
|
||||||
|
--- a/Modules/gcmodule.c
|
||||||
|
+++ b/Modules/gcmodule.c
|
||||||
|
@@ -21,6 +21,73 @@
|
||||||
|
#include "Python.h"
|
||||||
|
#include "frameobject.h" /* for PyFrame_ClearFreeList */
|
||||||
|
|
||||||
|
+/*
|
||||||
|
+ Define a pair of assertion macros.
|
||||||
|
+
|
||||||
|
+ These work like the regular C assert(), in that they will abort the
|
||||||
|
+ process with a message on stderr if the given condition fails to hold,
|
||||||
|
+ but compile away to nothing if NDEBUG is defined.
|
||||||
|
+
|
||||||
|
+ However, before aborting, Python will also try to call _PyObject_Dump() on
|
||||||
|
+ the given object. This may be of use when investigating bugs in which a
|
||||||
|
+ particular object is corrupt (e.g. buggy a tp_visit method in an extension
|
||||||
|
+ module breaking the garbage collector), to help locate the broken objects.
|
||||||
|
+
|
||||||
|
+ The WITH_MSG variant allows you to supply an additional message that Python
|
||||||
|
+ will attempt to print to stderr, after the object dump.
|
||||||
|
+*/
|
||||||
|
+#ifdef NDEBUG
|
||||||
|
+/* No debugging: compile away the assertions: */
|
||||||
|
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0)
|
||||||
|
+#else
|
||||||
|
+/* With debugging: generate checks: */
|
||||||
|
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \
|
||||||
|
+ ((expr) \
|
||||||
|
+ ? (void)(0) \
|
||||||
|
+ : _PyObject_AssertFailed((obj), \
|
||||||
|
+ (msg), \
|
||||||
|
+ (__STRING(expr)), \
|
||||||
|
+ (__FILE__), \
|
||||||
|
+ (__LINE__), \
|
||||||
|
+ (__PRETTY_FUNCTION__)))
|
||||||
|
+#endif
|
||||||
|
+
|
||||||
|
+#define PyObject_ASSERT(obj, expr) \
|
||||||
|
+ PyObject_ASSERT_WITH_MSG(obj, expr, NULL)
|
||||||
|
+
|
||||||
|
+static void _PyObject_AssertFailed(PyObject *, const char *,
|
||||||
|
+ const char *, const char *, int,
|
||||||
|
+ const char *);
|
||||||
|
+
|
||||||
|
+static void
|
||||||
|
+_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr,
|
||||||
|
+ const char *file, int line, const char *function)
|
||||||
|
+{
|
||||||
|
+ fprintf(stderr,
|
||||||
|
+ "%s:%d: %s: Assertion \"%s\" failed.\n",
|
||||||
|
+ file, line, function, expr);
|
||||||
|
+ if (msg) {
|
||||||
|
+ fprintf(stderr, "%s\n", msg);
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ fflush(stderr);
|
||||||
|
+
|
||||||
|
+ if (obj) {
|
||||||
|
+ /* This might succeed or fail, but we're about to abort, so at least
|
||||||
|
+ try to provide any extra info we can: */
|
||||||
|
+ _PyObject_Dump(obj);
|
||||||
|
+ }
|
||||||
|
+ else {
|
||||||
|
+ fprintf(stderr, "NULL object\n");
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ fflush(stdout);
|
||||||
|
+ fflush(stderr);
|
||||||
|
+
|
||||||
|
+ /* Terminate the process: */
|
||||||
|
+ abort();
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
/* Get an object's GC head */
|
||||||
|
#define AS_GC(o) ((PyGC_Head *)(o)-1)
|
||||||
|
|
||||||
|
@@ -328,7 +395,8 @@ update_refs(PyGC_Head *containers)
|
||||||
|
{
|
||||||
|
PyGC_Head *gc = containers->gc.gc_next;
|
||||||
|
for (; gc != containers; gc = gc->gc.gc_next) {
|
||||||
|
- assert(gc->gc.gc_refs == GC_REACHABLE);
|
||||||
|
+ PyObject_ASSERT(FROM_GC(gc),
|
||||||
|
+ gc->gc.gc_refs == GC_REACHABLE);
|
||||||
|
gc->gc.gc_refs = Py_REFCNT(FROM_GC(gc));
|
||||||
|
/* Python's cyclic gc should never see an incoming refcount
|
||||||
|
* of 0: if something decref'ed to 0, it should have been
|
||||||
|
@@ -348,7 +416,8 @@ update_refs(PyGC_Head *containers)
|
||||||
|
* so serious that maybe this should be a release-build
|
||||||
|
* check instead of an assert?
|
||||||
|
*/
|
||||||
|
- assert(gc->gc.gc_refs != 0);
|
||||||
|
+ PyObject_ASSERT(FROM_GC(gc),
|
||||||
|
+ gc->gc.gc_refs != 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -363,7 +432,9 @@ visit_decref(PyObject *op, void *data)
|
||||||
|
* generation being collected, which can be recognized
|
||||||
|
* because only they have positive gc_refs.
|
||||||
|
*/
|
||||||
|
- assert(gc->gc.gc_refs != 0); /* else refcount was too small */
|
||||||
|
+ PyObject_ASSERT_WITH_MSG(FROM_GC(gc),
|
||||||
|
+ gc->gc.gc_refs != 0,
|
||||||
|
+ "refcount was too small");
|
||||||
|
if (gc->gc.gc_refs > 0)
|
||||||
|
gc->gc.gc_refs--;
|
||||||
|
}
|
||||||
|
@@ -423,9 +494,10 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
|
||||||
|
* If gc_refs == GC_UNTRACKED, it must be ignored.
|
||||||
|
*/
|
||||||
|
else {
|
||||||
|
- assert(gc_refs > 0
|
||||||
|
- || gc_refs == GC_REACHABLE
|
||||||
|
- || gc_refs == GC_UNTRACKED);
|
||||||
|
+ PyObject_ASSERT(FROM_GC(gc),
|
||||||
|
+ gc_refs > 0
|
||||||
|
+ || gc_refs == GC_REACHABLE
|
||||||
|
+ || gc_refs == GC_UNTRACKED);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
@@ -467,7 +539,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
|
||||||
|
*/
|
||||||
|
PyObject *op = FROM_GC(gc);
|
||||||
|
traverseproc traverse = Py_TYPE(op)->tp_traverse;
|
||||||
|
- assert(gc->gc.gc_refs > 0);
|
||||||
|
+ PyObject_ASSERT(op, gc->gc.gc_refs > 0);
|
||||||
|
gc->gc.gc_refs = GC_REACHABLE;
|
||||||
|
(void) traverse(op,
|
||||||
|
(visitproc)visit_reachable,
|
||||||
|
@@ -545,7 +617,8 @@ move_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers)
|
||||||
|
for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) {
|
||||||
|
PyObject *op = FROM_GC(gc);
|
||||||
|
|
||||||
|
- assert(IS_TENTATIVELY_UNREACHABLE(op));
|
||||||
|
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
|
||||||
|
+
|
||||||
|
next = gc->gc.gc_next;
|
||||||
|
|
||||||
|
if (has_finalizer(op)) {
|
||||||
|
@@ -621,7 +694,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||||
|
PyWeakReference **wrlist;
|
||||||
|
|
||||||
|
op = FROM_GC(gc);
|
||||||
|
- assert(IS_TENTATIVELY_UNREACHABLE(op));
|
||||||
|
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
|
||||||
|
next = gc->gc.gc_next;
|
||||||
|
|
||||||
|
if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op)))
|
||||||
|
@@ -642,9 +715,9 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||||
|
* the callback pointer intact. Obscure: it also
|
||||||
|
* changes *wrlist.
|
||||||
|
*/
|
||||||
|
- assert(wr->wr_object == op);
|
||||||
|
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == op);
|
||||||
|
_PyWeakref_ClearRef(wr);
|
||||||
|
- assert(wr->wr_object == Py_None);
|
||||||
|
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None);
|
||||||
|
if (wr->wr_callback == NULL)
|
||||||
|
continue; /* no callback */
|
||||||
|
|
||||||
|
@@ -678,7 +751,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||||
|
*/
|
||||||
|
if (IS_TENTATIVELY_UNREACHABLE(wr))
|
||||||
|
continue;
|
||||||
|
- assert(IS_REACHABLE(wr));
|
||||||
|
+ PyObject_ASSERT(op, IS_REACHABLE(wr));
|
||||||
|
|
||||||
|
/* Create a new reference so that wr can't go away
|
||||||
|
* before we can process it again.
|
||||||
|
@@ -687,7 +760,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||||
|
|
||||||
|
/* Move wr to wrcb_to_call, for the next pass. */
|
||||||
|
wrasgc = AS_GC(wr);
|
||||||
|
- assert(wrasgc != next); /* wrasgc is reachable, but
|
||||||
|
+ PyObject_ASSERT(op, wrasgc != next);
|
||||||
|
+ /* wrasgc is reachable, but
|
||||||
|
next isn't, so they can't
|
||||||
|
be the same */
|
||||||
|
gc_list_move(wrasgc, &wrcb_to_call);
|
||||||
|
@@ -703,11 +777,11 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||||
|
|
||||||
|
gc = wrcb_to_call.gc.gc_next;
|
||||||
|
op = FROM_GC(gc);
|
||||||
|
- assert(IS_REACHABLE(op));
|
||||||
|
- assert(PyWeakref_Check(op));
|
||||||
|
+ PyObject_ASSERT(op, IS_REACHABLE(op));
|
||||||
|
+ PyObject_ASSERT(op, PyWeakref_Check(op));
|
||||||
|
wr = (PyWeakReference *)op;
|
||||||
|
callback = wr->wr_callback;
|
||||||
|
- assert(callback != NULL);
|
||||||
|
+ PyObject_ASSERT(op, callback != NULL);
|
||||||
|
|
||||||
|
/* copy-paste of weakrefobject.c's handle_callback() */
|
||||||
|
temp = PyObject_CallFunctionObjArgs(callback, wr, NULL);
|
||||||
|
@@ -810,7 +884,7 @@ delete_garbage(PyGC_Head *collectable, PyGC_Head *old)
|
||||||
|
PyGC_Head *gc = collectable->gc.gc_next;
|
||||||
|
PyObject *op = FROM_GC(gc);
|
||||||
|
|
||||||
|
- assert(IS_TENTATIVELY_UNREACHABLE(op));
|
||||||
|
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
|
||||||
|
if (debug & DEBUG_SAVEALL) {
|
||||||
|
PyList_Append(garbage, op);
|
||||||
|
}
|
28
00174-fix-for-usr-move.patch
Normal file
28
00174-fix-for-usr-move.patch
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
diff -up Python-2.7.3/Modules/getpath.c.fix-for-usr-move Python-2.7.3/Modules/getpath.c
|
||||||
|
--- Python-2.7.3/Modules/getpath.c.fix-for-usr-move 2013-03-06 14:25:32.801828698 -0500
|
||||||
|
+++ Python-2.7.3/Modules/getpath.c 2013-03-06 15:59:30.872443168 -0500
|
||||||
|
@@ -510,6 +510,24 @@ calculate_path(void)
|
||||||
|
MAXPATHLEN bytes long.
|
||||||
|
*/
|
||||||
|
|
||||||
|
+ /*
|
||||||
|
+ Workaround for rhbz#817554, where an empty argv0_path erroneously
|
||||||
|
+ locates "prefix" as "/lib[64]/python2.7" due to it finding
|
||||||
|
+ "/lib[64]/python2.7/os.py" via the /lib -> /usr/lib symlink for
|
||||||
|
+ https://fedoraproject.org/wiki/Features/UsrMove
|
||||||
|
+ */
|
||||||
|
+ if (argv0_path[0] == '\0' && 0 == strcmp(prog, "cmpi_swig")) {
|
||||||
|
+ /*
|
||||||
|
+ We have an empty argv0_path, presumably because prog aka
|
||||||
|
+ Py_GetProgramName() was not found on $PATH.
|
||||||
|
+
|
||||||
|
+ Set argv0_path to "/usr/" so that search_for_prefix() and
|
||||||
|
+ search_for_exec_prefix() don't erroneously pick up
|
||||||
|
+ on /lib/ via the UsrMove symlink:
|
||||||
|
+ */
|
||||||
|
+ strcpy(argv0_path, "/usr/");
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
if (!(pfound = search_for_prefix(argv0_path, home))) {
|
||||||
|
if (!Py_FrozenFlag)
|
||||||
|
fprintf(stderr,
|
13
00180-python-add-support-for-ppc64p7.patch
Normal file
13
00180-python-add-support-for-ppc64p7.patch
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
diff --git a/config.sub b/config.sub
|
||||||
|
index 3478c1f..e422173 100755
|
||||||
|
--- a/config.sub
|
||||||
|
+++ b/config.sub
|
||||||
|
@@ -1040,7 +1040,7 @@ case $basic_machine in
|
||||||
|
;;
|
||||||
|
ppc64) basic_machine=powerpc64-unknown
|
||||||
|
;;
|
||||||
|
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
|
||||||
|
+ ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
|
||||||
|
;;
|
||||||
|
ppc64le | powerpc64little)
|
||||||
|
basic_machine=powerpc64le-unknown
|
70
00181-allow-arbitrary-timeout-in-condition-wait.patch
Normal file
70
00181-allow-arbitrary-timeout-in-condition-wait.patch
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
diff --git a/Lib/threading.py b/Lib/threading.py
|
||||||
|
index cb49c4a..c9795a5 100644
|
||||||
|
--- a/Lib/threading.py
|
||||||
|
+++ b/Lib/threading.py
|
||||||
|
@@ -305,7 +305,7 @@ class _Condition(_Verbose):
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
- def wait(self, timeout=None):
|
||||||
|
+ def wait(self, timeout=None, balancing=True):
|
||||||
|
"""Wait until notified or until a timeout occurs.
|
||||||
|
|
||||||
|
If the calling thread has not acquired the lock when this method is
|
||||||
|
@@ -354,7 +354,10 @@ class _Condition(_Verbose):
|
||||||
|
remaining = endtime - _time()
|
||||||
|
if remaining <= 0:
|
||||||
|
break
|
||||||
|
- delay = min(delay * 2, remaining, .05)
|
||||||
|
+ if balancing:
|
||||||
|
+ delay = min(delay * 2, remaining, 0.05)
|
||||||
|
+ else:
|
||||||
|
+ delay = remaining
|
||||||
|
_sleep(delay)
|
||||||
|
if not gotit:
|
||||||
|
if __debug__:
|
||||||
|
@@ -599,7 +602,7 @@ class _Event(_Verbose):
|
||||||
|
with self.__cond:
|
||||||
|
self.__flag = False
|
||||||
|
|
||||||
|
- def wait(self, timeout=None):
|
||||||
|
+ def wait(self, timeout=None, balancing=True):
|
||||||
|
"""Block until the internal flag is true.
|
||||||
|
|
||||||
|
If the internal flag is true on entry, return immediately. Otherwise,
|
||||||
|
@@ -617,7 +620,7 @@ class _Event(_Verbose):
|
||||||
|
"""
|
||||||
|
with self.__cond:
|
||||||
|
if not self.__flag:
|
||||||
|
- self.__cond.wait(timeout)
|
||||||
|
+ self.__cond.wait(timeout, balancing)
|
||||||
|
return self.__flag
|
||||||
|
|
||||||
|
# Helper to generate new thread names
|
||||||
|
@@ -908,7 +911,7 @@ class Thread(_Verbose):
|
||||||
|
if 'dummy_threading' not in _sys.modules:
|
||||||
|
raise
|
||||||
|
|
||||||
|
- def join(self, timeout=None):
|
||||||
|
+ def join(self, timeout=None, balancing=True):
|
||||||
|
"""Wait until the thread terminates.
|
||||||
|
|
||||||
|
This blocks the calling thread until the thread whose join() method is
|
||||||
|
@@ -957,7 +960,7 @@ class Thread(_Verbose):
|
||||||
|
if __debug__:
|
||||||
|
self._note("%s.join(): timed out", self)
|
||||||
|
break
|
||||||
|
- self.__block.wait(delay)
|
||||||
|
+ self.__block.wait(delay, balancing)
|
||||||
|
else:
|
||||||
|
if __debug__:
|
||||||
|
self._note("%s.join(): thread stopped", self)
|
||||||
|
@@ -1143,7 +1146,7 @@ class _DummyThread(Thread):
|
||||||
|
def _set_daemon(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
- def join(self, timeout=None):
|
||||||
|
+ def join(self, timeout=None, balancing=True):
|
||||||
|
assert False, "cannot join a dummy thread"
|
||||||
|
|
||||||
|
|
12
00185-urllib2-honors-noproxy-for-ftp.patch
Normal file
12
00185-urllib2-honors-noproxy-for-ftp.patch
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
diff -up Python-2.7.5/Lib/urllib2.py.orig Python-2.7.5/Lib/urllib2.py
|
||||||
|
--- Python-2.7.5/Lib/urllib2.py.orig 2013-07-17 12:22:58.595525622 +0200
|
||||||
|
+++ Python-2.7.5/Lib/urllib2.py 2013-07-17 12:19:59.875898030 +0200
|
||||||
|
@@ -728,6 +728,8 @@ class ProxyHandler(BaseHandler):
|
||||||
|
if proxy_type is None:
|
||||||
|
proxy_type = orig_type
|
||||||
|
|
||||||
|
+ req.get_host()
|
||||||
|
+
|
||||||
|
if req.host and proxy_bypass(req.host):
|
||||||
|
return None
|
||||||
|
|
25
00187-add-RPATH-to-pyexpat.patch
Normal file
25
00187-add-RPATH-to-pyexpat.patch
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
diff -r e8b8279ca118 setup.py
|
||||||
|
--- a/setup.py Sun Jul 21 21:57:52 2013 -0400
|
||||||
|
+++ b/setup.py Tue Aug 20 09:45:31 2013 +0200
|
||||||
|
@@ -1480,12 +1480,21 @@
|
||||||
|
'expat/xmltok_impl.h'
|
||||||
|
]
|
||||||
|
|
||||||
|
+ # Add an explicit RPATH to pyexpat.so pointing at the directory
|
||||||
|
+ # containing the system expat (which has the extra XML_SetHashSalt
|
||||||
|
+ # symbol), to avoid an ImportError with a link error if there's an
|
||||||
|
+ # LD_LIBRARY_PATH containing a "vanilla" build of expat (without the
|
||||||
|
+ # symbol) (rhbz#833271):
|
||||||
|
+ EXPAT_RPATH = '/usr/lib64' if sys.maxint == 0x7fffffffffffffff else '/usr/lib'
|
||||||
|
+
|
||||||
|
+
|
||||||
|
exts.append(Extension('pyexpat',
|
||||||
|
define_macros = define_macros,
|
||||||
|
include_dirs = expat_inc,
|
||||||
|
libraries = expat_lib,
|
||||||
|
sources = ['pyexpat.c'] + expat_sources,
|
||||||
|
depends = expat_depends,
|
||||||
|
+ extra_link_args = ['-Wl,-rpath,%s' % EXPAT_RPATH]
|
||||||
|
))
|
||||||
|
|
||||||
|
# Fredrik Lundh's cElementTree module. Note that this also
|
70
00189-use-rpm-wheels.patch
Normal file
70
00189-use-rpm-wheels.patch
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
|
||||||
|
index 5021ebf..29a7d1b 100644
|
||||||
|
--- a/Lib/ensurepip/__init__.py
|
||||||
|
+++ b/Lib/ensurepip/__init__.py
|
||||||
|
@@ -1,9 +1,10 @@
|
||||||
|
#!/usr/bin/env python2
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
+import distutils.version
|
||||||
|
+import glob
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
-import pkgutil
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
@@ -12,9 +13,19 @@ import tempfile
|
||||||
|
__all__ = ["version", "bootstrap"]
|
||||||
|
|
||||||
|
|
||||||
|
-_SETUPTOOLS_VERSION = "41.2.0"
|
||||||
|
+_WHEEL_DIR = "/usr/share/python{}-wheels/".format(sys.version_info[0])
|
||||||
|
|
||||||
|
-_PIP_VERSION = "19.2.3"
|
||||||
|
+def _get_most_recent_wheel_version(pkg):
|
||||||
|
+ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg))
|
||||||
|
+ suffix = "-py2.py3-none-any.whl"
|
||||||
|
+ pattern = "{}*{}".format(prefix, suffix)
|
||||||
|
+ versions = (p[len(prefix):-len(suffix)] for p in glob.glob(pattern))
|
||||||
|
+ return str(max(versions, key=distutils.version.LooseVersion))
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+_SETUPTOOLS_VERSION = _get_most_recent_wheel_version("setuptools")
|
||||||
|
+
|
||||||
|
+_PIP_VERSION = _get_most_recent_wheel_version("pip")
|
||||||
|
|
||||||
|
_PROJECTS = [
|
||||||
|
("setuptools", _SETUPTOOLS_VERSION),
|
||||||
|
@@ -28,8 +39,13 @@ def _run_pip(args, additional_paths=None):
|
||||||
|
sys.path = additional_paths + sys.path
|
||||||
|
|
||||||
|
# Install the bundled software
|
||||||
|
- import pip._internal
|
||||||
|
- return pip._internal.main(args)
|
||||||
|
+ try:
|
||||||
|
+ # pip 10
|
||||||
|
+ from pip._internal import main
|
||||||
|
+ except ImportError:
|
||||||
|
+ # pip 9
|
||||||
|
+ from pip import main
|
||||||
|
+ return main(args)
|
||||||
|
|
||||||
|
|
||||||
|
def version():
|
||||||
|
@@ -100,12 +116,9 @@ def _bootstrap(root=None, upgrade=False, user=False,
|
||||||
|
additional_paths = []
|
||||||
|
for project, version in _PROJECTS:
|
||||||
|
wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||||
|
- whl = pkgutil.get_data(
|
||||||
|
- "ensurepip",
|
||||||
|
- "_bundled/{}".format(wheel_name),
|
||||||
|
- )
|
||||||
|
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||||
|
- fp.write(whl)
|
||||||
|
+ with open(os.path.join(_WHEEL_DIR, wheel_name), "rb") as sfp:
|
||||||
|
+ with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||||
|
+ fp.write(sfp.read())
|
||||||
|
|
||||||
|
additional_paths.append(os.path.join(tmpdir, wheel_name))
|
||||||
|
|
12
00191-disable-NOOP.patch
Normal file
12
00191-disable-NOOP.patch
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py
|
||||||
|
index 1bb6690..28ed25d 100644
|
||||||
|
--- a/Lib/test/test_smtplib.py
|
||||||
|
+++ b/Lib/test/test_smtplib.py
|
||||||
|
@@ -182,6 +182,7 @@ class DebuggingServerTests(unittest.TestCase):
|
||||||
|
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
|
||||||
|
smtp.quit()
|
||||||
|
|
||||||
|
+ @unittest._skipInRpmBuild("Does not work in network-free environment")
|
||||||
|
def testNOOP(self):
|
||||||
|
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
|
||||||
|
expected = (250, 'Ok')
|
11
00193-enable-loading-sqlite-extensions.patch
Normal file
11
00193-enable-loading-sqlite-extensions.patch
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
--- Python-2.7.5/setup.py.orig 2013-05-11 20:32:54.000000000 -0700
|
||||||
|
+++ Python-2.7.5/setup.py 2014-02-18 14:16:07.999004901 -0800
|
||||||
|
@@ -1168,7 +1168,7 @@ class PyBuildExt(build_ext):
|
||||||
|
sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
|
||||||
|
|
||||||
|
# Comment this out if you want the sqlite3 module to be able to load extensions.
|
||||||
|
- sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
|
||||||
|
+ #sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
|
||||||
|
|
||||||
|
if host_platform == 'darwin':
|
||||||
|
# In every directory on the search path search for a dynamic
|
20
00257-threading-wait-clamp-remaining-time.patch
Normal file
20
00257-threading-wait-clamp-remaining-time.patch
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
diff --git a/Lib/threading.py b/Lib/threading.py
|
||||||
|
index e4c7f35..91b3849 100644
|
||||||
|
--- a/Lib/threading.py
|
||||||
|
+++ b/Lib/threading.py
|
||||||
|
@@ -351,13 +351,14 @@ class _Condition(_Verbose):
|
||||||
|
gotit = waiter.acquire(0)
|
||||||
|
if gotit:
|
||||||
|
break
|
||||||
|
- remaining = endtime - _time()
|
||||||
|
+ remaining = min(endtime - _time(), timeout)
|
||||||
|
if remaining <= 0:
|
||||||
|
break
|
||||||
|
if balancing:
|
||||||
|
delay = min(delay * 2, remaining, 0.05)
|
||||||
|
else:
|
||||||
|
delay = remaining
|
||||||
|
+ endtime = _time() + remaining
|
||||||
|
_sleep(delay)
|
||||||
|
if not gotit:
|
||||||
|
if __debug__:
|
53
00288-ambiguous-python-version-rpmbuild-warn.patch
Normal file
53
00288-ambiguous-python-version-rpmbuild-warn.patch
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
diff -U3 -r Python-2.7.14.orig/Lib/site.py Python-2.7.14/Lib/site.py
|
||||||
|
--- Python-2.7.14.orig/Lib/site.py 2018-01-29 15:05:04.517599815 +0100
|
||||||
|
+++ Python-2.7.14/Lib/site.py 2018-01-30 09:13:17.305270500 +0100
|
||||||
|
@@ -515,6 +515,41 @@
|
||||||
|
"'import usercustomize' failed; use -v for traceback"
|
||||||
|
|
||||||
|
|
||||||
|
+def handle_ambiguous_python_version():
|
||||||
|
+ """Warn or fail if /usr/bin/python is used
|
||||||
|
+
|
||||||
|
+ Behavior depends on the value of PYTHON_DISALLOW_AMBIGUOUS_VERSION:
|
||||||
|
+ - "warn" - print warning to stderr
|
||||||
|
+ - "1" - print error and exit with positive exit code
|
||||||
|
+ - otherwise: do nothing
|
||||||
|
+
|
||||||
|
+ This is a Fedora modification, see the Change page for details:
|
||||||
|
+ See https://fedoraproject.org/wiki/Changes/Avoid_usr_bin_python_in_RPM_Build
|
||||||
|
+ """
|
||||||
|
+ if sys.executable == "/usr/bin/python":
|
||||||
|
+ setting = os.environ.get("PYTHON_DISALLOW_AMBIGUOUS_VERSION")
|
||||||
|
+ if setting == 'warn':
|
||||||
|
+ print>>sys.stderr, (
|
||||||
|
+ "DEPRECATION WARNING: python2 invoked with /usr/bin/python.\n"
|
||||||
|
+ " Use /usr/bin/python3 or /usr/bin/python2\n"
|
||||||
|
+ " /usr/bin/python will be removed or switched to Python 3"
|
||||||
|
+ " in the future.\n"
|
||||||
|
+ " If you cannot make the switch now, please follow"
|
||||||
|
+ " instructions at"
|
||||||
|
+ " https://fedoraproject.org/wiki/Changes/"
|
||||||
|
+ "Avoid_usr_bin_python_in_RPM_Build#Quick_Opt-Out")
|
||||||
|
+ elif setting == '1':
|
||||||
|
+ print>>sys.stderr, (
|
||||||
|
+ "ERROR: python2 invoked with /usr/bin/python.\n"
|
||||||
|
+ " Use /usr/bin/python3 or /usr/bin/python2\n"
|
||||||
|
+ " /usr/bin/python will be switched to Python 3"
|
||||||
|
+ " in the future.\n"
|
||||||
|
+ " More details are at"
|
||||||
|
+ " https://fedoraproject.org/wiki/Changes/"
|
||||||
|
+ "Avoid_usr_bin_python_in_RPM_Build#Quick_Opt-Out")
|
||||||
|
+ exit(1)
|
||||||
|
+
|
||||||
|
+
|
||||||
|
def main():
|
||||||
|
global ENABLE_USER_SITE
|
||||||
|
|
||||||
|
@@ -543,6 +578,7 @@
|
||||||
|
# this module is run as a script, because this code is executed twice.
|
||||||
|
if hasattr(sys, "setdefaultencoding"):
|
||||||
|
del sys.setdefaultencoding
|
||||||
|
+ handle_ambiguous_python_version()
|
||||||
|
|
||||||
|
main()
|
||||||
|
|
69
00289-disable-nis-detection.patch
Normal file
69
00289-disable-nis-detection.patch
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
diff --git a/setup.py b/setup.py
|
||||||
|
index 585e380..9993f11 100644
|
||||||
|
--- a/setup.py
|
||||||
|
+++ b/setup.py
|
||||||
|
@@ -1346,11 +1346,7 @@ class PyBuildExt(build_ext):
|
||||||
|
else:
|
||||||
|
missing.append('resource')
|
||||||
|
|
||||||
|
- nis = self._detect_nis(inc_dirs, lib_dirs)
|
||||||
|
- if nis is not None:
|
||||||
|
- exts.append(nis)
|
||||||
|
- else:
|
||||||
|
- missing.append('nis')
|
||||||
|
+ # nis (Sun yellow pages) is handled in Setup.dist
|
||||||
|
|
||||||
|
# Curses support, requiring the System V version of curses, often
|
||||||
|
# provided by the ncurses library.
|
||||||
|
@@ -2162,51 +2158,6 @@ class PyBuildExt(build_ext):
|
||||||
|
# for dlopen, see bpo-32647
|
||||||
|
ext.libraries.append('dl')
|
||||||
|
|
||||||
|
- def _detect_nis(self, inc_dirs, lib_dirs):
|
||||||
|
- if host_platform in {'win32', 'cygwin', 'qnx6'}:
|
||||||
|
- return None
|
||||||
|
-
|
||||||
|
- libs = []
|
||||||
|
- library_dirs = []
|
||||||
|
- includes_dirs = []
|
||||||
|
-
|
||||||
|
- # bpo-32521: glibc has deprecated Sun RPC for some time. Fedora 28
|
||||||
|
- # moved headers and libraries to libtirpc and libnsl. The headers
|
||||||
|
- # are in tircp and nsl sub directories.
|
||||||
|
- rpcsvc_inc = find_file(
|
||||||
|
- 'rpcsvc/yp_prot.h', inc_dirs,
|
||||||
|
- [os.path.join(inc_dir, 'nsl') for inc_dir in inc_dirs]
|
||||||
|
- )
|
||||||
|
- rpc_inc = find_file(
|
||||||
|
- 'rpc/rpc.h', inc_dirs,
|
||||||
|
- [os.path.join(inc_dir, 'tirpc') for inc_dir in inc_dirs]
|
||||||
|
- )
|
||||||
|
- if rpcsvc_inc is None or rpc_inc is None:
|
||||||
|
- # not found
|
||||||
|
- return None
|
||||||
|
- includes_dirs.extend(rpcsvc_inc)
|
||||||
|
- includes_dirs.extend(rpc_inc)
|
||||||
|
-
|
||||||
|
- if self.compiler.find_library_file(lib_dirs, 'nsl'):
|
||||||
|
- libs.append('nsl')
|
||||||
|
- else:
|
||||||
|
- # libnsl-devel: check for libnsl in nsl/ subdirectory
|
||||||
|
- nsl_dirs = [os.path.join(lib_dir, 'nsl') for lib_dir in lib_dirs]
|
||||||
|
- libnsl = self.compiler.find_library_file(nsl_dirs, 'nsl')
|
||||||
|
- if libnsl is not None:
|
||||||
|
- library_dirs.append(os.path.dirname(libnsl))
|
||||||
|
- libs.append('nsl')
|
||||||
|
-
|
||||||
|
- if self.compiler.find_library_file(lib_dirs, 'tirpc'):
|
||||||
|
- libs.append('tirpc')
|
||||||
|
-
|
||||||
|
- return Extension(
|
||||||
|
- 'nis', ['nismodule.c'],
|
||||||
|
- libraries=libs,
|
||||||
|
- library_dirs=library_dirs,
|
||||||
|
- include_dirs=includes_dirs
|
||||||
|
- )
|
||||||
|
-
|
||||||
|
|
||||||
|
class PyBuildInstall(install):
|
||||||
|
# Suppress the warning about installation into the lib_dynload
|
70
00351-cve-2019-20907-fix-infinite-loop-in-tarfile.patch
Normal file
70
00351-cve-2019-20907-fix-infinite-loop-in-tarfile.patch
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
From b099ce737f6e6cc9f3a1bf756af78eaa1c1480cd Mon Sep 17 00:00:00 2001
|
||||||
|
From: Rishi <rishi_devan@mail.com>
|
||||||
|
Date: Wed, 15 Jul 2020 13:51:00 +0200
|
||||||
|
Subject: [PATCH] 00351-cve-2019-20907-fix-infinite-loop-in-tarfile.patch
|
||||||
|
|
||||||
|
00351 #
|
||||||
|
Avoid infinite loop when reading specially crafted TAR files using the tarfile module
|
||||||
|
(CVE-2019-20907).
|
||||||
|
See: https://bugs.python.org/issue39017
|
||||||
|
---
|
||||||
|
Lib/tarfile.py | 2 ++
|
||||||
|
Lib/test/recursion.tar | Bin 0 -> 516 bytes
|
||||||
|
Lib/test/test_tarfile.py | 7 +++++++
|
||||||
|
.../2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst | 1 +
|
||||||
|
4 files changed, 10 insertions(+)
|
||||||
|
create mode 100644 Lib/test/recursion.tar
|
||||||
|
create mode 100644 Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
|
||||||
|
|
||||||
|
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
|
||||||
|
index adf91d5..574a6bb 100644
|
||||||
|
--- a/Lib/tarfile.py
|
||||||
|
+++ b/Lib/tarfile.py
|
||||||
|
@@ -1400,6 +1400,8 @@ class TarInfo(object):
|
||||||
|
|
||||||
|
length, keyword = match.groups()
|
||||||
|
length = int(length)
|
||||||
|
+ if length == 0:
|
||||||
|
+ raise InvalidHeaderError("invalid header")
|
||||||
|
value = buf[match.end(2) + 1:match.start(1) + length - 1]
|
||||||
|
|
||||||
|
keyword = keyword.decode("utf8")
|
||||||
|
diff --git a/Lib/test/recursion.tar b/Lib/test/recursion.tar
|
||||||
|
new file mode 100644
|
||||||
|
index 0000000000000000000000000000000000000000..b8237251964983f54ed1966297e887636cd0c5f4
|
||||||
|
GIT binary patch
|
||||||
|
literal 516
|
||||||
|
zcmYdFPRz+kEn=W0Fn}74P8%Xw3X=l~85kIuo0>8xq$A1Gm}!7)KUsFc41m#O8A5+e
|
||||||
|
I1_}|j06>QaCIA2c
|
||||||
|
|
||||||
|
literal 0
|
||||||
|
HcmV?d00001
|
||||||
|
|
||||||
|
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
|
||||||
|
index 89bd738..4592156 100644
|
||||||
|
--- a/Lib/test/test_tarfile.py
|
||||||
|
+++ b/Lib/test/test_tarfile.py
|
||||||
|
@@ -325,6 +325,13 @@ class CommonReadTest(ReadTest):
|
||||||
|
class MiscReadTest(CommonReadTest):
|
||||||
|
taropen = tarfile.TarFile.taropen
|
||||||
|
|
||||||
|
+ def test_length_zero_header(self):
|
||||||
|
+ # bpo-39017 (CVE-2019-20907): reading a zero-length header should fail
|
||||||
|
+ # with an exception
|
||||||
|
+ with self.assertRaisesRegexp(tarfile.ReadError, "file could not be opened successfully"):
|
||||||
|
+ with tarfile.open(support.findfile('recursion.tar')) as tar:
|
||||||
|
+ pass
|
||||||
|
+
|
||||||
|
def test_no_name_argument(self):
|
||||||
|
with open(self.tarname, "rb") as fobj:
|
||||||
|
tar = tarfile.open(fileobj=fobj, mode=self.mode)
|
||||||
|
diff --git a/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst b/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
|
||||||
|
new file mode 100644
|
||||||
|
index 0000000..ad26676
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst
|
||||||
|
@@ -0,0 +1 @@
|
||||||
|
+Avoid infinite loop when reading specially crafted TAR files using the tarfile module (CVE-2019-20907).
|
||||||
|
--
|
||||||
|
2.25.4
|
||||||
|
|
@ -0,0 +1,88 @@
|
|||||||
|
diff --git a/Lib/httplib.py b/Lib/httplib.py
|
||||||
|
index fcc4152..a636774 100644
|
||||||
|
--- a/Lib/httplib.py
|
||||||
|
+++ b/Lib/httplib.py
|
||||||
|
@@ -257,6 +257,10 @@ _contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f-\xff]')
|
||||||
|
# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
|
||||||
|
# We are more lenient for assumed real world compatibility purposes.
|
||||||
|
|
||||||
|
+# These characters are not allowed within HTTP method names
|
||||||
|
+# to prevent http header injection.
|
||||||
|
+_contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]')
|
||||||
|
+
|
||||||
|
# We always set the Content-Length header for these methods because some
|
||||||
|
# servers will otherwise respond with a 411
|
||||||
|
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
|
||||||
|
@@ -935,6 +939,8 @@ class HTTPConnection:
|
||||||
|
else:
|
||||||
|
raise CannotSendRequest()
|
||||||
|
|
||||||
|
+ self._validate_method(method)
|
||||||
|
+
|
||||||
|
# Save the method for use later in the response phase
|
||||||
|
self._method = method
|
||||||
|
|
||||||
|
@@ -1020,6 +1026,16 @@ class HTTPConnection:
|
||||||
|
# On Python 2, request is already encoded (default)
|
||||||
|
return request
|
||||||
|
|
||||||
|
+ def _validate_method(self, method):
|
||||||
|
+ """Validate a method name for putrequest."""
|
||||||
|
+ # prevent http header injection
|
||||||
|
+ match = _contains_disallowed_method_pchar_re.search(method)
|
||||||
|
+ if match:
|
||||||
|
+ raise ValueError(
|
||||||
|
+ "method can't contain control characters. %r "
|
||||||
|
+ "(found at least %r)"
|
||||||
|
+ % (method, match.group()))
|
||||||
|
+
|
||||||
|
def _validate_path(self, url):
|
||||||
|
"""Validate a url for putrequest."""
|
||||||
|
# Prevent CVE-2019-9740.
|
||||||
|
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
|
||||||
|
index d8a57f7..96a61dd 100644
|
||||||
|
--- a/Lib/test/test_httplib.py
|
||||||
|
+++ b/Lib/test/test_httplib.py
|
||||||
|
@@ -385,6 +385,29 @@ class HeaderTests(TestCase):
|
||||||
|
conn.putheader(name, value)
|
||||||
|
|
||||||
|
|
||||||
|
+class HttpMethodTests(TestCase):
|
||||||
|
+ def test_invalid_method_names(self):
|
||||||
|
+ methods = (
|
||||||
|
+ 'GET\r',
|
||||||
|
+ 'POST\n',
|
||||||
|
+ 'PUT\n\r',
|
||||||
|
+ 'POST\nValue',
|
||||||
|
+ 'POST\nHOST:abc',
|
||||||
|
+ 'GET\nrHost:abc\n',
|
||||||
|
+ 'POST\rRemainder:\r',
|
||||||
|
+ 'GET\rHOST:\n',
|
||||||
|
+ '\nPUT'
|
||||||
|
+ )
|
||||||
|
+
|
||||||
|
+ for method in methods:
|
||||||
|
+ with self.assertRaisesRegexp(
|
||||||
|
+ ValueError, "method can't contain control characters"):
|
||||||
|
+ conn = httplib.HTTPConnection('example.com')
|
||||||
|
+ conn.sock = FakeSocket(None)
|
||||||
|
+ conn.request(method=method, url="/")
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
class BasicTest(TestCase):
|
||||||
|
def test_status_lines(self):
|
||||||
|
# Test HTTP status lines
|
||||||
|
@@ -1009,9 +1032,9 @@ class TunnelTests(TestCase):
|
||||||
|
|
||||||
|
@test_support.reap_threads
|
||||||
|
def test_main(verbose=None):
|
||||||
|
- test_support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest,
|
||||||
|
- HTTPTest, HTTPSTest, SourceAddressTest,
|
||||||
|
- TunnelTests)
|
||||||
|
+ test_support.run_unittest(HeaderTests, OfflineTest, HttpMethodTests,
|
||||||
|
+ BasicTest, TimeoutTest, HTTPTest, HTTPSTest,
|
||||||
|
+ SourceAddressTest, TunnelTests)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
test_main()
|
42
00355-CVE-2020-27619.patch
Normal file
42
00355-CVE-2020-27619.patch
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
diff --git a/Lib/test/multibytecodec_support.py b/Lib/test/multibytecodec_support.py
|
||||||
|
index 5b2329b6d84..53b5d64d453 100644
|
||||||
|
--- a/Lib/test/multibytecodec_support.py
|
||||||
|
+++ b/Lib/test/multibytecodec_support.py
|
||||||
|
@@ -279,30 +279,22 @@ class TestBase_Mapping(unittest.TestCase):
|
||||||
|
self._test_mapping_file_plain()
|
||||||
|
|
||||||
|
def _test_mapping_file_plain(self):
|
||||||
|
- _unichr = lambda c: eval("u'\\U%08x'" % int(c, 16))
|
||||||
|
- unichrs = lambda s: u''.join(_unichr(c) for c in s.split('+'))
|
||||||
|
+ def unichrs(s):
|
||||||
|
+ return ''.join(unichr(int(x, 16)) for x in s.split('+'))
|
||||||
|
urt_wa = {}
|
||||||
|
|
||||||
|
with self.open_mapping_file() as f:
|
||||||
|
for line in f:
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
- data = line.split('#')[0].strip().split()
|
||||||
|
+ data = line.split('#')[0].split()
|
||||||
|
if len(data) != 2:
|
||||||
|
continue
|
||||||
|
|
||||||
|
- csetval = eval(data[0])
|
||||||
|
- if csetval <= 0x7F:
|
||||||
|
- csetch = chr(csetval & 0xff)
|
||||||
|
- elif csetval >= 0x1000000:
|
||||||
|
- csetch = chr(csetval >> 24) + chr((csetval >> 16) & 0xff) + \
|
||||||
|
- chr((csetval >> 8) & 0xff) + chr(csetval & 0xff)
|
||||||
|
- elif csetval >= 0x10000:
|
||||||
|
- csetch = chr(csetval >> 16) + \
|
||||||
|
- chr((csetval >> 8) & 0xff) + chr(csetval & 0xff)
|
||||||
|
- elif csetval >= 0x100:
|
||||||
|
- csetch = chr(csetval >> 8) + chr(csetval & 0xff)
|
||||||
|
- else:
|
||||||
|
+ if data[0][:2] != '0x':
|
||||||
|
+ self.fail("Invalid line: {!r}".format(line))
|
||||||
|
+ csetch = bytes.fromhex(data[0][2:])
|
||||||
|
+ if len(csetch) == 1 and 0x80 <= csetch[0]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
unich = unichrs(data[1])
|
181
00357-CVE-2021-3177.patch
Normal file
181
00357-CVE-2021-3177.patch
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
commit 30e41798f40c684be57d7ccfebf5c6ad94c0ff97
|
||||||
|
Author: Petr Viktorin <pviktori@redhat.com>
|
||||||
|
Date: Wed Jan 20 15:21:43 2021 +0100
|
||||||
|
|
||||||
|
CVE-2021-3177: Replace snprintf with Python unicode formatting in ctypes param reprs
|
||||||
|
|
||||||
|
Backport of Python3 commit 916610ef90a0d0761f08747f7b0905541f0977c7:
|
||||||
|
https://bugs.python.org/issue42938
|
||||||
|
https://github.com/python/cpython/pull/24239
|
||||||
|
|
||||||
|
diff --git a/Lib/ctypes/test/test_parameters.py b/Lib/ctypes/test/test_parameters.py
|
||||||
|
index 23c1b6e2259..77300d71ae1 100644
|
||||||
|
--- a/Lib/ctypes/test/test_parameters.py
|
||||||
|
+++ b/Lib/ctypes/test/test_parameters.py
|
||||||
|
@@ -206,6 +206,49 @@ class SimpleTypesTestCase(unittest.TestCase):
|
||||||
|
with self.assertRaises(ZeroDivisionError):
|
||||||
|
WorseStruct().__setstate__({}, b'foo')
|
||||||
|
|
||||||
|
+ def test_parameter_repr(self):
|
||||||
|
+ from ctypes import (
|
||||||
|
+ c_bool,
|
||||||
|
+ c_char,
|
||||||
|
+ c_wchar,
|
||||||
|
+ c_byte,
|
||||||
|
+ c_ubyte,
|
||||||
|
+ c_short,
|
||||||
|
+ c_ushort,
|
||||||
|
+ c_int,
|
||||||
|
+ c_uint,
|
||||||
|
+ c_long,
|
||||||
|
+ c_ulong,
|
||||||
|
+ c_longlong,
|
||||||
|
+ c_ulonglong,
|
||||||
|
+ c_float,
|
||||||
|
+ c_double,
|
||||||
|
+ c_longdouble,
|
||||||
|
+ c_char_p,
|
||||||
|
+ c_wchar_p,
|
||||||
|
+ c_void_p,
|
||||||
|
+ )
|
||||||
|
+ self.assertRegexpMatches(repr(c_bool.from_param(True)), r"^<cparam '\?' at 0x[A-Fa-f0-9]+>$")
|
||||||
|
+ self.assertEqual(repr(c_char.from_param('a')), "<cparam 'c' ('a')>")
|
||||||
|
+ self.assertRegexpMatches(repr(c_wchar.from_param('a')), r"^<cparam 'u' at 0x[A-Fa-f0-9]+>$")
|
||||||
|
+ self.assertEqual(repr(c_byte.from_param(98)), "<cparam 'b' (98)>")
|
||||||
|
+ self.assertEqual(repr(c_ubyte.from_param(98)), "<cparam 'B' (98)>")
|
||||||
|
+ self.assertEqual(repr(c_short.from_param(511)), "<cparam 'h' (511)>")
|
||||||
|
+ self.assertEqual(repr(c_ushort.from_param(511)), "<cparam 'H' (511)>")
|
||||||
|
+ self.assertRegexpMatches(repr(c_int.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
|
||||||
|
+ self.assertRegexpMatches(repr(c_uint.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
|
||||||
|
+ self.assertRegexpMatches(repr(c_long.from_param(20000)), r"^<cparam '[li]' \(20000\)>$")
|
||||||
|
+ self.assertRegexpMatches(repr(c_ulong.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$")
|
||||||
|
+ self.assertRegexpMatches(repr(c_longlong.from_param(20000)), r"^<cparam '[liq]' \(20000\)>$")
|
||||||
|
+ self.assertRegexpMatches(repr(c_ulonglong.from_param(20000)), r"^<cparam '[LIQ]' \(20000\)>$")
|
||||||
|
+ self.assertEqual(repr(c_float.from_param(1.5)), "<cparam 'f' (1.5)>")
|
||||||
|
+ self.assertEqual(repr(c_double.from_param(1.5)), "<cparam 'd' (1.5)>")
|
||||||
|
+ self.assertEqual(repr(c_double.from_param(1e300)), "<cparam 'd' (1e+300)>")
|
||||||
|
+ self.assertRegexpMatches(repr(c_longdouble.from_param(1.5)), r"^<cparam ('d' \(1.5\)|'g' at 0x[A-Fa-f0-9]+)>$")
|
||||||
|
+ self.assertRegexpMatches(repr(c_char_p.from_param(b'hihi')), "^<cparam 'z' \(0x[A-Fa-f0-9]+\)>$")
|
||||||
|
+ self.assertRegexpMatches(repr(c_wchar_p.from_param('hihi')), "^<cparam 'Z' \(0x[A-Fa-f0-9]+\)>$")
|
||||||
|
+ self.assertRegexpMatches(repr(c_void_p.from_param(0x12)), r"^<cparam 'P' \(0x0*12\)>$")
|
||||||
|
+
|
||||||
|
################################################################
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
diff --git a/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
||||||
|
new file mode 100644
|
||||||
|
index 00000000000..7df65a156fe
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst
|
||||||
|
@@ -0,0 +1,2 @@
|
||||||
|
+Avoid static buffers when computing the repr of :class:`ctypes.c_double` and
|
||||||
|
+:class:`ctypes.c_longdouble` values.
|
||||||
|
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
|
||||||
|
index 066fefc0cca..5cc3c4cf685 100644
|
||||||
|
--- a/Modules/_ctypes/callproc.c
|
||||||
|
+++ b/Modules/_ctypes/callproc.c
|
||||||
|
@@ -460,50 +460,62 @@ PyCArg_dealloc(PyCArgObject *self)
|
||||||
|
static PyObject *
|
||||||
|
PyCArg_repr(PyCArgObject *self)
|
||||||
|
{
|
||||||
|
- char buffer[256];
|
||||||
|
switch(self->tag) {
|
||||||
|
case 'b':
|
||||||
|
case 'B':
|
||||||
|
- sprintf(buffer, "<cparam '%c' (%d)>",
|
||||||
|
+ return PyString_FromFormat("<cparam '%c' (%d)>",
|
||||||
|
self->tag, self->value.b);
|
||||||
|
- break;
|
||||||
|
case 'h':
|
||||||
|
case 'H':
|
||||||
|
- sprintf(buffer, "<cparam '%c' (%d)>",
|
||||||
|
+ return PyString_FromFormat("<cparam '%c' (%d)>",
|
||||||
|
self->tag, self->value.h);
|
||||||
|
- break;
|
||||||
|
case 'i':
|
||||||
|
case 'I':
|
||||||
|
- sprintf(buffer, "<cparam '%c' (%d)>",
|
||||||
|
+ return PyString_FromFormat("<cparam '%c' (%d)>",
|
||||||
|
self->tag, self->value.i);
|
||||||
|
- break;
|
||||||
|
case 'l':
|
||||||
|
case 'L':
|
||||||
|
- sprintf(buffer, "<cparam '%c' (%ld)>",
|
||||||
|
+ return PyString_FromFormat("<cparam '%c' (%ld)>",
|
||||||
|
self->tag, self->value.l);
|
||||||
|
- break;
|
||||||
|
|
||||||
|
#ifdef HAVE_LONG_LONG
|
||||||
|
case 'q':
|
||||||
|
case 'Q':
|
||||||
|
- sprintf(buffer,
|
||||||
|
- "<cparam '%c' (%" PY_FORMAT_LONG_LONG "d)>",
|
||||||
|
+ return PyString_FromFormat("<cparam '%c' (%lld)>",
|
||||||
|
self->tag, self->value.q);
|
||||||
|
- break;
|
||||||
|
#endif
|
||||||
|
case 'd':
|
||||||
|
- sprintf(buffer, "<cparam '%c' (%f)>",
|
||||||
|
- self->tag, self->value.d);
|
||||||
|
- break;
|
||||||
|
- case 'f':
|
||||||
|
- sprintf(buffer, "<cparam '%c' (%f)>",
|
||||||
|
- self->tag, self->value.f);
|
||||||
|
- break;
|
||||||
|
-
|
||||||
|
+ case 'f': {
|
||||||
|
+ PyObject *s = PyString_FromFormat("<cparam '%c' (", self->tag);
|
||||||
|
+ if (s == NULL) {
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
+ PyObject *f = PyFloat_FromDouble((self->tag == 'f') ? self->value.f : self->value.d);
|
||||||
|
+ if (f == NULL) {
|
||||||
|
+ Py_DECREF(s);
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
+ PyObject *r = PyObject_Repr(f);
|
||||||
|
+ Py_DECREF(f);
|
||||||
|
+ if (r == NULL) {
|
||||||
|
+ Py_DECREF(s);
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
+ PyString_ConcatAndDel(&s, r);
|
||||||
|
+ if (s == NULL) {
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
+ r = PyString_FromString(")>");
|
||||||
|
+ if (r == NULL) {
|
||||||
|
+ Py_DECREF(s);
|
||||||
|
+ return NULL;
|
||||||
|
+ }
|
||||||
|
+ PyString_ConcatAndDel(&s, r);
|
||||||
|
+ return s;
|
||||||
|
+ }
|
||||||
|
case 'c':
|
||||||
|
- sprintf(buffer, "<cparam '%c' (%c)>",
|
||||||
|
+ return PyString_FromFormat("<cparam '%c' ('%c')>",
|
||||||
|
self->tag, self->value.c);
|
||||||
|
- break;
|
||||||
|
|
||||||
|
/* Hm, are these 'z' and 'Z' codes useful at all?
|
||||||
|
Shouldn't they be replaced by the functionality of c_string
|
||||||
|
@@ -512,16 +524,13 @@ PyCArg_repr(PyCArgObject *self)
|
||||||
|
case 'z':
|
||||||
|
case 'Z':
|
||||||
|
case 'P':
|
||||||
|
- sprintf(buffer, "<cparam '%c' (%p)>",
|
||||||
|
+ return PyUnicode_FromFormat("<cparam '%c' (%p)>",
|
||||||
|
self->tag, self->value.p);
|
||||||
|
- break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
- sprintf(buffer, "<cparam '%c' at %p>",
|
||||||
|
- self->tag, self);
|
||||||
|
- break;
|
||||||
|
+ return PyString_FromFormat("<cparam '%c' at %p>",
|
||||||
|
+ (unsigned char)self->tag, (void *)self);
|
||||||
|
}
|
||||||
|
- return PyString_FromString(buffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
static PyMemberDef PyCArgType_members[] = {
|
707
00359-CVE-2021-23336.patch
Normal file
707
00359-CVE-2021-23336.patch
Normal file
@ -0,0 +1,707 @@
|
|||||||
|
From 976a4010aa4e450855dce5fa4c865bcbdc86cccd Mon Sep 17 00:00:00 2001
|
||||||
|
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||||
|
Date: Fri, 16 Apr 2021 18:02:00 +0200
|
||||||
|
Subject: [PATCH] CVE-2021-23336: Add `separator` argument to parse_qs; warn
|
||||||
|
with default
|
||||||
|
MIME-Version: 1.0
|
||||||
|
Content-Type: text/plain; charset=UTF-8
|
||||||
|
Content-Transfer-Encoding: 8bit
|
||||||
|
|
||||||
|
Partially backports https://bugs.python.org/issue42967 : [security] Address a web cache-poisoning issue reported in urllib.parse.parse_qsl().
|
||||||
|
|
||||||
|
Backported from the python3 branch.
|
||||||
|
However, this solution is different than the upstream solution in Python 3.
|
||||||
|
|
||||||
|
Based on the downstream solution for python 3.6.13 by Petr Viktorin.
|
||||||
|
|
||||||
|
An optional argument seperator is added to specify the separator.
|
||||||
|
It is recommended to set it to '&' or ';' to match the application or proxy in use.
|
||||||
|
The default can be set with an env variable of a config file.
|
||||||
|
If neither the argument, env var or config file specifies a separator, "&" is used
|
||||||
|
but a warning is raised if parse_qs is used on input that contains ';'.
|
||||||
|
|
||||||
|
Co-authors of the downstream change:
|
||||||
|
Co-authored-by: Petr Viktorin <pviktori@redhat.com>
|
||||||
|
Co-authors of the upstream change (who do not necessarily agree with this):
|
||||||
|
Co-authored-by: Adam Goldschmidt <adamgold7@gmail.com>
|
||||||
|
Co-authored-by: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com>
|
||||||
|
Co-authored-by: Éric Araujo <merwok@netwok.org>
|
||||||
|
---
|
||||||
|
Doc/library/cgi.rst | 5 +-
|
||||||
|
Doc/library/urlparse.rst | 15 ++-
|
||||||
|
Lib/cgi.py | 34 +++---
|
||||||
|
Lib/test/test_cgi.py | 59 ++++++++++-
|
||||||
|
Lib/test/test_urlparse.py | 210 +++++++++++++++++++++++++++++++++++++-
|
||||||
|
Lib/urlparse.py | 78 +++++++++++++-
|
||||||
|
6 files changed, 369 insertions(+), 32 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Doc/library/cgi.rst b/Doc/library/cgi.rst
|
||||||
|
index ecd62c8c019..a96cd38717b 100644
|
||||||
|
--- a/Doc/library/cgi.rst
|
||||||
|
+++ b/Doc/library/cgi.rst
|
||||||
|
@@ -285,10 +285,10 @@ These are useful if you want more control, or if you want to employ some of the
|
||||||
|
algorithms implemented in this module in other circumstances.
|
||||||
|
|
||||||
|
|
||||||
|
-.. function:: parse(fp[, environ[, keep_blank_values[, strict_parsing]]])
|
||||||
|
+.. function:: parse(fp[, environ[, keep_blank_values[, strict_parsing[, separator]]]])
|
||||||
|
|
||||||
|
Parse a query in the environment or from a file (the file defaults to
|
||||||
|
- ``sys.stdin`` and environment defaults to ``os.environ``). The *keep_blank_values* and *strict_parsing* parameters are
|
||||||
|
+ ``sys.stdin`` and environment defaults to ``os.environ``). The *keep_blank_values*, *strict_parsing* and *separator* parameters are
|
||||||
|
passed to :func:`urlparse.parse_qs` unchanged.
|
||||||
|
|
||||||
|
|
||||||
|
@@ -316,7 +316,6 @@ algorithms implemented in this module in other circumstances.
|
||||||
|
Note that this does not parse nested multipart parts --- use
|
||||||
|
:class:`FieldStorage` for that.
|
||||||
|
|
||||||
|
-
|
||||||
|
.. function:: parse_header(string)
|
||||||
|
|
||||||
|
Parse a MIME header (such as :mailheader:`Content-Type`) into a main value and a
|
||||||
|
diff --git a/Doc/library/urlparse.rst b/Doc/library/urlparse.rst
|
||||||
|
index 0989c88c302..97d1119257c 100644
|
||||||
|
--- a/Doc/library/urlparse.rst
|
||||||
|
+++ b/Doc/library/urlparse.rst
|
||||||
|
@@ -136,7 +136,7 @@ The :mod:`urlparse` module defines the following functions:
|
||||||
|
now raise :exc:`ValueError`.
|
||||||
|
|
||||||
|
|
||||||
|
-.. function:: parse_qs(qs[, keep_blank_values[, strict_parsing[, max_num_fields]]])
|
||||||
|
+.. function:: parse_qs(qs[, keep_blank_values[, strict_parsing[, max_num_fields[, separator]]]])
|
||||||
|
|
||||||
|
Parse a query string given as a string argument (data of type
|
||||||
|
:mimetype:`application/x-www-form-urlencoded`). Data are returned as a
|
||||||
|
@@ -157,6 +157,15 @@ The :mod:`urlparse` module defines the following functions:
|
||||||
|
read. If set, then throws a :exc:`ValueError` if there are more than
|
||||||
|
*max_num_fields* fields read.
|
||||||
|
|
||||||
|
+ The optional argument *separator* is the symbol to use for separating the
|
||||||
|
+ query arguments. It is recommended to set it to ``'&'`` or ``';'``.
|
||||||
|
+ It defaults to ``'&'``; a warning is raised if this default is used.
|
||||||
|
+ This default may be changed with the following environment variable settings:
|
||||||
|
+
|
||||||
|
+ - ``PYTHON_URLLIB_QS_SEPARATOR='&'``: use only ``&`` as separator, without warning (as in Python 3.6.13+ or 3.10)
|
||||||
|
+ - ``PYTHON_URLLIB_QS_SEPARATOR=';'``: use only ``;`` as separator
|
||||||
|
+ - ``PYTHON_URLLIB_QS_SEPARATOR=legacy``: use both ``&`` and ``;`` (as in previous versions of Python)
|
||||||
|
+
|
||||||
|
Use the :func:`urllib.urlencode` function to convert such dictionaries into
|
||||||
|
query strings.
|
||||||
|
|
||||||
|
@@ -186,6 +195,9 @@ The :mod:`urlparse` module defines the following functions:
|
||||||
|
read. If set, then throws a :exc:`ValueError` if there are more than
|
||||||
|
*max_num_fields* fields read.
|
||||||
|
|
||||||
|
+ The optional argument *separator* is the symbol to use for separating the
|
||||||
|
+ query arguments. It works as in :py:func:`parse_qs`.
|
||||||
|
+
|
||||||
|
Use the :func:`urllib.urlencode` function to convert such lists of pairs into
|
||||||
|
query strings.
|
||||||
|
|
||||||
|
@@ -195,6 +207,7 @@ The :mod:`urlparse` module defines the following functions:
|
||||||
|
.. versionchanged:: 2.7.16
|
||||||
|
Added *max_num_fields* parameter.
|
||||||
|
|
||||||
|
+
|
||||||
|
.. function:: urlunparse(parts)
|
||||||
|
|
||||||
|
Construct a URL from a tuple as returned by ``urlparse()``. The *parts* argument
|
||||||
|
diff --git a/Lib/cgi.py b/Lib/cgi.py
|
||||||
|
index 5b903e03477..1421f2d90e0 100755
|
||||||
|
--- a/Lib/cgi.py
|
||||||
|
+++ b/Lib/cgi.py
|
||||||
|
@@ -121,7 +121,8 @@ log = initlog # The current logging function
|
||||||
|
# 0 ==> unlimited input
|
||||||
|
maxlen = 0
|
||||||
|
|
||||||
|
-def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
|
||||||
|
+def parse(fp=None, environ=os.environ, keep_blank_values=0,
|
||||||
|
+ strict_parsing=0, separator=None):
|
||||||
|
"""Parse a query in the environment or from a file (default stdin)
|
||||||
|
|
||||||
|
Arguments, all optional:
|
||||||
|
@@ -140,6 +141,8 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
|
||||||
|
strict_parsing: flag indicating what to do with parsing errors.
|
||||||
|
If false (the default), errors are silently ignored.
|
||||||
|
If true, errors raise a ValueError exception.
|
||||||
|
+
|
||||||
|
+ separator: str. The symbol to use for separating the query arguments.
|
||||||
|
"""
|
||||||
|
if fp is None:
|
||||||
|
fp = sys.stdin
|
||||||
|
@@ -171,25 +174,26 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
|
||||||
|
else:
|
||||||
|
qs = ""
|
||||||
|
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
|
||||||
|
- return urlparse.parse_qs(qs, keep_blank_values, strict_parsing)
|
||||||
|
+ return urlparse.parse_qs(qs, keep_blank_values, strict_parsing, separator=separator)
|
||||||
|
|
||||||
|
|
||||||
|
# parse query string function called from urlparse,
|
||||||
|
# this is done in order to maintain backward compatibility.
|
||||||
|
|
||||||
|
-def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
|
||||||
|
+def parse_qs(qs, keep_blank_values=0, strict_parsing=0, separator=None):
|
||||||
|
"""Parse a query given as a string argument."""
|
||||||
|
warn("cgi.parse_qs is deprecated, use urlparse.parse_qs instead",
|
||||||
|
PendingDeprecationWarning, 2)
|
||||||
|
- return urlparse.parse_qs(qs, keep_blank_values, strict_parsing)
|
||||||
|
+ return urlparse.parse_qs(qs, keep_blank_values, strict_parsing,
|
||||||
|
+ separator=separator)
|
||||||
|
|
||||||
|
|
||||||
|
-def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
|
||||||
|
+def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None, separator=None):
|
||||||
|
"""Parse a query given as a string argument."""
|
||||||
|
warn("cgi.parse_qsl is deprecated, use urlparse.parse_qsl instead",
|
||||||
|
PendingDeprecationWarning, 2)
|
||||||
|
return urlparse.parse_qsl(qs, keep_blank_values, strict_parsing,
|
||||||
|
- max_num_fields)
|
||||||
|
+ max_num_fields, separator=separator)
|
||||||
|
|
||||||
|
def parse_multipart(fp, pdict):
|
||||||
|
"""Parse multipart input.
|
||||||
|
@@ -288,7 +292,6 @@ def parse_multipart(fp, pdict):
|
||||||
|
|
||||||
|
return partdict
|
||||||
|
|
||||||
|
-
|
||||||
|
def _parseparam(s):
|
||||||
|
while s[:1] == ';':
|
||||||
|
s = s[1:]
|
||||||
|
@@ -395,7 +398,7 @@ class FieldStorage:
|
||||||
|
|
||||||
|
def __init__(self, fp=None, headers=None, outerboundary="",
|
||||||
|
environ=os.environ, keep_blank_values=0, strict_parsing=0,
|
||||||
|
- max_num_fields=None):
|
||||||
|
+ max_num_fields=None, separator=None):
|
||||||
|
"""Constructor. Read multipart/* until last part.
|
||||||
|
|
||||||
|
Arguments, all optional:
|
||||||
|
@@ -430,6 +433,7 @@ class FieldStorage:
|
||||||
|
self.keep_blank_values = keep_blank_values
|
||||||
|
self.strict_parsing = strict_parsing
|
||||||
|
self.max_num_fields = max_num_fields
|
||||||
|
+ self.separator = separator
|
||||||
|
if 'REQUEST_METHOD' in environ:
|
||||||
|
method = environ['REQUEST_METHOD'].upper()
|
||||||
|
self.qs_on_post = None
|
||||||
|
@@ -613,7 +617,8 @@ class FieldStorage:
|
||||||
|
if self.qs_on_post:
|
||||||
|
qs += '&' + self.qs_on_post
|
||||||
|
query = urlparse.parse_qsl(qs, self.keep_blank_values,
|
||||||
|
- self.strict_parsing, self.max_num_fields)
|
||||||
|
+ self.strict_parsing, self.max_num_fields,
|
||||||
|
+ self.separator)
|
||||||
|
self.list = [MiniFieldStorage(key, value) for key, value in query]
|
||||||
|
self.skip_lines()
|
||||||
|
|
||||||
|
@@ -629,7 +634,8 @@ class FieldStorage:
|
||||||
|
query = urlparse.parse_qsl(self.qs_on_post,
|
||||||
|
self.keep_blank_values,
|
||||||
|
self.strict_parsing,
|
||||||
|
- self.max_num_fields)
|
||||||
|
+ self.max_num_fields,
|
||||||
|
+ self.separator)
|
||||||
|
self.list.extend(MiniFieldStorage(key, value)
|
||||||
|
for key, value in query)
|
||||||
|
FieldStorageClass = None
|
||||||
|
@@ -649,7 +655,8 @@ class FieldStorage:
|
||||||
|
headers = rfc822.Message(self.fp)
|
||||||
|
part = klass(self.fp, headers, ib,
|
||||||
|
environ, keep_blank_values, strict_parsing,
|
||||||
|
- max_num_fields)
|
||||||
|
+ max_num_fields,
|
||||||
|
+ separator=self.separator)
|
||||||
|
|
||||||
|
if max_num_fields is not None:
|
||||||
|
max_num_fields -= 1
|
||||||
|
@@ -817,10 +824,11 @@ class FormContentDict(UserDict.UserDict):
|
||||||
|
form.dict == {key: [val, val, ...], ...}
|
||||||
|
|
||||||
|
"""
|
||||||
|
- def __init__(self, environ=os.environ, keep_blank_values=0, strict_parsing=0):
|
||||||
|
+ def __init__(self, environ=os.environ, keep_blank_values=0, strict_parsing=0, separator=None):
|
||||||
|
self.dict = self.data = parse(environ=environ,
|
||||||
|
keep_blank_values=keep_blank_values,
|
||||||
|
- strict_parsing=strict_parsing)
|
||||||
|
+ strict_parsing=strict_parsing,
|
||||||
|
+ separator=separator)
|
||||||
|
self.query_string = environ['QUERY_STRING']
|
||||||
|
|
||||||
|
|
||||||
|
diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py
|
||||||
|
index 743c2afbd4c..9956ea9d4e8 100644
|
||||||
|
--- a/Lib/test/test_cgi.py
|
||||||
|
+++ b/Lib/test/test_cgi.py
|
||||||
|
@@ -61,12 +61,9 @@ parse_strict_test_cases = [
|
||||||
|
("", ValueError("bad query field: ''")),
|
||||||
|
("&", ValueError("bad query field: ''")),
|
||||||
|
("&&", ValueError("bad query field: ''")),
|
||||||
|
- (";", ValueError("bad query field: ''")),
|
||||||
|
- (";&;", ValueError("bad query field: ''")),
|
||||||
|
# Should the next few really be valid?
|
||||||
|
("=", {}),
|
||||||
|
("=&=", {}),
|
||||||
|
- ("=;=", {}),
|
||||||
|
# This rest seem to make sense
|
||||||
|
("=a", {'': ['a']}),
|
||||||
|
("&=a", ValueError("bad query field: ''")),
|
||||||
|
@@ -81,8 +78,6 @@ parse_strict_test_cases = [
|
||||||
|
("a=a+b&b=b+c", {'a': ['a b'], 'b': ['b c']}),
|
||||||
|
("a=a+b&a=b+a", {'a': ['a b', 'b a']}),
|
||||||
|
("x=1&y=2.0&z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
|
||||||
|
- ("x=1;y=2.0&z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
|
||||||
|
- ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
|
||||||
|
("Hbc5161168c542333633315dee1182227:key_store_seqid=400006&cuyer=r&view=bustomer&order_id=0bb2e248638833d48cb7fed300000f1b&expire=964546263&lobale=en-US&kid=130003.300038&ss=env",
|
||||||
|
{'Hbc5161168c542333633315dee1182227:key_store_seqid': ['400006'],
|
||||||
|
'cuyer': ['r'],
|
||||||
|
@@ -177,6 +172,60 @@ class CgiTests(unittest.TestCase):
|
||||||
|
self.assertItemsEqual(sd.items(),
|
||||||
|
first_second_elts(expect.items()))
|
||||||
|
|
||||||
|
+ def test_separator(self):
|
||||||
|
+ parse_semicolon = [
|
||||||
|
+ ("x=1;y=2.0", {'x': ['1'], 'y': ['2.0']}),
|
||||||
|
+ ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
|
||||||
|
+ (";", ValueError("bad query field: ''")),
|
||||||
|
+ (";;", ValueError("bad query field: ''")),
|
||||||
|
+ ("=;a", ValueError("bad query field: 'a'")),
|
||||||
|
+ (";b=a", ValueError("bad query field: ''")),
|
||||||
|
+ ("b;=a", ValueError("bad query field: 'b'")),
|
||||||
|
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
|
||||||
|
+ ("a=a+b;a=b+a", {'a': ['a b', 'b a']}),
|
||||||
|
+ ]
|
||||||
|
+ for orig, expect in parse_semicolon:
|
||||||
|
+ env = {'QUERY_STRING': orig}
|
||||||
|
+ fcd = cgi.FormContentDict(env, separator=';')
|
||||||
|
+ sd = cgi.SvFormContentDict(env, separator=';')
|
||||||
|
+ fs = cgi.FieldStorage(environ=env, separator=';')
|
||||||
|
+ if isinstance(expect, dict):
|
||||||
|
+ # test dict interface
|
||||||
|
+ self.assertEqual(len(expect), len(fcd))
|
||||||
|
+ self.assertItemsEqual(expect.keys(), fcd.keys())
|
||||||
|
+ self.assertItemsEqual(expect.values(), fcd.values())
|
||||||
|
+ self.assertItemsEqual(expect.items(), fcd.items())
|
||||||
|
+ self.assertEqual(fcd.get("nonexistent field", "default"), "default")
|
||||||
|
+ self.assertEqual(len(sd), len(fs))
|
||||||
|
+ self.assertItemsEqual(sd.keys(), fs.keys())
|
||||||
|
+ self.assertEqual(fs.getvalue("nonexistent field", "default"), "default")
|
||||||
|
+ # test individual fields
|
||||||
|
+ for key in expect.keys():
|
||||||
|
+ expect_val = expect[key]
|
||||||
|
+ self.assertTrue(fcd.has_key(key))
|
||||||
|
+ self.assertItemsEqual(fcd[key], expect[key])
|
||||||
|
+ self.assertEqual(fcd.get(key, "default"), fcd[key])
|
||||||
|
+ self.assertTrue(fs.has_key(key))
|
||||||
|
+ if len(expect_val) > 1:
|
||||||
|
+ single_value = 0
|
||||||
|
+ else:
|
||||||
|
+ single_value = 1
|
||||||
|
+ try:
|
||||||
|
+ val = sd[key]
|
||||||
|
+ except IndexError:
|
||||||
|
+ self.assertFalse(single_value)
|
||||||
|
+ self.assertEqual(fs.getvalue(key), expect_val)
|
||||||
|
+ else:
|
||||||
|
+ self.assertTrue(single_value)
|
||||||
|
+ self.assertEqual(val, expect_val[0])
|
||||||
|
+ self.assertEqual(fs.getvalue(key), expect_val[0])
|
||||||
|
+ self.assertItemsEqual(sd.getlist(key), expect_val)
|
||||||
|
+ if single_value:
|
||||||
|
+ self.assertItemsEqual(sd.values(),
|
||||||
|
+ first_elts(expect.values()))
|
||||||
|
+ self.assertItemsEqual(sd.items(),
|
||||||
|
+ first_second_elts(expect.items()))
|
||||||
|
+
|
||||||
|
def test_weird_formcontentdict(self):
|
||||||
|
# Test the weird FormContentDict classes
|
||||||
|
env = {'QUERY_STRING': "x=1&y=2.0&z=2-3.%2b0&1=1abc"}
|
||||||
|
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
|
||||||
|
index 86c4a0595c4..21875bb2991 100644
|
||||||
|
--- a/Lib/test/test_urlparse.py
|
||||||
|
+++ b/Lib/test/test_urlparse.py
|
||||||
|
@@ -3,6 +3,12 @@ import sys
|
||||||
|
import unicodedata
|
||||||
|
import unittest
|
||||||
|
import urlparse
|
||||||
|
+from test.support import EnvironmentVarGuard
|
||||||
|
+from warnings import catch_warnings, filterwarnings
|
||||||
|
+import tempfile
|
||||||
|
+import contextlib
|
||||||
|
+import os.path
|
||||||
|
+import shutil
|
||||||
|
|
||||||
|
RFC1808_BASE = "http://a/b/c/d;p?q#f"
|
||||||
|
RFC2396_BASE = "http://a/b/c/d;p?q"
|
||||||
|
@@ -24,16 +30,29 @@ parse_qsl_test_cases = [
|
||||||
|
("&a=b", [('a', 'b')]),
|
||||||
|
("a=a+b&b=b+c", [('a', 'a b'), ('b', 'b c')]),
|
||||||
|
("a=1&a=2", [('a', '1'), ('a', '2')]),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qsl_test_cases_semicolon = [
|
||||||
|
(";", []),
|
||||||
|
(";;", []),
|
||||||
|
(";a=b", [('a', 'b')]),
|
||||||
|
("a=a+b;b=b+c", [('a', 'a b'), ('b', 'b c')]),
|
||||||
|
("a=1;a=2", [('a', '1'), ('a', '2')]),
|
||||||
|
- (b";", []),
|
||||||
|
- (b";;", []),
|
||||||
|
- (b";a=b", [(b'a', b'b')]),
|
||||||
|
- (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
|
||||||
|
- (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qsl_test_cases_legacy = [
|
||||||
|
+ ("a=1;a=2&a=3", [('a', '1'), ('a', '2'), ('a', '3')]),
|
||||||
|
+ ("a=1;b=2&c=3", [('a', '1'), ('b', '2'), ('c', '3')]),
|
||||||
|
+ ("a=1&b=2&c=3;", [('a', '1'), ('b', '2'), ('c', '3')]),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qsl_test_cases_warn = [
|
||||||
|
+ (";a=b", [(';a', 'b')]),
|
||||||
|
+ ("a=a+b;b=b+c", [('a', 'a b;b=b c')]),
|
||||||
|
+ (b";a=b", [(b';a', b'b')]),
|
||||||
|
+ (b"a=a+b;b=b+c", [(b'a', b'a b;b=b c')]),
|
||||||
|
+ ("a=1;a=2&a=3", [('a', '1;a=2'), ('a', '3')]),
|
||||||
|
+ (b"a=1;a=2&a=3", [(b'a', b'1;a=2'), (b'a', b'3')]),
|
||||||
|
]
|
||||||
|
|
||||||
|
parse_qs_test_cases = [
|
||||||
|
@@ -57,6 +76,9 @@ parse_qs_test_cases = [
|
||||||
|
(b"&a=b", {b'a': [b'b']}),
|
||||||
|
(b"a=a+b&b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
|
||||||
|
(b"a=1&a=2", {b'a': [b'1', b'2']}),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qs_test_cases_semicolon = [
|
||||||
|
(";", {}),
|
||||||
|
(";;", {}),
|
||||||
|
(";a=b", {'a': ['b']}),
|
||||||
|
@@ -69,6 +91,24 @@ parse_qs_test_cases = [
|
||||||
|
(b"a=1;a=2", {b'a': [b'1', b'2']}),
|
||||||
|
]
|
||||||
|
|
||||||
|
+parse_qs_test_cases_legacy = [
|
||||||
|
+ ("a=1;a=2&a=3", {'a': ['1', '2', '3']}),
|
||||||
|
+ ("a=1;b=2&c=3", {'a': ['1'], 'b': ['2'], 'c': ['3']}),
|
||||||
|
+ ("a=1&b=2&c=3;", {'a': ['1'], 'b': ['2'], 'c': ['3']}),
|
||||||
|
+ (b"a=1;a=2&a=3", {b'a': [b'1', b'2', b'3']}),
|
||||||
|
+ (b"a=1;b=2&c=3", {b'a': [b'1'], b'b': [b'2'], b'c': [b'3']}),
|
||||||
|
+ (b"a=1&b=2&c=3;", {b'a': [b'1'], b'b': [b'2'], b'c': [b'3']}),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
+parse_qs_test_cases_warn = [
|
||||||
|
+ (";a=b", {';a': ['b']}),
|
||||||
|
+ ("a=a+b;b=b+c", {'a': ['a b;b=b c']}),
|
||||||
|
+ (b";a=b", {b';a': [b'b']}),
|
||||||
|
+ (b"a=a+b;b=b+c", {b'a':[ b'a b;b=b c']}),
|
||||||
|
+ ("a=1;a=2&a=3", {'a': ['1;a=2', '3']}),
|
||||||
|
+ (b"a=1;a=2&a=3", {b'a': [b'1;a=2', b'3']}),
|
||||||
|
+]
|
||||||
|
+
|
||||||
|
class UrlParseTestCase(unittest.TestCase):
|
||||||
|
|
||||||
|
def checkRoundtrips(self, url, parsed, split):
|
||||||
|
@@ -141,6 +181,40 @@ class UrlParseTestCase(unittest.TestCase):
|
||||||
|
self.assertEqual(result, expect_without_blanks,
|
||||||
|
"Error parsing %r" % orig)
|
||||||
|
|
||||||
|
+ def test_qs_default_warn(self):
|
||||||
|
+ for orig, expect in parse_qs_test_cases_warn:
|
||||||
|
+ with catch_warnings(record=True) as w:
|
||||||
|
+ filterwarnings(action='always',
|
||||||
|
+ category=urlparse._QueryStringSeparatorWarning)
|
||||||
|
+ result = urlparse.parse_qs(orig, keep_blank_values=True)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 1)
|
||||||
|
+ self.assertEqual(w[0].category, urlparse._QueryStringSeparatorWarning)
|
||||||
|
+
|
||||||
|
+ def test_qsl_default_warn(self):
|
||||||
|
+ for orig, expect in parse_qsl_test_cases_warn:
|
||||||
|
+ with catch_warnings(record=True) as w:
|
||||||
|
+ filterwarnings(action='always',
|
||||||
|
+ category=urlparse._QueryStringSeparatorWarning)
|
||||||
|
+ result = urlparse.parse_qsl(orig, keep_blank_values=True)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 1)
|
||||||
|
+ self.assertEqual(w[0].category, urlparse._QueryStringSeparatorWarning)
|
||||||
|
+
|
||||||
|
+ def test_default_qs_no_warnings(self):
|
||||||
|
+ for orig, expect in parse_qs_test_cases:
|
||||||
|
+ with catch_warnings(record=True) as w:
|
||||||
|
+ result = urlparse.parse_qs(orig, keep_blank_values=True)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_default_qsl_no_warnings(self):
|
||||||
|
+ for orig, expect in parse_qsl_test_cases:
|
||||||
|
+ with catch_warnings(record=True) as w:
|
||||||
|
+ result = urlparse.parse_qsl(orig, keep_blank_values=True)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
def test_roundtrips(self):
|
||||||
|
testcases = [
|
||||||
|
('file:///tmp/junk.txt',
|
||||||
|
@@ -626,6 +700,132 @@ class UrlParseTestCase(unittest.TestCase):
|
||||||
|
self.assertEqual(urlparse.urlparse("http://www.python.org:80"),
|
||||||
|
('http','www.python.org:80','','','',''))
|
||||||
|
|
||||||
|
+ def test_parse_qs_separator_bytes(self):
|
||||||
|
+ expected = {b'a': [b'1'], b'b': [b'2']}
|
||||||
|
+
|
||||||
|
+ result = urlparse.parse_qs(b'a=1;b=2', separator=b';')
|
||||||
|
+ self.assertEqual(result, expected)
|
||||||
|
+ result = urlparse.parse_qs(b'a=1;b=2', separator=';')
|
||||||
|
+ self.assertEqual(result, expected)
|
||||||
|
+ result = urlparse.parse_qs('a=1;b=2', separator=';')
|
||||||
|
+ self.assertEqual(result, {'a': ['1'], 'b': ['2']})
|
||||||
|
+
|
||||||
|
+ @contextlib.contextmanager
|
||||||
|
+ def _qsl_sep_config(self, sep):
|
||||||
|
+ """Context for the given parse_qsl default separator configured in config file"""
|
||||||
|
+ old_filename = urlparse._QS_SEPARATOR_CONFIG_FILENAME
|
||||||
|
+ urlparse._default_qs_separator = None
|
||||||
|
+ try:
|
||||||
|
+ tmpdirname = tempfile.mkdtemp()
|
||||||
|
+ filename = os.path.join(tmpdirname, 'conf.cfg')
|
||||||
|
+ with open(filename, 'w') as file:
|
||||||
|
+ file.write('[parse_qs]\n')
|
||||||
|
+ file.write('PYTHON_URLLIB_QS_SEPARATOR = {}'.format(sep))
|
||||||
|
+ urlparse._QS_SEPARATOR_CONFIG_FILENAME = filename
|
||||||
|
+ yield
|
||||||
|
+ finally:
|
||||||
|
+ urlparse._QS_SEPARATOR_CONFIG_FILENAME = old_filename
|
||||||
|
+ urlparse._default_qs_separator = None
|
||||||
|
+ shutil.rmtree(tmpdirname)
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_semicolon(self):
|
||||||
|
+ for orig, expect in parse_qs_test_cases_semicolon:
|
||||||
|
+ result = urlparse.parse_qs(orig, separator=';')
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = ';'
|
||||||
|
+ result = urlparse.parse_qs(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+ with self._qsl_sep_config(';'), catch_warnings(record=True) as w:
|
||||||
|
+ result = urlparse.parse_qs(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_parse_qsl_separator_semicolon(self):
|
||||||
|
+ for orig, expect in parse_qsl_test_cases_semicolon:
|
||||||
|
+ result = urlparse.parse_qsl(orig, separator=';')
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = ';'
|
||||||
|
+ result = urlparse.parse_qsl(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+ with self._qsl_sep_config(';'), catch_warnings(record=True) as w:
|
||||||
|
+ result = urlparse.parse_qsl(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_legacy(self):
|
||||||
|
+ for orig, expect in parse_qs_test_cases_legacy:
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = 'legacy'
|
||||||
|
+ result = urlparse.parse_qs(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+ with self._qsl_sep_config('legacy'), catch_warnings(record=True) as w:
|
||||||
|
+ result = urlparse.parse_qs(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_parse_qsl_separator_legacy(self):
|
||||||
|
+ for orig, expect in parse_qsl_test_cases_legacy:
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = 'legacy'
|
||||||
|
+ result = urlparse.parse_qsl(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+ with self._qsl_sep_config('legacy'), catch_warnings(record=True) as w:
|
||||||
|
+ result = urlparse.parse_qsl(orig)
|
||||||
|
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
|
||||||
|
+ self.assertEqual(len(w), 0)
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_bad_value_env_or_config(self):
|
||||||
|
+ for bad_sep in '', 'abc', 'safe', '&;', 'SEP':
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = bad_sep
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urlparse.parse_qsl('a=1;b=2')
|
||||||
|
+ with self._qsl_sep_config('bad_sep'), catch_warnings(record=True) as w:
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urlparse.parse_qsl('a=1;b=2')
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_bad_value_arg(self):
|
||||||
|
+ for bad_sep in True, {}, '':
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urlparse.parse_qsl('a=1;b=2', separator=bad_sep)
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_num_fields(self):
|
||||||
|
+ for qs, sep in (
|
||||||
|
+ ('a&b&c', '&'),
|
||||||
|
+ ('a;b;c', ';'),
|
||||||
|
+ ('a&b;c', 'legacy'),
|
||||||
|
+ ):
|
||||||
|
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
|
||||||
|
+ if sep != 'legacy':
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urlparse.parse_qsl(qs, separator=sep, max_num_fields=2)
|
||||||
|
+ if sep:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = sep
|
||||||
|
+ with self.assertRaises(ValueError):
|
||||||
|
+ urlparse.parse_qsl(qs, max_num_fields=2)
|
||||||
|
+
|
||||||
|
+ def test_parse_qs_separator_priority(self):
|
||||||
|
+ # env variable trumps config file
|
||||||
|
+ with self._qsl_sep_config('~'), EnvironmentVarGuard() as environ:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = '!'
|
||||||
|
+ result = urlparse.parse_qs('a=1!b=2~c=3')
|
||||||
|
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
|
||||||
|
+ # argument trumps config file
|
||||||
|
+ with self._qsl_sep_config('~'):
|
||||||
|
+ result = urlparse.parse_qs('a=1$b=2~c=3', separator='$')
|
||||||
|
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
|
||||||
|
+ # argument trumps env variable
|
||||||
|
+ with EnvironmentVarGuard() as environ:
|
||||||
|
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = '~'
|
||||||
|
+ result = urlparse.parse_qs('a=1$b=2~c=3', separator='$')
|
||||||
|
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
|
||||||
|
+
|
||||||
|
def test_urlsplit_normalization(self):
|
||||||
|
# Certain characters should never occur in the netloc,
|
||||||
|
# including under normalization.
|
||||||
|
diff --git a/Lib/urlparse.py b/Lib/urlparse.py
|
||||||
|
index 798b467b605..69504d8fd93 100644
|
||||||
|
--- a/Lib/urlparse.py
|
||||||
|
+++ b/Lib/urlparse.py
|
||||||
|
@@ -29,6 +29,7 @@ test_urlparse.py provides a good indicator of parsing behavior.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
+import os
|
||||||
|
|
||||||
|
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
|
||||||
|
"urlsplit", "urlunsplit", "parse_qs", "parse_qsl"]
|
||||||
|
@@ -382,7 +383,8 @@ def unquote(s):
|
||||||
|
append(item)
|
||||||
|
return ''.join(res)
|
||||||
|
|
||||||
|
-def parse_qs(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
|
||||||
|
+def parse_qs(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None,
|
||||||
|
+ separator=None):
|
||||||
|
"""Parse a query given as a string argument.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
@@ -405,14 +407,23 @@ def parse_qs(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
|
||||||
|
"""
|
||||||
|
dict = {}
|
||||||
|
for name, value in parse_qsl(qs, keep_blank_values, strict_parsing,
|
||||||
|
- max_num_fields):
|
||||||
|
+ max_num_fields, separator):
|
||||||
|
if name in dict:
|
||||||
|
dict[name].append(value)
|
||||||
|
else:
|
||||||
|
dict[name] = [value]
|
||||||
|
return dict
|
||||||
|
|
||||||
|
-def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
|
||||||
|
+class _QueryStringSeparatorWarning(RuntimeWarning):
|
||||||
|
+ """Warning for using default `separator` in parse_qs or parse_qsl"""
|
||||||
|
+
|
||||||
|
+# The default "separator" for parse_qsl can be specified in a config file.
|
||||||
|
+# It's cached after first read.
|
||||||
|
+_QS_SEPARATOR_CONFIG_FILENAME = '/etc/python/urllib.cfg'
|
||||||
|
+_default_qs_separator = None
|
||||||
|
+
|
||||||
|
+def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None,
|
||||||
|
+ separator=None):
|
||||||
|
"""Parse a query given as a string argument.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
@@ -434,15 +445,72 @@ def parse_qsl(qs, keep_blank_values=0, strict_parsing=0, max_num_fields=None):
|
||||||
|
|
||||||
|
Returns a list, as G-d intended.
|
||||||
|
"""
|
||||||
|
+
|
||||||
|
+ if (not separator or (not isinstance(separator, (str, bytes)))) and separator is not None:
|
||||||
|
+ raise ValueError("Separator must be of type string or bytes.")
|
||||||
|
+
|
||||||
|
+ # Used when both "&" and ";" act as separators. (Need a non-string value.)
|
||||||
|
+ _legacy = object()
|
||||||
|
+
|
||||||
|
+ if separator is None:
|
||||||
|
+ global _default_qs_separator
|
||||||
|
+ separator = _default_qs_separator
|
||||||
|
+ envvar_name = 'PYTHON_URLLIB_QS_SEPARATOR'
|
||||||
|
+ if separator is None:
|
||||||
|
+ # Set default separator from environment variable
|
||||||
|
+ separator = os.environ.get(envvar_name)
|
||||||
|
+ config_source = 'environment variable'
|
||||||
|
+ if separator is None:
|
||||||
|
+ # Set default separator from the configuration file
|
||||||
|
+ try:
|
||||||
|
+ file = open(_QS_SEPARATOR_CONFIG_FILENAME)
|
||||||
|
+ except EnvironmentError:
|
||||||
|
+ pass
|
||||||
|
+ else:
|
||||||
|
+ with file:
|
||||||
|
+ import ConfigParser
|
||||||
|
+ config = ConfigParser.ConfigParser()
|
||||||
|
+ config.readfp(file)
|
||||||
|
+ separator = config.get('parse_qs', envvar_name)
|
||||||
|
+ _default_qs_separator = separator
|
||||||
|
+ config_source = _QS_SEPARATOR_CONFIG_FILENAME
|
||||||
|
+ if separator is None:
|
||||||
|
+ # The default is '&', but warn if not specified explicitly
|
||||||
|
+ if ';' in qs:
|
||||||
|
+ from warnings import warn
|
||||||
|
+ warn("The default separator of urlparse.parse_qsl and "
|
||||||
|
+ + "parse_qs was changed to '&' to avoid a web cache "
|
||||||
|
+ + "poisoning issue (CVE-2021-23336). "
|
||||||
|
+ + "By default, semicolons no longer act as query field "
|
||||||
|
+ + "separators. "
|
||||||
|
+ + "See https://access.redhat.com/articles/5860431 for "
|
||||||
|
+ + "more details.",
|
||||||
|
+ _QueryStringSeparatorWarning, stacklevel=2)
|
||||||
|
+ separator = '&'
|
||||||
|
+ elif separator == 'legacy':
|
||||||
|
+ separator = _legacy
|
||||||
|
+ elif len(separator) != 1:
|
||||||
|
+ raise ValueError(
|
||||||
|
+ '{} (from {}) must contain '.format(envvar_name, config_source)
|
||||||
|
+ + '1 character, or "legacy". See '
|
||||||
|
+ + 'https://access.redhat.com/articles/5860431 for more details.'
|
||||||
|
+ )
|
||||||
|
+
|
||||||
|
# If max_num_fields is defined then check that the number of fields
|
||||||
|
# is less than max_num_fields. This prevents a memory exhaustion DOS
|
||||||
|
# attack via post bodies with many fields.
|
||||||
|
if max_num_fields is not None:
|
||||||
|
- num_fields = 1 + qs.count('&') + qs.count(';')
|
||||||
|
+ if separator is _legacy:
|
||||||
|
+ num_fields = 1 + qs.count('&') + qs.count(';')
|
||||||
|
+ else:
|
||||||
|
+ num_fields = 1 + qs.count(separator)
|
||||||
|
if max_num_fields < num_fields:
|
||||||
|
raise ValueError('Max number of fields exceeded')
|
||||||
|
|
||||||
|
- pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
|
||||||
|
+ if separator is _legacy:
|
||||||
|
+ pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
|
||||||
|
+ else:
|
||||||
|
+ pairs = [s1 for s1 in qs.split(separator)]
|
||||||
|
r = []
|
||||||
|
for name_value in pairs:
|
||||||
|
if not name_value and not strict_parsing:
|
||||||
|
--
|
||||||
|
2.30.2
|
||||||
|
|
35
00366-CVE-2021-3733.patch
Normal file
35
00366-CVE-2021-3733.patch
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Lumir Balhar <lbalhar@redhat.com>
|
||||||
|
Date: Tue, 14 Sep 2021 11:34:43 +0200
|
||||||
|
Subject: [PATCH] 00366-CVE-2021-3733.patch
|
||||||
|
|
||||||
|
00366 #
|
||||||
|
CVE-2021-3733: Fix ReDoS in urllib AbstractBasicAuthHandler
|
||||||
|
|
||||||
|
Fix Regular Expression Denial of Service (ReDoS) vulnerability in
|
||||||
|
urllib2.AbstractBasicAuthHandler. The ReDoS-vulnerable regex
|
||||||
|
has quadratic worst-case complexity and it allows cause a denial of
|
||||||
|
service when identifying crafted invalid RFCs. This ReDoS issue is on
|
||||||
|
the client side and needs remote attackers to control the HTTP server.
|
||||||
|
|
||||||
|
Backported from Python 3 together with another backward-compatible
|
||||||
|
improvement of the regex from fix for CVE-2020-8492.
|
||||||
|
|
||||||
|
Co-authored-by: Yeting Li <liyt@ios.ac.cn>
|
||||||
|
---
|
||||||
|
Lib/urllib2.py | 2 +-
|
||||||
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/urllib2.py b/Lib/urllib2.py
|
||||||
|
index fd19e1ae943..e286583ecba 100644
|
||||||
|
--- a/Lib/urllib2.py
|
||||||
|
+++ b/Lib/urllib2.py
|
||||||
|
@@ -858,7 +858,7 @@ class AbstractBasicAuthHandler:
|
||||||
|
|
||||||
|
# allow for double- and single-quoted realm values
|
||||||
|
# (single quotes are a violation of the RFC, but appear in the wild)
|
||||||
|
- rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
|
||||||
|
+ rx = re.compile('(?:[^,]*,)*[ \t]*([^ \t,]+)[ \t]+'
|
||||||
|
'realm=(["\']?)([^"\']*)\\2', re.I)
|
||||||
|
|
||||||
|
# XXX could pre-emptively send auth info already accepted (RFC 2617,
|
89
00368-CVE-2021-3737.patch
Normal file
89
00368-CVE-2021-3737.patch
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Lumir Balhar <lbalhar@redhat.com>
|
||||||
|
Date: Fri, 17 Sep 2021 07:56:50 +0200
|
||||||
|
Subject: [PATCH] 00368-CVE-2021-3737.patch
|
||||||
|
|
||||||
|
00368 #
|
||||||
|
CVE-2021-3737: http client infinite line reading (DoS) after a HTTP 100 Continue
|
||||||
|
|
||||||
|
Fixes http.client potential denial of service where it could get stuck reading
|
||||||
|
lines from a malicious server after a 100 Continue response.
|
||||||
|
|
||||||
|
Backported from Python 3.
|
||||||
|
|
||||||
|
Co-authored-by: Gregory P. Smith <greg@krypto.org>
|
||||||
|
Co-authored-by: Gen Xu <xgbarry@gmail.com>
|
||||||
|
---
|
||||||
|
Lib/httplib.py | 32 +++++++++++++++++++++++---------
|
||||||
|
Lib/test/test_httplib.py | 8 ++++++++
|
||||||
|
2 files changed, 31 insertions(+), 9 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/httplib.py b/Lib/httplib.py
|
||||||
|
index a63677477d5..f9a27619e62 100644
|
||||||
|
--- a/Lib/httplib.py
|
||||||
|
+++ b/Lib/httplib.py
|
||||||
|
@@ -365,6 +365,25 @@ class HTTPMessage(mimetools.Message):
|
||||||
|
# It's not a header line; skip it and try the next line.
|
||||||
|
self.status = 'Non-header line where header expected'
|
||||||
|
|
||||||
|
+
|
||||||
|
+def _read_headers(fp):
|
||||||
|
+ """Reads potential header lines into a list from a file pointer.
|
||||||
|
+ Length of line is limited by _MAXLINE, and number of
|
||||||
|
+ headers is limited by _MAXHEADERS.
|
||||||
|
+ """
|
||||||
|
+ headers = []
|
||||||
|
+ while True:
|
||||||
|
+ line = fp.readline(_MAXLINE + 1)
|
||||||
|
+ if len(line) > _MAXLINE:
|
||||||
|
+ raise LineTooLong("header line")
|
||||||
|
+ headers.append(line)
|
||||||
|
+ if len(headers) > _MAXHEADERS:
|
||||||
|
+ raise HTTPException("got more than %d headers" % _MAXHEADERS)
|
||||||
|
+ if line in (b'\r\n', b'\n', b''):
|
||||||
|
+ break
|
||||||
|
+ return headers
|
||||||
|
+
|
||||||
|
+
|
||||||
|
class HTTPResponse:
|
||||||
|
|
||||||
|
# strict: If true, raise BadStatusLine if the status line can't be
|
||||||
|
@@ -453,15 +472,10 @@ class HTTPResponse:
|
||||||
|
if status != CONTINUE:
|
||||||
|
break
|
||||||
|
# skip the header from the 100 response
|
||||||
|
- while True:
|
||||||
|
- skip = self.fp.readline(_MAXLINE + 1)
|
||||||
|
- if len(skip) > _MAXLINE:
|
||||||
|
- raise LineTooLong("header line")
|
||||||
|
- skip = skip.strip()
|
||||||
|
- if not skip:
|
||||||
|
- break
|
||||||
|
- if self.debuglevel > 0:
|
||||||
|
- print "header:", skip
|
||||||
|
+ skipped_headers = _read_headers(self.fp)
|
||||||
|
+ if self.debuglevel > 0:
|
||||||
|
+ print("headers:", skipped_headers)
|
||||||
|
+ del skipped_headers
|
||||||
|
|
||||||
|
self.status = status
|
||||||
|
self.reason = reason.strip()
|
||||||
|
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
|
||||||
|
index b5fec9aa1ec..d05c0fc28d2 100644
|
||||||
|
--- a/Lib/test/test_httplib.py
|
||||||
|
+++ b/Lib/test/test_httplib.py
|
||||||
|
@@ -700,6 +700,14 @@ class BasicTest(TestCase):
|
||||||
|
resp = httplib.HTTPResponse(FakeSocket(body))
|
||||||
|
self.assertRaises(httplib.LineTooLong, resp.begin)
|
||||||
|
|
||||||
|
+ def test_overflowing_header_limit_after_100(self):
|
||||||
|
+ body = (
|
||||||
|
+ 'HTTP/1.1 100 OK\r\n'
|
||||||
|
+ 'r\n' * 32768
|
||||||
|
+ )
|
||||||
|
+ resp = httplib.HTTPResponse(FakeSocket(body))
|
||||||
|
+ self.assertRaises(httplib.HTTPException, resp.begin)
|
||||||
|
+
|
||||||
|
def test_overflowing_chunked_line(self):
|
||||||
|
body = (
|
||||||
|
'HTTP/1.1 200 OK\r\n'
|
80
00372-CVE-2021-4189.patch
Normal file
80
00372-CVE-2021-4189.patch
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
diff --git a/Lib/ftplib.py b/Lib/ftplib.py
|
||||||
|
index 6644554..0550f0a 100644
|
||||||
|
--- a/Lib/ftplib.py
|
||||||
|
+++ b/Lib/ftplib.py
|
||||||
|
@@ -108,6 +108,8 @@ class FTP:
|
||||||
|
file = None
|
||||||
|
welcome = None
|
||||||
|
passiveserver = 1
|
||||||
|
+ # Disables https://bugs.python.org/issue43285 security if set to True.
|
||||||
|
+ trust_server_pasv_ipv4_address = False
|
||||||
|
|
||||||
|
# Initialization method (called by class instantiation).
|
||||||
|
# Initialize host to localhost, port to standard ftp port
|
||||||
|
@@ -310,8 +312,13 @@ class FTP:
|
||||||
|
return sock
|
||||||
|
|
||||||
|
def makepasv(self):
|
||||||
|
+ """Internal: Does the PASV or EPSV handshake -> (address, port)"""
|
||||||
|
if self.af == socket.AF_INET:
|
||||||
|
- host, port = parse227(self.sendcmd('PASV'))
|
||||||
|
+ untrusted_host, port = parse227(self.sendcmd('PASV'))
|
||||||
|
+ if self.trust_server_pasv_ipv4_address:
|
||||||
|
+ host = untrusted_host
|
||||||
|
+ else:
|
||||||
|
+ host = self.sock.getpeername()[0]
|
||||||
|
else:
|
||||||
|
host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername())
|
||||||
|
return host, port
|
||||||
|
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
|
||||||
|
index 8a3eb06..62a3f5e 100644
|
||||||
|
--- a/Lib/test/test_ftplib.py
|
||||||
|
+++ b/Lib/test/test_ftplib.py
|
||||||
|
@@ -67,6 +67,10 @@ class DummyFTPHandler(asynchat.async_chat):
|
||||||
|
self.rest = None
|
||||||
|
self.next_retr_data = RETR_DATA
|
||||||
|
self.push('220 welcome')
|
||||||
|
+ # We use this as the string IPv4 address to direct the client
|
||||||
|
+ # to in response to a PASV command. To test security behavior.
|
||||||
|
+ # https://bugs.python.org/issue43285/.
|
||||||
|
+ self.fake_pasv_server_ip = '252.253.254.255'
|
||||||
|
|
||||||
|
def collect_incoming_data(self, data):
|
||||||
|
self.in_buffer.append(data)
|
||||||
|
@@ -109,7 +113,8 @@ class DummyFTPHandler(asynchat.async_chat):
|
||||||
|
sock.bind((self.socket.getsockname()[0], 0))
|
||||||
|
sock.listen(5)
|
||||||
|
sock.settimeout(10)
|
||||||
|
- ip, port = sock.getsockname()[:2]
|
||||||
|
+ port = sock.getsockname()[1]
|
||||||
|
+ ip = self.fake_pasv_server_ip
|
||||||
|
ip = ip.replace('.', ',')
|
||||||
|
p1, p2 = divmod(port, 256)
|
||||||
|
self.push('227 entering passive mode (%s,%d,%d)' %(ip, p1, p2))
|
||||||
|
@@ -577,6 +582,26 @@ class TestFTPClass(TestCase):
|
||||||
|
# IPv4 is in use, just make sure send_epsv has not been used
|
||||||
|
self.assertEqual(self.server.handler_instance.last_received_cmd, 'pasv')
|
||||||
|
|
||||||
|
+ def test_makepasv_issue43285_security_disabled(self):
|
||||||
|
+ """Test the opt-in to the old vulnerable behavior."""
|
||||||
|
+ self.client.trust_server_pasv_ipv4_address = True
|
||||||
|
+ bad_host, port = self.client.makepasv()
|
||||||
|
+ self.assertEqual(
|
||||||
|
+ bad_host, self.server.handler_instance.fake_pasv_server_ip)
|
||||||
|
+ # Opening and closing a connection keeps the dummy server happy
|
||||||
|
+ # instead of timing out on accept.
|
||||||
|
+ socket.create_connection((self.client.sock.getpeername()[0], port),
|
||||||
|
+ timeout=TIMEOUT).close()
|
||||||
|
+
|
||||||
|
+ def test_makepasv_issue43285_security_enabled_default(self):
|
||||||
|
+ self.assertFalse(self.client.trust_server_pasv_ipv4_address)
|
||||||
|
+ trusted_host, port = self.client.makepasv()
|
||||||
|
+ self.assertNotEqual(
|
||||||
|
+ trusted_host, self.server.handler_instance.fake_pasv_server_ip)
|
||||||
|
+ # Opening and closing a connection keeps the dummy server happy
|
||||||
|
+ # instead of timing out on accept.
|
||||||
|
+ socket.create_connection((trusted_host, port), timeout=TIMEOUT).close()
|
||||||
|
+
|
||||||
|
def test_line_too_long(self):
|
||||||
|
self.assertRaises(ftplib.Error, self.client.sendcmd,
|
||||||
|
'x' * self.client.maxline * 2)
|
127
00377-CVE-2022-0391.patch
Normal file
127
00377-CVE-2022-0391.patch
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
diff --git a/Doc/library/urlparse.rst b/Doc/library/urlparse.rst
|
||||||
|
index 97d1119257c..c08c3dc8e8f 100644
|
||||||
|
--- a/Doc/library/urlparse.rst
|
||||||
|
+++ b/Doc/library/urlparse.rst
|
||||||
|
@@ -125,6 +125,9 @@ The :mod:`urlparse` module defines the following functions:
|
||||||
|
decomposed before parsing, or is not a Unicode string, no error will be
|
||||||
|
raised.
|
||||||
|
|
||||||
|
+ Following the `WHATWG spec`_ that updates RFC 3986, ASCII newline
|
||||||
|
+ ``\n``, ``\r`` and tab ``\t`` characters are stripped from the URL.
|
||||||
|
+
|
||||||
|
.. versionchanged:: 2.5
|
||||||
|
Added attributes to return value.
|
||||||
|
|
||||||
|
@@ -321,6 +324,10 @@ The :mod:`urlparse` module defines the following functions:
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
|
||||||
|
+ `WHATWG`_ - URL Living standard
|
||||||
|
+ Working Group for the URL Standard that defines URLs, domains, IP addresses, the
|
||||||
|
+ application/x-www-form-urlencoded format, and their API.
|
||||||
|
+
|
||||||
|
:rfc:`3986` - Uniform Resource Identifiers
|
||||||
|
This is the current standard (STD66). Any changes to urlparse module
|
||||||
|
should conform to this. Certain deviations could be observed, which are
|
||||||
|
@@ -345,6 +352,7 @@ The :mod:`urlparse` module defines the following functions:
|
||||||
|
:rfc:`1738` - Uniform Resource Locators (URL)
|
||||||
|
This specifies the formal syntax and semantics of absolute URLs.
|
||||||
|
|
||||||
|
+.. _WHATWG: https://url.spec.whatwg.org/
|
||||||
|
|
||||||
|
.. _urlparse-result-object:
|
||||||
|
|
||||||
|
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
|
||||||
|
index 21875bb2991..16eefed56f6 100644
|
||||||
|
--- a/Lib/test/test_urlparse.py
|
||||||
|
+++ b/Lib/test/test_urlparse.py
|
||||||
|
@@ -618,6 +618,55 @@ class UrlParseTestCase(unittest.TestCase):
|
||||||
|
self.assertEqual(p1.path, '863-1234')
|
||||||
|
self.assertEqual(p1.params, 'phone-context=+1-914-555')
|
||||||
|
|
||||||
|
+ def test_urlsplit_remove_unsafe_bytes(self):
|
||||||
|
+ # Remove ASCII tabs and newlines from input, for http common case scenario.
|
||||||
|
+ url = "h\nttp://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||||
|
+ p = urlparse.urlsplit(url)
|
||||||
|
+ self.assertEqual(p.scheme, "http")
|
||||||
|
+ self.assertEqual(p.netloc, "www.python.org")
|
||||||
|
+ self.assertEqual(p.path, "/javascript:alert('msg')/")
|
||||||
|
+ self.assertEqual(p.query, "query=something")
|
||||||
|
+ self.assertEqual(p.fragment, "fragment")
|
||||||
|
+ self.assertEqual(p.username, None)
|
||||||
|
+ self.assertEqual(p.password, None)
|
||||||
|
+ self.assertEqual(p.hostname, "www.python.org")
|
||||||
|
+ self.assertEqual(p.port, None)
|
||||||
|
+ self.assertEqual(p.geturl(), "http://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||||
|
+
|
||||||
|
+ # Remove ASCII tabs and newlines from input as bytes, for http common case scenario.
|
||||||
|
+ url = b"h\nttp://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||||
|
+ p = urlparse.urlsplit(url)
|
||||||
|
+ self.assertEqual(p.scheme, b"http")
|
||||||
|
+ self.assertEqual(p.netloc, b"www.python.org")
|
||||||
|
+ self.assertEqual(p.path, b"/javascript:alert('msg')/")
|
||||||
|
+ self.assertEqual(p.query, b"query=something")
|
||||||
|
+ self.assertEqual(p.fragment, b"fragment")
|
||||||
|
+ self.assertEqual(p.username, None)
|
||||||
|
+ self.assertEqual(p.password, None)
|
||||||
|
+ self.assertEqual(p.hostname, b"www.python.org")
|
||||||
|
+ self.assertEqual(p.port, None)
|
||||||
|
+ self.assertEqual(p.geturl(), b"http://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||||
|
+
|
||||||
|
+ # any scheme
|
||||||
|
+ url = "x-new-scheme\t://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||||
|
+ p = urlparse.urlsplit(url)
|
||||||
|
+ self.assertEqual(p.geturl(), "x-new-scheme://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||||
|
+
|
||||||
|
+ # Remove ASCII tabs and newlines from input as bytes, any scheme.
|
||||||
|
+ url = b"x-new-scheme\t://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||||
|
+ p = urlparse.urlsplit(url)
|
||||||
|
+ self.assertEqual(p.geturl(), b"x-new-scheme://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||||
|
+
|
||||||
|
+ # Unsafe bytes is not returned from urlparse cache.
|
||||||
|
+ # scheme is stored after parsing, sending an scheme with unsafe bytes *will not* return an unsafe scheme
|
||||||
|
+ url = "https://www.python\n.org\t/java\nscript:\talert('msg\r\n')/?query\n=\tsomething#frag\nment"
|
||||||
|
+ scheme = "htt\nps"
|
||||||
|
+ for _ in range(2):
|
||||||
|
+ p = urlparse.urlsplit(url, scheme=scheme)
|
||||||
|
+ self.assertEqual(p.scheme, "https")
|
||||||
|
+ self.assertEqual(p.geturl(), "https://www.python.org/javascript:alert('msg')/?query=something#fragment")
|
||||||
|
+
|
||||||
|
+
|
||||||
|
|
||||||
|
def test_attributes_bad_port(self):
|
||||||
|
"""Check handling of non-integer ports."""
|
||||||
|
diff --git a/Lib/urlparse.py b/Lib/urlparse.py
|
||||||
|
index 69504d8fd93..6cc40a8d2fb 100644
|
||||||
|
--- a/Lib/urlparse.py
|
||||||
|
+++ b/Lib/urlparse.py
|
||||||
|
@@ -63,6 +63,9 @@ scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
|
||||||
|
'0123456789'
|
||||||
|
'+-.')
|
||||||
|
|
||||||
|
+# Unsafe bytes to be removed per WHATWG spec
|
||||||
|
+_UNSAFE_URL_BYTES_TO_REMOVE = ['\t', '\r', '\n']
|
||||||
|
+
|
||||||
|
MAX_CACHE_SIZE = 20
|
||||||
|
_parse_cache = {}
|
||||||
|
|
||||||
|
@@ -185,12 +188,19 @@ def _checknetloc(netloc):
|
||||||
|
"under NFKC normalization"
|
||||||
|
% netloc)
|
||||||
|
|
||||||
|
+def _remove_unsafe_bytes_from_url(url):
|
||||||
|
+ for b in _UNSAFE_URL_BYTES_TO_REMOVE:
|
||||||
|
+ url = url.replace(b, "")
|
||||||
|
+ return url
|
||||||
|
+
|
||||||
|
def urlsplit(url, scheme='', allow_fragments=True):
|
||||||
|
"""Parse a URL into 5 components:
|
||||||
|
<scheme>://<netloc>/<path>?<query>#<fragment>
|
||||||
|
Return a 5-tuple: (scheme, netloc, path, query, fragment).
|
||||||
|
Note that we don't break the components up in smaller bits
|
||||||
|
(e.g. netloc is a single string) and we don't expand % escapes."""
|
||||||
|
+ url = _remove_unsafe_bytes_from_url(url)
|
||||||
|
+ scheme = _remove_unsafe_bytes_from_url(scheme)
|
||||||
|
allow_fragments = bool(allow_fragments)
|
||||||
|
key = url, scheme, allow_fragments, type(url), type(scheme)
|
||||||
|
cached = _parse_cache.get(key, None)
|
94
00378-support-expat-2-4-5.patch
Normal file
94
00378-support-expat-2-4-5.patch
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
From 35f5707b555d3bca92858de16760918e76463a1e Mon Sep 17 00:00:00 2001
|
||||||
|
From: Sebastian Pipping <sebastian@pipping.org>
|
||||||
|
Date: Mon, 21 Feb 2022 15:48:32 +0100
|
||||||
|
Subject: [PATCH] 00378-support-expat-2-4-5.patch
|
||||||
|
|
||||||
|
00378 #
|
||||||
|
Support expat 2.4.5
|
||||||
|
|
||||||
|
Curly brackets were never allowed in namespace URIs
|
||||||
|
according to RFC 3986, and so-called namespace-validating
|
||||||
|
XML parsers have the right to reject them a invalid URIs.
|
||||||
|
|
||||||
|
libexpat >=2.4.5 has become strcter in that regard due to
|
||||||
|
related security issues; with ET.XML instantiating a
|
||||||
|
namespace-aware parser under the hood, this test has no
|
||||||
|
future in CPython.
|
||||||
|
|
||||||
|
References:
|
||||||
|
- https://datatracker.ietf.org/doc/html/rfc3968
|
||||||
|
- https://www.w3.org/TR/xml-names/
|
||||||
|
|
||||||
|
Also, test_minidom.py: Support Expat >=2.4.5
|
||||||
|
|
||||||
|
Upstream: https://bugs.python.org/issue46811
|
||||||
|
|
||||||
|
Backported from Python 3.
|
||||||
|
|
||||||
|
Co-authored-by: Sebastian Pipping <sebastian@pipping.org>
|
||||||
|
---
|
||||||
|
Lib/test/test_minidom.py | 8 ++++++--
|
||||||
|
Lib/test/test_xml_etree.py | 6 ------
|
||||||
|
.../next/Library/2022-02-20-21-03-31.bpo-46811.8BxgdQ.rst | 1 +
|
||||||
|
3 files changed, 7 insertions(+), 8 deletions(-)
|
||||||
|
create mode 100644 Misc/NEWS.d/next/Library/2022-02-20-21-03-31.bpo-46811.8BxgdQ.rst
|
||||||
|
|
||||||
|
diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py
|
||||||
|
index 2eb6423..2c9a7a3 100644
|
||||||
|
--- a/Lib/test/test_minidom.py
|
||||||
|
+++ b/Lib/test/test_minidom.py
|
||||||
|
@@ -6,12 +6,14 @@ from StringIO import StringIO
|
||||||
|
from test import support
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
+import pyexpat
|
||||||
|
import xml.dom
|
||||||
|
import xml.dom.minidom
|
||||||
|
import xml.parsers.expat
|
||||||
|
|
||||||
|
from xml.dom.minidom import parse, Node, Document, parseString
|
||||||
|
from xml.dom.minidom import getDOMImplementation
|
||||||
|
+from xml.parsers.expat import ExpatError
|
||||||
|
|
||||||
|
|
||||||
|
tstfile = support.findfile("test.xml", subdir="xmltestdata")
|
||||||
|
@@ -1051,8 +1053,10 @@ class MinidomTest(unittest.TestCase):
|
||||||
|
|
||||||
|
# Verify that character decoding errors raise exceptions instead
|
||||||
|
# of crashing
|
||||||
|
- self.assertRaises(UnicodeDecodeError, parseString,
|
||||||
|
- '<fran\xe7ais>Comment \xe7a va ? Tr\xe8s bien ?</fran\xe7ais>')
|
||||||
|
+ self.assertRaises(ExpatError, parseString,
|
||||||
|
+ '<fran\xe7ais></fran\xe7ais>')
|
||||||
|
+ self.assertRaises(ExpatError, parseString,
|
||||||
|
+ '<franais>Comment \xe7a va ? Tr\xe8s bien ?</franais>')
|
||||||
|
|
||||||
|
doc.unlink()
|
||||||
|
|
||||||
|
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
|
||||||
|
index c75d55f..0855bc0 100644
|
||||||
|
--- a/Lib/test/test_xml_etree.py
|
||||||
|
+++ b/Lib/test/test_xml_etree.py
|
||||||
|
@@ -1482,12 +1482,6 @@ class BugsTest(unittest.TestCase):
|
||||||
|
b"<?xml version='1.0' encoding='ascii'?>\n"
|
||||||
|
b'<body>tãg</body>')
|
||||||
|
|
||||||
|
- def test_issue3151(self):
|
||||||
|
- e = ET.XML('<prefix:localname xmlns:prefix="${stuff}"/>')
|
||||||
|
- self.assertEqual(e.tag, '{${stuff}}localname')
|
||||||
|
- t = ET.ElementTree(e)
|
||||||
|
- self.assertEqual(ET.tostring(e), b'<ns0:localname xmlns:ns0="${stuff}" />')
|
||||||
|
-
|
||||||
|
def test_issue6565(self):
|
||||||
|
elem = ET.XML("<body><tag/></body>")
|
||||||
|
self.assertEqual(summarize_list(elem), ['tag'])
|
||||||
|
diff --git a/Misc/NEWS.d/next/Library/2022-02-20-21-03-31.bpo-46811.8BxgdQ.rst b/Misc/NEWS.d/next/Library/2022-02-20-21-03-31.bpo-46811.8BxgdQ.rst
|
||||||
|
new file mode 100644
|
||||||
|
index 0000000..6969bd1
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Misc/NEWS.d/next/Library/2022-02-20-21-03-31.bpo-46811.8BxgdQ.rst
|
||||||
|
@@ -0,0 +1 @@
|
||||||
|
+Make test suite support Expat >=2.4.5
|
||||||
|
--
|
||||||
|
2.35.1
|
||||||
|
|
440
00382-cve-2015-20107.patch
Normal file
440
00382-cve-2015-20107.patch
Normal file
@ -0,0 +1,440 @@
|
|||||||
|
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Petr Viktorin <encukou@gmail.com>
|
||||||
|
Date: Fri, 3 Jun 2022 11:43:35 +0200
|
||||||
|
Subject: [PATCH] 00382-cve-2015-20107.patch
|
||||||
|
|
||||||
|
00382 #
|
||||||
|
Make mailcap refuse to match unsafe filenames/types/params (GH-91993)
|
||||||
|
|
||||||
|
Upstream: https://github.com/python/cpython/issues/68966
|
||||||
|
|
||||||
|
Tracker bug: https://bugzilla.redhat.com/show_bug.cgi?id=2075390
|
||||||
|
|
||||||
|
Backported from python3.
|
||||||
|
---
|
||||||
|
Doc/library/mailcap.rst | 12 +
|
||||||
|
Lib/mailcap.py | 29 +-
|
||||||
|
Lib/test/mailcap.txt | 39 +++
|
||||||
|
Lib/test/test_mailcap.py | 259 ++++++++++++++++++
|
||||||
|
...2-04-27-18-25-30.gh-issue-68966.gjS8zs.rst | 4 +
|
||||||
|
5 files changed, 341 insertions(+), 2 deletions(-)
|
||||||
|
create mode 100644 Lib/test/mailcap.txt
|
||||||
|
create mode 100644 Lib/test/test_mailcap.py
|
||||||
|
create mode 100644 Misc/NEWS.d/next/Security/2022-04-27-18-25-30.gh-issue-68966.gjS8zs.rst
|
||||||
|
|
||||||
|
diff --git a/Doc/library/mailcap.rst b/Doc/library/mailcap.rst
|
||||||
|
index 750d085796f..5f75ee6086e 100644
|
||||||
|
--- a/Doc/library/mailcap.rst
|
||||||
|
+++ b/Doc/library/mailcap.rst
|
||||||
|
@@ -54,6 +54,18 @@ standard. However, mailcap files are supported on most Unix systems.
|
||||||
|
use) to determine whether or not the mailcap line applies. :func:`findmatch`
|
||||||
|
will automatically check such conditions and skip the entry if the check fails.
|
||||||
|
|
||||||
|
+ .. versionchanged:: 3.11
|
||||||
|
+
|
||||||
|
+ To prevent security issues with shell metacharacters (symbols that have
|
||||||
|
+ special effects in a shell command line), ``findmatch`` will refuse
|
||||||
|
+ to inject ASCII characters other than alphanumerics and ``@+=:,./-_``
|
||||||
|
+ into the returned command line.
|
||||||
|
+
|
||||||
|
+ If a disallowed character appears in *filename*, ``findmatch`` will always
|
||||||
|
+ return ``(None, None)`` as if no entry was found.
|
||||||
|
+ If such a character appears elsewhere (a value in *plist* or in *MIMEtype*),
|
||||||
|
+ ``findmatch`` will ignore all mailcap entries which use that value.
|
||||||
|
+ A :mod:`warning <warnings>` will be raised in either case.
|
||||||
|
|
||||||
|
.. function:: getcaps()
|
||||||
|
|
||||||
|
diff --git a/Lib/mailcap.py b/Lib/mailcap.py
|
||||||
|
index 04077ba0db2..1108b447b1d 100644
|
||||||
|
--- a/Lib/mailcap.py
|
||||||
|
+++ b/Lib/mailcap.py
|
||||||
|
@@ -1,9 +1,18 @@
|
||||||
|
"""Mailcap file handling. See RFC 1524."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
+import warnings
|
||||||
|
+import re
|
||||||
|
|
||||||
|
__all__ = ["getcaps","findmatch"]
|
||||||
|
|
||||||
|
+
|
||||||
|
+_find_unsafe = re.compile(r'[^\xa1-\xff\w@+=:,./-]').search
|
||||||
|
+
|
||||||
|
+class UnsafeMailcapInput(Warning):
|
||||||
|
+ """Warning raised when refusing unsafe input"""
|
||||||
|
+
|
||||||
|
+
|
||||||
|
# Part 1: top-level interface.
|
||||||
|
|
||||||
|
def getcaps():
|
||||||
|
@@ -144,15 +153,22 @@ def findmatch(caps, MIMEtype, key='view', filename="/dev/null", plist=[]):
|
||||||
|
entry to use.
|
||||||
|
|
||||||
|
"""
|
||||||
|
+ if _find_unsafe(filename):
|
||||||
|
+ msg = "Refusing to use mailcap with filename %r. Use a safe temporary filename." % (filename,)
|
||||||
|
+ warnings.warn(msg, UnsafeMailcapInput)
|
||||||
|
+ return None, None
|
||||||
|
entries = lookup(caps, MIMEtype, key)
|
||||||
|
# XXX This code should somehow check for the needsterminal flag.
|
||||||
|
for e in entries:
|
||||||
|
if 'test' in e:
|
||||||
|
test = subst(e['test'], filename, plist)
|
||||||
|
+ if test is None:
|
||||||
|
+ continue
|
||||||
|
if test and os.system(test) != 0:
|
||||||
|
continue
|
||||||
|
command = subst(e[key], MIMEtype, filename, plist)
|
||||||
|
- return command, e
|
||||||
|
+ if command is not None:
|
||||||
|
+ return command, e
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def lookup(caps, MIMEtype, key=None):
|
||||||
|
@@ -184,6 +200,10 @@ def subst(field, MIMEtype, filename, plist=[]):
|
||||||
|
elif c == 's':
|
||||||
|
res = res + filename
|
||||||
|
elif c == 't':
|
||||||
|
+ if _find_unsafe(MIMEtype):
|
||||||
|
+ msg = "Refusing to substitute MIME type %r into a shell command." % (MIMEtype,)
|
||||||
|
+ warnings.warn(msg, UnsafeMailcapInput)
|
||||||
|
+ return None
|
||||||
|
res = res + MIMEtype
|
||||||
|
elif c == '{':
|
||||||
|
start = i
|
||||||
|
@@ -191,7 +211,12 @@ def subst(field, MIMEtype, filename, plist=[]):
|
||||||
|
i = i+1
|
||||||
|
name = field[start:i]
|
||||||
|
i = i+1
|
||||||
|
- res = res + findparam(name, plist)
|
||||||
|
+ param = findparam(name, plist)
|
||||||
|
+ if _find_unsafe(param):
|
||||||
|
+ msg = "Refusing to substitute parameter %r (%s) into a shell command" % (param, name)
|
||||||
|
+ warnings.warn(msg, UnsafeMailcapInput)
|
||||||
|
+ return None
|
||||||
|
+ res = res + param
|
||||||
|
# XXX To do:
|
||||||
|
# %n == number of parts if type is multipart/*
|
||||||
|
# %F == list of alternating type and filename for parts
|
||||||
|
diff --git a/Lib/test/mailcap.txt b/Lib/test/mailcap.txt
|
||||||
|
new file mode 100644
|
||||||
|
index 00000000000..08a76e65941
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Lib/test/mailcap.txt
|
||||||
|
@@ -0,0 +1,39 @@
|
||||||
|
+# Mailcap file for test_mailcap; based on RFC 1524
|
||||||
|
+# Referred to by test_mailcap.py
|
||||||
|
+
|
||||||
|
+#
|
||||||
|
+# This is a comment.
|
||||||
|
+#
|
||||||
|
+
|
||||||
|
+application/frame; showframe %s; print="cat %s | lp"
|
||||||
|
+application/postscript; ps-to-terminal %s;\
|
||||||
|
+ needsterminal
|
||||||
|
+application/postscript; ps-to-terminal %s; \
|
||||||
|
+ compose=idraw %s
|
||||||
|
+application/x-dvi; xdvi %s
|
||||||
|
+application/x-movie; movieplayer %s; compose=moviemaker %s; \
|
||||||
|
+ description="Movie"; \
|
||||||
|
+ x11-bitmap="/usr/lib/Zmail/bitmaps/movie.xbm"
|
||||||
|
+application/*; echo "This is \"%t\" but \
|
||||||
|
+ is 50 \% Greek to me" \; cat %s; copiousoutput
|
||||||
|
+
|
||||||
|
+audio/basic; showaudio %s; compose=audiocompose %s; edit=audiocompose %s;\
|
||||||
|
+description="An audio fragment"
|
||||||
|
+audio/* ; /usr/local/bin/showaudio %t
|
||||||
|
+
|
||||||
|
+image/rgb; display %s
|
||||||
|
+#image/gif; display %s
|
||||||
|
+image/x-xwindowdump; display %s
|
||||||
|
+
|
||||||
|
+# The continuation char shouldn't \
|
||||||
|
+# make a difference in a comment.
|
||||||
|
+
|
||||||
|
+message/external-body; showexternal %s %{access-type} %{name} %{site} \
|
||||||
|
+ %{directory} %{mode} %{server}; needsterminal; composetyped = extcompose %s; \
|
||||||
|
+ description="A reference to data stored in an external location"
|
||||||
|
+
|
||||||
|
+text/richtext; shownonascii iso-8859-8 -e richtext -p %s; test=test "`echo \
|
||||||
|
+ %{charset} | tr '[A-Z]' '[a-z]'`" = iso-8859-8; copiousoutput
|
||||||
|
+
|
||||||
|
+video/*; animate %s
|
||||||
|
+video/mpeg; mpeg_play %s
|
||||||
|
\ No newline at end of file
|
||||||
|
diff --git a/Lib/test/test_mailcap.py b/Lib/test/test_mailcap.py
|
||||||
|
new file mode 100644
|
||||||
|
index 00000000000..35da7fb0741
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Lib/test/test_mailcap.py
|
||||||
|
@@ -0,0 +1,259 @@
|
||||||
|
+import copy
|
||||||
|
+import os
|
||||||
|
+import sys
|
||||||
|
+import test.support
|
||||||
|
+import unittest
|
||||||
|
+from test import support as os_helper
|
||||||
|
+from test import support as warnings_helper
|
||||||
|
+from collections import OrderedDict
|
||||||
|
+
|
||||||
|
+import mailcap
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+# Location of mailcap file
|
||||||
|
+MAILCAPFILE = test.support.findfile("mailcap.txt")
|
||||||
|
+
|
||||||
|
+# Dict to act as mock mailcap entry for this test
|
||||||
|
+# The keys and values should match the contents of MAILCAPFILE
|
||||||
|
+
|
||||||
|
+MAILCAPDICT = {
|
||||||
|
+ 'application/x-movie':
|
||||||
|
+ [{'compose': 'moviemaker %s',
|
||||||
|
+ 'x11-bitmap': '"/usr/lib/Zmail/bitmaps/movie.xbm"',
|
||||||
|
+ 'description': '"Movie"',
|
||||||
|
+ 'view': 'movieplayer %s',
|
||||||
|
+ 'lineno': 4}],
|
||||||
|
+ 'application/*':
|
||||||
|
+ [{'copiousoutput': '',
|
||||||
|
+ 'view': 'echo "This is \\"%t\\" but is 50 \\% Greek to me" \\; cat %s',
|
||||||
|
+ 'lineno': 5}],
|
||||||
|
+ 'audio/basic':
|
||||||
|
+ [{'edit': 'audiocompose %s',
|
||||||
|
+ 'compose': 'audiocompose %s',
|
||||||
|
+ 'description': '"An audio fragment"',
|
||||||
|
+ 'view': 'showaudio %s',
|
||||||
|
+ 'lineno': 6}],
|
||||||
|
+ 'video/mpeg':
|
||||||
|
+ [{'view': 'mpeg_play %s', 'lineno': 13}],
|
||||||
|
+ 'application/postscript':
|
||||||
|
+ [{'needsterminal': '', 'view': 'ps-to-terminal %s', 'lineno': 1},
|
||||||
|
+ {'compose': 'idraw %s', 'view': 'ps-to-terminal %s', 'lineno': 2}],
|
||||||
|
+ 'application/x-dvi':
|
||||||
|
+ [{'view': 'xdvi %s', 'lineno': 3}],
|
||||||
|
+ 'message/external-body':
|
||||||
|
+ [{'composetyped': 'extcompose %s',
|
||||||
|
+ 'description': '"A reference to data stored in an external location"',
|
||||||
|
+ 'needsterminal': '',
|
||||||
|
+ 'view': 'showexternal %s %{access-type} %{name} %{site} %{directory} %{mode} %{server}',
|
||||||
|
+ 'lineno': 10}],
|
||||||
|
+ 'text/richtext':
|
||||||
|
+ [{'test': 'test "`echo %{charset} | tr \'[A-Z]\' \'[a-z]\'`" = iso-8859-8',
|
||||||
|
+ 'copiousoutput': '',
|
||||||
|
+ 'view': 'shownonascii iso-8859-8 -e richtext -p %s',
|
||||||
|
+ 'lineno': 11}],
|
||||||
|
+ 'image/x-xwindowdump':
|
||||||
|
+ [{'view': 'display %s', 'lineno': 9}],
|
||||||
|
+ 'audio/*':
|
||||||
|
+ [{'view': '/usr/local/bin/showaudio %t', 'lineno': 7}],
|
||||||
|
+ 'video/*':
|
||||||
|
+ [{'view': 'animate %s', 'lineno': 12}],
|
||||||
|
+ 'application/frame':
|
||||||
|
+ [{'print': '"cat %s | lp"', 'view': 'showframe %s', 'lineno': 0}],
|
||||||
|
+ 'image/rgb':
|
||||||
|
+ [{'view': 'display %s', 'lineno': 8}]
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+# In Python 2, mailcap doesn't return line numbers.
|
||||||
|
+# This test suite is copied from Python 3.11; for easier backporting we keep
|
||||||
|
+# data from there and remove the lineno.
|
||||||
|
+# So, for Python 2, MAILCAPDICT_DEPRECATED is the same as MAILCAPDICT
|
||||||
|
+MAILCAPDICT_DEPRECATED = MAILCAPDICT
|
||||||
|
+for entry_list in MAILCAPDICT_DEPRECATED.values():
|
||||||
|
+ for entry in entry_list:
|
||||||
|
+ entry.pop('lineno')
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+class HelperFunctionTest(unittest.TestCase):
|
||||||
|
+
|
||||||
|
+ def test_listmailcapfiles(self):
|
||||||
|
+ # The return value for listmailcapfiles() will vary by system.
|
||||||
|
+ # So verify that listmailcapfiles() returns a list of strings that is of
|
||||||
|
+ # non-zero length.
|
||||||
|
+ mcfiles = mailcap.listmailcapfiles()
|
||||||
|
+ self.assertIsInstance(mcfiles, list)
|
||||||
|
+ for m in mcfiles:
|
||||||
|
+ self.assertIsInstance(m, str)
|
||||||
|
+ with os_helper.EnvironmentVarGuard() as env:
|
||||||
|
+ # According to RFC 1524, if MAILCAPS env variable exists, use that
|
||||||
|
+ # and only that.
|
||||||
|
+ if "MAILCAPS" in env:
|
||||||
|
+ env_mailcaps = env["MAILCAPS"].split(os.pathsep)
|
||||||
|
+ else:
|
||||||
|
+ env_mailcaps = ["/testdir1/.mailcap", "/testdir2/mailcap"]
|
||||||
|
+ env["MAILCAPS"] = os.pathsep.join(env_mailcaps)
|
||||||
|
+ mcfiles = mailcap.listmailcapfiles()
|
||||||
|
+ self.assertEqual(env_mailcaps, mcfiles)
|
||||||
|
+
|
||||||
|
+ def test_readmailcapfile(self):
|
||||||
|
+ # Test readmailcapfile() using test file. It should match MAILCAPDICT.
|
||||||
|
+ with open(MAILCAPFILE, 'r') as mcf:
|
||||||
|
+ d = mailcap.readmailcapfile(mcf)
|
||||||
|
+ self.assertDictEqual(d, MAILCAPDICT_DEPRECATED)
|
||||||
|
+
|
||||||
|
+ def test_lookup(self):
|
||||||
|
+ # Test without key
|
||||||
|
+
|
||||||
|
+ # In Python 2, 'video/mpeg' is tried before 'video/*'
|
||||||
|
+ # (unfixed bug: https://github.com/python/cpython/issues/59182 )
|
||||||
|
+ # So, these are in reverse order:
|
||||||
|
+ expected = [{'view': 'mpeg_play %s', },
|
||||||
|
+ {'view': 'animate %s', }]
|
||||||
|
+ actual = mailcap.lookup(MAILCAPDICT, 'video/mpeg')
|
||||||
|
+ self.assertListEqual(expected, actual)
|
||||||
|
+
|
||||||
|
+ # Test with key
|
||||||
|
+ key = 'compose'
|
||||||
|
+ expected = [{'edit': 'audiocompose %s',
|
||||||
|
+ 'compose': 'audiocompose %s',
|
||||||
|
+ 'description': '"An audio fragment"',
|
||||||
|
+ 'view': 'showaudio %s',
|
||||||
|
+ }]
|
||||||
|
+ actual = mailcap.lookup(MAILCAPDICT, 'audio/basic', key)
|
||||||
|
+ self.assertListEqual(expected, actual)
|
||||||
|
+
|
||||||
|
+ # Test on user-defined dicts without line numbers
|
||||||
|
+ expected = [{'view': 'mpeg_play %s'}, {'view': 'animate %s'}]
|
||||||
|
+ actual = mailcap.lookup(MAILCAPDICT_DEPRECATED, 'video/mpeg')
|
||||||
|
+ self.assertListEqual(expected, actual)
|
||||||
|
+
|
||||||
|
+ def test_subst(self):
|
||||||
|
+ plist = ['id=1', 'number=2', 'total=3']
|
||||||
|
+ # test case: ([field, MIMEtype, filename, plist=[]], <expected string>)
|
||||||
|
+ test_cases = [
|
||||||
|
+ (["", "audio/*", "foo.txt"], ""),
|
||||||
|
+ (["echo foo", "audio/*", "foo.txt"], "echo foo"),
|
||||||
|
+ (["echo %s", "audio/*", "foo.txt"], "echo foo.txt"),
|
||||||
|
+ (["echo %t", "audio/*", "foo.txt"], None),
|
||||||
|
+ (["echo %t", "audio/wav", "foo.txt"], "echo audio/wav"),
|
||||||
|
+ (["echo \\%t", "audio/*", "foo.txt"], "echo %t"),
|
||||||
|
+ (["echo foo", "audio/*", "foo.txt", plist], "echo foo"),
|
||||||
|
+ (["echo %{total}", "audio/*", "foo.txt", plist], "echo 3")
|
||||||
|
+ ]
|
||||||
|
+ for tc in test_cases:
|
||||||
|
+ self.assertEqual(mailcap.subst(*tc[0]), tc[1])
|
||||||
|
+
|
||||||
|
+class GetcapsTest(unittest.TestCase):
|
||||||
|
+
|
||||||
|
+ def test_mock_getcaps(self):
|
||||||
|
+ # Test mailcap.getcaps() using mock mailcap file in this dir.
|
||||||
|
+ # Temporarily override any existing system mailcap file by pointing the
|
||||||
|
+ # MAILCAPS environment variable to our mock file.
|
||||||
|
+ with os_helper.EnvironmentVarGuard() as env:
|
||||||
|
+ env["MAILCAPS"] = MAILCAPFILE
|
||||||
|
+ caps = mailcap.getcaps()
|
||||||
|
+ self.assertDictEqual(caps, MAILCAPDICT)
|
||||||
|
+
|
||||||
|
+ def test_system_mailcap(self):
|
||||||
|
+ # Test mailcap.getcaps() with mailcap file(s) on system, if any.
|
||||||
|
+ caps = mailcap.getcaps()
|
||||||
|
+ self.assertIsInstance(caps, dict)
|
||||||
|
+ mailcapfiles = mailcap.listmailcapfiles()
|
||||||
|
+ existingmcfiles = [mcf for mcf in mailcapfiles if os.path.exists(mcf)]
|
||||||
|
+ if existingmcfiles:
|
||||||
|
+ # At least 1 mailcap file exists, so test that.
|
||||||
|
+ for (k, v) in caps.items():
|
||||||
|
+ self.assertIsInstance(k, str)
|
||||||
|
+ self.assertIsInstance(v, list)
|
||||||
|
+ for e in v:
|
||||||
|
+ self.assertIsInstance(e, dict)
|
||||||
|
+ else:
|
||||||
|
+ # No mailcap files on system. getcaps() should return empty dict.
|
||||||
|
+ self.assertEqual({}, caps)
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+class FindmatchTest(unittest.TestCase):
|
||||||
|
+
|
||||||
|
+ def test_findmatch(self):
|
||||||
|
+
|
||||||
|
+ # default findmatch arguments
|
||||||
|
+ c = MAILCAPDICT
|
||||||
|
+ fname = "foo.txt"
|
||||||
|
+ plist = ["access-type=default", "name=john", "site=python.org",
|
||||||
|
+ "directory=/tmp", "mode=foo", "server=bar"]
|
||||||
|
+ audio_basic_entry = {
|
||||||
|
+ 'edit': 'audiocompose %s',
|
||||||
|
+ 'compose': 'audiocompose %s',
|
||||||
|
+ 'description': '"An audio fragment"',
|
||||||
|
+ 'view': 'showaudio %s',
|
||||||
|
+ }
|
||||||
|
+ audio_entry = {"view": "/usr/local/bin/showaudio %t", }
|
||||||
|
+ video_entry = {'view': 'animate %s', }
|
||||||
|
+ mpeg_entry = {'view': 'mpeg_play %s', }
|
||||||
|
+ message_entry = {
|
||||||
|
+ 'composetyped': 'extcompose %s',
|
||||||
|
+ 'description': '"A reference to data stored in an external location"', 'needsterminal': '',
|
||||||
|
+ 'view': 'showexternal %s %{access-type} %{name} %{site} %{directory} %{mode} %{server}',
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ # test case: (findmatch args, findmatch keyword args, expected output)
|
||||||
|
+ # positional args: caps, MIMEtype
|
||||||
|
+ # keyword args: key="view", filename="/dev/null", plist=[]
|
||||||
|
+ # output: (command line, mailcap entry)
|
||||||
|
+ cases = [
|
||||||
|
+ ([{}, "video/mpeg"], {}, (None, None)),
|
||||||
|
+ ([c, "foo/bar"], {}, (None, None)),
|
||||||
|
+
|
||||||
|
+ # In Python 2, 'video/mpeg' is tried before 'video/*'
|
||||||
|
+ # (unfixed bug: https://github.com/python/cpython/issues/59182 )
|
||||||
|
+ #([c, "video/mpeg"], {}, ('animate /dev/null', video_entry)),
|
||||||
|
+ ([c, "video/mpeg"], {}, ('mpeg_play /dev/null', mpeg_entry)),
|
||||||
|
+
|
||||||
|
+ ([c, "audio/basic", "edit"], {}, ("audiocompose /dev/null", audio_basic_entry)),
|
||||||
|
+ ([c, "audio/basic", "compose"], {}, ("audiocompose /dev/null", audio_basic_entry)),
|
||||||
|
+ ([c, "audio/basic", "description"], {}, ('"An audio fragment"', audio_basic_entry)),
|
||||||
|
+ ([c, "audio/basic", "foobar"], {}, (None, None)),
|
||||||
|
+ ([c, "video/*"], {"filename": fname}, ("animate %s" % fname, video_entry)),
|
||||||
|
+ ([c, "audio/basic", "compose"],
|
||||||
|
+ {"filename": fname},
|
||||||
|
+ ("audiocompose %s" % fname, audio_basic_entry)),
|
||||||
|
+ ([c, "audio/basic"],
|
||||||
|
+ {"key": "description", "filename": fname},
|
||||||
|
+ ('"An audio fragment"', audio_basic_entry)),
|
||||||
|
+ ([c, "audio/*"],
|
||||||
|
+ {"filename": fname},
|
||||||
|
+ (None, None)),
|
||||||
|
+ ([c, "audio/wav"],
|
||||||
|
+ {"filename": fname},
|
||||||
|
+ ("/usr/local/bin/showaudio audio/wav", audio_entry)),
|
||||||
|
+ ([c, "message/external-body"],
|
||||||
|
+ {"plist": plist},
|
||||||
|
+ ("showexternal /dev/null default john python.org /tmp foo bar", message_entry))
|
||||||
|
+ ]
|
||||||
|
+ self._run_cases(cases)
|
||||||
|
+
|
||||||
|
+ @unittest.skipUnless(os.name == "posix", "Requires 'test' command on system")
|
||||||
|
+ @unittest.skipIf(sys.platform == "vxworks", "'test' command is not supported on VxWorks")
|
||||||
|
+ def test_test(self):
|
||||||
|
+ # findmatch() will automatically check any "test" conditions and skip
|
||||||
|
+ # the entry if the check fails.
|
||||||
|
+ caps = {"test/pass": [{"test": "test 1 -eq 1"}],
|
||||||
|
+ "test/fail": [{"test": "test 1 -eq 0"}]}
|
||||||
|
+ # test case: (findmatch args, findmatch keyword args, expected output)
|
||||||
|
+ # positional args: caps, MIMEtype, key ("test")
|
||||||
|
+ # keyword args: N/A
|
||||||
|
+ # output: (command line, mailcap entry)
|
||||||
|
+ cases = [
|
||||||
|
+ # findmatch will return the mailcap entry for test/pass because it evaluates to true
|
||||||
|
+ ([caps, "test/pass", "test"], {}, ("test 1 -eq 1", {"test": "test 1 -eq 1"})),
|
||||||
|
+ # findmatch will return None because test/fail evaluates to false
|
||||||
|
+ ([caps, "test/fail", "test"], {}, (None, None))
|
||||||
|
+ ]
|
||||||
|
+ self._run_cases(cases)
|
||||||
|
+
|
||||||
|
+ def _run_cases(self, cases):
|
||||||
|
+ for c in cases:
|
||||||
|
+ self.assertEqual(mailcap.findmatch(*c[0], **c[1]), c[2])
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+def test_main():
|
||||||
|
+ test.support.run_unittest(HelperFunctionTest, GetcapsTest, FindmatchTest)
|
||||||
|
diff --git a/Misc/NEWS.d/next/Security/2022-04-27-18-25-30.gh-issue-68966.gjS8zs.rst b/Misc/NEWS.d/next/Security/2022-04-27-18-25-30.gh-issue-68966.gjS8zs.rst
|
||||||
|
new file mode 100644
|
||||||
|
index 00000000000..da81a1f6993
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Misc/NEWS.d/next/Security/2022-04-27-18-25-30.gh-issue-68966.gjS8zs.rst
|
||||||
|
@@ -0,0 +1,4 @@
|
||||||
|
+The deprecated mailcap module now refuses to inject unsafe text (filenames,
|
||||||
|
+MIME types, parameters) into shell commands. Instead of using such text, it
|
||||||
|
+will warn and act as if a match was not found (or for test commands, as if
|
||||||
|
+the test failed).
|
207
05000-autotool-intermediates.patch
Normal file
207
05000-autotool-intermediates.patch
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
diff -up ./configure.autotool-intermediates ./configure
|
||||||
|
--- ./configure.autotool-intermediates 2013-04-09 11:24:01.024185796 +0200
|
||||||
|
+++ ./configure 2013-04-09 11:24:01.780183954 +0200
|
||||||
|
@@ -639,6 +639,8 @@ TRUE
|
||||||
|
MACHDEP_OBJS
|
||||||
|
DYNLOADFILE
|
||||||
|
DLINCLDIR
|
||||||
|
+DTRACEHDRS
|
||||||
|
+DTRACEOBJS
|
||||||
|
THREADOBJ
|
||||||
|
LDLAST
|
||||||
|
USE_THREAD_MODULE
|
||||||
|
@@ -659,6 +661,8 @@ OTHER_LIBTOOL_OPT
|
||||||
|
UNIVERSAL_ARCH_FLAGS
|
||||||
|
BASECFLAGS
|
||||||
|
OPT
|
||||||
|
+DEBUG_SUFFIX
|
||||||
|
+DEBUG_EXT
|
||||||
|
LN
|
||||||
|
MKDIR_P
|
||||||
|
INSTALL_DATA
|
||||||
|
@@ -795,8 +799,11 @@ with_pth
|
||||||
|
enable_ipv6
|
||||||
|
with_doc_strings
|
||||||
|
with_tsc
|
||||||
|
+with_count_allocs
|
||||||
|
+with_call_profile
|
||||||
|
with_pymalloc
|
||||||
|
with_valgrind
|
||||||
|
+with_dtrace
|
||||||
|
with_wctype_functions
|
||||||
|
with_fpectl
|
||||||
|
with_libm
|
||||||
|
@@ -1472,8 +1479,11 @@ Optional Packages:
|
||||||
|
--with-pth use GNU pth threading libraries
|
||||||
|
--with(out)-doc-strings disable/enable documentation strings
|
||||||
|
--with(out)-tsc enable/disable timestamp counter profile
|
||||||
|
+ --with(out)count-allocs enable/disable per-type instance accounting
|
||||||
|
+ --with(out)-call-profile enable/disable statistics on function call invocation
|
||||||
|
--with(out)-pymalloc disable/enable specialized mallocs
|
||||||
|
--with-valgrind Enable Valgrind support
|
||||||
|
+ --with(out)-dtrace disable/enable dtrace support
|
||||||
|
--with-wctype-functions use wctype.h functions
|
||||||
|
--with-fpectl enable SIGFPE catching
|
||||||
|
--with-libm=STRING math library
|
||||||
|
@@ -5343,8 +5353,8 @@ $as_echo "#define Py_ENABLE_SHARED 1" >>
|
||||||
|
INSTSONAME="$LDLIBRARY".$SOVERSION
|
||||||
|
;;
|
||||||
|
Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*)
|
||||||
|
- LDLIBRARY='libpython$(VERSION).so'
|
||||||
|
- BLDLIBRARY='-L. -lpython$(VERSION)'
|
||||||
|
+ LDLIBRARY='libpython$(VERSION)$(DEBUG_EXT).so'
|
||||||
|
+ BLDLIBRARY='-L. -lpython$(VERSION)$(DEBUG_EXT)'
|
||||||
|
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
|
||||||
|
case $ac_sys_system in
|
||||||
|
FreeBSD*)
|
||||||
|
@@ -5367,7 +5377,7 @@ $as_echo "#define Py_ENABLE_SHARED 1" >>
|
||||||
|
;;
|
||||||
|
OSF*)
|
||||||
|
LDLIBRARY='libpython$(VERSION).so'
|
||||||
|
- BLDLIBRARY='-rpath $(LIBDIR) -L. -lpython$(VERSION)'
|
||||||
|
+ BLDLIBRARY='-L. -lpython$(VERSION)'
|
||||||
|
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
|
||||||
|
;;
|
||||||
|
atheos*)
|
||||||
|
@@ -5894,6 +5904,14 @@ $as_echo "no" >&6; }
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
+if test "$Py_DEBUG" = 'true'
|
||||||
|
+then
|
||||||
|
+ DEBUG_EXT=_d
|
||||||
|
+ DEBUG_SUFFIX=-debug
|
||||||
|
+fi
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
# XXX Shouldn't the code above that fiddles with BASECFLAGS and OPT be
|
||||||
|
# merged with this chunk of code?
|
||||||
|
|
||||||
|
@@ -9958,6 +9976,50 @@ $as_echo "no" >&6; }
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-count-allocs" >&5
|
||||||
|
+$as_echo_n "checking for --with-count-allocs... " >&6; }
|
||||||
|
+
|
||||||
|
+# Check whether --with-count-allocs was given.
|
||||||
|
+if test "${with_count_allocs+set}" = set; then :
|
||||||
|
+ withval=$with_count_allocs;
|
||||||
|
+if test "$withval" != no
|
||||||
|
+then
|
||||||
|
+
|
||||||
|
+$as_echo "#define COUNT_ALLOCS 1" >>confdefs.h
|
||||||
|
+
|
||||||
|
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
|
||||||
|
+$as_echo "yes" >&6; }
|
||||||
|
+else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
|
||||||
|
+$as_echo "no" >&6; }
|
||||||
|
+fi
|
||||||
|
+else
|
||||||
|
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
|
||||||
|
+$as_echo "no" >&6; }
|
||||||
|
+fi
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-call-profile" >&5
|
||||||
|
+$as_echo_n "checking for --with-call-profile... " >&6; }
|
||||||
|
+
|
||||||
|
+# Check whether --with-call-profile was given.
|
||||||
|
+if test "${with_call_profile+set}" = set; then :
|
||||||
|
+ withval=$with_call_profile;
|
||||||
|
+if test "$withval" != no
|
||||||
|
+then
|
||||||
|
+
|
||||||
|
+$as_echo "#define CALL_PROFILE 1" >>confdefs.h
|
||||||
|
+
|
||||||
|
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
|
||||||
|
+$as_echo "yes" >&6; }
|
||||||
|
+else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
|
||||||
|
+$as_echo "no" >&6; }
|
||||||
|
+fi
|
||||||
|
+else
|
||||||
|
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
|
||||||
|
+$as_echo "no" >&6; }
|
||||||
|
+fi
|
||||||
|
+
|
||||||
|
+
|
||||||
|
# Check for Python-specific malloc support
|
||||||
|
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-pymalloc" >&5
|
||||||
|
$as_echo_n "checking for --with-pymalloc... " >&6; }
|
||||||
|
@@ -10007,6 +10069,46 @@ fi
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
+# Check for dtrace support
|
||||||
|
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-dtrace" >&5
|
||||||
|
+$as_echo_n "checking for --with-dtrace... " >&6; }
|
||||||
|
+
|
||||||
|
+# Check whether --with-dtrace was given.
|
||||||
|
+if test "${with_dtrace+set}" = set; then :
|
||||||
|
+ withval=$with_dtrace;
|
||||||
|
+fi
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+if test ! -z "$with_dtrace"
|
||||||
|
+then
|
||||||
|
+ if dtrace -G -o /dev/null -s $srcdir/Include/pydtrace.d 2>/dev/null
|
||||||
|
+ then
|
||||||
|
+
|
||||||
|
+$as_echo "#define WITH_DTRACE 1" >>confdefs.h
|
||||||
|
+
|
||||||
|
+ with_dtrace="Sun"
|
||||||
|
+ DTRACEOBJS="Python/dtrace.o"
|
||||||
|
+ DTRADEHDRS=""
|
||||||
|
+ elif dtrace -h -o /dev/null -s $srcdir/Include/pydtrace.d
|
||||||
|
+ then
|
||||||
|
+
|
||||||
|
+$as_echo "#define WITH_DTRACE 1" >>confdefs.h
|
||||||
|
+
|
||||||
|
+ with_dtrace="Apple"
|
||||||
|
+ DTRACEOBJS=""
|
||||||
|
+ DTRADEHDRS="pydtrace.h"
|
||||||
|
+ else
|
||||||
|
+ with_dtrace="no"
|
||||||
|
+ fi
|
||||||
|
+else
|
||||||
|
+ with_dtrace="no"
|
||||||
|
+fi
|
||||||
|
+
|
||||||
|
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_dtrace" >&5
|
||||||
|
+$as_echo "$with_dtrace" >&6; }
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
# Check for --with-wctype-functions
|
||||||
|
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-wctype-functions" >&5
|
||||||
|
$as_echo_n "checking for --with-wctype-functions... " >&6; }
|
||||||
|
diff -up ./pyconfig.h.in.autotool-intermediates ./pyconfig.h.in
|
||||||
|
--- ./pyconfig.h.in.autotool-intermediates 2013-04-09 11:24:01.020185806 +0200
|
||||||
|
+++ ./pyconfig.h.in 2013-04-09 11:24:02.088183204 +0200
|
||||||
|
@@ -18,6 +18,12 @@
|
||||||
|
/* Define this if you have BeOS threads. */
|
||||||
|
#undef BEOS_THREADS
|
||||||
|
|
||||||
|
+/* Define to keep records on function call invocation */
|
||||||
|
+#undef CALL_PROFILE
|
||||||
|
+
|
||||||
|
+/* Define to keep records of the number of instances of each type */
|
||||||
|
+#undef COUNT_ALLOCS
|
||||||
|
+
|
||||||
|
/* Define if you have the Mach cthreads package */
|
||||||
|
#undef C_THREADS
|
||||||
|
|
||||||
|
@@ -1119,12 +1125,6 @@
|
||||||
|
/* Define to profile with the Pentium timestamp counter */
|
||||||
|
#undef WITH_TSC
|
||||||
|
|
||||||
|
-/* Define to keep records of the number of instances of each type */
|
||||||
|
-#undef COUNT_ALLOCS
|
||||||
|
-
|
||||||
|
-/* Define to keep records on function call invocation */
|
||||||
|
-#undef CALL_PROFILE
|
||||||
|
-
|
||||||
|
/* Define if you want pymalloc to be disabled when running under valgrind */
|
||||||
|
#undef WITH_VALGRIND
|
||||||
|
|
28
get-source.sh
Executable file
28
get-source.sh
Executable file
@ -0,0 +1,28 @@
|
|||||||
|
#! /bin/bash -ex
|
||||||
|
|
||||||
|
# Download a release of Python (if missing) and remove .exe files from it
|
||||||
|
|
||||||
|
version=$1
|
||||||
|
|
||||||
|
if [ -z "${version}" ]; then
|
||||||
|
echo "Usage: $0 VERSION" >& 2
|
||||||
|
echo "" >& 2
|
||||||
|
echo "example: $0 2.7.15" >& 2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
versionedname=Python-${version}
|
||||||
|
orig_archive=${versionedname}.tar.xz
|
||||||
|
new_archive=${versionedname}-noexe.tar.xz
|
||||||
|
|
||||||
|
if [ ! -e ${orig_archive} ]; then
|
||||||
|
wget -N https://www.python.org/ftp/python/${version}/${orig_archive}
|
||||||
|
fi
|
||||||
|
|
||||||
|
deleted_names=$(tar --list -Jf ${orig_archive} | grep '\.exe$')
|
||||||
|
|
||||||
|
# tar --delete does not operate on compressed archives, so do
|
||||||
|
# xz compression/decompression explicitly
|
||||||
|
xz --decompress --stdout ${orig_archive} | \
|
||||||
|
tar --delete -v ${deleted_names} | \
|
||||||
|
xz --compress --stdout -3 -T0 > ${new_archive}
|
17
libpython.stp
Normal file
17
libpython.stp
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
/* Systemtap tapset to make it easier to trace Python */
|
||||||
|
|
||||||
|
/*
|
||||||
|
Define python.function.entry/return:
|
||||||
|
*/
|
||||||
|
probe python.function.entry = process("python").library("LIBRARY_PATH").mark("function__entry")
|
||||||
|
{
|
||||||
|
filename = user_string($arg1);
|
||||||
|
funcname = user_string($arg2);
|
||||||
|
lineno = $arg3;
|
||||||
|
}
|
||||||
|
probe python.function.return = process("python").library("LIBRARY_PATH").mark("function__return")
|
||||||
|
{
|
||||||
|
filename = user_string($arg1);
|
||||||
|
funcname = user_string($arg2);
|
||||||
|
lineno = $arg3;
|
||||||
|
}
|
21
pyfuntop.stp
Normal file
21
pyfuntop.stp
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/stap
|
||||||
|
|
||||||
|
global fn_calls;
|
||||||
|
|
||||||
|
probe python.function.entry
|
||||||
|
{
|
||||||
|
fn_calls[pid(), filename, funcname, lineno] += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
probe timer.ms(1000) {
|
||||||
|
printf("\033[2J\033[1;1H") /* clear screen */
|
||||||
|
printf("%6s %80s %6s %30s %6s\n",
|
||||||
|
"PID", "FILENAME", "LINE", "FUNCTION", "CALLS")
|
||||||
|
foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) {
|
||||||
|
printf("%6d %80s %6d %30s %6d\n",
|
||||||
|
pid, filename, lineno, funcname,
|
||||||
|
fn_calls[pid, filename, funcname, lineno]);
|
||||||
|
}
|
||||||
|
|
||||||
|
delete fn_calls;
|
||||||
|
}
|
2
pynche
Normal file
2
pynche
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
exec `python2 -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific = True))"`/pynche/pynche
|
11
python-2.5-cflags.patch
Normal file
11
python-2.5-cflags.patch
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
--- Python-2.5c1/Makefile.pre.in.cflags 2006-08-18 11:05:40.000000000 -0400
|
||||||
|
+++ Python-2.5c1/Makefile.pre.in 2006-08-18 11:09:26.000000000 -0400
|
||||||
|
@@ -334,7 +334,7 @@
|
||||||
|
|
||||||
|
# Build the interpreter
|
||||||
|
$(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY)
|
||||||
|
- $(LINKCC) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
|
||||||
|
+ $(LINKCC) $(CFLAGS) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
|
||||||
|
Modules/python.o \
|
||||||
|
$(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
|
||||||
|
|
12
python-2.5.1-plural-fix.patch
Normal file
12
python-2.5.1-plural-fix.patch
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
diff -up Python-2.5.1/Lib/gettext.py.plural Python-2.5.1/Lib/gettext.py
|
||||||
|
--- Python-2.5.1/Lib/gettext.py.plural 2007-09-10 11:38:57.000000000 -0400
|
||||||
|
+++ Python-2.5.1/Lib/gettext.py 2007-09-10 11:39:00.000000000 -0400
|
||||||
|
@@ -299,6 +299,8 @@ class GNUTranslations(NullTranslations):
|
||||||
|
item = item.strip()
|
||||||
|
if not item:
|
||||||
|
continue
|
||||||
|
+ if item.startswith("#"):
|
||||||
|
+ continue
|
||||||
|
k = v = None
|
||||||
|
if ':' in item:
|
||||||
|
k, v = item.split(':', 1)
|
24
python-2.5.1-sqlite-encoding.patch
Normal file
24
python-2.5.1-sqlite-encoding.patch
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
diff -up Python-2.5.1/Lib/sqlite3/dbapi2.py.encoding Python-2.5.1/Lib/sqlite3/dbapi2.py
|
||||||
|
--- Python-2.5.1/Lib/sqlite3/dbapi2.py.encoding 2007-09-14 10:41:50.000000000 -0400
|
||||||
|
+++ Python-2.5.1/Lib/sqlite3/dbapi2.py 2007-09-14 10:42:00.000000000 -0400
|
||||||
|
@@ -1,7 +1,6 @@
|
||||||
|
-# -*- coding: iso-8859-1 -*-
|
||||||
|
# pysqlite2/dbapi2.py: the DB-API 2.0 interface
|
||||||
|
#
|
||||||
|
-# Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
|
||||||
|
+# Copyright (C) 2004-2005 Gerhard Haering <gh@ghaering.de>
|
||||||
|
#
|
||||||
|
# This file is part of pysqlite.
|
||||||
|
#
|
||||||
|
diff -up Python-2.5.1/Lib/sqlite3/__init__.py.encoding Python-2.5.1/Lib/sqlite3/__init__.py
|
||||||
|
--- Python-2.5.1/Lib/sqlite3/__init__.py.encoding 2007-09-14 10:41:47.000000000 -0400
|
||||||
|
+++ Python-2.5.1/Lib/sqlite3/__init__.py 2007-09-14 10:42:06.000000000 -0400
|
||||||
|
@@ -1,7 +1,6 @@
|
||||||
|
-#-*- coding: ISO-8859-1 -*-
|
||||||
|
# pysqlite2/__init__.py: the pysqlite2 package.
|
||||||
|
#
|
||||||
|
-# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
|
||||||
|
+# Copyright (C) 2005 Gerhard Haering <gh@ghaering.de>
|
||||||
|
#
|
||||||
|
# This file is part of pysqlite.
|
||||||
|
#
|
12
python-2.6-rpath.patch
Normal file
12
python-2.6-rpath.patch
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
diff -up Python-2.6/configure.ac.rpath Python-2.6/configure.ac
|
||||||
|
--- Python-2.6/configure.ac.rpath 2008-11-24 02:51:06.000000000 -0500
|
||||||
|
+++ Python-2.6/configure.ac 2008-11-24 02:51:21.000000000 -0500
|
||||||
|
@@ -729,7 +729,7 @@ if test $enable_shared = "yes"; then
|
||||||
|
;;
|
||||||
|
OSF*)
|
||||||
|
LDLIBRARY='libpython$(VERSION).so'
|
||||||
|
- BLDLIBRARY='-rpath $(LIBDIR) -L. -lpython$(VERSION)'
|
||||||
|
+ BLDLIBRARY='-L. -lpython$(VERSION)'
|
||||||
|
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
|
||||||
|
;;
|
||||||
|
atheos*)
|
20
python-2.6.4-distutils-rpath.patch
Normal file
20
python-2.6.4-distutils-rpath.patch
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
diff -up Python-2.6.4/Lib/distutils/unixccompiler.py.distutils-rpath Python-2.6.4/Lib/distutils/unixccompiler.py
|
||||||
|
--- Python-2.6.4/Lib/distutils/unixccompiler.py.distutils-rpath 2009-09-09 04:34:06.000000000 -0400
|
||||||
|
+++ Python-2.6.4/Lib/distutils/unixccompiler.py 2010-03-15 21:33:25.000000000 -0400
|
||||||
|
@@ -142,6 +142,16 @@ class UnixCCompiler(CCompiler):
|
||||||
|
if sys.platform == "cygwin":
|
||||||
|
exe_extension = ".exe"
|
||||||
|
|
||||||
|
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
|
||||||
|
+ """Remove standard library path from rpath"""
|
||||||
|
+ libraries, library_dirs, runtime_library_dirs = \
|
||||||
|
+ CCompiler._fix_lib_args(self, libraries, library_dirs,
|
||||||
|
+ runtime_library_dirs)
|
||||||
|
+ libdir = sysconfig.get_config_var('LIBDIR')
|
||||||
|
+ if runtime_library_dirs and (libdir in runtime_library_dirs):
|
||||||
|
+ runtime_library_dirs.remove(libdir)
|
||||||
|
+ return libraries, library_dirs, runtime_library_dirs
|
||||||
|
+
|
||||||
|
def preprocess(self, source,
|
||||||
|
output_file=None, macros=None, include_dirs=None,
|
||||||
|
extra_preargs=None, extra_postargs=None):
|
44
python-2.7-lib64-sysconfig.patch
Normal file
44
python-2.7-lib64-sysconfig.patch
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
diff -up Python-2.7/Lib/sysconfig.py.lib64-sysconfig Python-2.7/Lib/sysconfig.py
|
||||||
|
--- Python-2.7/Lib/sysconfig.py.lib64-sysconfig 2010-07-08 14:18:41.386898476 -0400
|
||||||
|
+++ Python-2.7/Lib/sysconfig.py 2010-07-08 14:22:02.837896461 -0400
|
||||||
|
@@ -7,20 +7,20 @@ from os.path import pardir, realpath
|
||||||
|
|
||||||
|
_INSTALL_SCHEMES = {
|
||||||
|
'posix_prefix': {
|
||||||
|
- 'stdlib': '{base}/lib/python{py_version_short}',
|
||||||
|
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
|
||||||
|
+ 'stdlib': '{base}/lib64/python{py_version_short}',
|
||||||
|
+ 'platstdlib': '{platbase}/lib64/python{py_version_short}',
|
||||||
|
'purelib': '{base}/lib/python{py_version_short}/site-packages',
|
||||||
|
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
|
||||||
|
+ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages',
|
||||||
|
'include': '{base}/include/python{py_version_short}',
|
||||||
|
'platinclude': '{platbase}/include/python{py_version_short}',
|
||||||
|
'scripts': '{base}/bin',
|
||||||
|
'data': '{base}',
|
||||||
|
},
|
||||||
|
'posix_home': {
|
||||||
|
- 'stdlib': '{base}/lib/python',
|
||||||
|
- 'platstdlib': '{base}/lib/python',
|
||||||
|
+ 'stdlib': '{base}/lib64/python',
|
||||||
|
+ 'platstdlib': '{base}/lib64/python',
|
||||||
|
'purelib': '{base}/lib/python',
|
||||||
|
- 'platlib': '{base}/lib/python',
|
||||||
|
+ 'platlib': '{base}/lib64/python',
|
||||||
|
'include': '{base}/include/python',
|
||||||
|
'platinclude': '{base}/include/python',
|
||||||
|
'scripts': '{base}/bin',
|
||||||
|
@@ -65,10 +65,10 @@ _INSTALL_SCHEMES = {
|
||||||
|
'data' : '{userbase}',
|
||||||
|
},
|
||||||
|
'posix_user': {
|
||||||
|
- 'stdlib': '{userbase}/lib/python{py_version_short}',
|
||||||
|
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
|
||||||
|
+ 'stdlib': '{userbase}/lib64/python{py_version_short}',
|
||||||
|
+ 'platstdlib': '{userbase}/lib64/python{py_version_short}',
|
||||||
|
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||||
|
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||||
|
+ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages',
|
||||||
|
'include': '{userbase}/include/python{py_version_short}',
|
||||||
|
'scripts': '{userbase}/bin',
|
||||||
|
'data' : '{userbase}',
|
284
python-2.7.1-config.patch
Normal file
284
python-2.7.1-config.patch
Normal file
@ -0,0 +1,284 @@
|
|||||||
|
diff --git a/Modules/Setup.dist b/Modules/Setup.dist
|
||||||
|
index bbc9222..2cf35a9 100644
|
||||||
|
--- a/Modules/Setup.dist
|
||||||
|
+++ b/Modules/Setup.dist
|
||||||
|
@@ -153,7 +153,7 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# modules are to be built as shared libraries (see above for more
|
||||||
|
# detail; also note that *static* reverses this effect):
|
||||||
|
|
||||||
|
-#*shared*
|
||||||
|
+*shared*
|
||||||
|
|
||||||
|
# GNU readline. Unlike previous Python incarnations, GNU readline is
|
||||||
|
# now incorporated in an optional module, configured in the Setup file
|
||||||
|
@@ -163,33 +163,33 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# it, depending on your system -- see the GNU readline instructions.
|
||||||
|
# It's okay for this to be a shared library, too.
|
||||||
|
|
||||||
|
-#readline readline.c -lreadline -ltermcap
|
||||||
|
+readline readline.c -lreadline -ltermcap
|
||||||
|
|
||||||
|
|
||||||
|
# Modules that should always be present (non UNIX dependent):
|
||||||
|
|
||||||
|
-#array arraymodule.c # array objects
|
||||||
|
-#cmath cmathmodule.c _math.c # -lm # complex math library functions
|
||||||
|
-#math mathmodule.c _math.c # -lm # math library functions, e.g. sin()
|
||||||
|
-#_struct _struct.c # binary structure packing/unpacking
|
||||||
|
-#time timemodule.c # -lm # time operations and variables
|
||||||
|
-#operator operator.c # operator.add() and similar goodies
|
||||||
|
-#_testcapi _testcapimodule.c # Python C API test module
|
||||||
|
-#_random _randommodule.c # Random number generator
|
||||||
|
-#_collections _collectionsmodule.c # Container types
|
||||||
|
+array arraymodule.c # array objects
|
||||||
|
+cmath cmathmodule.c _math.c # -lm # complex math library functions
|
||||||
|
+math mathmodule.c _math.c # -lm # math library functions, e.g. sin()
|
||||||
|
+_struct _struct.c # binary structure packing/unpacking
|
||||||
|
+time timemodule.c # -lm # time operations and variables
|
||||||
|
+operator operator.c # operator.add() and similar goodies
|
||||||
|
+_testcapi _testcapimodule.c # Python C API test module
|
||||||
|
+_random _randommodule.c # Random number generator
|
||||||
|
+_collections _collectionsmodule.c # Container types
|
||||||
|
#_heapq _heapqmodule.c # Heapq type
|
||||||
|
-#itertools itertoolsmodule.c # Functions creating iterators for efficient looping
|
||||||
|
-#strop stropmodule.c # String manipulations
|
||||||
|
-#_functools _functoolsmodule.c # Tools for working with functions and callable objects
|
||||||
|
+itertools itertoolsmodule.c # Functions creating iterators for efficient looping
|
||||||
|
+strop stropmodule.c # String manipulations
|
||||||
|
+_functools _functoolsmodule.c # Tools for working with functions and callable objects
|
||||||
|
#_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator
|
||||||
|
#_pickle _pickle.c # pickle accelerator
|
||||||
|
#datetime datetimemodule.c # date/time type
|
||||||
|
-#_bisect _bisectmodule.c # Bisection algorithms
|
||||||
|
+_bisect _bisectmodule.c # Bisection algorithms
|
||||||
|
|
||||||
|
-#unicodedata unicodedata.c # static Unicode character database
|
||||||
|
+unicodedata unicodedata.c # static Unicode character database
|
||||||
|
|
||||||
|
# access to ISO C locale support
|
||||||
|
-#_locale _localemodule.c # -lintl
|
||||||
|
+_locale _localemodule.c # -lintl
|
||||||
|
|
||||||
|
# Standard I/O baseline
|
||||||
|
#_io -I$(srcdir)/Modules/_io _io/bufferedio.c _io/bytesio.c _io/fileio.c _io/iobase.c _io/_iomodule.c _io/stringio.c _io/textio.c
|
||||||
|
@@ -199,41 +199,41 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# (If you have a really backward UNIX, select and socket may not be
|
||||||
|
# supported...)
|
||||||
|
|
||||||
|
-#fcntl fcntlmodule.c # fcntl(2) and ioctl(2)
|
||||||
|
-#spwd spwdmodule.c # spwd(3)
|
||||||
|
-#grp grpmodule.c # grp(3)
|
||||||
|
-#select selectmodule.c # select(2); not on ancient System V
|
||||||
|
+fcntl fcntlmodule.c # fcntl(2) and ioctl(2)
|
||||||
|
+spwd spwdmodule.c # spwd(3)
|
||||||
|
+grp grpmodule.c # grp(3)
|
||||||
|
+select selectmodule.c # select(2); not on ancient System V
|
||||||
|
|
||||||
|
# Memory-mapped files (also works on Win32).
|
||||||
|
-#mmap mmapmodule.c
|
||||||
|
+mmap mmapmodule.c
|
||||||
|
|
||||||
|
# CSV file helper
|
||||||
|
-#_csv _csv.c
|
||||||
|
+_csv _csv.c
|
||||||
|
|
||||||
|
# Socket module helper for socket(2)
|
||||||
|
-#_socket socketmodule.c timemodule.c
|
||||||
|
+_socket socketmodule.c timemodule.c
|
||||||
|
|
||||||
|
# Socket module helper for SSL support; you must comment out the other
|
||||||
|
# socket line above, and possibly edit the SSL variable:
|
||||||
|
#SSL=/usr/local/ssl
|
||||||
|
-#_ssl _ssl.c \
|
||||||
|
-# -DUSE_SSL -I$(SSL)/include -I$(SSL)/include/openssl \
|
||||||
|
-# -L$(SSL)/lib -lssl -lcrypto
|
||||||
|
+_ssl _ssl.c \
|
||||||
|
+ -DUSE_SSL -I$(SSL)/include -I$(SSL)/include/openssl \
|
||||||
|
+ -L$(SSL)/lib -lssl -lcrypto
|
||||||
|
|
||||||
|
# The crypt module is now disabled by default because it breaks builds
|
||||||
|
# on many systems (where -lcrypt is needed), e.g. Linux (I believe).
|
||||||
|
#
|
||||||
|
# First, look at Setup.config; configure may have set this for you.
|
||||||
|
|
||||||
|
-#crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems
|
||||||
|
+crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems
|
||||||
|
|
||||||
|
|
||||||
|
# Some more UNIX dependent modules -- off by default, since these
|
||||||
|
# are not supported by all UNIX systems:
|
||||||
|
|
||||||
|
-#nis nismodule.c -lnsl # Sun yellow pages -- not everywhere
|
||||||
|
-#termios termios.c # Steen Lumholt's termios module
|
||||||
|
-#resource resource.c # Jeremy Hylton's rlimit interface
|
||||||
|
+nis nismodule.c -lnsl -ltirpc -I/usr/include/tirpc -I/usr/include/nsl -L/usr/lib/nsl
|
||||||
|
+termios termios.c # Steen Lumholt's termios module
|
||||||
|
+resource resource.c # Jeremy Hylton's rlimit interface
|
||||||
|
|
||||||
|
|
||||||
|
# Multimedia modules -- off by default.
|
||||||
|
@@ -241,8 +241,8 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# #993173 says audioop works on 64-bit platforms, though.
|
||||||
|
# These represent audio samples or images as strings:
|
||||||
|
|
||||||
|
-#audioop audioop.c # Operations on audio samples
|
||||||
|
-#imageop imageop.c # Operations on images
|
||||||
|
+audioop audioop.c # Operations on audio samples
|
||||||
|
+imageop imageop.c # Operations on images
|
||||||
|
|
||||||
|
|
||||||
|
# Note that the _md5 and _sha modules are normally only built if the
|
||||||
|
@@ -252,14 +252,14 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# Message-Digest Algorithm, described in RFC 1321. The necessary files
|
||||||
|
# md5.c and md5.h are included here.
|
||||||
|
|
||||||
|
-#_md5 md5module.c md5.c
|
||||||
|
+_md5 md5module.c md5.c
|
||||||
|
|
||||||
|
|
||||||
|
# The _sha module implements the SHA checksum algorithms.
|
||||||
|
# (NIST's Secure Hash Algorithms.)
|
||||||
|
-#_sha shamodule.c
|
||||||
|
-#_sha256 sha256module.c
|
||||||
|
-#_sha512 sha512module.c
|
||||||
|
+_sha shamodule.c
|
||||||
|
+_sha256 sha256module.c
|
||||||
|
+_sha512 sha512module.c
|
||||||
|
|
||||||
|
|
||||||
|
# SGI IRIX specific modules -- off by default.
|
||||||
|
@@ -306,12 +306,12 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# A Linux specific module -- off by default; this may also work on
|
||||||
|
# some *BSDs.
|
||||||
|
|
||||||
|
-#linuxaudiodev linuxaudiodev.c
|
||||||
|
+linuxaudiodev linuxaudiodev.c
|
||||||
|
|
||||||
|
|
||||||
|
# George Neville-Neil's timing module:
|
||||||
|
|
||||||
|
-#timing timingmodule.c
|
||||||
|
+timing timingmodule.c
|
||||||
|
|
||||||
|
|
||||||
|
# The _tkinter module.
|
||||||
|
@@ -326,7 +326,7 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# every system.
|
||||||
|
|
||||||
|
# *** Always uncomment this (leave the leading underscore in!):
|
||||||
|
-# _tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \
|
||||||
|
+_tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \
|
||||||
|
# *** Uncomment and edit to reflect where your Tcl/Tk libraries are:
|
||||||
|
# -L/usr/local/lib \
|
||||||
|
# *** Uncomment and edit to reflect where your Tcl/Tk headers are:
|
||||||
|
@@ -336,7 +336,7 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# *** Or uncomment this for Solaris:
|
||||||
|
# -I/usr/openwin/include \
|
||||||
|
# *** Uncomment and edit for Tix extension only:
|
||||||
|
-# -DWITH_TIX -ltix8.1.8.2 \
|
||||||
|
+ -DWITH_TIX -ltix \
|
||||||
|
# *** Uncomment and edit for BLT extension only:
|
||||||
|
# -DWITH_BLT -I/usr/local/blt/blt8.0-unoff/include -lBLT8.0 \
|
||||||
|
# *** Uncomment and edit for PIL (TkImaging) extension only:
|
||||||
|
@@ -345,7 +345,7 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# *** Uncomment and edit for TOGL extension only:
|
||||||
|
# -DWITH_TOGL togl.c \
|
||||||
|
# *** Uncomment and edit to reflect your Tcl/Tk versions:
|
||||||
|
-# -ltk8.2 -ltcl8.2 \
|
||||||
|
+ -ltk -ltcl \
|
||||||
|
# *** Uncomment and edit to reflect where your X11 libraries are:
|
||||||
|
# -L/usr/X11R6/lib \
|
||||||
|
# *** Or uncomment this for Solaris:
|
||||||
|
@@ -355,7 +355,7 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# *** Uncomment for AIX:
|
||||||
|
# -lld \
|
||||||
|
# *** Always uncomment this; X11 libraries to link with:
|
||||||
|
-# -lX11
|
||||||
|
+ -lX11
|
||||||
|
|
||||||
|
# Lance Ellinghaus's syslog module
|
||||||
|
#syslog syslogmodule.c # syslog daemon interface
|
||||||
|
@@ -377,7 +377,7 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# it is a highly experimental and dangerous device for calling
|
||||||
|
# *arbitrary* C functions in *arbitrary* shared libraries:
|
||||||
|
|
||||||
|
-#dl dlmodule.c
|
||||||
|
+dl dlmodule.c
|
||||||
|
|
||||||
|
|
||||||
|
# Modules that provide persistent dictionary-like semantics. You will
|
||||||
|
@@ -400,7 +400,7 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
#
|
||||||
|
# First, look at Setup.config; configure may have set this for you.
|
||||||
|
|
||||||
|
-#gdbm gdbmmodule.c -I/usr/local/include -L/usr/local/lib -lgdbm
|
||||||
|
+gdbm gdbmmodule.c -lgdbm
|
||||||
|
|
||||||
|
|
||||||
|
# Sleepycat Berkeley DB interface.
|
||||||
|
@@ -415,11 +415,9 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
#
|
||||||
|
# Edit the variables DB and DBLIBVERto point to the db top directory
|
||||||
|
# and the subdirectory of PORT where you built it.
|
||||||
|
-#DB=/usr/local/BerkeleyDB.4.0
|
||||||
|
-#DBLIBVER=4.0
|
||||||
|
-#DBINC=$(DB)/include
|
||||||
|
-#DBLIB=$(DB)/lib
|
||||||
|
-#_bsddb _bsddb.c -I$(DBINC) -L$(DBLIB) -ldb-$(DBLIBVER)
|
||||||
|
+DBINC=/usr/include/libdb
|
||||||
|
+DBLIB=/usr/lib
|
||||||
|
+_bsddb _bsddb.c -I$(DBINC) -L$(DBLIB) -ldb
|
||||||
|
|
||||||
|
# Historical Berkeley DB 1.85
|
||||||
|
#
|
||||||
|
@@ -434,14 +432,14 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
|
||||||
|
|
||||||
|
# Helper module for various ascii-encoders
|
||||||
|
-#binascii binascii.c
|
||||||
|
+binascii binascii.c
|
||||||
|
|
||||||
|
# Fred Drake's interface to the Python parser
|
||||||
|
-#parser parsermodule.c
|
||||||
|
+parser parsermodule.c
|
||||||
|
|
||||||
|
# cStringIO and cPickle
|
||||||
|
-#cStringIO cStringIO.c
|
||||||
|
-#cPickle cPickle.c
|
||||||
|
+cStringIO cStringIO.c
|
||||||
|
+cPickle cPickle.c
|
||||||
|
|
||||||
|
|
||||||
|
# Lee Busby's SIGFPE modules.
|
||||||
|
@@ -464,7 +462,7 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# Andrew Kuchling's zlib module.
|
||||||
|
# This require zlib 1.1.3 (or later).
|
||||||
|
# See http://www.gzip.org/zlib/
|
||||||
|
-#zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz
|
||||||
|
+zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz
|
||||||
|
|
||||||
|
# Interface to the Expat XML parser
|
||||||
|
# More information on Expat can be found at www.libexpat.org.
|
||||||
|
@@ -475,14 +473,14 @@ GLHACK=-Dclear=__GLclear
|
||||||
|
# Hye-Shik Chang's CJKCodecs
|
||||||
|
|
||||||
|
# multibytecodec is required for all the other CJK codec modules
|
||||||
|
-#_multibytecodec cjkcodecs/multibytecodec.c
|
||||||
|
-
|
||||||
|
-#_codecs_cn cjkcodecs/_codecs_cn.c
|
||||||
|
-#_codecs_hk cjkcodecs/_codecs_hk.c
|
||||||
|
-#_codecs_iso2022 cjkcodecs/_codecs_iso2022.c
|
||||||
|
-#_codecs_jp cjkcodecs/_codecs_jp.c
|
||||||
|
-#_codecs_kr cjkcodecs/_codecs_kr.c
|
||||||
|
-#_codecs_tw cjkcodecs/_codecs_tw.c
|
||||||
|
+_multibytecodec cjkcodecs/multibytecodec.c
|
||||||
|
+
|
||||||
|
+_codecs_cn cjkcodecs/_codecs_cn.c
|
||||||
|
+_codecs_hk cjkcodecs/_codecs_hk.c
|
||||||
|
+_codecs_iso2022 cjkcodecs/_codecs_iso2022.c
|
||||||
|
+_codecs_jp cjkcodecs/_codecs_jp.c
|
||||||
|
+_codecs_kr cjkcodecs/_codecs_kr.c
|
||||||
|
+_codecs_tw cjkcodecs/_codecs_tw.c
|
||||||
|
|
||||||
|
# Example -- included for reference only:
|
||||||
|
# xx xxmodule.c
|
27
python-2.7.1-fix_test_abc_with_COUNT_ALLOCS.patch
Normal file
27
python-2.7.1-fix_test_abc_with_COUNT_ALLOCS.patch
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
diff -up Python-2.7.1/Lib/test/test_abc.py.cache_leak Python-2.7.1/Lib/test/test_abc.py
|
||||||
|
--- Python-2.7.1/Lib/test/test_abc.py.cache_leak 2010-12-28 18:06:35.551938356 -0500
|
||||||
|
+++ Python-2.7.1/Lib/test/test_abc.py 2010-12-28 18:09:09.021059202 -0500
|
||||||
|
@@ -3,6 +3,8 @@
|
||||||
|
|
||||||
|
"""Unit tests for abc.py."""
|
||||||
|
|
||||||
|
+import sys
|
||||||
|
+
|
||||||
|
import unittest, weakref
|
||||||
|
from test import test_support
|
||||||
|
|
||||||
|
@@ -229,8 +231,12 @@ class TestABC(unittest.TestCase):
|
||||||
|
# Trigger cache.
|
||||||
|
C().f()
|
||||||
|
del C
|
||||||
|
- test_support.gc_collect()
|
||||||
|
- self.assertEqual(r(), None)
|
||||||
|
+ # This doesn't work in our debug build, presumably due to its use
|
||||||
|
+ # of COUNT_ALLOCS, which makes heap-allocated types immortal (once
|
||||||
|
+ # they've ever had an instance):
|
||||||
|
+ if not hasattr(sys, 'getcounts'):
|
||||||
|
+ test_support.gc_collect()
|
||||||
|
+ self.assertEqual(r(), None)
|
||||||
|
|
||||||
|
def test_main():
|
||||||
|
test_support.run_unittest(TestABC)
|
18
python-2.7.2-add-extension-suffix-to-python-config.patch
Normal file
18
python-2.7.2-add-extension-suffix-to-python-config.patch
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
diff -up Python-2.7.2/Misc/python-config.in.add-extension-suffix-to-python-config Python-2.7.2/Misc/python-config.in
|
||||||
|
--- Python-2.7.2/Misc/python-config.in.add-extension-suffix-to-python-config 2011-08-23 18:15:41.832497124 -0400
|
||||||
|
+++ Python-2.7.2/Misc/python-config.in 2011-08-23 18:17:25.854490011 -0400
|
||||||
|
@@ -6,7 +6,7 @@ import getopt
|
||||||
|
from distutils import sysconfig
|
||||||
|
|
||||||
|
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
|
||||||
|
- 'ldflags', 'help']
|
||||||
|
+ 'ldflags', 'extension-suffix', 'help']
|
||||||
|
|
||||||
|
def exit_with_usage(code=1):
|
||||||
|
print >>sys.stderr, "Usage: %s [%s]" % (sys.argv[0],
|
||||||
|
@@ -54,3 +54,5 @@ for opt in opt_flags:
|
||||||
|
libs.extend(getvar('LINKFORSHARED').split())
|
||||||
|
print ' '.join(libs)
|
||||||
|
|
||||||
|
+ elif opt == '--extension-suffix':
|
||||||
|
+ print (sys.pydebug and "_d" or "") + sysconfig.get_config_var('SO')
|
64
python-2.7rc1-socketmodule-constants.patch
Normal file
64
python-2.7rc1-socketmodule-constants.patch
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
--- Python-2.7rc1/Modules/socketmodule.c.socketmodule 2010-05-09 10:46:46.000000000 -0400
|
||||||
|
+++ Python-2.7rc1/Modules/socketmodule.c 2010-06-07 23:04:19.374234780 -0400
|
||||||
|
@@ -4783,6 +4783,61 @@ init_socket(void)
|
||||||
|
PyModule_AddIntConstant(m, "SO_SETFIB", SO_SETFIB);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
+#ifdef SO_SNDBUFFORCE
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_SNDBUFFORCE", SO_SNDBUFFORCE);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_RCVBUFFORCE
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_RCVBUFFORCE", SO_RCVBUFFORCE);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_NO_CHECK
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_NO_CHECK", SO_NO_CHECK);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_PRIORITY
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_PRIORITY", SO_PRIORITY);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_BSDCOMPAT
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_BSDCOMPAT", SO_BSDCOMPAT);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_PASSCRED
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_PASSCRED", SO_PASSCRED);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_PEERCRED
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_PEERCRED", SO_PEERCRED);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_SECURITY_AUTHENTICATION
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_SECURITY_AUTHENTICATION", SO_SECURITY_AUTHENTICATION);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_SECURITY_ENCRYPTION_TRANSPORT
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_SECURITY_ENCRYPTION_TRANSPORT", SO_SECURITY_ENCRYPTION_TRANSPORT);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_SECURITY_ENCRYPTION_NETWORK
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_SECURITY_ENCRYPTION_NETWORK", SO_SECURITY_ENCRYPTION_NETWORK);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_BINDTODEVICE
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_BINDTODEVICE", SO_BINDTODEVICE);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_ATTACH_FILTER
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_ATTACH_FILTER", SO_ATTACH_FILTER);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_DETACH_FILTER
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_DETACH_FILTER", SO_DETACH_FILTER);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_PEERNAME
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_PEERNAME", SO_PEERNAME);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_TIMESTAMP
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_TIMESTAMP", SO_TIMESTAMP);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_PEERSEC
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_PEERSEC", SO_PEERSEC);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_PASSSEC
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_PASSSEC", SO_PASSSEC);
|
||||||
|
+#endif
|
||||||
|
+#ifdef SO_TIMESTAMPNS
|
||||||
|
+ PyModule_AddIntConstant(m, "SO_TIMESTAMPNS", SO_TIMESTAMPNS);
|
||||||
|
+#endif
|
||||||
|
+
|
||||||
|
/* Maximum number of connections for "listen" */
|
||||||
|
#ifdef SOMAXCONN
|
||||||
|
PyModule_AddIntConstant(m, "SOMAXCONN", SOMAXCONN);
|
19
python-2.7rc1-socketmodule-constants2.patch
Normal file
19
python-2.7rc1-socketmodule-constants2.patch
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
diff -up Python-2.7rc1/Modules/socketmodule.c.socketmodule2 Python-2.7rc1/Modules/socketmodule.c
|
||||||
|
--- Python-2.7rc1/Modules/socketmodule.c.socketmodule2 2010-06-07 23:06:59.133498087 -0400
|
||||||
|
+++ Python-2.7rc1/Modules/socketmodule.c 2010-06-07 23:11:51.249520087 -0400
|
||||||
|
@@ -5253,6 +5253,15 @@ init_socket(void)
|
||||||
|
#ifdef TCP_QUICKACK
|
||||||
|
PyModule_AddIntConstant(m, "TCP_QUICKACK", TCP_QUICKACK);
|
||||||
|
#endif
|
||||||
|
+#ifdef TCP_CONGESTION
|
||||||
|
+ PyModule_AddIntConstant(m, "TCP_CONGESTION", TCP_CONGESTION);
|
||||||
|
+#endif
|
||||||
|
+#ifdef TCP_MD5SIG
|
||||||
|
+ PyModule_AddIntConstant(m, "TCP_MD5SIG", TCP_MD5SIG);
|
||||||
|
+#endif
|
||||||
|
+#ifdef TCP_MD5SIG_MAXKEYLEN
|
||||||
|
+ PyModule_AddIntConstant(m, "TCP_MD5SIG_MAXKEYLEN", TCP_MD5SIG_MAXKEYLEN);
|
||||||
|
+#endif
|
||||||
|
|
||||||
|
|
||||||
|
/* IPX options */
|
3989
python2.spec
Normal file
3989
python2.spec
Normal file
File diff suppressed because it is too large
Load Diff
32
pythondeps.sh
Executable file
32
pythondeps.sh
Executable file
@ -0,0 +1,32 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
[ $# -ge 1 ] || {
|
||||||
|
cat > /dev/null
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
case $1 in
|
||||||
|
-P|--provides)
|
||||||
|
shift
|
||||||
|
# Match buildroot/payload paths of the form
|
||||||
|
# /PATH/OF/BUILDROOT/usr/bin/pythonMAJOR.MINOR
|
||||||
|
# generating a line of the form
|
||||||
|
# python(abi) = MAJOR.MINOR
|
||||||
|
# (Don't match against -config tools e.g. /usr/bin/python2.6-config)
|
||||||
|
grep "/usr/bin/python.\..$" \
|
||||||
|
| sed -e "s|.*/usr/bin/python\(.\..\)|python(abi) = \1|"
|
||||||
|
;;
|
||||||
|
-R|--requires)
|
||||||
|
shift
|
||||||
|
# Match buildroot paths of the form
|
||||||
|
# /PATH/OF/BUILDROOT/usr/lib/pythonMAJOR.MINOR/ and
|
||||||
|
# /PATH/OF/BUILDROOT/usr/lib64/pythonMAJOR.MINOR/
|
||||||
|
# generating (uniqely) lines of the form:
|
||||||
|
# python(abi) = MAJOR.MINOR
|
||||||
|
grep "/usr/lib[^/]*/python.\../.*" \
|
||||||
|
| sed -e "s|.*/usr/lib[^/]*/python\(.\..\)/.*|python(abi) = \1|g" \
|
||||||
|
| sort | uniq
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
exit 0
|
1
sources
Normal file
1
sources
Normal file
@ -0,0 +1 @@
|
|||||||
|
SHA1 (Python-2.7.18-noexe.tar.xz) = ce5e27d588d635469bdec487c4b1def2ffa84ba2
|
19
systemtap-example.stp
Normal file
19
systemtap-example.stp
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
/*
|
||||||
|
Example usage of the Python systemtap tapset to show a nested view of all
|
||||||
|
Python function calls (and returns) across the whole system.
|
||||||
|
|
||||||
|
Run this using
|
||||||
|
stap systemtap-example.stp
|
||||||
|
to instrument all Python processes on the system, or (for example) using
|
||||||
|
stap systemtap-example.stp -c COMMAND
|
||||||
|
to instrument a specific program (implemented in Python)
|
||||||
|
*/
|
||||||
|
probe python.function.entry
|
||||||
|
{
|
||||||
|
printf("%s => %s in %s:%d\n", thread_indent(1), funcname, filename, lineno);
|
||||||
|
}
|
||||||
|
|
||||||
|
probe python.function.return
|
||||||
|
{
|
||||||
|
printf("%s <= %s in %s:%d\n", thread_indent(-1), funcname, filename, lineno);
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user