import python2-2.7.17-1.el8

This commit is contained in:
CentOS Sources 2020-06-18 20:28:32 +00:00 committed by Andrew Lukoshko
commit b5fe3c635f
62 changed files with 8695 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
SOURCES/Python-2.7.17-noexe.tar.xz

1
.python2.metadata Normal file
View File

@ -0,0 +1 @@
e63124a9a86b4b52c09384915a0842adf00b9d45 SOURCES/Python-2.7.17-noexe.tar.xz

View File

@ -0,0 +1,28 @@
diff -up Python-2.7.3/Lib/pydoc.py.no_gui Python-2.7.3/Lib/pydoc.py
--- Python-2.7.3/Lib/pydoc.py.no_gui 2012-04-09 19:07:31.000000000 -0400
+++ Python-2.7.3/Lib/pydoc.py 2013-02-19 13:48:44.480054515 -0500
@@ -19,9 +19,6 @@ of all available modules.
local machine to generate documentation web pages. Port number 0 can be
used to get an arbitrary unused port.
-For platforms without a command line, "pydoc -g" starts the HTTP server
-and also pops up a little window for controlling it.
-
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
@@ -2346,13 +2340,10 @@ def cli():
Start an HTTP server on the given port on the local machine. Port
number 0 can be used to get an arbitrary unused port.
-%s -g
- Pop up a graphical interface for finding and serving documentation.
-
%s -w <name> ...
Write out the HTML documentation for a module to a file in the current
directory. If <name> contains a '%s', it is treated as a filename; if
it names a directory, documentation is written for all the contents.
-""" % (cmd, os.sep, cmd, cmd, cmd, cmd, os.sep)
+""" % (cmd, os.sep, cmd, cmd, cmd, os.sep)
if __name__ == '__main__': cli()

View File

@ -0,0 +1,21 @@
diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py
index ab10ec5..923d1b7 100644
--- a/Lib/ctypes/util.py
+++ b/Lib/ctypes/util.py
@@ -140,11 +140,15 @@ elif os.name == "posix":
# assuming GNU binutils / ELF
if not f:
return None
- cmd = 'if ! type objdump >/dev/null 2>&1; then exit; fi;' \
+ cmd = 'if ! type objdump >/dev/null 2>&1; then exit 10; fi;' \
'objdump -p -j .dynamic 2>/dev/null "$1"'
proc = subprocess.Popen((cmd, '_get_soname', f), shell=True,
stdout=subprocess.PIPE)
[dump, _] = proc.communicate()
+ if proc.returncode == 10:
+ return os.path.basename(f) # This is good for GLibc, I think,
+ # and a dep on binutils is big (for
+ # live CDs).
res = re.search(br'\sSONAME\s+([^\s]+)', dump)
if not res:
return None

View File

@ -0,0 +1,198 @@
diff -up Python-2.7rc1/configure.ac.systemtap Python-2.7rc1/configure.ac
--- Python-2.7rc1/configure.ac.systemtap 2010-06-06 10:53:15.514975012 -0400
+++ Python-2.7rc1/configure.ac 2010-06-06 10:53:15.520974361 -0400
@@ -2616,6 +2616,38 @@ if test "$with_valgrind" != no; then
)
fi
+# Check for dtrace support
+AC_MSG_CHECKING(for --with-dtrace)
+AC_ARG_WITH(dtrace,
+ AC_HELP_STRING(--with(out)-dtrace, disable/enable dtrace support))
+
+if test ! -z "$with_dtrace"
+then
+ if dtrace -G -o /dev/null -s $srcdir/Include/pydtrace.d 2>/dev/null
+ then
+ AC_DEFINE(WITH_DTRACE, 1,
+ [Define if you want to compile in Dtrace support])
+ with_dtrace="Sun"
+ DTRACEOBJS="Python/dtrace.o"
+ DTRADEHDRS=""
+ elif dtrace -h -o /dev/null -s $srcdir/Include/pydtrace.d
+ then
+ AC_DEFINE(WITH_DTRACE, 1,
+ [Define if you want to compile in Dtrace support])
+ with_dtrace="Apple"
+ DTRACEOBJS=""
+ DTRADEHDRS="pydtrace.h"
+ else
+ with_dtrace="no"
+ fi
+else
+ with_dtrace="no"
+fi
+
+AC_MSG_RESULT($with_dtrace)
+AC_SUBST(DTRACEOBJS)
+AC_SUBST(DTRACEHDRS)
+
# Check for --with-wctype-functions
AC_MSG_CHECKING(for --with-wctype-functions)
AC_ARG_WITH(wctype-functions,
diff -up Python-2.7rc1/Include/pydtrace.d.systemtap Python-2.7rc1/Include/pydtrace.d
--- Python-2.7rc1/Include/pydtrace.d.systemtap 2010-06-06 10:53:15.520974361 -0400
+++ Python-2.7rc1/Include/pydtrace.d 2010-06-06 10:53:15.520974361 -0400
@@ -0,0 +1,10 @@
+provider python {
+ probe function__entry(const char *, const char *, int);
+ probe function__return(const char *, const char *, int);
+};
+
+#pragma D attributes Evolving/Evolving/Common provider python provider
+#pragma D attributes Private/Private/Common provider python module
+#pragma D attributes Private/Private/Common provider python function
+#pragma D attributes Evolving/Evolving/Common provider python name
+#pragma D attributes Evolving/Evolving/Common provider python args
diff -up Python-2.7rc1/Makefile.pre.in.systemtap Python-2.7rc1/Makefile.pre.in
--- Python-2.7rc1/Makefile.pre.in.systemtap 2010-06-06 10:53:15.488978775 -0400
+++ Python-2.7rc1/Makefile.pre.in 2010-06-06 11:05:30.411100568 -0400
@@ -298,6 +298,7 @@ PYTHON_OBJS= \
Python/formatter_unicode.o \
Python/formatter_string.o \
Python/$(DYNLOADFILE) \
+ @DTRACEOBJS@ \
$(LIBOBJS) \
$(MACHDEP_OBJS) \
$(THREADOBJ)
@@ -599,6 +600,18 @@ Python/formatter_unicode.o: $(srcdir)/Py
Python/formatter_string.o: $(srcdir)/Python/formatter_string.c \
$(STRINGLIB_HEADERS)
+# Only needed with --with-dtrace
+buildinclude:
+ mkdir -p Include
+
+Include/pydtrace.h: buildinclude $(srcdir)/Include/pydtrace.d
+ dtrace -o $@ $(DFLAGS) -C -h -s $(srcdir)/Include/pydtrace.d
+
+Python/ceval.o: Include/pydtrace.h
+
+Python/dtrace.o: buildinclude $(srcdir)/Include/pydtrace.d Python/ceval.o
+ dtrace -o $@ $(DFLAGS) -C -G -s $(srcdir)/Include/pydtrace.d Python/ceval.o
+
############################################################################
# Header files
@@ -1251,7 +1264,7 @@ Python/thread.o: @THREADHEADERS@
.PHONY: frameworkinstall frameworkinstallframework frameworkinstallstructure
.PHONY: frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools
.PHONY: frameworkaltinstallunixtools recheck clean clobber distclean
-.PHONY: smelly funny patchcheck altmaninstall commoninstall
+.PHONY: smelly funny patchcheck altmaninstall commoninstall buildinclude
.PHONY: gdbhooks
# IF YOU PUT ANYTHING HERE IT WILL GO AWAY
diff -up Python-2.7rc1/pyconfig.h.in.systemtap Python-2.7rc1/pyconfig.h.in
--- Python-2.7rc1/pyconfig.h.in.systemtap 2010-05-08 07:04:18.000000000 -0400
+++ Python-2.7rc1/pyconfig.h.in 2010-06-06 10:53:15.521974070 -0400
@@ -1074,6 +1074,9 @@
/* Define if you want documentation strings in extension modules */
#undef WITH_DOC_STRINGS
+/* Define if you want to compile in Dtrace support */
+#undef WITH_DTRACE
+
/* Define if you want to use the new-style (Openstep, Rhapsody, MacOS) dynamic
linker (dyld) instead of the old-style (NextStep) dynamic linker (rld).
Dyld is necessary to support frameworks. */
diff -up Python-2.7rc1/Python/ceval.c.systemtap Python-2.7rc1/Python/ceval.c
--- Python-2.7rc1/Python/ceval.c.systemtap 2010-05-09 10:46:46.000000000 -0400
+++ Python-2.7rc1/Python/ceval.c 2010-06-06 11:08:40.683100500 -0400
@@ -19,6 +19,10 @@
#include <ctype.h>
+#ifdef WITH_DTRACE
+#include "pydtrace.h"
+#endif
+
#ifndef WITH_TSC
#define READ_TIMESTAMP(var)
@@ -671,6 +675,55 @@ PyEval_EvalCode(PyCodeObject *co, PyObje
NULL);
}
+#ifdef WITH_DTRACE
+static void
+dtrace_entry(PyFrameObject *f)
+{
+ const char *filename;
+ const char *fname;
+ int lineno;
+
+ filename = PyString_AsString(f->f_code->co_filename);
+ fname = PyString_AsString(f->f_code->co_name);
+ lineno = PyCode_Addr2Line(f->f_code, f->f_lasti);
+
+ PYTHON_FUNCTION_ENTRY((char *)filename, (char *)fname, lineno);
+
+ /*
+ * Currently a USDT tail-call will not receive the correct arguments.
+ * Disable the tail call here.
+ */
+#if defined(__sparc)
+ asm("nop");
+#endif
+}
+
+static void
+dtrace_return(PyFrameObject *f)
+{
+ const char *filename;
+ const char *fname;
+ int lineno;
+
+ filename = PyString_AsString(f->f_code->co_filename);
+ fname = PyString_AsString(f->f_code->co_name);
+ lineno = PyCode_Addr2Line(f->f_code, f->f_lasti);
+ PYTHON_FUNCTION_RETURN((char *)filename, (char *)fname, lineno);
+
+ /*
+ * Currently a USDT tail-call will not receive the correct arguments.
+ * Disable the tail call here.
+ */
+#if defined(__sparc)
+ asm("nop");
+#endif
+}
+#else
+#define PYTHON_FUNCTION_ENTRY_ENABLED() 0
+#define PYTHON_FUNCTION_RETURN_ENABLED() 0
+#define dtrace_entry(f)
+#define dtrace_return(f)
+#endif
/* Interpreter main loop */
@@ -909,6 +962,9 @@ PyEval_EvalFrameEx(PyFrameObject *f, int
}
}
+ if (PYTHON_FUNCTION_ENTRY_ENABLED())
+ dtrace_entry(f);
+
co = f->f_code;
names = co->co_names;
consts = co->co_consts;
@@ -3000,6 +3056,9 @@ fast_yield:
/* pop frame */
exit_eval_frame:
+ if (PYTHON_FUNCTION_RETURN_ENABLED())
+ dtrace_return(f);
+
Py_LeaveRecursiveCall();
tstate->frame = f->f_back;

View File

@ -0,0 +1,193 @@
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index b9f1c6c..7b23714 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -42,14 +42,14 @@ else:
INSTALL_SCHEMES = {
'unix_prefix': {
'purelib': '$base/lib/python$py_version_short/site-packages',
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
+ 'platlib': '$platbase/lib64/python$py_version_short/site-packages',
'headers': '$base/include/python$py_version_short/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
},
'unix_home': {
'purelib': '$base/lib/python',
- 'platlib': '$base/lib/python',
+ 'platlib': '$base/lib64/python',
'headers': '$base/include/python/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 031f809..ec5d584 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -120,8 +120,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
prefix = plat_specific and EXEC_PREFIX or PREFIX
if os.name == "posix":
+ if plat_specific or standard_lib:
+ lib = "lib64"
+ else:
+ lib = "lib"
libpython = os.path.join(prefix,
- "lib", "python" + get_python_version())
+ lib, "python" + get_python_version())
if standard_lib:
return libpython
else:
diff --git a/Lib/site.py b/Lib/site.py
index c360802..868b7cb 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -288,12 +288,16 @@ def getsitepackages():
if sys.platform in ('os2emx', 'riscos'):
sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
elif os.sep == '/':
+ sitepackages.append(os.path.join(prefix, "lib64",
+ "python" + sys.version[:3],
+ "site-packages"))
sitepackages.append(os.path.join(prefix, "lib",
"python" + sys.version[:3],
"site-packages"))
sitepackages.append(os.path.join(prefix, "lib", "site-python"))
else:
sitepackages.append(prefix)
+ sitepackages.append(os.path.join(prefix, "lib64", "site-packages"))
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
return sitepackages
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
index b4384ee..349f688 100644
--- a/Lib/test/test_site.py
+++ b/Lib/test/test_site.py
@@ -254,17 +254,20 @@ class HelperFunctionsTests(unittest.TestCase):
self.assertEqual(dirs[0], wanted)
elif os.sep == '/':
# OS X, Linux, FreeBSD, etc
- self.assertEqual(len(dirs), 2)
- wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3],
+ self.assertEqual(len(dirs), 3)
+ wanted = os.path.join('xoxo', 'lib64', 'python' + sys.version[:3],
'site-packages')
self.assertEqual(dirs[0], wanted)
- wanted = os.path.join('xoxo', 'lib', 'site-python')
+ wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3],
+ 'site-packages')
self.assertEqual(dirs[1], wanted)
+ wanted = os.path.join('xoxo', 'lib', 'site-python')
+ self.assertEqual(dirs[2], wanted)
else:
# other platforms
self.assertEqual(len(dirs), 2)
self.assertEqual(dirs[0], 'xoxo')
- wanted = os.path.join('xoxo', 'lib', 'site-packages')
+ wanted = os.path.join('xoxo', 'lib64', 'site-packages')
self.assertEqual(dirs[1], wanted)
def test_no_home_directory(self):
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 4f59dd3..877698c 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -110,7 +110,7 @@ LIBDIR= @libdir@
MANDIR= @mandir@
INCLUDEDIR= @includedir@
CONFINCLUDEDIR= $(exec_prefix)/include
-SCRIPTDIR= $(prefix)/lib
+SCRIPTDIR= $(prefix)/lib64
# Detailed destination directories
BINLIBDEST= $(LIBDIR)/python$(VERSION)
diff --git a/Modules/Setup.dist b/Modules/Setup.dist
index 2cf35a9..c4c88cb 100644
--- a/Modules/Setup.dist
+++ b/Modules/Setup.dist
@@ -231,7 +231,7 @@ crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems
# Some more UNIX dependent modules -- off by default, since these
# are not supported by all UNIX systems:
-nis nismodule.c -lnsl -ltirpc -I/usr/include/tirpc -I/usr/include/nsl -L/usr/lib/nsl
+nis nismodule.c -lnsl -ltirpc -I/usr/include/tirpc -I/usr/include/nsl -L/usr/lib64/nsl
termios termios.c # Steen Lumholt's termios module
resource resource.c # Jeremy Hylton's rlimit interface
@@ -416,7 +416,7 @@ gdbm gdbmmodule.c -lgdbm
# Edit the variables DB and DBLIBVERto point to the db top directory
# and the subdirectory of PORT where you built it.
DBINC=/usr/include/libdb
-DBLIB=/usr/lib
+DBLIB=/usr/lib64
_bsddb _bsddb.c -I$(DBINC) -L$(DBLIB) -ldb
# Historical Berkeley DB 1.85
@@ -462,7 +462,7 @@ cPickle cPickle.c
# Andrew Kuchling's zlib module.
# This require zlib 1.1.3 (or later).
# See http://www.gzip.org/zlib/
-zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz
+zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib64 -lz
# Interface to the Expat XML parser
# More information on Expat can be found at www.libexpat.org.
diff --git a/Modules/getpath.c b/Modules/getpath.c
index fd33a01..c5c86fd 100644
--- a/Modules/getpath.c
+++ b/Modules/getpath.c
@@ -108,7 +108,7 @@ static char prefix[MAXPATHLEN+1];
static char exec_prefix[MAXPATHLEN+1];
static char progpath[MAXPATHLEN+1];
static char *module_search_path = NULL;
-static char lib_python[] = "lib/python" VERSION;
+static char lib_python[] = "lib64/python" VERSION;
static void
reduce(char *dir)
@@ -548,7 +548,7 @@ calculate_path(void)
fprintf(stderr,
"Could not find platform dependent libraries <exec_prefix>\n");
strncpy(exec_prefix, EXEC_PREFIX, MAXPATHLEN);
- joinpath(exec_prefix, "lib/lib-dynload");
+ joinpath(exec_prefix, "lib64/lib-dynload");
}
/* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */
diff --git a/setup.py b/setup.py
index 0288a6b..7905f6f 100644
--- a/setup.py
+++ b/setup.py
@@ -456,7 +456,7 @@ class PyBuildExt(build_ext):
def detect_modules(self):
# Ensure that /usr/local is always used
if not cross_compiling:
- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
+ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64')
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
if cross_compiling:
self.add_gcc_paths()
@@ -782,11 +782,11 @@ class PyBuildExt(build_ext):
elif curses_library:
readline_libs.append(curses_library)
elif self.compiler.find_library_file(lib_dirs +
- ['/usr/lib/termcap'],
+ ['/usr/lib64/termcap'],
'termcap'):
readline_libs.append('termcap')
exts.append( Extension('readline', ['readline.c'],
- library_dirs=['/usr/lib/termcap'],
+ library_dirs=['/usr/lib64/termcap'],
extra_link_args=readline_extra_link_args,
libraries=readline_libs) )
else:
@@ -821,8 +821,8 @@ class PyBuildExt(build_ext):
if krb5_h:
ssl_incs += krb5_h
ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
- ['/usr/local/ssl/lib',
- '/usr/contrib/ssl/lib/'
+ ['/usr/local/ssl/lib64',
+ '/usr/contrib/ssl/lib64/'
] )
if (ssl_incs is not None and

View File

@ -0,0 +1,13 @@
--- Python-2.7.2/Lib/distutils/tests/test_install.py.lib64 2011-09-08 17:51:57.851405376 -0400
+++ Python-2.7.2/Lib/distutils/tests/test_install.py 2011-09-08 18:40:46.754205096 -0400
@@ -41,8 +41,9 @@ class InstallTestCase(support.TempdirMan
self.assertEqual(got, expected)
libdir = os.path.join(destination, "lib", "python")
+ platlibdir = os.path.join(destination, "lib64", "python")
check_path(cmd.install_lib, libdir)
- check_path(cmd.install_platlib, libdir)
+ check_path(cmd.install_platlib, platlibdir)
check_path(cmd.install_purelib, libdir)
check_path(cmd.install_headers,
os.path.join(destination, "include", "python", "foopkg"))

View File

@ -0,0 +1,50 @@
diff -up Python-2.7.6/Makefile.pre.in.no-static-lib Python-2.7.6/Makefile.pre.in
--- Python-2.7.6/Makefile.pre.in.no-static-lib 2014-01-29 13:58:32.933226720 +0100
+++ Python-2.7.6/Makefile.pre.in 2014-01-29 14:10:25.002247272 +0100
@@ -437,7 +437,7 @@ coverage:
# Build the interpreter
-$(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY)
+$(BUILDPYTHON): Modules/python.o $(LDLIBRARY)
$(LINKCC) $(CFLAGS) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
Modules/python.o \
$(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
@@ -464,18 +464,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.tx
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
-# Build static library
-# avoid long command lines, same as LIBRARY_OBJS
-$(LIBRARY): $(LIBRARY_OBJS)
- -rm -f $@
- $(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o
- $(AR) $(ARFLAGS) $@ $(PARSER_OBJS)
- $(AR) $(ARFLAGS) $@ $(OBJECT_OBJS)
- $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS)
- $(AR) $(ARFLAGS) $@ $(MODULE_OBJS) $(SIGNAL_OBJS)
- $(AR) $(ARFLAGS) $@ $(MODOBJS)
- $(RANLIB) $@
-
libpython$(VERSION).so: $(LIBRARY_OBJS)
if test $(INSTSONAME) != $(LDLIBRARY); then \
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
@@ -1097,18 +1085,6 @@ libainstall: all python-config
else true; \
fi; \
done
- @if test -d $(LIBRARY); then :; else \
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
- if test "$(SO)" = .dll; then \
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
- else \
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
- $(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
- fi; \
- else \
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \
- fi; \
- fi
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
$(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in

View File

@ -0,0 +1,324 @@
From 898f93aa206e577dfe854c59bc62d0cea09cd5ed Mon Sep 17 00:00:00 2001
From: Tomas Orsava <torsava@redhat.com>
Date: Tue, 10 Jan 2017 16:19:50 +0100
Subject: [PATCH] Patch to support building both optimized vs debug stacks DSO
ABIs,
sharing the same .py and .pyc files, using "_d.so" to signify a debug build of
an extension module.
---
Lib/distutils/command/build_ext.py | 7 ++++-
Lib/distutils/sysconfig.py | 5 ++--
Lib/distutils/tests/test_install.py | 3 +-
Makefile.pre.in | 56 ++++++++++++++++++++-----------------
Misc/python-config.in | 2 +-
Modules/makesetup | 2 +-
Python/dynload_shlib.c | 11 ++++++--
Python/sysmodule.c | 6 ++++
configure.ac | 14 ++++++++--
9 files changed, 69 insertions(+), 37 deletions(-)
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
index 2c68be3..029d144 100644
--- a/Lib/distutils/command/build_ext.py
+++ b/Lib/distutils/command/build_ext.py
@@ -677,7 +677,10 @@ class build_ext (Command):
so_ext = get_config_var('SO')
if os.name == 'nt' and self.debug:
return os.path.join(*ext_path) + '_d' + so_ext
- return os.path.join(*ext_path) + so_ext
+
+ # Similarly, extensions in debug mode are named 'module_d.so', to
+ # avoid adding the _d to the SO config variable:
+ return os.path.join(*ext_path) + (sys.pydebug and "_d" or "") + so_ext
def get_export_symbols (self, ext):
"""Return the list of symbols that a shared extension has to
@@ -762,6 +765,8 @@ class build_ext (Command):
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ if sys.pydebug:
+ pythonlib += '_d'
return ext.libraries + [pythonlib]
else:
return ext.libraries
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 3e7f077..ec5d584 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -90,7 +90,8 @@ def get_python_inc(plat_specific=0, prefix=None):
# Include is located in the srcdir
inc_dir = os.path.join(srcdir, "Include")
return inc_dir
- return os.path.join(prefix, "include", "python" + get_python_version())
+ return os.path.join(prefix, "include",
+ "python" + get_python_version() + (sys.pydebug and '-debug' or ''))
elif os.name == "nt":
return os.path.join(prefix, "include")
elif os.name == "os2":
@@ -248,7 +249,7 @@ def get_makefile_filename():
if python_build:
return os.path.join(project_base, "Makefile")
lib_dir = get_python_lib(plat_specific=1, standard_lib=1)
- return os.path.join(lib_dir, "config", "Makefile")
+ return os.path.join(lib_dir, "config" + (sys.pydebug and "-debug" or ""), "Makefile")
def parse_config_h(fp, g=None):
diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
index 78fac46..d1d0931 100644
--- a/Lib/distutils/tests/test_install.py
+++ b/Lib/distutils/tests/test_install.py
@@ -20,8 +20,9 @@ from distutils.tests import support
def _make_ext_name(modname):
- if os.name == 'nt' and sys.executable.endswith('_d.exe'):
+ if sys.pydebug:
modname += '_d'
+
return modname + sysconfig.get_config_var('SO')
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 997a2fc..467e782 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -116,8 +116,8 @@ SCRIPTDIR= $(prefix)/lib64
# Detailed destination directories
BINLIBDEST= $(LIBDIR)/python$(VERSION)
LIBDEST= $(SCRIPTDIR)/python$(VERSION)
-INCLUDEPY= $(INCLUDEDIR)/python$(VERSION)
-CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(VERSION)
+INCLUDEPY= $(INCLUDEDIR)/python$(VERSION)$(DEBUG_SUFFIX)
+CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(VERSION)$(DEBUG_SUFFIX)
LIBP= $(LIBDIR)/python$(VERSION)
# Symbols used for using shared libraries
@@ -131,6 +131,12 @@ DESTSHARED= $(BINLIBDEST)/lib-dynload
EXE= @EXEEXT@
BUILDEXE= @BUILDEXEEXT@
+# DEBUG_EXT is used by ELF files (names and SONAMEs); it will be "_d" for a debug build
+# DEBUG_SUFFIX is used by filesystem paths; it will be "-debug" for a debug build
+# Both will be empty in an optimized build
+DEBUG_EXT= @DEBUG_EXT@
+DEBUG_SUFFIX= @DEBUG_SUFFIX@
+
# Short name and location for Mac OS X Python framework
UNIVERSALSDK=@UNIVERSALSDK@
PYTHONFRAMEWORK= @PYTHONFRAMEWORK@
@@ -197,8 +203,8 @@ LIBOBJDIR= Python/
LIBOBJS= @LIBOBJS@
UNICODE_OBJS= @UNICODE_OBJS@
-PYTHON= python$(EXE)
-BUILDPYTHON= python$(BUILDEXE)
+PYTHON= python$(DEBUG_SUFFIX)$(EXE)
+BUILDPYTHON= python$(DEBUG_SUFFIX)$(BUILDEXE)
PYTHON_FOR_REGEN=@PYTHON_FOR_REGEN@
PYTHON_FOR_BUILD=@PYTHON_FOR_BUILD@
@@ -547,7 +553,7 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
-libpython$(VERSION).so: $(LIBRARY_OBJS)
+libpython$(VERSION)$(DEBUG_EXT).so: $(LIBRARY_OBJS)
if test $(INSTSONAME) != $(LDLIBRARY); then \
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
$(LN) -f $(INSTSONAME) $@; \
@@ -954,18 +960,18 @@ bininstall: altbininstall
then rm -f $(DESTDIR)$(BINDIR)/$(PYTHON); \
else true; \
fi
- (cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(EXE) $(PYTHON))
- -rm -f $(DESTDIR)$(BINDIR)/python2$(EXE)
- (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(EXE) python2$(EXE))
- -rm -f $(DESTDIR)$(BINDIR)/python2-config
- (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)-config python2-config)
- -rm -f $(DESTDIR)$(BINDIR)/python-config
- (cd $(DESTDIR)$(BINDIR); $(LN) -s python2-config python-config)
+ (cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(DEBUG_SUFFIX)$(EXE) $(PYTHON))
+ -rm -f $(DESTDIR)$(BINDIR)/python2$(DEBUG_SUFFIX)$(EXE)
+ (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(DEBUG_SUFFIX)$(EXE) python2$(DEBUG_SUFFIX)$(EXE))
+ -rm -f $(DESTDIR)$(BINDIR)/python2$(DEBUG_SUFFIX)-config
+ (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(DEBUG_SUFFIX)-config python2$(DEBUG_SUFFIX)-config)
+ -rm -f $(DESTDIR)$(BINDIR)/python$(DEBUG_SUFFIX)-config
+ (cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(DEBUG_SUFFIX)-config python$(DEBUG_SUFFIX)-config)
-test -d $(DESTDIR)$(LIBPC) || $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(LIBPC)
- -rm -f $(DESTDIR)$(LIBPC)/python2.pc
- (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION).pc python2.pc)
- -rm -f $(DESTDIR)$(LIBPC)/python.pc
- (cd $(DESTDIR)$(LIBPC); $(LN) -s python2.pc python.pc)
+ -rm -f $(DESTDIR)$(LIBPC)/python2$(DEBUG_SUFFIX).pc
+ (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION)$(DEBUG_SUFFIX).pc python2$(DEBUG_SUFFIX).pc)
+ -rm -f $(DESTDIR)$(LIBPC)/python$(DEBUG_SUFFIX).pc
+ (cd $(DESTDIR)$(LIBPC); $(LN) -s python2$(DEBUG_SUFFIX).pc python$(DEBUG_SUFFIX).pc)
# Install the interpreter with $(VERSION) affixed
# This goes into $(exec_prefix)
@@ -978,7 +984,7 @@ altbininstall: $(BUILDPYTHON)
else true; \
fi; \
done
- $(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE)
+ $(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(VERSION)$(DEBUG_SUFFIX)$(EXE)
if test -f $(LDLIBRARY); then \
if test -n "$(DLLLIBRARY)" ; then \
$(INSTALL_SHARED) $(DLLLIBRARY) $(DESTDIR)$(BINDIR); \
@@ -1148,10 +1154,11 @@ $(srcdir)/Lib/$(PLATDIR):
fi; \
cd $(srcdir)/Lib/$(PLATDIR); $(RUNSHARED) ./regen
-python-config: $(srcdir)/Misc/python-config.in
+python$(DEBUG_SUFFIX)-config: $(srcdir)/Misc/python-config.in
# Substitution happens here, as the completely-expanded BINDIR
# is not available in configure
- sed -e "s,@EXENAME@,$(BINDIR)/python$(VERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config
+ sed -e "s,@EXENAME@,$(BINDIR)/python$(VERSION)$(DEBUG_SUFFIX)$(EXE)," < $(srcdir)/Misc/python-config.in >python$(DEBUG_SUFFIX)-config
+
# Install the include files
INCLDIRSTOMAKE=$(INCLUDEDIR) $(CONFINCLUDEDIR) $(INCLUDEPY) $(CONFINCLUDEPY)
@@ -1172,13 +1179,13 @@ inclinstall:
$(INSTALL_DATA) pyconfig.h $(DESTDIR)$(CONFINCLUDEPY)/pyconfig.h
# Install the library and miscellaneous stuff needed for extending/embedding
-# This goes into $(exec_prefix)
-LIBPL= $(LIBP)/config
+# This goes into $(exec_prefix)$(DEBUG_SUFFIX)
+LIBPL= $(LIBP)/config$(DEBUG_SUFFIX)
# pkgconfig directory
LIBPC= $(LIBDIR)/pkgconfig
-libainstall: @DEF_MAKE_RULE@ python-config
+libainstall: @DEF_MAKE_RULE@ python$(DEBUG_SUFFIX)-config
@for i in $(LIBDIR) $(LIBP) $(LIBPL) $(LIBPC); \
do \
if test ! -d $(DESTDIR)$$i; then \
@@ -1194,11 +1201,10 @@ libainstall: all python-config
$(INSTALL_DATA) Modules/Setup $(DESTDIR)$(LIBPL)/Setup
$(INSTALL_DATA) Modules/Setup.local $(DESTDIR)$(LIBPL)/Setup.local
$(INSTALL_DATA) Modules/Setup.config $(DESTDIR)$(LIBPL)/Setup.config
- $(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION).pc
+ $(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION)$(DEBUG_SUFFIX).pc
$(INSTALL_SCRIPT) $(srcdir)/Modules/makesetup $(DESTDIR)$(LIBPL)/makesetup
$(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh
- $(INSTALL_SCRIPT) python-config $(DESTDIR)$(BINDIR)/python$(VERSION)-config
- rm python-config
+ $(INSTALL_SCRIPT) python$(DEBUG_SUFFIX)-config $(DESTDIR)$(BINDIR)/python$(VERSION)$(DEBUG_SUFFIX)-config
@if [ -s Modules/python.exp -a \
"`echo $(MACHDEP) | sed 's/^\(...\).*/\1/'`" = "aix" ]; then \
echo; echo "Installing support files for building shared extension modules on AIX:"; \
diff --git a/Misc/python-config.in b/Misc/python-config.in
index a09e07c..c1691ef 100644
--- a/Misc/python-config.in
+++ b/Misc/python-config.in
@@ -44,7 +44,7 @@ for opt in opt_flags:
print ' '.join(flags)
elif opt in ('--libs', '--ldflags'):
- libs = ['-lpython' + pyver]
+ libs = ['-lpython' + pyver + (sys.pydebug and "_d" or "")]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
diff --git a/Modules/makesetup b/Modules/makesetup
index 1bffcbf..f0bc743 100755
--- a/Modules/makesetup
+++ b/Modules/makesetup
@@ -233,7 +233,7 @@ sed -e 's/[ ]*#.*//' -e '/^[ ]*$/d' |
*$mod.o*) base=$mod;;
*) base=${mod}module;;
esac
- file="$srcdir/$base\$(SO)"
+ file="$srcdir/$base\$(DEBUG_EXT)\$(SO)"
case $doconfig in
no) SHAREDMODS="$SHAREDMODS $file";;
esac
diff --git a/Python/dynload_shlib.c b/Python/dynload_shlib.c
index 17ebab1..02a94aa 100644
--- a/Python/dynload_shlib.c
+++ b/Python/dynload_shlib.c
@@ -46,11 +46,16 @@ const struct filedescr _PyImport_DynLoadFiletab[] = {
{"module.exe", "rb", C_EXTENSION},
{"MODULE.EXE", "rb", C_EXTENSION},
#else
+#ifdef Py_DEBUG
+ {"_d.so", "rb", C_EXTENSION},
+ {"module_d.so", "rb", C_EXTENSION},
+#else
{".so", "rb", C_EXTENSION},
{"module.so", "rb", C_EXTENSION},
-#endif
-#endif
-#endif
+#endif /* Py_DEBUG */
+#endif /* __VMS */
+#endif /* defined(PYOS_OS2) && defined(PYCC_GCC) */
+#endif /* __CYGWIN__ */
{0, 0}
};
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index aeff38a..183e3cc 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -1524,6 +1524,12 @@ _PySys_Init(void)
PyString_FromString("legacy"));
#endif
+#ifdef Py_DEBUG
+ PyDict_SetItemString(sysdict, "pydebug", Py_True);
+#else
+ PyDict_SetItemString(sysdict, "pydebug", Py_False);
+#endif
+
#undef SET_SYS_FROM_STRING
if (PyErr_Occurred())
return NULL;
diff --git a/configure.ac b/configure.ac
index 0a902c7..5caedb7 100644
--- a/configure.ac
+++ b/configure.ac
@@ -764,7 +764,7 @@ AC_SUBST(LIBRARY)
AC_MSG_CHECKING(LIBRARY)
if test -z "$LIBRARY"
then
- LIBRARY='libpython$(VERSION).a'
+ LIBRARY='libpython$(VERSION)$(DEBUG_EXT).a'
fi
AC_MSG_RESULT($LIBRARY)
@@ -910,8 +910,8 @@ if test $enable_shared = "yes"; then
INSTSONAME="$LDLIBRARY".$SOVERSION
;;
Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*)
- LDLIBRARY='libpython$(VERSION).so'
- BLDLIBRARY='-L. -lpython$(VERSION)'
+ LDLIBRARY='libpython$(VERSION)$(DEBUG_EXT).so'
+ BLDLIBRARY='-L. -lpython$(VERSION)$(DEBUG_EXT)'
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
case $ac_sys_system in
FreeBSD*)
@@ -1040,6 +1040,14 @@ else AC_MSG_RESULT(no); Py_DEBUG='false'
fi],
[AC_MSG_RESULT(no)])
+if test "$Py_DEBUG" = 'true'
+then
+ DEBUG_EXT=_d
+ DEBUG_SUFFIX=-debug
+fi
+AC_SUBST(DEBUG_EXT)
+AC_SUBST(DEBUG_SUFFIX)
+
# XXX Shouldn't the code above that fiddles with BASECFLAGS and OPT be
# merged with this chunk of code?
--
2.11.0

View File

@ -0,0 +1,50 @@
diff -up Python-2.6.5/configure.ac.more-configuration-flags Python-2.6.5/configure.ac
--- Python-2.6.5/configure.ac.more-configuration-flags 2010-05-24 18:51:25.410111792 -0400
+++ Python-2.6.5/configure.ac 2010-05-24 18:59:23.954986388 -0400
@@ -2515,6 +2515,30 @@ else AC_MSG_RESULT(no)
fi],
[AC_MSG_RESULT(no)])
+AC_MSG_CHECKING(for --with-count-allocs)
+AC_ARG_WITH(count-allocs,
+[ --with(out)count-allocs enable/disable per-type instance accounting], [
+if test "$withval" != no
+then
+ AC_DEFINE(COUNT_ALLOCS, 1,
+ [Define to keep records of the number of instances of each type])
+ AC_MSG_RESULT(yes)
+else AC_MSG_RESULT(no)
+fi],
+[AC_MSG_RESULT(no)])
+
+AC_MSG_CHECKING(for --with-call-profile)
+AC_ARG_WITH(call-profile,
+[ --with(out)-call-profile enable/disable statistics on function call invocation], [
+if test "$withval" != no
+then
+ AC_DEFINE(CALL_PROFILE, 1,
+ [Define to keep records on function call invocation])
+ AC_MSG_RESULT(yes)
+else AC_MSG_RESULT(no)
+fi],
+[AC_MSG_RESULT(no)])
+
# Check for Python-specific malloc support
AC_MSG_CHECKING(for --with-pymalloc)
AC_ARG_WITH(pymalloc,
diff -up Python-2.6.5/pyconfig.h.in.more-configuration-flags Python-2.6.5/pyconfig.h.in
--- Python-2.6.5/pyconfig.h.in.more-configuration-flags 2010-05-24 18:51:45.677988086 -0400
+++ Python-2.6.5/pyconfig.h.in 2010-05-24 19:00:44.163987730 -0400
@@ -1019,6 +1019,12 @@
/* Define to profile with the Pentium timestamp counter */
#undef WITH_TSC
+/* Define to keep records of the number of instances of each type */
+#undef COUNT_ALLOCS
+
+/* Define to keep records on function call invocation */
+#undef CALL_PROFILE
+
/* Define if you want pymalloc to be disabled when running under valgrind */
#undef WITH_VALGRIND

View File

@ -0,0 +1,47 @@
diff -up Python-2.7rc1/Modules/posixmodule.c.statvfs-f-flag-constants Python-2.7rc1/Modules/posixmodule.c
--- Python-2.7rc1/Modules/posixmodule.c.statvfs-f-flag-constants 2010-05-15 17:45:30.000000000 -0400
+++ Python-2.7rc1/Modules/posixmodule.c 2010-06-07 22:54:16.162068624 -0400
@@ -9174,6 +9174,43 @@ all_ins(PyObject *d)
#endif
#endif
+ /* These came from statvfs.h */
+#ifdef ST_RDONLY
+ if (ins(d, "ST_RDONLY", (long)ST_RDONLY)) return -1;
+#endif /* ST_RDONLY */
+#ifdef ST_NOSUID
+ if (ins(d, "ST_NOSUID", (long)ST_NOSUID)) return -1;
+#endif /* ST_NOSUID */
+
+ /* GNU extensions */
+#ifdef ST_NODEV
+ if (ins(d, "ST_NODEV", (long)ST_NODEV)) return -1;
+#endif /* ST_NODEV */
+#ifdef ST_NOEXEC
+ if (ins(d, "ST_NOEXEC", (long)ST_NOEXEC)) return -1;
+#endif /* ST_NOEXEC */
+#ifdef ST_SYNCHRONOUS
+ if (ins(d, "ST_SYNCHRONOUS", (long)ST_SYNCHRONOUS)) return -1;
+#endif /* ST_SYNCHRONOUS */
+#ifdef ST_MANDLOCK
+ if (ins(d, "ST_MANDLOCK", (long)ST_MANDLOCK)) return -1;
+#endif /* ST_MANDLOCK */
+#ifdef ST_WRITE
+ if (ins(d, "ST_WRITE", (long)ST_WRITE)) return -1;
+#endif /* ST_WRITE */
+#ifdef ST_APPEND
+ if (ins(d, "ST_APPEND", (long)ST_APPEND)) return -1;
+#endif /* ST_APPEND */
+#ifdef ST_NOATIME
+ if (ins(d, "ST_NOATIME", (long)ST_NOATIME)) return -1;
+#endif /* ST_NOATIME */
+#ifdef ST_NODIRATIME
+ if (ins(d, "ST_NODIRATIME", (long)ST_NODIRATIME)) return -1;
+#endif /* ST_NODIRATIME */
+#ifdef ST_RELATIME
+ if (ins(d, "ST_RELATIME", (long)ST_RELATIME)) return -1;
+#endif /* ST_RELATIME */
+
#if defined(PYOS_OS2)
if (insertvalues(d)) return -1;
#endif

View File

@ -0,0 +1,13 @@
--- Python-2.7.5/Lib/site.py.orig 2013-05-16 12:47:55.000000000 +0200
+++ Python-2.7.5/Lib/site.py 2013-05-16 12:56:20.089058109 +0200
@@ -529,6 +529,10 @@ def main():
abs__file__()
known_paths = removeduppaths()
+ from sysconfig import is_python_build
+ if is_python_build():
+ from _sysconfigdata import build_time_vars
+ sys.path.append(os.path.join(build_time_vars['abs_builddir'], 'Modules'))
if ENABLE_USER_SITE is None:
ENABLE_USER_SITE = check_enableusersite()
known_paths = addusersitepackages(known_paths)

View File

@ -0,0 +1,11 @@
diff -up Python-2.7.2/Lib/test/test_io.py.disable-tests-in-test_io Python-2.7.2/Lib/test/test_io.py
--- Python-2.7.2/Lib/test/test_io.py.disable-tests-in-test_io 2011-09-01 14:18:45.963304089 -0400
+++ Python-2.7.2/Lib/test/test_io.py 2011-09-01 15:08:53.796098413 -0400
@@ -2669,6 +2669,7 @@ class SignalsTest(unittest.TestCase):
self.check_interrupted_read_retry(lambda x: x,
mode="r")
+ @unittest.skip('rhbz#732998')
@unittest.skipUnless(threading, 'Threading required for this test.')
def check_interrupted_write_retry(self, item, **fdopen_kwargs):
"""Check that a buffered write, when it gets interrupted (either

View File

@ -0,0 +1,68 @@
diff -up Python-2.7.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-2.7.2/Lib/unittest/case.py
--- Python-2.7.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-08 14:45:47.677169191 -0400
+++ Python-2.7.2/Lib/unittest/case.py 2011-09-08 16:01:36.287858159 -0400
@@ -1,6 +1,7 @@
"""Test case implementation"""
import collections
+import os
import sys
import functools
import difflib
@@ -94,6 +95,43 @@ def expectedFailure(func):
return wrapper
+# Non-standard/downstream-only hooks for handling issues with specific test
+# cases:
+
+def _skipInRpmBuild(reason):
+ """
+ Non-standard/downstream-only decorator for marking a specific unit test
+ to be skipped when run within the %check of an rpmbuild.
+
+ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within
+ the environment, and has no effect otherwise.
+ """
+ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ:
+ return skip(reason)
+ else:
+ return _id
+
+def _expectedFailureInRpmBuild(func):
+ """
+ Non-standard/downstream-only decorator for marking a specific unit test
+ as expected to fail within the %check of an rpmbuild.
+
+ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within
+ the environment, and has no effect otherwise.
+ """
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ:
+ try:
+ func(*args, **kwargs)
+ except Exception:
+ raise _ExpectedFailure(sys.exc_info())
+ raise _UnexpectedSuccess
+ else:
+ # Call directly:
+ func(*args, **kwargs)
+ return wrapper
+
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
diff -up Python-2.7.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-2.7.2/Lib/unittest/__init__.py
--- Python-2.7.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-08 14:59:39.534112310 -0400
+++ Python-2.7.2/Lib/unittest/__init__.py 2011-09-08 15:07:09.191081562 -0400
@@ -57,7 +57,8 @@ __unittest = True
from .result import TestResult
from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf,
- skipUnless, expectedFailure)
+ skipUnless, expectedFailure,
+ _skipInRpmBuild, _expectedFailureInRpmBuild)
from .suite import BaseTestSuite, TestSuite
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
findTestCases)

View File

@ -0,0 +1,13 @@
diff -up Python-2.7.2/Lib/test/test_dl.py.skip-test_dl Python-2.7.2/Lib/test/test_dl.py
--- Python-2.7.2/Lib/test/test_dl.py.skip-test_dl 2011-09-08 15:18:40.529034289 -0400
+++ Python-2.7.2/Lib/test/test_dl.py 2011-09-08 16:29:45.184742670 -0400
@@ -13,6 +13,9 @@ sharedlibs = [
('/usr/lib/libc.dylib', 'getpid'),
]
+# (also, "dl" is deprecated in favor of ctypes)
+@unittest._skipInRpmBuild('fails on 64-bit builds: '
+ 'module dl requires sizeof(int) == sizeof(long) == sizeof(char*)')
def test_main():
for s, func in sharedlibs:
try:

View File

@ -0,0 +1,11 @@
diff -up Python-2.7.6/Lib/test/test_file2k.py.stdin-test Python-2.7.6/Lib/test/test_file2k.py
--- Python-2.7.6/Lib/test/test_file2k.py.stdin-test 2013-11-10 08:36:40.000000000 +0100
+++ Python-2.7.6/Lib/test/test_file2k.py 2014-01-29 14:28:01.029488055 +0100
@@ -223,6 +223,7 @@ class OtherFileTests(unittest.TestCase):
else:
f.close()
+ @unittest._skipInRpmBuild('seems not to raise the exception when run in Koji')
def testStdinSeek(self):
if sys.platform == 'osf1V5':
# This causes the interpreter to exit on OSF1 v5.1.

View File

@ -0,0 +1,12 @@
diff -up Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py.mark-tests-that-fail-in-rpmbuild Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py
--- Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py.mark-tests-that-fail-in-rpmbuild 2012-04-09 19:07:29.000000000 -0400
+++ Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py 2012-04-13 00:20:08.223819263 -0400
@@ -24,6 +24,7 @@ setup(name='foo', version='0.1', py_modu
"""
+@unittest._skipInRpmBuild("don't try to nest one rpm build inside another rpm build")
class BuildRpmTestCase(support.TempdirManager,
support.EnvironGuard,
support.LoggingSilencer,
diff -up Python-2.7.3/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild Python-2.7.3/Lib/distutils/tests/test_build_ext.py

View File

@ -0,0 +1,68 @@
diff -up Python-2.7.2/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild Python-2.7.2/Lib/distutils/tests/test_build_ext.py
--- Python-2.7.2/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild 2011-09-08 16:07:25.033834312 -0400
+++ Python-2.7.2/Lib/distutils/tests/test_build_ext.py 2011-09-08 17:43:15.656441082 -0400
@@ -330,6 +332,7 @@ class BuildExtTestCase(support.TempdirMa
self.assertEqual(lastdir, 'bar')
def test_ext_fullpath(self):
+ debug_ext = sysconfig.get_config_var("DEBUG_EXT")
ext = sysconfig.get_config_vars()['SO']
dist = Distribution()
cmd = build_ext(dist)
@@ -337,14 +340,14 @@ class BuildExtTestCase(support.TempdirMa
cmd.distribution.package_dir = {'': 'src'}
cmd.distribution.packages = ['lxml', 'lxml.html']
curdir = os.getcwd()
- wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
+ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + debug_ext + ext)
path = cmd.get_ext_fullpath('lxml.etree')
self.assertEqual(wanted, path)
# building lxml.etree not inplace
cmd.inplace = 0
cmd.build_lib = os.path.join(curdir, 'tmpdir')
- wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
+ wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + debug_ext + ext)
path = cmd.get_ext_fullpath('lxml.etree')
self.assertEqual(wanted, path)
@@ -354,13 +357,13 @@ class BuildExtTestCase(support.TempdirMa
cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
path = cmd.get_ext_fullpath('twisted.runner.portmap')
wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
- 'portmap' + ext)
+ 'portmap' + debug_ext + ext)
self.assertEqual(wanted, path)
# building twisted.runner.portmap inplace
cmd.inplace = 1
path = cmd.get_ext_fullpath('twisted.runner.portmap')
- wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
+ wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + debug_ext + ext)
self.assertEqual(wanted, path)
def test_build_ext_inplace(self):
@@ -373,8 +376,9 @@ class BuildExtTestCase(support.TempdirMa
cmd.distribution.package_dir = {'': 'src'}
cmd.distribution.packages = ['lxml', 'lxml.html']
curdir = os.getcwd()
+ debug_ext = sysconfig.get_config_var("DEBUG_EXT")
ext = sysconfig.get_config_var("SO")
- wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
+ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + debug_ext + ext)
path = cmd.get_ext_fullpath('lxml.etree')
self.assertEqual(wanted, path)
@@ -412,10 +416,11 @@ class BuildExtTestCase(support.TempdirMa
dist = Distribution({'name': 'UpdateManager'})
cmd = build_ext(dist)
cmd.ensure_finalized()
+ debug_ext = sysconfig.get_config_var("DEBUG_EXT")
ext = sysconfig.get_config_var("SO")
ext_name = os.path.join('UpdateManager', 'fdsend')
ext_path = cmd.get_ext_fullpath(ext_name)
- wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + ext)
+ wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + debug_ext + ext)
self.assertEqual(ext_path, wanted)
@unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows')

View File

@ -0,0 +1,11 @@
diff -up Python-2.7.2/Lib/test/test_float.py.skip-test_float-known-failure-on-arm Python-2.7.2/Lib/test/test_float.py
--- Python-2.7.2/Lib/test/test_float.py.skip-test_float-known-failure-on-arm 2011-09-08 19:34:09.000986128 -0400
+++ Python-2.7.2/Lib/test/test_float.py 2011-09-08 19:34:57.969982779 -0400
@@ -1072,6 +1072,7 @@ class HexFloatTestCase(unittest.TestCase
self.identical(got, expected)
+ @unittest.skip('Known failure on ARM: http://bugs.python.org/issue8265')
def test_from_hex(self):
MIN = self.MIN;
MAX = self.MAX;

View File

@ -0,0 +1,11 @@
diff -up Python-2.7.2/Lib/ctypes/test/test_callbacks.py.skip-test_ctypes-known-failure-on-sparc Python-2.7.2/Lib/ctypes/test/test_callbacks.py
--- Python-2.7.2/Lib/ctypes/test/test_callbacks.py.skip-test_ctypes-known-failure-on-sparc 2011-09-08 19:42:35.541951490 -0400
+++ Python-2.7.2/Lib/ctypes/test/test_callbacks.py 2011-09-08 19:43:40.676947036 -0400
@@ -67,6 +67,7 @@ class Callbacks(unittest.TestCase):
self.check_type(c_longlong, 42)
self.check_type(c_longlong, -42)
+ @unittest.skip('Known failure on Sparc: http://bugs.python.org/issue8314')
def test_ulonglong(self):
# test some 64-bit values, with and without msb set.
self.check_type(c_ulonglong, 10955412242170339782)

View File

@ -0,0 +1,22 @@
diff -up Python-2.7.6/Lib/test/test_openpty.py.tty-fail Python-2.7.6/Lib/test/test_openpty.py
--- Python-2.7.6/Lib/test/test_openpty.py.tty-fail 2014-01-29 14:31:43.761343267 +0100
+++ Python-2.7.6/Lib/test/test_openpty.py 2014-01-29 14:32:19.284090165 +0100
@@ -8,6 +8,7 @@ if not hasattr(os, "openpty"):
class OpenptyTest(unittest.TestCase):
+ @unittest._skipInRpmBuild('sometimes fails in Koji, possibly due to a mock issue (rhbz#714627)')
def test(self):
master, slave = os.openpty()
self.addCleanup(os.close, master)
diff -up Python-2.7.6/Lib/test/test_pty.py.tty-fail Python-2.7.6/Lib/test/test_pty.py
--- Python-2.7.6/Lib/test/test_pty.py.tty-fail 2013-11-10 08:36:40.000000000 +0100
+++ Python-2.7.6/Lib/test/test_pty.py 2014-01-29 14:31:43.761343267 +0100
@@ -111,6 +111,7 @@ class PtyTest(unittest.TestCase):
os.close(master_fd)
+ @unittest._skipInRpmBuild('sometimes fails in Koji, possibly due to a mock issue (rhbz#714627)')
def test_fork(self):
debug("calling pty.fork()")
pid, master_fd = pty.fork()

View File

@ -0,0 +1,58 @@
diff -up Python-2.7.2/Python/ceval.c.tsc-on-ppc Python-2.7.2/Python/ceval.c
--- Python-2.7.2/Python/ceval.c.tsc-on-ppc 2011-08-23 14:59:48.051300849 -0400
+++ Python-2.7.2/Python/ceval.c 2011-08-23 15:33:25.412162902 -0400
@@ -37,24 +37,42 @@ typedef unsigned long long uint64;
*/
#if defined(__ppc__) || defined (__powerpc__)
-#define READ_TIMESTAMP(var) ppc_getcounter(&var)
+#if defined( __powerpc64__) || defined(__LP64__)
+/* 64-bit PowerPC */
+#define READ_TIMESTAMP(var) ppc64_getcounter(&var)
+static void
+ppc64_getcounter(uint64 *v)
+{
+ /* On 64-bit PowerPC we can read the 64-bit timebase directly into a
+ 64-bit register */
+ uint64 timebase;
+#ifdef _ARCH_PWR4
+ asm volatile ("mfspr %0,268" : "=r" (timebase));
+#else
+ asm volatile ("mftb %0" : "=r" (timebase));
+#endif
+ *v = timebase;
+}
+
+#else
+/* 32-bit PowerPC */
+#define READ_TIMESTAMP(var) ppc32_getcounter(&var)
static void
-ppc_getcounter(uint64 *v)
+ppc32_getcounter(uint64 *v)
{
- register unsigned long tbu, tb, tbu2;
+ union { long long ll; long ii[2]; } u;
+ long tmp;
loop:
- asm volatile ("mftbu %0" : "=r" (tbu) );
- asm volatile ("mftb %0" : "=r" (tb) );
- asm volatile ("mftbu %0" : "=r" (tbu2));
- if (__builtin_expect(tbu != tbu2, 0)) goto loop;
-
- /* The slightly peculiar way of writing the next lines is
- compiled better by GCC than any other way I tried. */
- ((long*)(v))[0] = tbu;
- ((long*)(v))[1] = tb;
+ asm volatile ("mftbu %0" : "=r" (u.ii[0]) );
+ asm volatile ("mftb %0" : "=r" (u.ii[1]) );
+ asm volatile ("mftbu %0" : "=r" (tmp));
+ if (__builtin_expect(u.ii[0] != tmp, 0)) goto loop;
+
+ *v = u.ll;
}
+#endif /* powerpc 32/64 bit */
#elif defined(__i386__)

View File

@ -0,0 +1,12 @@
diff -up Python-2.7.2/Modules/Setup.dist.no-gdbm Python-2.7.2/Modules/Setup.dist
--- Python-2.7.2/Modules/Setup.dist.no-gdbm 2011-09-13 14:25:43.496095926 -0400
+++ Python-2.7.2/Modules/Setup.dist 2011-09-13 14:25:46.491095724 -0400
@@ -396,7 +396,7 @@ dl dlmodule.c
#
# First, look at Setup.config; configure may have set this for you.
-gdbm gdbmmodule.c -lgdbm
+# gdbm gdbmmodule.c -lgdbm
# Sleepycat Berkeley DB interface.

View File

@ -0,0 +1,732 @@
diff -up Python-2.7.2/Lib/hashlib.py.hashlib-fips Python-2.7.2/Lib/hashlib.py
--- Python-2.7.2/Lib/hashlib.py.hashlib-fips 2011-06-11 11:46:24.000000000 -0400
+++ Python-2.7.2/Lib/hashlib.py 2011-09-14 00:21:26.194252001 -0400
@@ -6,9 +6,12 @@
__doc__ = """hashlib module - A common interface to many hash functions.
-new(name, string='') - returns a new hash object implementing the
- given hash function; initializing the hash
- using the given string data.
+new(name, string='', usedforsecurity=True)
+ - returns a new hash object implementing the given hash function;
+ initializing the hash using the given string data.
+
+ "usedforsecurity" is a non-standard extension for better supporting
+ FIPS-compliant environments (see below)
Named constructor functions are also available, these are much faster
than using new():
@@ -24,6 +27,20 @@ the zlib module.
Choose your hash function wisely. Some have known collision weaknesses.
sha384 and sha512 will be slow on 32 bit platforms.
+Our implementation of hashlib uses OpenSSL.
+
+OpenSSL has a "FIPS mode", which, if enabled, may restrict the available hashes
+to only those that are compliant with FIPS regulations. For example, it may
+deny the use of MD5, on the grounds that this is not secure for uses such as
+authentication, system integrity checking, or digital signatures.
+
+If you need to use such a hash for non-security purposes (such as indexing into
+a data structure for speed), you can override the keyword argument
+"usedforsecurity" from True to False to signify that your code is not relying
+on the hash for security purposes, and this will allow the hash to be usable
+even in FIPS mode. This is not a standard feature of Python 2.7's hashlib, and
+is included here to better support FIPS mode.
+
Hash objects have these methods:
- update(arg): Update the hash object with the string arg. Repeated calls
are equivalent to a single call with the concatenation of all
@@ -63,76 +80,41 @@ algorithms = __always_supported
'pbkdf2_hmac')
-def __get_builtin_constructor(name):
- try:
- if name in ('SHA1', 'sha1'):
- import _sha
- return _sha.new
- elif name in ('MD5', 'md5'):
- import _md5
- return _md5.new
- elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'):
- import _sha256
- bs = name[3:]
- if bs == '256':
- return _sha256.sha256
- elif bs == '224':
- return _sha256.sha224
- elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'):
- import _sha512
- bs = name[3:]
- if bs == '512':
- return _sha512.sha512
- elif bs == '384':
- return _sha512.sha384
- except ImportError:
- pass # no extension module, this hash is unsupported.
-
- raise ValueError('unsupported hash type ' + name)
-
-
def __get_openssl_constructor(name):
try:
f = getattr(_hashlib, 'openssl_' + name)
# Allow the C module to raise ValueError. The function will be
# defined but the hash not actually available thanks to OpenSSL.
- f()
+ #
+ # We pass "usedforsecurity=False" to disable FIPS-based restrictions:
+ # at this stage we're merely seeing if the function is callable,
+ # rather than using it for actual work.
+ f(usedforsecurity=False)
# Use the C function directly (very fast)
return f
except (AttributeError, ValueError):
- return __get_builtin_constructor(name)
+ raise
-
-def __py_new(name, string=''):
- """new(name, string='') - Return a new hashing object using the named algorithm;
- optionally initialized with a string.
- """
- return __get_builtin_constructor(name)(string)
-
-
-def __hash_new(name, string=''):
+def __hash_new(name, string='', usedforsecurity=True):
"""new(name, string='') - Return a new hashing object using the named algorithm;
optionally initialized with a string.
+ Override 'usedforsecurity' to False when using for non-security purposes in
+ a FIPS environment
"""
try:
- return _hashlib.new(name, string)
+ return _hashlib.new(name, string, usedforsecurity)
except ValueError:
- # If the _hashlib module (OpenSSL) doesn't support the named
- # hash, try using our builtin implementations.
- # This allows for SHA224/256 and SHA384/512 support even though
- # the OpenSSL library prior to 0.9.8 doesn't provide them.
- return __get_builtin_constructor(name)(string)
-
+ raise
try:
import _hashlib
new = __hash_new
__get_hash = __get_openssl_constructor
algorithms_available = algorithms_available.union(
_hashlib.openssl_md_meth_names)
except ImportError:
- new = __py_new
- __get_hash = __get_builtin_constructor
+ # We don't build the legacy modules
+ raise
for __func_name in __always_supported:
# try them all, some may not work due to the OpenSSL
@@ -143,4 +125,4 @@ for __func_name in __always_supported:
# Cleanup locals()
del __always_supported, __func_name, __get_hash
-del __py_new, __hash_new, __get_openssl_constructor
+del __hash_new, __get_openssl_constructor
diff -up Python-2.7.2/Lib/test/test_hashlib.py.hashlib-fips Python-2.7.2/Lib/test/test_hashlib.py
--- Python-2.7.2/Lib/test/test_hashlib.py.hashlib-fips 2011-06-11 11:46:25.000000000 -0400
+++ Python-2.7.2/Lib/test/test_hashlib.py 2011-09-14 01:08:55.525254195 -0400
@@ -32,6 +32,19 @@ def hexstr(s):
r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
return r
+def openssl_enforces_fips():
+ # Use the "openssl" command (if present) to try to determine if the local
+ # OpenSSL is configured to enforce FIPS
+ from subprocess import Popen, PIPE
+ try:
+ p = Popen(['openssl', 'md5'],
+ stdin=PIPE, stdout=PIPE, stderr=PIPE)
+ except OSError:
+ # "openssl" command not found
+ return False
+ stdout, stderr = p.communicate(input=b'abc')
+ return b'unknown cipher' in stderr
+OPENSSL_ENFORCES_FIPS = openssl_enforces_fips()
class HashLibTestCase(unittest.TestCase):
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
@@ -61,10 +74,10 @@ class HashLibTestCase(unittest.TestCase)
# of hashlib.new given the algorithm name.
for algorithm, constructors in self.constructors_to_test.items():
constructors.add(getattr(hashlib, algorithm))
- def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm):
+ def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, usedforsecurity=True):
if data is None:
- return hashlib.new(_alg)
- return hashlib.new(_alg, data)
+ return hashlib.new(_alg, usedforsecurity=usedforsecurity)
+ return hashlib.new(_alg, data, usedforsecurity=usedforsecurity)
constructors.add(_test_algorithm_via_hashlib_new)
_hashlib = self._conditional_import_module('_hashlib')
@@ -78,28 +91,13 @@ class HashLibTestCase(unittest.TestCase)
if constructor:
constructors.add(constructor)
- _md5 = self._conditional_import_module('_md5')
- if _md5:
- self.constructors_to_test['md5'].add(_md5.new)
- _sha = self._conditional_import_module('_sha')
- if _sha:
- self.constructors_to_test['sha1'].add(_sha.new)
- _sha256 = self._conditional_import_module('_sha256')
- if _sha256:
- self.constructors_to_test['sha224'].add(_sha256.sha224)
- self.constructors_to_test['sha256'].add(_sha256.sha256)
- _sha512 = self._conditional_import_module('_sha512')
- if _sha512:
- self.constructors_to_test['sha384'].add(_sha512.sha384)
- self.constructors_to_test['sha512'].add(_sha512.sha512)
-
super(HashLibTestCase, self).__init__(*args, **kwargs)
def test_hash_array(self):
a = array.array("b", range(10))
constructors = self.constructors_to_test.itervalues()
for cons in itertools.chain.from_iterable(constructors):
- c = cons(a)
+ c = cons(a, usedforsecurity=False)
c.hexdigest()
def test_algorithms_attribute(self):
@@ -115,28 +113,9 @@ class HashLibTestCase(unittest.TestCase)
self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam')
self.assertRaises(TypeError, hashlib.new, 1)
- def test_get_builtin_constructor(self):
- get_builtin_constructor = hashlib.__dict__[
- '__get_builtin_constructor']
- self.assertRaises(ValueError, get_builtin_constructor, 'test')
- try:
- import _md5
- except ImportError:
- pass
- # This forces an ImportError for "import _md5" statements
- sys.modules['_md5'] = None
- try:
- self.assertRaises(ValueError, get_builtin_constructor, 'md5')
- finally:
- if '_md5' in locals():
- sys.modules['_md5'] = _md5
- else:
- del sys.modules['_md5']
- self.assertRaises(TypeError, get_builtin_constructor, 3)
-
def test_hexdigest(self):
for name in self.supported_hash_names:
- h = hashlib.new(name)
+ h = hashlib.new(name, usedforsecurity=False)
self.assertTrue(hexstr(h.digest()) == h.hexdigest())
def test_large_update(self):
@@ -145,16 +125,16 @@ class HashLibTestCase(unittest.TestCase)
abcs = aas + bees + cees
for name in self.supported_hash_names:
- m1 = hashlib.new(name)
+ m1 = hashlib.new(name, usedforsecurity=False)
m1.update(aas)
m1.update(bees)
m1.update(cees)
- m2 = hashlib.new(name)
+ m2 = hashlib.new(name, usedforsecurity=False)
m2.update(abcs)
self.assertEqual(m1.digest(), m2.digest(), name+' update problem.')
- m3 = hashlib.new(name, abcs)
+ m3 = hashlib.new(name, abcs, usedforsecurity=False)
self.assertEqual(m1.digest(), m3.digest(), name+' new problem.')
def check(self, name, data, digest):
@@ -162,7 +142,7 @@ class HashLibTestCase(unittest.TestCase)
# 2 is for hashlib.name(...) and hashlib.new(name, ...)
self.assertGreaterEqual(len(constructors), 2)
for hash_object_constructor in constructors:
- computed = hash_object_constructor(data).hexdigest()
+ computed = hash_object_constructor(data, usedforsecurity=False).hexdigest()
self.assertEqual(
computed, digest,
"Hash algorithm %s constructed using %s returned hexdigest"
@@ -172,7 +152,8 @@ class HashLibTestCase(unittest.TestCase)
def check_unicode(self, algorithm_name):
# Unicode objects are not allowed as input.
- expected = hashlib.new(algorithm_name, str(u'spam')).hexdigest()
+ expected = hashlib.new(algorithm_name, str(u'spam'),
+ usedforsecurity=False).hexdigest()
self.check(algorithm_name, u'spam', expected)
def test_unicode(self):
@@ -354,6 +335,70 @@ class HashLibTestCase(unittest.TestCase)
self.assertEqual(expected_hash, hasher.hexdigest())
+ def test_issue9146(self):
+ # Ensure that various ways to use "MD5" from "hashlib" don't segfault:
+ m = hashlib.md5(usedforsecurity=False)
+ m.update(b'abc\n')
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = hashlib.new('md5', usedforsecurity=False)
+ m.update(b'abc\n')
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = hashlib.md5(b'abc\n', usedforsecurity=False)
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = hashlib.new('md5', b'abc\n', usedforsecurity=False)
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ def assertRaisesUnknownCipher(self, callable_obj=None, *args, **kwargs):
+ try:
+ callable_obj(*args, **kwargs)
+ except ValueError, e:
+ if not e.args[0].endswith('unknown cipher'):
+ self.fail('Incorrect exception raised')
+ else:
+ self.fail('Exception was not raised')
+
+ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS,
+ 'FIPS enforcement required for this test.')
+ def test_hashlib_fips_mode(self):
+ # Ensure that we raise a ValueError on vanilla attempts to use MD5
+ # in hashlib in a FIPS-enforced setting:
+ self.assertRaisesUnknownCipher(hashlib.md5)
+ self.assertRaisesUnknownCipher(hashlib.new, 'md5')
+
+ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS,
+ 'FIPS enforcement required for this test.')
+ def test_hashopenssl_fips_mode(self):
+ # Verify the _hashlib module's handling of md5:
+ import _hashlib
+
+ assert hasattr(_hashlib, 'openssl_md5')
+
+ # Ensure that _hashlib raises a ValueError on vanilla attempts to
+ # use MD5 in a FIPS-enforced setting:
+ self.assertRaisesUnknownCipher(_hashlib.openssl_md5)
+ self.assertRaisesUnknownCipher(_hashlib.new, 'md5')
+
+ # Ensure that in such a setting we can whitelist a callsite with
+ # usedforsecurity=False and have it succeed:
+ m = _hashlib.openssl_md5(usedforsecurity=False)
+ m.update('abc\n')
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = _hashlib.new('md5', usedforsecurity=False)
+ m.update('abc\n')
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = _hashlib.openssl_md5('abc\n', usedforsecurity=False)
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = _hashlib.new('md5', 'abc\n', usedforsecurity=False)
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+
+
class KDFTests(unittest.TestCase):
pbkdf2_test_vectors = [
(b'password', b'salt', 1, None),
diff -up Python-2.7.2/Modules/Setup.dist.hashlib-fips Python-2.7.2/Modules/Setup.dist
--- Python-2.7.2/Modules/Setup.dist.hashlib-fips 2011-09-14 00:21:26.163252001 -0400
+++ Python-2.7.2/Modules/Setup.dist 2011-09-14 00:21:26.201252001 -0400
@@ -248,14 +248,14 @@ imageop imageop.c # Operations on images
# Message-Digest Algorithm, described in RFC 1321. The necessary files
# md5.c and md5.h are included here.
-_md5 md5module.c md5.c
+#_md5 md5module.c md5.c
# The _sha module implements the SHA checksum algorithms.
# (NIST's Secure Hash Algorithms.)
-_sha shamodule.c
-_sha256 sha256module.c
-_sha512 sha512module.c
+#_sha shamodule.c
+#_sha256 sha256module.c
+#_sha512 sha512module.c
# SGI IRIX specific modules -- off by default.
diff -up Python-2.7.2/setup.py.hashlib-fips Python-2.7.2/setup.py
--- Python-2.7.2/setup.py.hashlib-fips 2011-09-14 00:21:25.722252001 -0400
+++ Python-2.7.2/setup.py 2011-09-14 00:21:26.203252001 -0400
@@ -768,21 +768,6 @@ class PyBuildExt(build_ext):
print ("warning: openssl 0x%08x is too old for _hashlib" %
openssl_ver)
missing.append('_hashlib')
- if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
- # The _sha module implements the SHA1 hash algorithm.
- exts.append( Extension('_sha', ['shamodule.c']) )
- # The _md5 module implements the RSA Data Security, Inc. MD5
- # Message-Digest Algorithm, described in RFC 1321. The
- # necessary files md5.c and md5.h are included here.
- exts.append( Extension('_md5',
- sources = ['md5module.c', 'md5.c'],
- depends = ['md5.h']) )
-
- min_sha2_openssl_ver = 0x00908000
- if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
- # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
- exts.append( Extension('_sha256', ['sha256module.c']) )
- exts.append( Extension('_sha512', ['sha512module.c']) )
# Modules that provide persistent dictionary-like semantics. You will
# probably want to arrange for at least one of them to be available on
--- Python-2.7.8/Modules/_hashopenssl.c.orig 2014-06-30 04:05:41.000000000 +0200
+++ Python-2.7.8/Modules/_hashopenssl.c 2014-07-14 14:21:59.546386572 +0200
@@ -36,6 +36,8 @@
#endif
/* EVP is the preferred interface to hashing in OpenSSL */
+#include <openssl/ssl.h>
+#include <openssl/err.h>
#include <openssl/evp.h>
#include <openssl/hmac.h>
#include <openssl/err.h>
@@ -67,11 +69,19 @@
static PyTypeObject EVPtype;
+/* Struct to hold all the cached information we need on a specific algorithm.
+ We have one of these per algorithm */
+typedef struct {
+ PyObject *name_obj;
+ EVP_MD_CTX ctxs[2];
+ /* ctx_ptrs will point to ctxs unless an error occurred, when it will
+ be NULL: */
+ EVP_MD_CTX *ctx_ptrs[2];
+ PyObject *error_msgs[2];
+} EVPCachedInfo;
-#define DEFINE_CONSTS_FOR_NEW(Name) \
- static PyObject *CONST_ ## Name ## _name_obj = NULL; \
- static EVP_MD_CTX CONST_new_ ## Name ## _ctx; \
- static EVP_MD_CTX *CONST_new_ ## Name ## _ctx_p = NULL;
+#define DEFINE_CONSTS_FOR_NEW(Name) \
+ static EVPCachedInfo cached_info_ ##Name;
DEFINE_CONSTS_FOR_NEW(md5)
DEFINE_CONSTS_FOR_NEW(sha1)
@@ -117,6 +127,48 @@
}
}
+static void
+mc_ctx_init(EVP_MD_CTX *ctx, int usedforsecurity)
+{
+ EVP_MD_CTX_init(ctx);
+
+ /*
+ If the user has declared that this digest is being used in a
+ non-security role (e.g. indexing into a data structure), set
+ the exception flag for openssl to allow it
+ */
+ if (!usedforsecurity) {
+#ifdef EVP_MD_CTX_FLAG_NON_FIPS_ALLOW
+ EVP_MD_CTX_set_flags(ctx,
+ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW);
+#endif
+ }
+}
+
+/* Get an error msg for the last error as a PyObject */
+static PyObject *
+error_msg_for_last_error(void)
+{
+ char *errstr;
+
+ errstr = ERR_error_string(ERR_peek_last_error(), NULL);
+ ERR_clear_error();
+
+ return PyString_FromString(errstr); /* Can be NULL */
+}
+
+static void
+set_evp_exception(void)
+{
+ char *errstr;
+
+ errstr = ERR_error_string(ERR_peek_last_error(), NULL);
+ ERR_clear_error();
+
+ PyErr_SetString(PyExc_ValueError, errstr);
+}
+
+
/* Internal methods for a hash object */
static void
@@ -315,14 +367,15 @@
static int
EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"name", "string", NULL};
+ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL};
PyObject *name_obj = NULL;
+ int usedforsecurity = 1;
Py_buffer view = { 0 };
char *nameStr;
const EVP_MD *digest;
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|s*:HASH", kwlist,
- &name_obj, &view)) {
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|s*i:HASH", kwlist,
+ &name_obj, &view, &usedforsecurity)) {
return -1;
}
@@ -338,7 +391,12 @@
PyBuffer_Release(&view);
return -1;
}
- EVP_DigestInit(&self->ctx, digest);
+ mc_ctx_init(&self->ctx, usedforsecurity);
+ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) {
+ set_evp_exception();
+ PyBuffer_Release(&view);
+ return -1;
+ }
self->name = name_obj;
Py_INCREF(self->name);
@@ -422,7 +480,8 @@
static PyObject *
EVPnew(PyObject *name_obj,
const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
- const unsigned char *cp, Py_ssize_t len)
+ const unsigned char *cp, Py_ssize_t len,
+ int usedforsecurity)
{
EVPobject *self;
@@ -437,7 +496,12 @@
if (initial_ctx) {
EVP_MD_CTX_copy(&self->ctx, initial_ctx);
} else {
- EVP_DigestInit(&self->ctx, digest);
+ mc_ctx_init(&self->ctx, usedforsecurity);
+ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) {
+ set_evp_exception();
+ Py_DECREF(self);
+ return NULL;
+ }
}
if (cp && len) {
@@ -461,20 +525,28 @@
An optional string argument may be provided and will be\n\
automatically hashed.\n\
\n\
-The MD5 and SHA1 algorithms are always supported.\n");
+The MD5 and SHA1 algorithms are always supported.\n\
+\n\
+An optional \"usedforsecurity=True\" keyword argument is provided for use in\n\
+environments that enforce FIPS-based restrictions. Some implementations of\n\
+OpenSSL can be configured to prevent the usage of non-secure algorithms (such\n\
+as MD5). If you have a non-security use for these algorithms (e.g. a hash\n\
+table), you can override this argument by marking the callsite as\n\
+\"usedforsecurity=False\".");
static PyObject *
EVP_new(PyObject *self, PyObject *args, PyObject *kwdict)
{
- static char *kwlist[] = {"name", "string", NULL};
+ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL};
PyObject *name_obj = NULL;
Py_buffer view = { 0 };
PyObject *ret_obj;
char *name;
const EVP_MD *digest;
+ int usedforsecurity = 1;
- if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|s*:new", kwlist,
- &name_obj, &view)) {
+ if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|s*i:new", kwlist,
+ &name_obj, &view, &usedforsecurity)) {
return NULL;
}
@@ -487,7 +559,7 @@
digest = EVP_get_digestbyname(name);
ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf,
- view.len);
+ view.len, usedforsecurity);
PyBuffer_Release(&view);
return ret_obj;
@@ -713,51 +785,111 @@
/*
- * This macro generates constructor function definitions for specific
- * hash algorithms. These constructors are much faster than calling
- * the generic one passing it a python string and are noticably
- * faster than calling a python new() wrapper. Thats important for
+ * This macro and function generates a family of constructor function
+ * definitions for specific hash algorithms. These constructors are much
+ * faster than calling the generic one passing it a python string and are
+ * noticably faster than calling a python new() wrapper. That's important for
* code that wants to make hashes of a bunch of small strings.
*/
#define GEN_CONSTRUCTOR(NAME) \
static PyObject * \
- EVP_new_ ## NAME (PyObject *self, PyObject *args) \
+ EVP_new_ ## NAME (PyObject *self, PyObject *args, PyObject *kwdict) \
{ \
- Py_buffer view = { 0 }; \
- PyObject *ret_obj; \
- \
- if (!PyArg_ParseTuple(args, "|s*:" #NAME , &view)) { \
- return NULL; \
- } \
- \
- ret_obj = EVPnew( \
- CONST_ ## NAME ## _name_obj, \
- NULL, \
- CONST_new_ ## NAME ## _ctx_p, \
- (unsigned char*)view.buf, view.len); \
- PyBuffer_Release(&view); \
- return ret_obj; \
+ return implement_specific_EVP_new(self, args, kwdict, \
+ "|s*i:" #NAME, \
+ &cached_info_ ## NAME ); \
}
+static PyObject *
+implement_specific_EVP_new(PyObject *self, PyObject *args, PyObject *kwdict,
+ const char *format,
+ EVPCachedInfo *cached_info)
+{
+ static char *kwlist[] = {"string", "usedforsecurity", NULL};
+ Py_buffer view = { 0 };
+ int usedforsecurity = 1;
+ int idx;
+ PyObject *ret_obj = NULL;
+
+ assert(cached_info);
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwdict, format, kwlist,
+ &view, &usedforsecurity)) {
+ return NULL;
+ }
+
+ idx = usedforsecurity ? 1 : 0;
+
+ /*
+ * If an error occurred during creation of the global content, the ctx_ptr
+ * will be NULL, and the error_msg will hopefully be non-NULL:
+ */
+ if (cached_info->ctx_ptrs[idx]) {
+ /* We successfully initialized this context; copy it: */
+ ret_obj = EVPnew(cached_info->name_obj,
+ NULL,
+ cached_info->ctx_ptrs[idx],
+ (unsigned char*)view.buf, view.len,
+ usedforsecurity);
+ } else {
+ /* Some kind of error happened initializing the global context for
+ this (digest, usedforsecurity) pair.
+ Raise an exception with the saved error message: */
+ if (cached_info->error_msgs[idx]) {
+ PyErr_SetObject(PyExc_ValueError, cached_info->error_msgs[idx]);
+ } else {
+ PyErr_SetString(PyExc_ValueError, "Error initializing hash");
+ }
+ }
+
+ PyBuffer_Release(&view);
+
+ return ret_obj;
+}
+
/* a PyMethodDef structure for the constructor */
#define CONSTRUCTOR_METH_DEF(NAME) \
- {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \
+ {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, \
+ METH_VARARGS |METH_KEYWORDS, \
PyDoc_STR("Returns a " #NAME \
" hash object; optionally initialized with a string") \
}
-/* used in the init function to setup a constructor: initialize OpenSSL
- constructor constants if they haven't been initialized already. */
-#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \
- if (CONST_ ## NAME ## _name_obj == NULL) { \
- CONST_ ## NAME ## _name_obj = PyString_FromString(#NAME); \
- if (EVP_get_digestbyname(#NAME)) { \
- CONST_new_ ## NAME ## _ctx_p = &CONST_new_ ## NAME ## _ctx; \
- EVP_DigestInit(CONST_new_ ## NAME ## _ctx_p, EVP_get_digestbyname(#NAME)); \
- } \
- } \
+/*
+ Macro/function pair to set up the constructors.
+
+ Try to initialize a context for each hash twice, once with
+ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW and once without.
+
+ Any that have errors during initialization will end up wit a NULL ctx_ptrs
+ entry, and err_msgs will be set (unless we're very low on memory)
+*/
+#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \
+ init_constructor_constant(&cached_info_ ## NAME, #NAME); \
} while (0);
+static void
+init_constructor_constant(EVPCachedInfo *cached_info, const char *name)
+{
+ assert(cached_info);
+ cached_info->name_obj = PyString_FromString(name);
+ if (EVP_get_digestbyname(name)) {
+ int i;
+ for (i=0; i<2; i++) {
+ mc_ctx_init(&cached_info->ctxs[i], i);
+ if (EVP_DigestInit_ex(&cached_info->ctxs[i],
+ EVP_get_digestbyname(name), NULL)) {
+ /* Success: */
+ cached_info->ctx_ptrs[i] = &cached_info->ctxs[i];
+ } else {
+ /* Failure: */
+ cached_info->ctx_ptrs[i] = NULL;
+ cached_info->error_msgs[i] = error_msg_for_last_error();
+ }
+ }
+ }
+}
+
GEN_CONSTRUCTOR(md5)
GEN_CONSTRUCTOR(sha1)
#ifdef _OPENSSL_SUPPORTS_SHA2
@@ -794,14 +926,11 @@
{
PyObject *m, *openssl_md_meth_names;
+ SSL_load_error_strings();
+ SSL_library_init();
OpenSSL_add_all_digests();
ERR_load_crypto_strings();
- /* TODO build EVP_functions openssl_* entries dynamically based
- * on what hashes are supported rather than listing many
- * but having some be unsupported. Only init appropriate
- * constants. */
-
Py_TYPE(&EVPtype) = &PyType_Type;
if (PyType_Ready(&EVPtype) < 0)
return;

View File

@ -0,0 +1,761 @@
diff --git a/Include/dictobject.h b/Include/dictobject.h
index 5a1e9fe..da89cec 100644
--- a/Include/dictobject.h
+++ b/Include/dictobject.h
@@ -154,6 +154,8 @@ PyAPI_FUNC(PyObject *) PyDict_GetItemString(PyObject *dp, const char *key);
PyAPI_FUNC(int) PyDict_SetItemString(PyObject *dp, const char *key, PyObject *item);
PyAPI_FUNC(int) PyDict_DelItemString(PyObject *dp, const char *key);
+PyAPI_FUNC(void) _PyDict_DebugMallocStats(FILE *out);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/floatobject.h b/Include/floatobject.h
index 54e8825..33c6ac0 100644
--- a/Include/floatobject.h
+++ b/Include/floatobject.h
@@ -132,6 +132,7 @@ PyAPI_FUNC(PyObject *) _PyFloat_FormatAdvanced(PyObject *obj,
failure. Used in builtin_round in bltinmodule.c. */
PyAPI_FUNC(PyObject *) _Py_double_round(double x, int ndigits);
+PyAPI_FUNC(void) _PyFloat_DebugMallocStats(FILE* out);
#ifdef __cplusplus
diff --git a/Include/frameobject.h b/Include/frameobject.h
index 3460379..db89a4a 100644
--- a/Include/frameobject.h
+++ b/Include/frameobject.h
@@ -80,6 +80,8 @@ PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *);
PyAPI_FUNC(int) PyFrame_ClearFreeList(void);
+PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out);
+
/* Return the line of code the frame is currently executing. */
PyAPI_FUNC(int) PyFrame_GetLineNumber(PyFrameObject *);
diff --git a/Include/intobject.h b/Include/intobject.h
index d198574..60cb9e0 100644
--- a/Include/intobject.h
+++ b/Include/intobject.h
@@ -78,6 +78,8 @@ PyAPI_FUNC(PyObject *) _PyInt_FormatAdvanced(PyObject *obj,
char *format_spec,
Py_ssize_t format_spec_len);
+PyAPI_FUNC(void) _PyInt_DebugMallocStats(FILE *out);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/listobject.h b/Include/listobject.h
index f19b1c5..7fccb47 100644
--- a/Include/listobject.h
+++ b/Include/listobject.h
@@ -62,6 +62,8 @@ PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *);
#define PyList_SET_ITEM(op, i, v) (((PyListObject *)(op))->ob_item[i] = (v))
#define PyList_GET_SIZE(op) Py_SIZE(op)
+PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/methodobject.h b/Include/methodobject.h
index 6e160b6..1944517 100644
--- a/Include/methodobject.h
+++ b/Include/methodobject.h
@@ -87,6 +87,10 @@ typedef struct {
PyAPI_FUNC(int) PyCFunction_ClearFreeList(void);
+PyAPI_FUNC(void) _PyCFunction_DebugMallocStats(FILE *out);
+PyAPI_FUNC(void) _PyMethod_DebugMallocStats(FILE *out);
+
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/object.h b/Include/object.h
index 807b241..a9d2079 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -1040,6 +1040,13 @@ PyAPI_FUNC(void) _PyTrash_thread_destroy_chain(void);
_PyTrash_thread_deposit_object((PyObject*)op); \
} while (0);
+PyAPI_FUNC(void)
+_PyDebugAllocatorStats(FILE *out, const char *block_name, int num_blocks,
+ size_t sizeof_block);
+
+PyAPI_FUNC(void)
+_PyObject_DebugTypeStats(FILE *out);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/objimpl.h b/Include/objimpl.h
index cbf6bc3..8c14ab8 100644
--- a/Include/objimpl.h
+++ b/Include/objimpl.h
@@ -101,13 +101,13 @@ PyAPI_FUNC(void) PyObject_Free(void *);
/* Macros */
#ifdef WITH_PYMALLOC
+PyAPI_FUNC(void) _PyObject_DebugMallocStats(FILE *out);
#ifdef PYMALLOC_DEBUG /* WITH_PYMALLOC && PYMALLOC_DEBUG */
PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes);
PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes);
PyAPI_FUNC(void) _PyObject_DebugFree(void *p);
PyAPI_FUNC(void) _PyObject_DebugDumpAddress(const void *p);
PyAPI_FUNC(void) _PyObject_DebugCheckAddress(const void *p);
-PyAPI_FUNC(void) _PyObject_DebugMallocStats(void);
PyAPI_FUNC(void *) _PyObject_DebugMallocApi(char api, size_t nbytes);
PyAPI_FUNC(void *) _PyObject_DebugReallocApi(char api, void *p, size_t nbytes);
PyAPI_FUNC(void) _PyObject_DebugFreeApi(char api, void *p);
diff --git a/Include/setobject.h b/Include/setobject.h
index 52b07d5..73a37b6 100644
--- a/Include/setobject.h
+++ b/Include/setobject.h
@@ -92,6 +92,7 @@ PyAPI_FUNC(int) _PySet_Next(PyObject *set, Py_ssize_t *pos, PyObject **key);
PyAPI_FUNC(int) _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key, long *hash);
PyAPI_FUNC(PyObject *) PySet_Pop(PyObject *set);
PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable);
+PyAPI_FUNC(void) _PySet_DebugMallocStats(FILE *out);
#ifdef __cplusplus
}
diff --git a/Include/stringobject.h b/Include/stringobject.h
index 12cc093..0a5fbd1 100644
--- a/Include/stringobject.h
+++ b/Include/stringobject.h
@@ -204,6 +204,8 @@ PyAPI_FUNC(PyObject *) _PyBytes_FormatAdvanced(PyObject *obj,
char *format_spec,
Py_ssize_t format_spec_len);
+PyAPI_FUNC(void) _PyString_DebugMallocStats(FILE *out);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/tupleobject.h b/Include/tupleobject.h
index a5ab733..27e6ca6 100644
--- a/Include/tupleobject.h
+++ b/Include/tupleobject.h
@@ -54,6 +54,7 @@ PyAPI_FUNC(void) _PyTuple_MaybeUntrack(PyObject *);
#define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v)
PyAPI_FUNC(int) PyTuple_ClearFreeList(void);
+PyAPI_FUNC(void) _PyTuple_DebugMallocStats(FILE *out);
#ifdef __cplusplus
}
diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h
index 7781f96..321bd20 100644
--- a/Include/unicodeobject.h
+++ b/Include/unicodeobject.h
@@ -1406,6 +1406,8 @@ PyAPI_FUNC(int) _PyUnicode_IsAlpha(
Py_UNICODE ch /* Unicode character */
);
+PyAPI_FUNC(void) _PyUnicode_DebugMallocStats(FILE *out);
+
#ifdef __cplusplus
}
#endif
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 9342716..8eeb5ab 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -487,6 +487,32 @@ class SysModuleTest(unittest.TestCase):
p.wait()
self.assertIn(executable, ["''", repr(sys.executable)])
+ def test_debugmallocstats(self):
+ # Test sys._debugmallocstats()
+
+ import subprocess
+
+ # Verify the default of writing to stderr:
+ p = subprocess.Popen([sys.executable,
+ '-c', 'import sys; sys._debugmallocstats()'],
+ stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ p.wait()
+ self.assertIn("arenas allocated current", err)
+
+ # Verify that we can redirect the output to a file (not a file-like
+ # object, though):
+ with open('mallocstats.txt', 'w') as out:
+ sys._debugmallocstats(out)
+ result = open('mallocstats.txt').read()
+ self.assertIn("arenas allocated current", result)
+ os.unlink('mallocstats.txt')
+
+ # Verify that the destination must be a file:
+ with self.assertRaises(TypeError):
+ sys._debugmallocstats(42)
+
+
@test.test_support.cpython_only
class SizeofTest(unittest.TestCase):
diff --git a/Objects/classobject.c b/Objects/classobject.c
index 02d7cfd..1c44a47 100644
--- a/Objects/classobject.c
+++ b/Objects/classobject.c
@@ -2691,3 +2691,12 @@ PyMethod_Fini(void)
{
(void)PyMethod_ClearFreeList();
}
+
+/* Print summary info about the state of the optimized allocator */
+void
+_PyMethod_DebugMallocStats(FILE *out)
+{
+ _PyDebugAllocatorStats(out,
+ "free PyMethodObject",
+ numfree, sizeof(PyMethodObject));
+}
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index c544ecd..89ca39c 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -225,6 +225,15 @@ show_track(void)
static PyDictObject *free_list[PyDict_MAXFREELIST];
static int numfree = 0;
+/* Print summary info about the state of the optimized allocator */
+void
+_PyDict_DebugMallocStats(FILE *out)
+{
+ _PyDebugAllocatorStats(out,
+ "free PyDictObject", numfree, sizeof(PyDictObject));
+}
+
+
void
PyDict_Fini(void)
{
diff --git a/Objects/floatobject.c b/Objects/floatobject.c
index 5954d39..02acc8c 100644
--- a/Objects/floatobject.c
+++ b/Objects/floatobject.c
@@ -34,6 +34,22 @@ typedef struct _floatblock PyFloatBlock;
static PyFloatBlock *block_list = NULL;
static PyFloatObject *free_list = NULL;
+/* Print summary info about the state of the optimized allocator */
+void
+_PyFloat_DebugMallocStats(FILE *out)
+{
+ int num_blocks = 0;
+ PyFloatBlock *block;
+
+ /* Walk the block list, counting */
+ for (block = block_list; block ; block = block->next) {
+ num_blocks++;
+ }
+
+ _PyDebugAllocatorStats(out,
+ "PyFloatBlock", num_blocks, sizeof(PyFloatBlock));
+}
+
static PyFloatObject *
fill_free_list(void)
{
diff --git a/Objects/frameobject.c b/Objects/frameobject.c
index 4c91dd0..03a66dc 100644
--- a/Objects/frameobject.c
+++ b/Objects/frameobject.c
@@ -1019,3 +1019,13 @@ PyFrame_Fini(void)
Py_XDECREF(builtin_object);
builtin_object = NULL;
}
+
+/* Print summary info about the state of the optimized allocator */
+void
+_PyFrame_DebugMallocStats(FILE *out)
+{
+ _PyDebugAllocatorStats(out,
+ "free PyFrameObject",
+ numfree, sizeof(PyFrameObject));
+}
+
diff --git a/Objects/intobject.c b/Objects/intobject.c
index 9b27c35..703fa5a 100644
--- a/Objects/intobject.c
+++ b/Objects/intobject.c
@@ -44,6 +44,23 @@ typedef struct _intblock PyIntBlock;
static PyIntBlock *block_list = NULL;
static PyIntObject *free_list = NULL;
+
+/* Print summary info about the state of the optimized allocator */
+void
+_PyInt_DebugMallocStats(FILE *out)
+{
+ int num_blocks = 0;
+ PyIntBlock *block;
+
+ /* Walk the block list, counting */
+ for (block = block_list; block ; block = block->next) {
+ num_blocks++;
+ }
+
+ _PyDebugAllocatorStats(out,
+ "PyIntBlock", num_blocks, sizeof(PyIntBlock));
+}
+
static PyIntObject *
fill_free_list(void)
{
diff --git a/Objects/listobject.c b/Objects/listobject.c
index 24eff76..38848bd 100644
--- a/Objects/listobject.c
+++ b/Objects/listobject.c
@@ -109,6 +109,15 @@ PyList_Fini(void)
}
}
+/* Print summary info about the state of the optimized allocator */
+void
+_PyList_DebugMallocStats(FILE *out)
+{
+ _PyDebugAllocatorStats(out,
+ "free PyListObject",
+ numfree, sizeof(PyListObject));
+}
+
PyObject *
PyList_New(Py_ssize_t size)
{
diff --git a/Objects/methodobject.c b/Objects/methodobject.c
index c1a99ab..ea5df77 100644
--- a/Objects/methodobject.c
+++ b/Objects/methodobject.c
@@ -412,6 +412,15 @@ PyCFunction_Fini(void)
(void)PyCFunction_ClearFreeList();
}
+/* Print summary info about the state of the optimized allocator */
+void
+_PyCFunction_DebugMallocStats(FILE *out)
+{
+ _PyDebugAllocatorStats(out,
+ "free PyCFunction",
+ numfree, sizeof(PyCFunction));
+}
+
/* PyCFunction_New() is now just a macro that calls PyCFunction_NewEx(),
but it's part of the API so we need to keep a function around that
existing C extensions can call.
diff --git a/Objects/object.c b/Objects/object.c
index 65366b0..acef3ce 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -2360,6 +2360,23 @@ PyMem_Free(void *p)
PyMem_FREE(p);
}
+void
+_PyObject_DebugTypeStats(FILE *out)
+{
+ _PyString_DebugMallocStats(out);
+ _PyCFunction_DebugMallocStats(out);
+ _PyDict_DebugMallocStats(out);
+ _PyFloat_DebugMallocStats(out);
+ _PyFrame_DebugMallocStats(out);
+ _PyInt_DebugMallocStats(out);
+ _PyList_DebugMallocStats(out);
+ _PyMethod_DebugMallocStats(out);
+ _PySet_DebugMallocStats(out);
+ _PyTuple_DebugMallocStats(out);
+#if Py_USING_UNICODE
+ _PyUnicode_DebugMallocStats(out);
+#endif
+}
/* These methods are used to control infinite recursion in repr, str, print,
etc. Container objects that may recursively contain themselves,
diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c
index 0778c85..f049f5c 100644
--- a/Objects/obmalloc.c
+++ b/Objects/obmalloc.c
@@ -541,12 +541,10 @@ static struct arena_object* usable_arenas = NULL;
/* Number of arenas allocated that haven't been free()'d. */
static size_t narenas_currently_allocated = 0;
-#ifdef PYMALLOC_DEBUG
/* Total number of times malloc() called to allocate an arena. */
static size_t ntimes_arena_allocated = 0;
/* High water mark (max value ever seen) for narenas_currently_allocated. */
static size_t narenas_highwater = 0;
-#endif
/* Allocate a new arena. If we run out of memory, return NULL. Else
* allocate a new arena, and return the address of an arena_object
@@ -563,7 +561,7 @@ new_arena(void)
#ifdef PYMALLOC_DEBUG
if (Py_GETENV("PYTHONMALLOCSTATS"))
- _PyObject_DebugMallocStats();
+ _PyObject_DebugMallocStats(stderr);
#endif
if (unused_arena_objects == NULL) {
uint i;
@@ -631,11 +629,9 @@ new_arena(void)
arenaobj->address = (uptr)address;
++narenas_currently_allocated;
-#ifdef PYMALLOC_DEBUG
++ntimes_arena_allocated;
if (narenas_currently_allocated > narenas_highwater)
narenas_highwater = narenas_currently_allocated;
-#endif
arenaobj->freepools = NULL;
/* pool_address <- first pool-aligned address in the arena
nfreepools <- number of whole pools that fit after alignment */
@@ -1796,17 +1792,19 @@ _PyObject_DebugDumpAddress(const void *p)
}
}
+#endif /* PYMALLOC_DEBUG */
+
static size_t
-printone(const char* msg, size_t value)
+printone(FILE *out, const char* msg, size_t value)
{
int i, k;
char buf[100];
size_t origvalue = value;
- fputs(msg, stderr);
+ fputs(msg, out);
for (i = (int)strlen(msg); i < 35; ++i)
- fputc(' ', stderr);
- fputc('=', stderr);
+ fputc(' ', out);
+ fputc('=', out);
/* Write the value with commas. */
i = 22;
@@ -1827,17 +1825,32 @@ printone(const char* msg, size_t value)
while (i >= 0)
buf[i--] = ' ';
- fputs(buf, stderr);
+ fputs(buf, out);
return origvalue;
}
-/* Print summary info to stderr about the state of pymalloc's structures.
+void
+_PyDebugAllocatorStats(FILE *out,
+ const char *block_name, int num_blocks, size_t sizeof_block)
+{
+ char buf1[128];
+ char buf2[128];
+ PyOS_snprintf(buf1, sizeof(buf1),
+ "%d %ss * %zd bytes each",
+ num_blocks, block_name, sizeof_block);
+ PyOS_snprintf(buf2, sizeof(buf2),
+ "%48s ", buf1);
+ (void)printone(out, buf2, num_blocks * sizeof_block);
+}
+
+
+/* Print summary info to "out" about the state of pymalloc's structures.
* In Py_DEBUG mode, also perform some expensive internal consistency
* checks.
*/
void
-_PyObject_DebugMallocStats(void)
+_PyObject_DebugMallocStats(FILE *out)
{
uint i;
const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT;
@@ -1866,7 +1879,7 @@ _PyObject_DebugMallocStats(void)
size_t total;
char buf[128];
- fprintf(stderr, "Small block threshold = %d, in %u size classes.\n",
+ fprintf(out, "Small block threshold = %d, in %u size classes.\n",
SMALL_REQUEST_THRESHOLD, numclasses);
for (i = 0; i < numclasses; ++i)
@@ -1920,10 +1933,10 @@ _PyObject_DebugMallocStats(void)
}
assert(narenas == narenas_currently_allocated);
- fputc('\n', stderr);
+ fputc('\n', out);
fputs("class size num pools blocks in use avail blocks\n"
"----- ---- --------- ------------- ------------\n",
- stderr);
+ out);
for (i = 0; i < numclasses; ++i) {
size_t p = numpools[i];
@@ -1934,7 +1947,7 @@ _PyObject_DebugMallocStats(void)
assert(b == 0 && f == 0);
continue;
}
- fprintf(stderr, "%5u %6u "
+ fprintf(out, "%5u %6u "
"%11" PY_FORMAT_SIZE_T "u "
"%15" PY_FORMAT_SIZE_T "u "
"%13" PY_FORMAT_SIZE_T "u\n",
@@ -1944,36 +1957,35 @@ _PyObject_DebugMallocStats(void)
pool_header_bytes += p * POOL_OVERHEAD;
quantization += p * ((POOL_SIZE - POOL_OVERHEAD) % size);
}
- fputc('\n', stderr);
- (void)printone("# times object malloc called", serialno);
-
- (void)printone("# arenas allocated total", ntimes_arena_allocated);
- (void)printone("# arenas reclaimed", ntimes_arena_allocated - narenas);
- (void)printone("# arenas highwater mark", narenas_highwater);
- (void)printone("# arenas allocated current", narenas);
+ fputc('\n', out);
+#ifdef PYMALLOC_DEBUG
+ (void)printone(out, "# times object malloc called", serialno);
+#endif
+ (void)printone(out, "# arenas allocated total", ntimes_arena_allocated);
+ (void)printone(out, "# arenas reclaimed", ntimes_arena_allocated - narenas);
+ (void)printone(out, "# arenas highwater mark", narenas_highwater);
+ (void)printone(out, "# arenas allocated current", narenas);
PyOS_snprintf(buf, sizeof(buf),
"%" PY_FORMAT_SIZE_T "u arenas * %d bytes/arena",
narenas, ARENA_SIZE);
- (void)printone(buf, narenas * ARENA_SIZE);
+ (void)printone(out, buf, narenas * ARENA_SIZE);
- fputc('\n', stderr);
+ fputc('\n', out);
- total = printone("# bytes in allocated blocks", allocated_bytes);
- total += printone("# bytes in available blocks", available_bytes);
+ total = printone(out, "# bytes in allocated blocks", allocated_bytes);
+ total += printone(out, "# bytes in available blocks", available_bytes);
PyOS_snprintf(buf, sizeof(buf),
"%u unused pools * %d bytes", numfreepools, POOL_SIZE);
- total += printone(buf, (size_t)numfreepools * POOL_SIZE);
+ total += printone(out, buf, (size_t)numfreepools * POOL_SIZE);
- total += printone("# bytes lost to pool headers", pool_header_bytes);
- total += printone("# bytes lost to quantization", quantization);
- total += printone("# bytes lost to arena alignment", arena_alignment);
- (void)printone("Total", total);
+ total += printone(out, "# bytes lost to pool headers", pool_header_bytes);
+ total += printone(out, "# bytes lost to quantization", quantization);
+ total += printone(out, "# bytes lost to arena alignment", arena_alignment);
+ (void)printone(out, "Total", total);
}
-#endif /* PYMALLOC_DEBUG */
-
#ifdef Py_USING_MEMORY_DEBUGGER
/* Make this function last so gcc won't inline it since the definition is
* after the reference.
diff --git a/Objects/setobject.c b/Objects/setobject.c
index 31da3db..da086ab 100644
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -1087,6 +1087,16 @@ PySet_Fini(void)
Py_CLEAR(emptyfrozenset);
}
+/* Print summary info about the state of the optimized allocator */
+void
+_PySet_DebugMallocStats(FILE *out)
+{
+ _PyDebugAllocatorStats(out,
+ "free PySetObject",
+ numfree, sizeof(PySetObject));
+}
+
+
static PyObject *
set_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
diff --git a/Objects/stringobject.c b/Objects/stringobject.c
index c47d32f..b1ffa24 100644
--- a/Objects/stringobject.c
+++ b/Objects/stringobject.c
@@ -4880,3 +4880,43 @@ void _Py_ReleaseInternedStrings(void)
PyDict_Clear(interned);
Py_CLEAR(interned);
}
+
+void _PyString_DebugMallocStats(FILE *out)
+{
+ ssize_t i;
+ int num_immortal = 0, num_mortal = 0;
+ ssize_t immortal_size = 0, mortal_size = 0;
+
+ if (interned == NULL || !PyDict_Check(interned))
+ return;
+
+ for (i = 0; i <= ((PyDictObject*)interned)->ma_mask; i++) {
+ PyDictEntry *ep = ((PyDictObject*)interned)->ma_table + i;
+ PyObject *pvalue = ep->me_value;
+ if (pvalue != NULL) {
+ PyStringObject *s = (PyStringObject *)ep->me_key;
+
+ switch (s->ob_sstate) {
+ case SSTATE_NOT_INTERNED:
+ /* XXX Shouldn't happen */
+ break;
+ case SSTATE_INTERNED_IMMORTAL:
+ num_immortal ++;
+ immortal_size += s->ob_size;
+ break;
+ case SSTATE_INTERNED_MORTAL:
+ num_mortal ++;
+ mortal_size += s->ob_size;
+ break;
+ default:
+ Py_FatalError("Inconsistent interned string state.");
+ }
+ }
+ }
+
+ fprintf(out, "%d mortal interned strings\n", num_mortal);
+ fprintf(out, "%d immortal interned strings\n", num_immortal);
+ fprintf(out, "total size of all interned strings: "
+ "%zi/%zi "
+ "mortal/immortal\n", mortal_size, immortal_size);
+}
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c
index 6f4b18c..e8e4490 100644
--- a/Objects/tupleobject.c
+++ b/Objects/tupleobject.c
@@ -44,6 +44,22 @@ show_track(void)
}
#endif
+/* Print summary info about the state of the optimized allocator */
+void
+_PyTuple_DebugMallocStats(FILE *out)
+{
+#if PyTuple_MAXSAVESIZE > 0
+ int i;
+ char buf[128];
+ for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
+ PyOS_snprintf(buf, sizeof(buf),
+ "free %d-sized PyTupleObject", i);
+ _PyDebugAllocatorStats(out,
+ buf,
+ numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i));
+ }
+#endif
+}
PyObject *
PyTuple_New(register Py_ssize_t size)
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index a859fa0..b6ff83e 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -9018,6 +9018,12 @@ _PyUnicode_Fini(void)
(void)PyUnicode_ClearFreeList();
}
+void _PyUnicode_DebugMallocStats(FILE *out)
+{
+ _PyDebugAllocatorStats(out, "free PyUnicodeObject", numfree,
+ sizeof(PyUnicodeObject));
+}
+
#ifdef __cplusplus
}
#endif
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
index b686317..c3b3e17 100644
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -605,7 +605,7 @@ Py_Finalize(void)
#endif /* Py_TRACE_REFS */
#ifdef PYMALLOC_DEBUG
if (Py_GETENV("PYTHONMALLOCSTATS"))
- _PyObject_DebugMallocStats();
+ _PyObject_DebugMallocStats(stderr);
#endif
call_ll_exitfuncs();
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 22238ba..60624f2 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -890,6 +890,57 @@ a 11-tuple where the entries in the tuple are counts of:\n\
extern "C" {
#endif
+static PyObject *
+sys_debugmallocstats(PyObject *self, PyObject *args)
+{
+ PyObject *file = NULL;
+ FILE *fp;
+
+ if (!PyArg_ParseTuple(args, "|O!",
+ &PyFile_Type, &file)) {
+ return NULL;
+ }
+ if (!file) {
+ /* Default to sys.stderr: */
+ file = PySys_GetObject("stderr");
+ if (!file) {
+ PyErr_SetString(PyExc_ValueError, "sys.stderr not set");
+ return NULL;
+ }
+ if (!PyFile_Check(file)) {
+ PyErr_SetString(PyExc_TypeError, "sys.stderr is not a file");
+ return NULL;
+ }
+ }
+
+ Py_INCREF(file);
+ /* OK, we now own a ref on non-NULL "file" */
+
+ fp = PyFile_AsFile(file);
+ if (!fp) {
+ PyErr_SetString(PyExc_ValueError, "file is closed");
+ Py_DECREF(file);
+ return NULL;
+ }
+
+ _PyObject_DebugMallocStats(fp);
+ fputc('\n', fp);
+ _PyObject_DebugTypeStats(fp);
+
+ Py_DECREF(file);
+
+ Py_RETURN_NONE;
+}
+PyDoc_STRVAR(debugmallocstats_doc,
+"_debugmallocstats([file])\n\
+\n\
+Print summary info to the given file (or sys.stderr) about the state of\n\
+pymalloc's structures.\n\
+\n\
+In Py_DEBUG mode, also perform some expensive internal consistency\n\
+checks.\n\
+");
+
#ifdef Py_TRACE_REFS
/* Defined in objects.c because it uses static globals if that file */
extern PyObject *_Py_GetObjects(PyObject *, PyObject *);
@@ -988,6 +1039,8 @@ static PyMethodDef sys_methods[] = {
{"settrace", sys_settrace, METH_O, settrace_doc},
{"gettrace", sys_gettrace, METH_NOARGS, gettrace_doc},
{"call_tracing", sys_call_tracing, METH_VARARGS, call_tracing_doc},
+ {"_debugmallocstats", sys_debugmallocstats, METH_VARARGS,
+ debugmallocstats_doc},
{NULL, NULL} /* sentinel */
};

View File

@ -0,0 +1,15 @@
diff -up Python-2.7.3/Lib/ctypes/__init__.py.rhbz814391 Python-2.7.3/Lib/ctypes/__init__.py
--- Python-2.7.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 14:51:19.390990244 -0400
+++ Python-2.7.3/Lib/ctypes/__init__.py 2012-04-20 14:51:45.141668316 -0400
@@ -272,11 +272,6 @@ def _reset_cache():
# _SimpleCData.c_char_p_from_param
POINTER(c_char).from_param = c_char_p.from_param
_pointer_type_cache[None] = c_void_p
- # XXX for whatever reasons, creating the first instance of a callback
- # function is needed for the unittests on Win64 to succeed. This MAY
- # be a compiler bug, since the problem occurs only when _ctypes is
- # compiled with the MS SDK compiler. Or an uninitialized variable?
- CFUNCTYPE(c_int)(lambda: None)
try:
from _ctypes import set_conversion_mode

View File

@ -0,0 +1,57 @@
diff -up Python-2.7.3/Lib/test/test_gdb.py.gdb-autoload-safepath Python-2.7.3/Lib/test/test_gdb.py
--- Python-2.7.3/Lib/test/test_gdb.py.gdb-autoload-safepath 2012-04-30 15:53:57.254045220 -0400
+++ Python-2.7.3/Lib/test/test_gdb.py 2012-04-30 16:19:19.569941124 -0400
@@ -54,6 +54,19 @@ def gdb_has_frame_select():
HAS_PYUP_PYDOWN = gdb_has_frame_select()
+def gdb_has_autoload_safepath():
+ # Recent GDBs will only auto-load scripts from certain safe
+ # locations, so we will need to turn off this protection.
+ # However, if the GDB doesn't have it, then the following
+ # command will generate noise on stderr (rhbz#817072):
+ cmd = "--eval-command=set auto-load safe-path /"
+ p = subprocess.Popen(["gdb", "--batch", cmd],
+ stderr=subprocess.PIPE)
+ _, stderr = p.communicate()
+ return '"on" or "off" expected.' not in stderr
+
+HAS_AUTOLOAD_SAFEPATH = gdb_has_autoload_safepath()
+
class DebuggerTests(unittest.TestCase):
"""Test that the debugger can debug Python."""
diff -up Python-2.7.10/Lib/test/test_gdb.py.ms Python-2.7.10/Lib/test/test_gdb.py
--- Python-2.7.10/Lib/test/test_gdb.py.ms 2015-05-25 17:00:25.028462615 +0200
+++ Python-2.7.10/Lib/test/test_gdb.py 2015-05-25 17:01:53.166359822 +0200
@@ -153,6 +153,17 @@ class DebuggerTests(unittest.TestCase):
'run']
+ if HAS_AUTOLOAD_SAFEPATH:
+ # Recent GDBs will only auto-load scripts from certain safe
+ # locations.
+ # Where necessary, turn off this protection to ensure that
+ # our -gdb.py script can be loaded - but not on earlier gdb builds
+ # as this would generate noise on stderr (rhbz#817072):
+ init_commands = ['set auto-load safe-path /']
+ else:
+ init_commands = []
+
+
# GDB as of 7.4 onwards can distinguish between the
# value of a variable at entry vs current value:
# http://sourceware.org/gdb/onlinedocs/gdb/Variables.html
@@ -167,10 +178,11 @@ class DebuggerTests(unittest.TestCase):
else:
commands += ['backtrace']
- # print commands
+ # print init_commands
# Use "commands" to generate the arguments with which to invoke "gdb":
args = ["gdb", "--batch", "-nx"]
+ args += ['--init-eval-command=%s' % cmd for cmd in init_commands]
args += ['--eval-command=%s' % cmd for cmd in commands]
args += ["--args",
sys.executable]

View File

@ -0,0 +1,292 @@
diff --git a/Doc/library/crypt.rst b/Doc/library/crypt.rst
index 91464ef..6ee64d6 100644
--- a/Doc/library/crypt.rst
+++ b/Doc/library/crypt.rst
@@ -16,9 +16,9 @@
This module implements an interface to the :manpage:`crypt(3)` routine, which is
a one-way hash function based upon a modified DES algorithm; see the Unix man
-page for further details. Possible uses include allowing Python scripts to
-accept typed passwords from the user, or attempting to crack Unix passwords with
-a dictionary.
+page for further details. Possible uses include storing hashed passwords
+so you can check passwords without storing the actual password, or attempting
+to crack Unix passwords with a dictionary.
.. index:: single: crypt(3)
@@ -27,15 +27,81 @@ the :manpage:`crypt(3)` routine in the running system. Therefore, any
extensions available on the current implementation will also be available on
this module.
+Hashing Methods
+---------------
-.. function:: crypt(word, salt)
+The :mod:`crypt` module defines the list of hashing methods (not all methods
+are available on all platforms):
+
+.. data:: METHOD_SHA512
+
+ A Modular Crypt Format method with 16 character salt and 86 character
+ hash. This is the strongest method.
+
+.. versionadded:: 3.3
+
+.. data:: METHOD_SHA256
+
+ Another Modular Crypt Format method with 16 character salt and 43
+ character hash.
+
+.. versionadded:: 3.3
+
+.. data:: METHOD_MD5
+
+ Another Modular Crypt Format method with 8 character salt and 22
+ character hash.
+
+.. versionadded:: 3.3
+
+.. data:: METHOD_CRYPT
+
+ The traditional method with a 2 character salt and 13 characters of
+ hash. This is the weakest method.
+
+.. versionadded:: 3.3
+
+
+Module Attributes
+-----------------
+
+
+.. attribute:: methods
+
+ A list of available password hashing algorithms, as
+ ``crypt.METHOD_*`` objects. This list is sorted from strongest to
+ weakest, and is guaranteed to have at least ``crypt.METHOD_CRYPT``.
+
+.. versionadded:: 3.3
+
+
+Module Functions
+----------------
+
+The :mod:`crypt` module defines the following functions:
+
+.. function:: crypt(word, salt=None)
*word* will usually be a user's password as typed at a prompt or in a graphical
- interface. *salt* is usually a random two-character string which will be used
- to perturb the DES algorithm in one of 4096 ways. The characters in *salt* must
- be in the set ``[./a-zA-Z0-9]``. Returns the hashed password as a string, which
- will be composed of characters from the same alphabet as the salt (the first two
- characters represent the salt itself).
+ interface. The optional *salt* is either a string as returned from
+ :func:`mksalt`, one of the ``crypt.METHOD_*`` values (though not all
+ may be available on all platforms), or a full encrypted password
+ including salt, as returned by this function. If *salt* is not
+ provided, the strongest method will be used (as returned by
+ :func:`methods`.
+
+ Checking a password is usually done by passing the plain-text password
+ as *word* and the full results of a previous :func:`crypt` call,
+ which should be the same as the results of this call.
+
+ *salt* (either a random 2 or 16 character string, possibly prefixed with
+ ``$digit$`` to indicate the method) which will be used to perturb the
+ encryption algorithm. The characters in *salt* must be in the set
+ ``[./a-zA-Z0-9]``, with the exception of Modular Crypt Format which
+ prefixes a ``$digit$``.
+
+ Returns the hashed password as a string, which will be composed of
+ characters from the same alphabet as the salt.
.. index:: single: crypt(3)
@@ -43,6 +109,27 @@ this module.
different sizes in the *salt*, it is recommended to use the full crypted
password as salt when checking for a password.
+.. versionchanged:: 3.3
+ Before version 3.3, *salt* must be specified as a string and cannot
+ accept ``crypt.METHOD_*`` values (which don't exist anyway).
+
+
+.. function:: mksalt(method=None)
+
+ Return a randomly generated salt of the specified method. If no
+ *method* is given, the strongest method available as returned by
+ :func:`methods` is used.
+
+ The return value is a string either of 2 characters in length for
+ ``crypt.METHOD_CRYPT``, or 19 characters starting with ``$digit$`` and
+ 16 random characters from the set ``[./a-zA-Z0-9]``, suitable for
+ passing as the *salt* argument to :func:`crypt`.
+
+.. versionadded:: 3.3
+
+Examples
+--------
+
A simple example illustrating typical use::
import crypt, getpass, pwd
@@ -59,3 +146,11 @@ A simple example illustrating typical use::
else:
return 1
+To generate a hash of a password using the strongest available method and
+check it against the original::
+
+ import crypt
+
+ hashed = crypt.crypt(plaintext)
+ if hashed != crypt.crypt(plaintext, hashed):
+ raise "Hashed version doesn't validate against original"
diff --git a/Lib/crypt.py b/Lib/crypt.py
new file mode 100644
index 0000000..bf0a416
--- /dev/null
+++ b/Lib/crypt.py
@@ -0,0 +1,71 @@
+"""Wrapper to the POSIX crypt library call and associated functionality.
+
+Note that the ``methods`` and ``METHOD_*`` attributes are non-standard
+extensions to Python 2.7, backported from 3.3"""
+
+import _crypt
+import string as _string
+from random import SystemRandom as _SystemRandom
+from collections import namedtuple as _namedtuple
+
+
+_saltchars = _string.ascii_letters + _string.digits + './'
+_sr = _SystemRandom()
+
+
+class _Method(_namedtuple('_Method', 'name ident salt_chars total_size')):
+
+ """Class representing a salt method per the Modular Crypt Format or the
+ legacy 2-character crypt method."""
+
+ def __repr__(self):
+ return '<crypt.METHOD_%s>' % self.name
+
+
+def mksalt(method=None):
+ """Generate a salt for the specified method.
+
+ If not specified, the strongest available method will be used.
+
+ This is a non-standard extension to Python 2.7, backported from 3.3
+ """
+ if method is None:
+ method = methods[0]
+ s = '$%s$' % method.ident if method.ident else ''
+ s += ''.join(_sr.sample(_saltchars, method.salt_chars))
+ return s
+
+
+def crypt(word, salt=None):
+ """Return a string representing the one-way hash of a password, with a salt
+ prepended.
+
+ If ``salt`` is not specified or is ``None``, the strongest
+ available method will be selected and a salt generated. Otherwise,
+ ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as
+ returned by ``crypt.mksalt()``.
+
+ Note that these are non-standard extensions to Python 2.7's crypt.crypt()
+ entrypoint, backported from 3.3: the standard Python 2.7 crypt.crypt()
+ entrypoint requires two strings as the parameters, and does not support
+ keyword arguments.
+ """
+ if salt is None or isinstance(salt, _Method):
+ salt = mksalt(salt)
+ return _crypt.crypt(word, salt)
+
+
+# available salting/crypto methods
+METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
+METHOD_MD5 = _Method('MD5', '1', 8, 34)
+METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
+METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
+
+methods = []
+for _method in (METHOD_SHA512, METHOD_SHA256, METHOD_MD5):
+ _result = crypt('', _method)
+ if _result and len(_result) == _method.total_size:
+ methods.append(_method)
+methods.append(METHOD_CRYPT)
+del _result, _method
+
diff --git a/Lib/test/test_crypt.py b/Lib/test/test_crypt.py
index 7cd9c71..b061a55 100644
--- a/Lib/test/test_crypt.py
+++ b/Lib/test/test_crypt.py
@@ -16,6 +16,25 @@ class CryptTestCase(unittest.TestCase):
self.assertEqual(cr2, cr)
+ def test_salt(self):
+ self.assertEqual(len(crypt._saltchars), 64)
+ for method in crypt.methods:
+ salt = crypt.mksalt(method)
+ self.assertEqual(len(salt),
+ method.salt_chars + (3 if method.ident else 0))
+
+ def test_saltedcrypt(self):
+ for method in crypt.methods:
+ pw = crypt.crypt('assword', method)
+ self.assertEqual(len(pw), method.total_size)
+ pw = crypt.crypt('assword', crypt.mksalt(method))
+ self.assertEqual(len(pw), method.total_size)
+
+ def test_methods(self):
+ # Gurantee that METHOD_CRYPT is the last method in crypt.methods.
+ self.assertTrue(len(crypt.methods) >= 1)
+ self.assertEqual(crypt.METHOD_CRYPT, crypt.methods[-1])
+
def test_main():
test_support.run_unittest(CryptTestCase)
diff --git a/Modules/Setup.dist b/Modules/Setup.dist
index 2712f06..3ea4f0c 100644
--- a/Modules/Setup.dist
+++ b/Modules/Setup.dist
@@ -225,7 +225,7 @@ _ssl _ssl.c \
#
# First, look at Setup.config; configure may have set this for you.
-crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems
+_crypt _cryptmodule.c -lcrypt # crypt(3); needs -lcrypt on some systems
# Some more UNIX dependent modules -- off by default, since these
diff --git a/Modules/cryptmodule.c b/Modules/cryptmodule.c
index 76de54f..7c69ca6 100644
--- a/Modules/cryptmodule.c
+++ b/Modules/cryptmodule.c
@@ -43,7 +43,7 @@ static PyMethodDef crypt_methods[] = {
};
PyMODINIT_FUNC
-initcrypt(void)
+init_crypt(void)
{
- Py_InitModule("crypt", crypt_methods);
+ Py_InitModule("_crypt", crypt_methods);
}
diff --git a/setup.py b/setup.py
index b787487..c60ac35 100644
--- a/setup.py
+++ b/setup.py
@@ -798,7 +798,7 @@ class PyBuildExt(build_ext):
libs = ['crypt']
else:
libs = []
- exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
+ exts.append( Extension('_crypt', ['_cryptmodule.c'], libraries=libs) )
# CSV files
exts.append( Extension('_csv', ['_csv.c']) )

View File

@ -0,0 +1,47 @@
diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py
index 3354b34..10ba0e5 100644
--- a/Lib/test/test_gdb.py
+++ b/Lib/test/test_gdb.py
@@ -725,11 +725,10 @@ class PyListTests(DebuggerTests):
' 2 \n'
' 3 def foo(a, b, c):\n',
bt)
-
+@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
+@unittest.skipIf(python_is_optimized(),
+ "Python was compiled with optimizations")
class StackNavigationTests(DebuggerTests):
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
- @unittest.skipIf(python_is_optimized(),
- "Python was compiled with optimizations")
def test_pyup_command(self):
'Verify that the "py-up" command works'
bt = self.get_stack_trace(script=self.get_sample_script(),
@@ -740,7 +739,6 @@ class StackNavigationTests(DebuggerTests):
baz\(a, b, c\)
$''')
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
def test_down_at_bottom(self):
'Verify handling of "py-down" at the bottom of the stack'
bt = self.get_stack_trace(script=self.get_sample_script(),
@@ -748,9 +746,6 @@ $''')
self.assertEndsWith(bt,
'Unable to find a newer python frame\n')
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
- @unittest.skipIf(python_is_optimized(),
- "Python was compiled with optimizations")
def test_up_at_top(self):
'Verify handling of "py-up" at the top of the stack'
bt = self.get_stack_trace(script=self.get_sample_script(),
@@ -758,9 +753,6 @@ $''')
self.assertEndsWith(bt,
'Unable to find an older python frame\n')
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
- @unittest.skipIf(python_is_optimized(),
- "Python was compiled with optimizations")
def test_up_then_down(self):
'Verify "py-up" followed by "py-down"'
bt = self.get_stack_trace(script=self.get_sample_script(),

View File

@ -0,0 +1,41 @@
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -41,6 +41,10 @@
# A very generous timeout when it comes to local connections...
CONNECTION_TIMEOUT = 20.
+# The hmac module implicitly defaults to using MD5.
+# Support using a stronger algorithm for the challenge/response code:
+HMAC_DIGEST_NAME='sha256'
+
_mmap_counter = itertools.count()
default_family = 'AF_INET'
@@ -700,12 +704,16 @@
WELCOME = b'#WELCOME#'
FAILURE = b'#FAILURE#'
+def get_digestmod_for_hmac():
+ import hashlib
+ return getattr(hashlib, HMAC_DIGEST_NAME)
+
def deliver_challenge(connection, authkey):
import hmac
assert isinstance(authkey, bytes)
message = os.urandom(MESSAGE_LENGTH)
connection.send_bytes(CHALLENGE + message)
- digest = hmac.new(authkey, message).digest()
+ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest()
response = connection.recv_bytes(256) # reject large message
if response == digest:
connection.send_bytes(WELCOME)
@@ -719,7 +727,7 @@
message = connection.recv_bytes(256) # reject large message
assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message
message = message[len(CHALLENGE):]
- digest = hmac.new(authkey, message).digest()
+ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest()
connection.send_bytes(digest)
response = connection.recv_bytes(256) # reject large message
if response != WELCOME:

View File

@ -0,0 +1,279 @@
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index 7e47b2d..12a210d 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -1,7 +1,8 @@
import unittest
from test.support import (verbose, run_unittest, start_threads,
- requires_type_collecting)
+ requires_type_collecting, import_module)
import sys
+import sysconfig
import time
import gc
import weakref
@@ -39,6 +40,8 @@ class GC_Detector(object):
self.wr = weakref.ref(C1055820(666), it_happened)
+BUILT_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS'])
+
### Tests
###############################################################################
@@ -537,6 +540,49 @@ class GCTests(unittest.TestCase):
# would be damaged, with an empty __dict__.
self.assertEqual(x, None)
+ @unittest.skipIf(BUILT_WITH_NDEBUG,
+ 'built with -NDEBUG')
+ def test_refcount_errors(self):
+ # Verify the "handling" of objects with broken refcounts
+
+ import_module("ctypes") #skip if not supported
+
+ import subprocess
+ code = '''if 1:
+ a = []
+ b = [a]
+
+ # Simulate the refcount of "a" being too low (compared to the
+ # references held on it by live data), but keeping it above zero
+ # (to avoid deallocating it):
+ import ctypes
+ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a))
+
+ # The garbage collector should now have a fatal error when it reaches
+ # the broken object:
+ import gc
+ gc.collect()
+ '''
+ p = subprocess.Popen([sys.executable, "-c", code],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ p.stdout.close()
+ p.stderr.close()
+ # Verify that stderr has a useful error message:
+ self.assertRegexpMatches(stderr,
+ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "gc->gc.gc_refs != 0" failed.')
+ self.assertRegexpMatches(stderr,
+ b'refcount was too small')
+ self.assertRegexpMatches(stderr,
+ b'object : \[\]')
+ self.assertRegexpMatches(stderr,
+ b'type : list')
+ self.assertRegexpMatches(stderr,
+ b'refcount: 1')
+ self.assertRegexpMatches(stderr,
+ b'address : 0x[0-9a-f]+')
+
class GCTogglingTests(unittest.TestCase):
def setUp(self):
gc.enable()
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
index 916e481..0233ce2 100644
--- a/Modules/gcmodule.c
+++ b/Modules/gcmodule.c
@@ -21,6 +21,73 @@
#include "Python.h"
#include "frameobject.h" /* for PyFrame_ClearFreeList */
+/*
+ Define a pair of assertion macros.
+
+ These work like the regular C assert(), in that they will abort the
+ process with a message on stderr if the given condition fails to hold,
+ but compile away to nothing if NDEBUG is defined.
+
+ However, before aborting, Python will also try to call _PyObject_Dump() on
+ the given object. This may be of use when investigating bugs in which a
+ particular object is corrupt (e.g. buggy a tp_visit method in an extension
+ module breaking the garbage collector), to help locate the broken objects.
+
+ The WITH_MSG variant allows you to supply an additional message that Python
+ will attempt to print to stderr, after the object dump.
+*/
+#ifdef NDEBUG
+/* No debugging: compile away the assertions: */
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0)
+#else
+/* With debugging: generate checks: */
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \
+ ((expr) \
+ ? (void)(0) \
+ : _PyObject_AssertFailed((obj), \
+ (msg), \
+ (__STRING(expr)), \
+ (__FILE__), \
+ (__LINE__), \
+ (__PRETTY_FUNCTION__)))
+#endif
+
+#define PyObject_ASSERT(obj, expr) \
+ PyObject_ASSERT_WITH_MSG(obj, expr, NULL)
+
+static void _PyObject_AssertFailed(PyObject *, const char *,
+ const char *, const char *, int,
+ const char *);
+
+static void
+_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr,
+ const char *file, int line, const char *function)
+{
+ fprintf(stderr,
+ "%s:%d: %s: Assertion \"%s\" failed.\n",
+ file, line, function, expr);
+ if (msg) {
+ fprintf(stderr, "%s\n", msg);
+ }
+
+ fflush(stderr);
+
+ if (obj) {
+ /* This might succeed or fail, but we're about to abort, so at least
+ try to provide any extra info we can: */
+ _PyObject_Dump(obj);
+ }
+ else {
+ fprintf(stderr, "NULL object\n");
+ }
+
+ fflush(stdout);
+ fflush(stderr);
+
+ /* Terminate the process: */
+ abort();
+}
+
/* Get an object's GC head */
#define AS_GC(o) ((PyGC_Head *)(o)-1)
@@ -328,7 +395,8 @@ update_refs(PyGC_Head *containers)
{
PyGC_Head *gc = containers->gc.gc_next;
for (; gc != containers; gc = gc->gc.gc_next) {
- assert(gc->gc.gc_refs == GC_REACHABLE);
+ PyObject_ASSERT(FROM_GC(gc),
+ gc->gc.gc_refs == GC_REACHABLE);
gc->gc.gc_refs = Py_REFCNT(FROM_GC(gc));
/* Python's cyclic gc should never see an incoming refcount
* of 0: if something decref'ed to 0, it should have been
@@ -348,7 +416,8 @@ update_refs(PyGC_Head *containers)
* so serious that maybe this should be a release-build
* check instead of an assert?
*/
- assert(gc->gc.gc_refs != 0);
+ PyObject_ASSERT(FROM_GC(gc),
+ gc->gc.gc_refs != 0);
}
}
@@ -363,7 +432,9 @@ visit_decref(PyObject *op, void *data)
* generation being collected, which can be recognized
* because only they have positive gc_refs.
*/
- assert(gc->gc.gc_refs != 0); /* else refcount was too small */
+ PyObject_ASSERT_WITH_MSG(FROM_GC(gc),
+ gc->gc.gc_refs != 0,
+ "refcount was too small");
if (gc->gc.gc_refs > 0)
gc->gc.gc_refs--;
}
@@ -423,9 +494,10 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
* If gc_refs == GC_UNTRACKED, it must be ignored.
*/
else {
- assert(gc_refs > 0
- || gc_refs == GC_REACHABLE
- || gc_refs == GC_UNTRACKED);
+ PyObject_ASSERT(FROM_GC(gc),
+ gc_refs > 0
+ || gc_refs == GC_REACHABLE
+ || gc_refs == GC_UNTRACKED);
}
}
return 0;
@@ -467,7 +539,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
*/
PyObject *op = FROM_GC(gc);
traverseproc traverse = Py_TYPE(op)->tp_traverse;
- assert(gc->gc.gc_refs > 0);
+ PyObject_ASSERT(op, gc->gc.gc_refs > 0);
gc->gc.gc_refs = GC_REACHABLE;
(void) traverse(op,
(visitproc)visit_reachable,
@@ -545,7 +617,8 @@ move_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers)
for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) {
PyObject *op = FROM_GC(gc);
- assert(IS_TENTATIVELY_UNREACHABLE(op));
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
+
next = gc->gc.gc_next;
if (has_finalizer(op)) {
@@ -621,7 +694,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
PyWeakReference **wrlist;
op = FROM_GC(gc);
- assert(IS_TENTATIVELY_UNREACHABLE(op));
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
next = gc->gc.gc_next;
if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op)))
@@ -642,9 +715,9 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
* the callback pointer intact. Obscure: it also
* changes *wrlist.
*/
- assert(wr->wr_object == op);
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == op);
_PyWeakref_ClearRef(wr);
- assert(wr->wr_object == Py_None);
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None);
if (wr->wr_callback == NULL)
continue; /* no callback */
@@ -678,7 +751,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
*/
if (IS_TENTATIVELY_UNREACHABLE(wr))
continue;
- assert(IS_REACHABLE(wr));
+ PyObject_ASSERT(op, IS_REACHABLE(wr));
/* Create a new reference so that wr can't go away
* before we can process it again.
@@ -687,7 +760,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
/* Move wr to wrcb_to_call, for the next pass. */
wrasgc = AS_GC(wr);
- assert(wrasgc != next); /* wrasgc is reachable, but
+ PyObject_ASSERT(op, wrasgc != next);
+ /* wrasgc is reachable, but
next isn't, so they can't
be the same */
gc_list_move(wrasgc, &wrcb_to_call);
@@ -703,11 +777,11 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
gc = wrcb_to_call.gc.gc_next;
op = FROM_GC(gc);
- assert(IS_REACHABLE(op));
- assert(PyWeakref_Check(op));
+ PyObject_ASSERT(op, IS_REACHABLE(op));
+ PyObject_ASSERT(op, PyWeakref_Check(op));
wr = (PyWeakReference *)op;
callback = wr->wr_callback;
- assert(callback != NULL);
+ PyObject_ASSERT(op, callback != NULL);
/* copy-paste of weakrefobject.c's handle_callback() */
temp = PyObject_CallFunctionObjArgs(callback, wr, NULL);
@@ -810,7 +884,7 @@ delete_garbage(PyGC_Head *collectable, PyGC_Head *old)
PyGC_Head *gc = collectable->gc.gc_next;
PyObject *op = FROM_GC(gc);
- assert(IS_TENTATIVELY_UNREACHABLE(op));
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
if (debug & DEBUG_SAVEALL) {
PyList_Append(garbage, op);
}

View File

@ -0,0 +1,28 @@
diff -up Python-2.7.3/Modules/getpath.c.fix-for-usr-move Python-2.7.3/Modules/getpath.c
--- Python-2.7.3/Modules/getpath.c.fix-for-usr-move 2013-03-06 14:25:32.801828698 -0500
+++ Python-2.7.3/Modules/getpath.c 2013-03-06 15:59:30.872443168 -0500
@@ -510,6 +510,24 @@ calculate_path(void)
MAXPATHLEN bytes long.
*/
+ /*
+ Workaround for rhbz#817554, where an empty argv0_path erroneously
+ locates "prefix" as "/lib[64]/python2.7" due to it finding
+ "/lib[64]/python2.7/os.py" via the /lib -> /usr/lib symlink for
+ https://fedoraproject.org/wiki/Features/UsrMove
+ */
+ if (argv0_path[0] == '\0' && 0 == strcmp(prog, "cmpi_swig")) {
+ /*
+ We have an empty argv0_path, presumably because prog aka
+ Py_GetProgramName() was not found on $PATH.
+
+ Set argv0_path to "/usr/" so that search_for_prefix() and
+ search_for_exec_prefix() don't erroneously pick up
+ on /lib/ via the UsrMove symlink:
+ */
+ strcpy(argv0_path, "/usr/");
+ }
+
if (!(pfound = search_for_prefix(argv0_path, home))) {
if (!Py_FrozenFlag)
fprintf(stderr,

View File

@ -0,0 +1,13 @@
diff --git a/config.sub b/config.sub
index 3478c1f..e422173 100755
--- a/config.sub
+++ b/config.sub
@@ -1040,7 +1040,7 @@ case $basic_machine in
;;
ppc64) basic_machine=powerpc64-unknown
;;
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
;;
ppc64le | powerpc64little)
basic_machine=powerpc64le-unknown

View File

@ -0,0 +1,70 @@
diff --git a/Lib/threading.py b/Lib/threading.py
index cb49c4a..c9795a5 100644
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -305,7 +305,7 @@ class _Condition(_Verbose):
else:
return True
- def wait(self, timeout=None):
+ def wait(self, timeout=None, balancing=True):
"""Wait until notified or until a timeout occurs.
If the calling thread has not acquired the lock when this method is
@@ -354,7 +354,10 @@ class _Condition(_Verbose):
remaining = endtime - _time()
if remaining <= 0:
break
- delay = min(delay * 2, remaining, .05)
+ if balancing:
+ delay = min(delay * 2, remaining, 0.05)
+ else:
+ delay = remaining
_sleep(delay)
if not gotit:
if __debug__:
@@ -599,7 +602,7 @@ class _Event(_Verbose):
with self.__cond:
self.__flag = False
- def wait(self, timeout=None):
+ def wait(self, timeout=None, balancing=True):
"""Block until the internal flag is true.
If the internal flag is true on entry, return immediately. Otherwise,
@@ -617,7 +620,7 @@ class _Event(_Verbose):
"""
with self.__cond:
if not self.__flag:
- self.__cond.wait(timeout)
+ self.__cond.wait(timeout, balancing)
return self.__flag
# Helper to generate new thread names
@@ -908,7 +911,7 @@ class Thread(_Verbose):
if 'dummy_threading' not in _sys.modules:
raise
- def join(self, timeout=None):
+ def join(self, timeout=None, balancing=True):
"""Wait until the thread terminates.
This blocks the calling thread until the thread whose join() method is
@@ -957,7 +960,7 @@ class Thread(_Verbose):
if __debug__:
self._note("%s.join(): timed out", self)
break
- self.__block.wait(delay)
+ self.__block.wait(delay, balancing)
else:
if __debug__:
self._note("%s.join(): thread stopped", self)
@@ -1143,7 +1146,7 @@ class _DummyThread(Thread):
def _set_daemon(self):
return True
- def join(self, timeout=None):
+ def join(self, timeout=None, balancing=True):
assert False, "cannot join a dummy thread"

View File

@ -0,0 +1,12 @@
diff -up Python-2.7.5/Lib/urllib2.py.orig Python-2.7.5/Lib/urllib2.py
--- Python-2.7.5/Lib/urllib2.py.orig 2013-07-17 12:22:58.595525622 +0200
+++ Python-2.7.5/Lib/urllib2.py 2013-07-17 12:19:59.875898030 +0200
@@ -728,6 +728,8 @@ class ProxyHandler(BaseHandler):
if proxy_type is None:
proxy_type = orig_type
+ req.get_host()
+
if req.host and proxy_bypass(req.host):
return None

View File

@ -0,0 +1,25 @@
diff -r e8b8279ca118 setup.py
--- a/setup.py Sun Jul 21 21:57:52 2013 -0400
+++ b/setup.py Tue Aug 20 09:45:31 2013 +0200
@@ -1480,12 +1480,21 @@
'expat/xmltok_impl.h'
]
+ # Add an explicit RPATH to pyexpat.so pointing at the directory
+ # containing the system expat (which has the extra XML_SetHashSalt
+ # symbol), to avoid an ImportError with a link error if there's an
+ # LD_LIBRARY_PATH containing a "vanilla" build of expat (without the
+ # symbol) (rhbz#833271):
+ EXPAT_RPATH = '/usr/lib64' if sys.maxint == 0x7fffffffffffffff else '/usr/lib'
+
+
exts.append(Extension('pyexpat',
define_macros = define_macros,
include_dirs = expat_inc,
libraries = expat_lib,
sources = ['pyexpat.c'] + expat_sources,
depends = expat_depends,
+ extra_link_args = ['-Wl,-rpath,%s' % EXPAT_RPATH]
))
# Fredrik Lundh's cElementTree module. Note that this also

View File

@ -0,0 +1,12 @@
diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py
index 1bb6690..28ed25d 100644
--- a/Lib/test/test_smtplib.py
+++ b/Lib/test/test_smtplib.py
@@ -182,6 +182,7 @@ class DebuggingServerTests(unittest.TestCase):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.quit()
+ @unittest._skipInRpmBuild("Does not work in network-free environment")
def testNOOP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
expected = (250, 'Ok')

View File

@ -0,0 +1,11 @@
--- Python-2.7.5/setup.py.orig 2013-05-11 20:32:54.000000000 -0700
+++ Python-2.7.5/setup.py 2014-02-18 14:16:07.999004901 -0800
@@ -1168,7 +1168,7 @@ class PyBuildExt(build_ext):
sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
# Comment this out if you want the sqlite3 module to be able to load extensions.
- sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
+ #sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
if host_platform == 'darwin':
# In every directory on the search path search for a dynamic

View File

@ -0,0 +1,249 @@
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
index 5021ebf..63c763a 100644
--- a/Lib/ensurepip/__init__.py
+++ b/Lib/ensurepip/__init__.py
@@ -7,6 +7,7 @@ import pkgutil
import shutil
import sys
import tempfile
+from ensurepip import rewheel
__all__ = ["version", "bootstrap"]
@@ -29,6 +30,8 @@ def _run_pip(args, additional_paths=None):
# Install the bundled software
import pip._internal
+ if args[0] in ["install", "list", "wheel"]:
+ args.append('--pre')
return pip._internal.main(args)
@@ -93,21 +96,40 @@ def _bootstrap(root=None, upgrade=False, user=False,
# omit pip and easy_install
os.environ["ENSUREPIP_OPTIONS"] = "install"
+ whls = []
+ rewheel_dir = None
+ # try to see if we have system-wide versions of _PROJECTS
+ dep_records = rewheel.find_system_records([p[0] for p in _PROJECTS])
+ # TODO: check if system-wide versions are the newest ones
+ # if --upgrade is used?
+ if all(dep_records):
+ # if we have all _PROJECTS installed system-wide, we'll recreate
+ # wheels from them and install those
+ rewheel_dir = tempfile.mkdtemp()
+ for dr in dep_records:
+ new_whl = rewheel.rewheel_from_record(dr, rewheel_dir)
+ whls.append(os.path.join(rewheel_dir, new_whl))
+ else:
+ # if we don't have all the _PROJECTS installed system-wide,
+ # let's just fall back to bundled wheels
+ for project, version in _PROJECTS:
+ whl = os.path.join(
+ os.path.dirname(__file__),
+ "_bundled",
+ "{}-{}-py2.py3-none-any.whl".format(project, version)
+ )
+ whls.append(whl)
+
tmpdir = tempfile.mkdtemp()
try:
# Put our bundled wheels into a temporary directory and construct the
# additional paths that need added to sys.path
additional_paths = []
- for project, version in _PROJECTS:
- wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
- whl = pkgutil.get_data(
- "ensurepip",
- "_bundled/{}".format(wheel_name),
- )
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
- fp.write(whl)
-
- additional_paths.append(os.path.join(tmpdir, wheel_name))
+ for whl in whls:
+ shutil.copy(whl, tmpdir)
+ additional_paths.append(os.path.join(tmpdir, os.path.basename(whl)))
+ if rewheel_dir:
+ shutil.rmtree(rewheel_dir)
# Construct the arguments to be passed to the pip command
args = ["install", "--no-index", "--find-links", tmpdir]
diff --git a/Lib/ensurepip/rewheel/__init__.py b/Lib/ensurepip/rewheel/__init__.py
new file mode 100644
index 0000000..75c2094
--- /dev/null
+++ b/Lib/ensurepip/rewheel/__init__.py
@@ -0,0 +1,158 @@
+import argparse
+import codecs
+import csv
+import email.parser
+import os
+import io
+import re
+import site
+import subprocess
+import sys
+import zipfile
+
+def run():
+ parser = argparse.ArgumentParser(description='Recreate wheel of package with given RECORD.')
+ parser.add_argument('record_path',
+ help='Path to RECORD file')
+ parser.add_argument('-o', '--output-dir',
+ help='Dir where to place the wheel, defaults to current working dir.',
+ dest='outdir',
+ default=os.path.curdir)
+
+ ns = parser.parse_args()
+ retcode = 0
+ try:
+ print(rewheel_from_record(**vars(ns)))
+ except BaseException as e:
+ print('Failed: {}'.format(e))
+ retcode = 1
+ sys.exit(1)
+
+def find_system_records(projects):
+ """Return list of paths to RECORD files for system-installed projects.
+
+ If a project is not installed, the resulting list contains None instead
+ of a path to its RECORD
+ """
+ records = []
+ # get system site-packages dirs
+ if hasattr(sys, 'real_prefix'):
+ #we are in python2 virtualenv and sys.real_prefix is the original sys.prefix
+ _orig_prefixes = site.PREFIXES
+ setattr(site, 'PREFIXES', [sys.real_prefix]*2)
+ sys_sitepack = site.getsitepackages()
+ setattr(site, 'PREFIXES', _orig_prefixes)
+ elif hasattr(sys, 'base_prefix'): # python3 venv doesn't inject real_prefix to sys
+ # we are on python3 and base(_exec)_prefix is unchanged in venv
+ sys_sitepack = site.getsitepackages([sys.base_prefix, sys.base_exec_prefix])
+ else:
+ # we are in python2 without virtualenv
+ sys_sitepack = site.getsitepackages()
+
+ sys_sitepack = [sp for sp in sys_sitepack if os.path.exists(sp)]
+ # try to find all projects in all system site-packages
+ for project in projects:
+ path = None
+ for sp in sys_sitepack:
+ dist_info_re = os.path.join(sp, project) + '-[^\{0}]+\.dist-info'.format(os.sep)
+ candidates = [os.path.join(sp, p) for p in os.listdir(sp)]
+ # filter out candidate dirs based on the above regexp
+ filtered = [c for c in candidates if re.match(dist_info_re, c)]
+ # if we have 0 or 2 or more dirs, something is wrong...
+ if len(filtered) == 1:
+ path = filtered[0]
+ if path is not None:
+ records.append(os.path.join(path, 'RECORD'))
+ else:
+ records.append(None)
+ return records
+
+def rewheel_from_record(record_path, outdir):
+ """Recreates a whee of package with given record_path and returns path
+ to the newly created wheel."""
+ site_dir = os.path.dirname(os.path.dirname(record_path))
+ record_relpath = record_path[len(site_dir):].strip(os.path.sep)
+ to_write, to_omit = get_records_to_pack(site_dir, record_relpath)
+ new_wheel_name = get_wheel_name(record_path)
+ new_wheel_path = os.path.join(outdir, new_wheel_name + '.whl')
+
+ new_wheel = zipfile.ZipFile(new_wheel_path, mode='w', compression=zipfile.ZIP_DEFLATED)
+ # we need to write a new record with just the files that we will write,
+ # e.g. not binaries and *.pyc/*.pyo files
+ if sys.version_info[0] < 3:
+ new_record = io.BytesIO()
+ else:
+ new_record = io.StringIO()
+ writer = csv.writer(new_record)
+
+ # handle files that we can write straight away
+ for f, sha_hash, size in to_write:
+ new_wheel.write(os.path.join(site_dir, f), arcname=f)
+ writer.writerow([f, sha_hash,size])
+
+ # rewrite the old wheel file with a new computed one
+ writer.writerow([record_relpath, '', ''])
+ new_wheel.writestr(record_relpath, new_record.getvalue())
+
+ new_wheel.close()
+
+ return new_wheel.filename
+
+def get_wheel_name(record_path):
+ """Return proper name of the wheel, without .whl."""
+
+ wheel_info_path = os.path.join(os.path.dirname(record_path), 'WHEEL')
+ with codecs.open(wheel_info_path, encoding='utf-8') as wheel_info_file:
+ wheel_info = email.parser.Parser().parsestr(wheel_info_file.read().encode('utf-8'))
+
+ metadata_path = os.path.join(os.path.dirname(record_path), 'METADATA')
+ with codecs.open(metadata_path, encoding='utf-8') as metadata_file:
+ metadata = email.parser.Parser().parsestr(metadata_file.read().encode('utf-8'))
+
+ # construct name parts according to wheel spec
+ distribution = metadata.get('Name')
+ version = metadata.get('Version')
+ build_tag = '' # nothing for now
+ lang_tag = []
+ for t in wheel_info.get_all('Tag'):
+ lang_tag.append(t.split('-')[0])
+ lang_tag = '.'.join(lang_tag)
+ abi_tag, plat_tag = wheel_info.get('Tag').split('-')[1:3]
+ # leave out build tag, if it is empty
+ to_join = filter(None, [distribution, version, build_tag, lang_tag, abi_tag, plat_tag])
+ return '-'.join(list(to_join))
+
+def get_records_to_pack(site_dir, record_relpath):
+ """Accepts path of sitedir and path of RECORD file relative to it.
+ Returns two lists:
+ - list of files that can be written to new RECORD straight away
+ - list of files that shouldn't be written or need some processing
+ (pyc and pyo files, scripts)
+ """
+ record_file_path = os.path.join(site_dir, record_relpath)
+ with codecs.open(record_file_path, encoding='utf-8') as record_file:
+ record_contents = record_file.read()
+ # temporary fix for https://github.com/pypa/pip/issues/1376
+ # we need to ignore files under ".data" directory
+ data_dir = os.path.dirname(record_relpath).strip(os.path.sep)
+ data_dir = data_dir[:-len('dist-info')] + 'data'
+
+ to_write = []
+ to_omit = []
+ for l in record_contents.splitlines():
+ spl = l.split(',')
+ if len(spl) == 3:
+ # new record will omit (or write differently):
+ # - abs paths, paths with ".." (entry points),
+ # - pyc+pyo files
+ # - the old RECORD file
+ # TODO: is there any better way to recognize an entry point?
+ if os.path.isabs(spl[0]) or spl[0].startswith('..') or \
+ spl[0].endswith('.pyc') or spl[0].endswith('.pyo') or \
+ spl[0] == record_relpath or spl[0].startswith(data_dir):
+ to_omit.append(spl)
+ else:
+ to_write.append(spl)
+ else:
+ pass # bad RECORD or empty line
+ return to_write, to_omit
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 877698c..2c43611 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -1065,7 +1065,7 @@ LIBSUBDIRS= lib-tk lib-tk/test lib-tk/test/test_tkinter \
test/tracedmodules \
encodings compiler hotshot \
email email/mime email/test email/test/data \
- ensurepip ensurepip/_bundled \
+ ensurepip ensurepip/_bundled ensurepip/rewheel\
json json/tests \
sqlite3 sqlite3/test \
logging bsddb bsddb/test csv importlib wsgiref \

View File

@ -0,0 +1,20 @@
diff --git a/Lib/threading.py b/Lib/threading.py
index e4c7f35..91b3849 100644
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -351,13 +351,14 @@ class _Condition(_Verbose):
gotit = waiter.acquire(0)
if gotit:
break
- remaining = endtime - _time()
+ remaining = min(endtime - _time(), timeout)
if remaining <= 0:
break
if balancing:
delay = min(delay * 2, remaining, 0.05)
else:
delay = remaining
+ endtime = _time() + remaining
_sleep(delay)
if not gotit:
if __debug__:

View File

@ -0,0 +1,52 @@
--- Python-2.7.15-orig/Python/pythonrun.c
+++ Python-2.7.15/Python/pythonrun.c
@@ -180,6 +182,49 @@
char buf[128];
#endif
extern void _Py_ReadyTypes(void);
+ char *py2_allow_flag = getenv("RHEL_ALLOW_PYTHON2_FOR_BUILD");
+
+ // Fail unless a specific workaround is applied
+ if ((!py2_allow_flag || strcmp(py2_allow_flag, "1") != 0)
+ && (strstr(Py_GetProgramName(), "for-tests") == NULL)
+ ) {
+ fprintf(stderr,
+ "\n"
+ "ERROR: Python 2 is disabled in RHEL8.\n"
+ "\n"
+ "- For guidance on porting to Python 3, see the\n"
+ " Conservative Python3 Porting Guide:\n"
+ " http://portingguide.readthedocs.io/\n"
+ "\n"
+ "- If you need Python 2 at runtime:\n"
+ " - Use the python27 module\n"
+ "\n"
+ "- If you do not have access to BZ#1533919:\n"
+ " - Use the python27 module\n"
+ "\n"
+ "- If you need to use Python 2 only at RPM build time:\n"
+ " - File a bug blocking BZ#1533919:\n"
+ " https://bugzilla.redhat.com/show_bug.cgi?id=1533919\n"
+ " - Set the environment variable RHEL_ALLOW_PYTHON2_FOR_BUILD=1\n"
+ " (Note that if you do not file the bug as above,\n"
+ " this workaround will break without warning in the future.)\n"
+ "\n"
+ "- If you need to use Python 2 only for tests:\n"
+ " - File a bug blocking BZ#1533919:\n"
+ " https://bugzilla.redhat.com/show_bug.cgi?id=1533919\n"
+ " (If your test tool does not have a Bugzilla component,\n"
+ " feel free to use `python2`.)\n"
+ " - Use /usr/bin/python2-for-tests instead of python2 to run\n"
+ " your tests.\n"
+ " (Note that if you do not file the bug as above,\n"
+ " this workaround will break without warning in the future.)\n"
+ "\n"
+ "For details, see https://hurl.corp.redhat.com/rhel8-py2\n"
+ "\n"
+ );
+ fflush(stderr);
+ Py_FatalError("Python 2 is disabled");
+ }
if (initialized)
return;

View File

@ -0,0 +1,69 @@
diff --git a/setup.py b/setup.py
index 585e380..9993f11 100644
--- a/setup.py
+++ b/setup.py
@@ -1346,11 +1346,7 @@ class PyBuildExt(build_ext):
else:
missing.append('resource')
- nis = self._detect_nis(inc_dirs, lib_dirs)
- if nis is not None:
- exts.append(nis)
- else:
- missing.append('nis')
+ # nis (Sun yellow pages) is handled in Setup.dist
# Curses support, requiring the System V version of curses, often
# provided by the ncurses library.
@@ -2162,51 +2158,6 @@ class PyBuildExt(build_ext):
# for dlopen, see bpo-32647
ext.libraries.append('dl')
- def _detect_nis(self, inc_dirs, lib_dirs):
- if host_platform in {'win32', 'cygwin', 'qnx6'}:
- return None
-
- libs = []
- library_dirs = []
- includes_dirs = []
-
- # bpo-32521: glibc has deprecated Sun RPC for some time. Fedora 28
- # moved headers and libraries to libtirpc and libnsl. The headers
- # are in tircp and nsl sub directories.
- rpcsvc_inc = find_file(
- 'rpcsvc/yp_prot.h', inc_dirs,
- [os.path.join(inc_dir, 'nsl') for inc_dir in inc_dirs]
- )
- rpc_inc = find_file(
- 'rpc/rpc.h', inc_dirs,
- [os.path.join(inc_dir, 'tirpc') for inc_dir in inc_dirs]
- )
- if rpcsvc_inc is None or rpc_inc is None:
- # not found
- return None
- includes_dirs.extend(rpcsvc_inc)
- includes_dirs.extend(rpc_inc)
-
- if self.compiler.find_library_file(lib_dirs, 'nsl'):
- libs.append('nsl')
- else:
- # libnsl-devel: check for libnsl in nsl/ subdirectory
- nsl_dirs = [os.path.join(lib_dir, 'nsl') for lib_dir in lib_dirs]
- libnsl = self.compiler.find_library_file(nsl_dirs, 'nsl')
- if libnsl is not None:
- library_dirs.append(os.path.dirname(libnsl))
- libs.append('nsl')
-
- if self.compiler.find_library_file(lib_dirs, 'tirpc'):
- libs.append('tirpc')
-
- return Extension(
- 'nis', ['nismodule.c'],
- libraries=libs,
- library_dirs=library_dirs,
- include_dirs=includes_dirs
- )
-
class PyBuildInstall(install):
# Suppress the warning about installation into the lib_dynload

View File

@ -0,0 +1,21 @@
diff -urN Python-2.7.13/Modules/Setup.dist Python-2.7.13_modul/Modules/Setup.dist
--- Python-2.7.13/Modules/Setup.dist 2017-04-21 14:57:13.767444374 +0200
+++ Python-2.7.13_modul/Modules/Setup.dist 2017-04-21 14:56:49.658953833 +0200
@@ -326,7 +326,7 @@
# every system.
# *** Always uncomment this (leave the leading underscore in!):
-_tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \
+#_tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \
# *** Uncomment and edit to reflect where your Tcl/Tk libraries are:
# -L/usr/local/lib \
# *** Uncomment and edit to reflect where your Tcl/Tk headers are:
@@ -345,7 +345,7 @@
# *** Uncomment and edit for TOGL extension only:
# -DWITH_TOGL togl.c \
# *** Uncomment and edit to reflect your Tcl/Tk versions:
- -ltk -ltcl \
+# -ltk -ltcl \
# *** Uncomment and edit to reflect where your X11 libraries are:
# -L/usr/X11R6/lib \
# *** Or uncomment this for Solaris:

View File

@ -0,0 +1,207 @@
diff -up ./configure.autotool-intermediates ./configure
--- ./configure.autotool-intermediates 2013-04-09 11:24:01.024185796 +0200
+++ ./configure 2013-04-09 11:24:01.780183954 +0200
@@ -639,6 +639,8 @@ TRUE
MACHDEP_OBJS
DYNLOADFILE
DLINCLDIR
+DTRACEHDRS
+DTRACEOBJS
THREADOBJ
LDLAST
USE_THREAD_MODULE
@@ -659,6 +661,8 @@ OTHER_LIBTOOL_OPT
UNIVERSAL_ARCH_FLAGS
BASECFLAGS
OPT
+DEBUG_SUFFIX
+DEBUG_EXT
LN
MKDIR_P
INSTALL_DATA
@@ -795,8 +799,11 @@ with_pth
enable_ipv6
with_doc_strings
with_tsc
+with_count_allocs
+with_call_profile
with_pymalloc
with_valgrind
+with_dtrace
with_wctype_functions
with_fpectl
with_libm
@@ -1472,8 +1479,11 @@ Optional Packages:
--with-pth use GNU pth threading libraries
--with(out)-doc-strings disable/enable documentation strings
--with(out)-tsc enable/disable timestamp counter profile
+ --with(out)count-allocs enable/disable per-type instance accounting
+ --with(out)-call-profile enable/disable statistics on function call invocation
--with(out)-pymalloc disable/enable specialized mallocs
--with-valgrind Enable Valgrind support
+ --with(out)-dtrace disable/enable dtrace support
--with-wctype-functions use wctype.h functions
--with-fpectl enable SIGFPE catching
--with-libm=STRING math library
@@ -5343,8 +5353,8 @@ $as_echo "#define Py_ENABLE_SHARED 1" >>
INSTSONAME="$LDLIBRARY".$SOVERSION
;;
Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*)
- LDLIBRARY='libpython$(VERSION).so'
- BLDLIBRARY='-L. -lpython$(VERSION)'
+ LDLIBRARY='libpython$(VERSION)$(DEBUG_EXT).so'
+ BLDLIBRARY='-L. -lpython$(VERSION)$(DEBUG_EXT)'
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
case $ac_sys_system in
FreeBSD*)
@@ -5367,7 +5377,7 @@ $as_echo "#define Py_ENABLE_SHARED 1" >>
;;
OSF*)
LDLIBRARY='libpython$(VERSION).so'
- BLDLIBRARY='-rpath $(LIBDIR) -L. -lpython$(VERSION)'
+ BLDLIBRARY='-L. -lpython$(VERSION)'
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
;;
atheos*)
@@ -5894,6 +5904,14 @@ $as_echo "no" >&6; }
fi
+if test "$Py_DEBUG" = 'true'
+then
+ DEBUG_EXT=_d
+ DEBUG_SUFFIX=-debug
+fi
+
+
+
# XXX Shouldn't the code above that fiddles with BASECFLAGS and OPT be
# merged with this chunk of code?
@@ -9958,6 +9976,50 @@ $as_echo "no" >&6; }
fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-count-allocs" >&5
+$as_echo_n "checking for --with-count-allocs... " >&6; }
+
+# Check whether --with-count-allocs was given.
+if test "${with_count_allocs+set}" = set; then :
+ withval=$with_count_allocs;
+if test "$withval" != no
+then
+
+$as_echo "#define COUNT_ALLOCS 1" >>confdefs.h
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-call-profile" >&5
+$as_echo_n "checking for --with-call-profile... " >&6; }
+
+# Check whether --with-call-profile was given.
+if test "${with_call_profile+set}" = set; then :
+ withval=$with_call_profile;
+if test "$withval" != no
+then
+
+$as_echo "#define CALL_PROFILE 1" >>confdefs.h
+
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
# Check for Python-specific malloc support
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-pymalloc" >&5
$as_echo_n "checking for --with-pymalloc... " >&6; }
@@ -10007,6 +10069,46 @@ fi
fi
+# Check for dtrace support
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-dtrace" >&5
+$as_echo_n "checking for --with-dtrace... " >&6; }
+
+# Check whether --with-dtrace was given.
+if test "${with_dtrace+set}" = set; then :
+ withval=$with_dtrace;
+fi
+
+
+if test ! -z "$with_dtrace"
+then
+ if dtrace -G -o /dev/null -s $srcdir/Include/pydtrace.d 2>/dev/null
+ then
+
+$as_echo "#define WITH_DTRACE 1" >>confdefs.h
+
+ with_dtrace="Sun"
+ DTRACEOBJS="Python/dtrace.o"
+ DTRADEHDRS=""
+ elif dtrace -h -o /dev/null -s $srcdir/Include/pydtrace.d
+ then
+
+$as_echo "#define WITH_DTRACE 1" >>confdefs.h
+
+ with_dtrace="Apple"
+ DTRACEOBJS=""
+ DTRADEHDRS="pydtrace.h"
+ else
+ with_dtrace="no"
+ fi
+else
+ with_dtrace="no"
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_dtrace" >&5
+$as_echo "$with_dtrace" >&6; }
+
+
+
# Check for --with-wctype-functions
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-wctype-functions" >&5
$as_echo_n "checking for --with-wctype-functions... " >&6; }
diff -up ./pyconfig.h.in.autotool-intermediates ./pyconfig.h.in
--- ./pyconfig.h.in.autotool-intermediates 2013-04-09 11:24:01.020185806 +0200
+++ ./pyconfig.h.in 2013-04-09 11:24:02.088183204 +0200
@@ -18,6 +18,12 @@
/* Define this if you have BeOS threads. */
#undef BEOS_THREADS
+/* Define to keep records on function call invocation */
+#undef CALL_PROFILE
+
+/* Define to keep records of the number of instances of each type */
+#undef COUNT_ALLOCS
+
/* Define if you have the Mach cthreads package */
#undef C_THREADS
@@ -1119,12 +1125,6 @@
/* Define to profile with the Pentium timestamp counter */
#undef WITH_TSC
-/* Define to keep records of the number of instances of each type */
-#undef COUNT_ALLOCS
-
-/* Define to keep records on function call invocation */
-#undef CALL_PROFILE
-
/* Define if you want pymalloc to be disabled when running under valgrind */
#undef WITH_VALGRIND

28
SOURCES/get-source.sh Executable file
View File

@ -0,0 +1,28 @@
#! /bin/bash -ex
# Download a release of Python (if missing) and remove .exe files from it
version=$1
if [ -z "${version}" ]; then
echo "Usage: $0 VERSION" >& 2
echo "" >& 2
echo "example: $0 2.7.15" >& 2
exit 1
fi
versionedname=Python-${version}
orig_archive=${versionedname}.tar.xz
new_archive=${versionedname}-noexe.tar.xz
if [ ! -e ${orig_archive} ]; then
wget -N https://www.python.org/ftp/python/${version}/${orig_archive}
fi
deleted_names=$(tar --list -Jf ${orig_archive} | grep '\.exe$')
# tar --delete does not operate on compressed archives, so do
# xz compression/decompression explicitly
xz --decompress --stdout ${orig_archive} | \
tar --delete -v ${deleted_names} | \
xz --compress --stdout -3 -T0 > ${new_archive}

17
SOURCES/libpython.stp Normal file
View File

@ -0,0 +1,17 @@
/* Systemtap tapset to make it easier to trace Python */
/*
Define python.function.entry/return:
*/
probe python.function.entry = process("python").library("LIBRARY_PATH").mark("function__entry")
{
filename = user_string($arg1);
funcname = user_string($arg2);
lineno = $arg3;
}
probe python.function.return = process("python").library("LIBRARY_PATH").mark("function__return")
{
filename = user_string($arg1);
funcname = user_string($arg2);
lineno = $arg3;
}

21
SOURCES/pyfuntop.stp Normal file
View File

@ -0,0 +1,21 @@
#!/usr/bin/stap
global fn_calls;
probe python.function.entry
{
fn_calls[pid(), filename, funcname, lineno] += 1;
}
probe timer.ms(1000) {
printf("\033[2J\033[1;1H") /* clear screen */
printf("%6s %80s %6s %30s %6s\n",
"PID", "FILENAME", "LINE", "FUNCTION", "CALLS")
foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) {
printf("%6d %80s %6d %30s %6d\n",
pid, filename, lineno, funcname,
fn_calls[pid, filename, funcname, lineno]);
}
delete fn_calls;
}

2
SOURCES/pynche Normal file
View File

@ -0,0 +1,2 @@
#!/bin/bash
exec `python2 -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific = True))"`/pynche/pynche

View File

@ -0,0 +1,11 @@
--- Python-2.5c1/Makefile.pre.in.cflags 2006-08-18 11:05:40.000000000 -0400
+++ Python-2.5c1/Makefile.pre.in 2006-08-18 11:09:26.000000000 -0400
@@ -334,7 +334,7 @@
# Build the interpreter
$(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY)
- $(LINKCC) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
+ $(LINKCC) $(CFLAGS) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
Modules/python.o \
$(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)

View File

@ -0,0 +1,12 @@
diff -up Python-2.5.1/Lib/gettext.py.plural Python-2.5.1/Lib/gettext.py
--- Python-2.5.1/Lib/gettext.py.plural 2007-09-10 11:38:57.000000000 -0400
+++ Python-2.5.1/Lib/gettext.py 2007-09-10 11:39:00.000000000 -0400
@@ -299,6 +299,8 @@ class GNUTranslations(NullTranslations):
item = item.strip()
if not item:
continue
+ if item.startswith("#"):
+ continue
k = v = None
if ':' in item:
k, v = item.split(':', 1)

View File

@ -0,0 +1,24 @@
diff -up Python-2.5.1/Lib/sqlite3/dbapi2.py.encoding Python-2.5.1/Lib/sqlite3/dbapi2.py
--- Python-2.5.1/Lib/sqlite3/dbapi2.py.encoding 2007-09-14 10:41:50.000000000 -0400
+++ Python-2.5.1/Lib/sqlite3/dbapi2.py 2007-09-14 10:42:00.000000000 -0400
@@ -1,7 +1,6 @@
-# -*- coding: iso-8859-1 -*-
# pysqlite2/dbapi2.py: the DB-API 2.0 interface
#
-# Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
+# Copyright (C) 2004-2005 Gerhard Haering <gh@ghaering.de>
#
# This file is part of pysqlite.
#
diff -up Python-2.5.1/Lib/sqlite3/__init__.py.encoding Python-2.5.1/Lib/sqlite3/__init__.py
--- Python-2.5.1/Lib/sqlite3/__init__.py.encoding 2007-09-14 10:41:47.000000000 -0400
+++ Python-2.5.1/Lib/sqlite3/__init__.py 2007-09-14 10:42:06.000000000 -0400
@@ -1,7 +1,6 @@
-#-*- coding: ISO-8859-1 -*-
# pysqlite2/__init__.py: the pysqlite2 package.
#
-# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+# Copyright (C) 2005 Gerhard Haering <gh@ghaering.de>
#
# This file is part of pysqlite.
#

View File

@ -0,0 +1,12 @@
diff -up Python-2.6/configure.ac.rpath Python-2.6/configure.ac
--- Python-2.6/configure.ac.rpath 2008-11-24 02:51:06.000000000 -0500
+++ Python-2.6/configure.ac 2008-11-24 02:51:21.000000000 -0500
@@ -729,7 +729,7 @@ if test $enable_shared = "yes"; then
;;
OSF*)
LDLIBRARY='libpython$(VERSION).so'
- BLDLIBRARY='-rpath $(LIBDIR) -L. -lpython$(VERSION)'
+ BLDLIBRARY='-L. -lpython$(VERSION)'
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
;;
atheos*)

View File

@ -0,0 +1,20 @@
diff -up Python-2.6.4/Lib/distutils/unixccompiler.py.distutils-rpath Python-2.6.4/Lib/distutils/unixccompiler.py
--- Python-2.6.4/Lib/distutils/unixccompiler.py.distutils-rpath 2009-09-09 04:34:06.000000000 -0400
+++ Python-2.6.4/Lib/distutils/unixccompiler.py 2010-03-15 21:33:25.000000000 -0400
@@ -142,6 +142,16 @@ class UnixCCompiler(CCompiler):
if sys.platform == "cygwin":
exe_extension = ".exe"
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+ """Remove standard library path from rpath"""
+ libraries, library_dirs, runtime_library_dirs = \
+ CCompiler._fix_lib_args(self, libraries, library_dirs,
+ runtime_library_dirs)
+ libdir = sysconfig.get_config_var('LIBDIR')
+ if runtime_library_dirs and (libdir in runtime_library_dirs):
+ runtime_library_dirs.remove(libdir)
+ return libraries, library_dirs, runtime_library_dirs
+
def preprocess(self, source,
output_file=None, macros=None, include_dirs=None,
extra_preargs=None, extra_postargs=None):

View File

@ -0,0 +1,44 @@
diff -up Python-2.7/Lib/sysconfig.py.lib64-sysconfig Python-2.7/Lib/sysconfig.py
--- Python-2.7/Lib/sysconfig.py.lib64-sysconfig 2010-07-08 14:18:41.386898476 -0400
+++ Python-2.7/Lib/sysconfig.py 2010-07-08 14:22:02.837896461 -0400
@@ -7,20 +7,20 @@ from os.path import pardir, realpath
_INSTALL_SCHEMES = {
'posix_prefix': {
- 'stdlib': '{base}/lib/python{py_version_short}',
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
+ 'stdlib': '{base}/lib64/python{py_version_short}',
+ 'platstdlib': '{platbase}/lib64/python{py_version_short}',
'purelib': '{base}/lib/python{py_version_short}/site-packages',
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
+ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages',
'include': '{base}/include/python{py_version_short}',
'platinclude': '{platbase}/include/python{py_version_short}',
'scripts': '{base}/bin',
'data': '{base}',
},
'posix_home': {
- 'stdlib': '{base}/lib/python',
- 'platstdlib': '{base}/lib/python',
+ 'stdlib': '{base}/lib64/python',
+ 'platstdlib': '{base}/lib64/python',
'purelib': '{base}/lib/python',
- 'platlib': '{base}/lib/python',
+ 'platlib': '{base}/lib64/python',
'include': '{base}/include/python',
'platinclude': '{base}/include/python',
'scripts': '{base}/bin',
@@ -65,10 +65,10 @@ _INSTALL_SCHEMES = {
'data' : '{userbase}',
},
'posix_user': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
+ 'stdlib': '{userbase}/lib64/python{py_version_short}',
+ 'platstdlib': '{userbase}/lib64/python{py_version_short}',
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
+ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages',
'include': '{userbase}/include/python{py_version_short}',
'scripts': '{userbase}/bin',
'data' : '{userbase}',

View File

@ -0,0 +1,284 @@
diff --git a/Modules/Setup.dist b/Modules/Setup.dist
index bbc9222..2cf35a9 100644
--- a/Modules/Setup.dist
+++ b/Modules/Setup.dist
@@ -153,7 +153,7 @@ GLHACK=-Dclear=__GLclear
# modules are to be built as shared libraries (see above for more
# detail; also note that *static* reverses this effect):
-#*shared*
+*shared*
# GNU readline. Unlike previous Python incarnations, GNU readline is
# now incorporated in an optional module, configured in the Setup file
@@ -163,33 +163,33 @@ GLHACK=-Dclear=__GLclear
# it, depending on your system -- see the GNU readline instructions.
# It's okay for this to be a shared library, too.
-#readline readline.c -lreadline -ltermcap
+readline readline.c -lreadline -ltermcap
# Modules that should always be present (non UNIX dependent):
-#array arraymodule.c # array objects
-#cmath cmathmodule.c _math.c # -lm # complex math library functions
-#math mathmodule.c _math.c # -lm # math library functions, e.g. sin()
-#_struct _struct.c # binary structure packing/unpacking
-#time timemodule.c # -lm # time operations and variables
-#operator operator.c # operator.add() and similar goodies
-#_testcapi _testcapimodule.c # Python C API test module
-#_random _randommodule.c # Random number generator
-#_collections _collectionsmodule.c # Container types
+array arraymodule.c # array objects
+cmath cmathmodule.c _math.c # -lm # complex math library functions
+math mathmodule.c _math.c # -lm # math library functions, e.g. sin()
+_struct _struct.c # binary structure packing/unpacking
+time timemodule.c # -lm # time operations and variables
+operator operator.c # operator.add() and similar goodies
+_testcapi _testcapimodule.c # Python C API test module
+_random _randommodule.c # Random number generator
+_collections _collectionsmodule.c # Container types
#_heapq _heapqmodule.c # Heapq type
-#itertools itertoolsmodule.c # Functions creating iterators for efficient looping
-#strop stropmodule.c # String manipulations
-#_functools _functoolsmodule.c # Tools for working with functions and callable objects
+itertools itertoolsmodule.c # Functions creating iterators for efficient looping
+strop stropmodule.c # String manipulations
+_functools _functoolsmodule.c # Tools for working with functions and callable objects
#_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator
#_pickle _pickle.c # pickle accelerator
#datetime datetimemodule.c # date/time type
-#_bisect _bisectmodule.c # Bisection algorithms
+_bisect _bisectmodule.c # Bisection algorithms
-#unicodedata unicodedata.c # static Unicode character database
+unicodedata unicodedata.c # static Unicode character database
# access to ISO C locale support
-#_locale _localemodule.c # -lintl
+_locale _localemodule.c # -lintl
# Standard I/O baseline
#_io -I$(srcdir)/Modules/_io _io/bufferedio.c _io/bytesio.c _io/fileio.c _io/iobase.c _io/_iomodule.c _io/stringio.c _io/textio.c
@@ -199,41 +199,41 @@ GLHACK=-Dclear=__GLclear
# (If you have a really backward UNIX, select and socket may not be
# supported...)
-#fcntl fcntlmodule.c # fcntl(2) and ioctl(2)
-#spwd spwdmodule.c # spwd(3)
-#grp grpmodule.c # grp(3)
-#select selectmodule.c # select(2); not on ancient System V
+fcntl fcntlmodule.c # fcntl(2) and ioctl(2)
+spwd spwdmodule.c # spwd(3)
+grp grpmodule.c # grp(3)
+select selectmodule.c # select(2); not on ancient System V
# Memory-mapped files (also works on Win32).
-#mmap mmapmodule.c
+mmap mmapmodule.c
# CSV file helper
-#_csv _csv.c
+_csv _csv.c
# Socket module helper for socket(2)
-#_socket socketmodule.c timemodule.c
+_socket socketmodule.c timemodule.c
# Socket module helper for SSL support; you must comment out the other
# socket line above, and possibly edit the SSL variable:
#SSL=/usr/local/ssl
-#_ssl _ssl.c \
-# -DUSE_SSL -I$(SSL)/include -I$(SSL)/include/openssl \
-# -L$(SSL)/lib -lssl -lcrypto
+_ssl _ssl.c \
+ -DUSE_SSL -I$(SSL)/include -I$(SSL)/include/openssl \
+ -L$(SSL)/lib -lssl -lcrypto
# The crypt module is now disabled by default because it breaks builds
# on many systems (where -lcrypt is needed), e.g. Linux (I believe).
#
# First, look at Setup.config; configure may have set this for you.
-#crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems
+crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems
# Some more UNIX dependent modules -- off by default, since these
# are not supported by all UNIX systems:
-#nis nismodule.c -lnsl # Sun yellow pages -- not everywhere
-#termios termios.c # Steen Lumholt's termios module
-#resource resource.c # Jeremy Hylton's rlimit interface
+nis nismodule.c -lnsl -ltirpc -I/usr/include/tirpc -I/usr/include/nsl -L/usr/lib/nsl
+termios termios.c # Steen Lumholt's termios module
+resource resource.c # Jeremy Hylton's rlimit interface
# Multimedia modules -- off by default.
@@ -241,8 +241,8 @@ GLHACK=-Dclear=__GLclear
# #993173 says audioop works on 64-bit platforms, though.
# These represent audio samples or images as strings:
-#audioop audioop.c # Operations on audio samples
-#imageop imageop.c # Operations on images
+audioop audioop.c # Operations on audio samples
+imageop imageop.c # Operations on images
# Note that the _md5 and _sha modules are normally only built if the
@@ -252,14 +252,14 @@ GLHACK=-Dclear=__GLclear
# Message-Digest Algorithm, described in RFC 1321. The necessary files
# md5.c and md5.h are included here.
-#_md5 md5module.c md5.c
+_md5 md5module.c md5.c
# The _sha module implements the SHA checksum algorithms.
# (NIST's Secure Hash Algorithms.)
-#_sha shamodule.c
-#_sha256 sha256module.c
-#_sha512 sha512module.c
+_sha shamodule.c
+_sha256 sha256module.c
+_sha512 sha512module.c
# SGI IRIX specific modules -- off by default.
@@ -306,12 +306,12 @@ GLHACK=-Dclear=__GLclear
# A Linux specific module -- off by default; this may also work on
# some *BSDs.
-#linuxaudiodev linuxaudiodev.c
+linuxaudiodev linuxaudiodev.c
# George Neville-Neil's timing module:
-#timing timingmodule.c
+timing timingmodule.c
# The _tkinter module.
@@ -326,7 +326,7 @@ GLHACK=-Dclear=__GLclear
# every system.
# *** Always uncomment this (leave the leading underscore in!):
-# _tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \
+_tkinter _tkinter.c tkappinit.c -DWITH_APPINIT \
# *** Uncomment and edit to reflect where your Tcl/Tk libraries are:
# -L/usr/local/lib \
# *** Uncomment and edit to reflect where your Tcl/Tk headers are:
@@ -336,7 +336,7 @@ GLHACK=-Dclear=__GLclear
# *** Or uncomment this for Solaris:
# -I/usr/openwin/include \
# *** Uncomment and edit for Tix extension only:
-# -DWITH_TIX -ltix8.1.8.2 \
+ -DWITH_TIX -ltix \
# *** Uncomment and edit for BLT extension only:
# -DWITH_BLT -I/usr/local/blt/blt8.0-unoff/include -lBLT8.0 \
# *** Uncomment and edit for PIL (TkImaging) extension only:
@@ -345,7 +345,7 @@ GLHACK=-Dclear=__GLclear
# *** Uncomment and edit for TOGL extension only:
# -DWITH_TOGL togl.c \
# *** Uncomment and edit to reflect your Tcl/Tk versions:
-# -ltk8.2 -ltcl8.2 \
+ -ltk -ltcl \
# *** Uncomment and edit to reflect where your X11 libraries are:
# -L/usr/X11R6/lib \
# *** Or uncomment this for Solaris:
@@ -355,7 +355,7 @@ GLHACK=-Dclear=__GLclear
# *** Uncomment for AIX:
# -lld \
# *** Always uncomment this; X11 libraries to link with:
-# -lX11
+ -lX11
# Lance Ellinghaus's syslog module
#syslog syslogmodule.c # syslog daemon interface
@@ -377,7 +377,7 @@ GLHACK=-Dclear=__GLclear
# it is a highly experimental and dangerous device for calling
# *arbitrary* C functions in *arbitrary* shared libraries:
-#dl dlmodule.c
+dl dlmodule.c
# Modules that provide persistent dictionary-like semantics. You will
@@ -400,7 +400,7 @@ GLHACK=-Dclear=__GLclear
#
# First, look at Setup.config; configure may have set this for you.
-#gdbm gdbmmodule.c -I/usr/local/include -L/usr/local/lib -lgdbm
+gdbm gdbmmodule.c -lgdbm
# Sleepycat Berkeley DB interface.
@@ -415,11 +415,9 @@ GLHACK=-Dclear=__GLclear
#
# Edit the variables DB and DBLIBVERto point to the db top directory
# and the subdirectory of PORT where you built it.
-#DB=/usr/local/BerkeleyDB.4.0
-#DBLIBVER=4.0
-#DBINC=$(DB)/include
-#DBLIB=$(DB)/lib
-#_bsddb _bsddb.c -I$(DBINC) -L$(DBLIB) -ldb-$(DBLIBVER)
+DBINC=/usr/include/libdb
+DBLIB=/usr/lib
+_bsddb _bsddb.c -I$(DBINC) -L$(DBLIB) -ldb
# Historical Berkeley DB 1.85
#
@@ -434,14 +432,14 @@ GLHACK=-Dclear=__GLclear
# Helper module for various ascii-encoders
-#binascii binascii.c
+binascii binascii.c
# Fred Drake's interface to the Python parser
-#parser parsermodule.c
+parser parsermodule.c
# cStringIO and cPickle
-#cStringIO cStringIO.c
-#cPickle cPickle.c
+cStringIO cStringIO.c
+cPickle cPickle.c
# Lee Busby's SIGFPE modules.
@@ -464,7 +462,7 @@ GLHACK=-Dclear=__GLclear
# Andrew Kuchling's zlib module.
# This require zlib 1.1.3 (or later).
# See http://www.gzip.org/zlib/
-#zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz
+zlib zlibmodule.c -I$(prefix)/include -L$(exec_prefix)/lib -lz
# Interface to the Expat XML parser
# More information on Expat can be found at www.libexpat.org.
@@ -475,14 +473,14 @@ GLHACK=-Dclear=__GLclear
# Hye-Shik Chang's CJKCodecs
# multibytecodec is required for all the other CJK codec modules
-#_multibytecodec cjkcodecs/multibytecodec.c
-
-#_codecs_cn cjkcodecs/_codecs_cn.c
-#_codecs_hk cjkcodecs/_codecs_hk.c
-#_codecs_iso2022 cjkcodecs/_codecs_iso2022.c
-#_codecs_jp cjkcodecs/_codecs_jp.c
-#_codecs_kr cjkcodecs/_codecs_kr.c
-#_codecs_tw cjkcodecs/_codecs_tw.c
+_multibytecodec cjkcodecs/multibytecodec.c
+
+_codecs_cn cjkcodecs/_codecs_cn.c
+_codecs_hk cjkcodecs/_codecs_hk.c
+_codecs_iso2022 cjkcodecs/_codecs_iso2022.c
+_codecs_jp cjkcodecs/_codecs_jp.c
+_codecs_kr cjkcodecs/_codecs_kr.c
+_codecs_tw cjkcodecs/_codecs_tw.c
# Example -- included for reference only:
# xx xxmodule.c

View File

@ -0,0 +1,27 @@
diff -up Python-2.7.1/Lib/test/test_abc.py.cache_leak Python-2.7.1/Lib/test/test_abc.py
--- Python-2.7.1/Lib/test/test_abc.py.cache_leak 2010-12-28 18:06:35.551938356 -0500
+++ Python-2.7.1/Lib/test/test_abc.py 2010-12-28 18:09:09.021059202 -0500
@@ -3,6 +3,8 @@
"""Unit tests for abc.py."""
+import sys
+
import unittest, weakref
from test import test_support
@@ -229,8 +231,12 @@ class TestABC(unittest.TestCase):
# Trigger cache.
C().f()
del C
- test_support.gc_collect()
- self.assertEqual(r(), None)
+ # This doesn't work in our debug build, presumably due to its use
+ # of COUNT_ALLOCS, which makes heap-allocated types immortal (once
+ # they've ever had an instance):
+ if not hasattr(sys, 'getcounts'):
+ test_support.gc_collect()
+ self.assertEqual(r(), None)
def test_main():
test_support.run_unittest(TestABC)

View File

@ -0,0 +1,18 @@
diff -up Python-2.7.2/Misc/python-config.in.add-extension-suffix-to-python-config Python-2.7.2/Misc/python-config.in
--- Python-2.7.2/Misc/python-config.in.add-extension-suffix-to-python-config 2011-08-23 18:15:41.832497124 -0400
+++ Python-2.7.2/Misc/python-config.in 2011-08-23 18:17:25.854490011 -0400
@@ -6,7 +6,7 @@ import getopt
from distutils import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
- 'ldflags', 'help']
+ 'ldflags', 'extension-suffix', 'help']
def exit_with_usage(code=1):
print >>sys.stderr, "Usage: %s [%s]" % (sys.argv[0],
@@ -54,3 +54,5 @@ for opt in opt_flags:
libs.extend(getvar('LINKFORSHARED').split())
print ' '.join(libs)
+ elif opt == '--extension-suffix':
+ print (sys.pydebug and "_d" or "") + sysconfig.get_config_var('SO')

View File

@ -0,0 +1,64 @@
--- Python-2.7rc1/Modules/socketmodule.c.socketmodule 2010-05-09 10:46:46.000000000 -0400
+++ Python-2.7rc1/Modules/socketmodule.c 2010-06-07 23:04:19.374234780 -0400
@@ -4783,6 +4783,61 @@ init_socket(void)
PyModule_AddIntConstant(m, "SO_SETFIB", SO_SETFIB);
#endif
+#ifdef SO_SNDBUFFORCE
+ PyModule_AddIntConstant(m, "SO_SNDBUFFORCE", SO_SNDBUFFORCE);
+#endif
+#ifdef SO_RCVBUFFORCE
+ PyModule_AddIntConstant(m, "SO_RCVBUFFORCE", SO_RCVBUFFORCE);
+#endif
+#ifdef SO_NO_CHECK
+ PyModule_AddIntConstant(m, "SO_NO_CHECK", SO_NO_CHECK);
+#endif
+#ifdef SO_PRIORITY
+ PyModule_AddIntConstant(m, "SO_PRIORITY", SO_PRIORITY);
+#endif
+#ifdef SO_BSDCOMPAT
+ PyModule_AddIntConstant(m, "SO_BSDCOMPAT", SO_BSDCOMPAT);
+#endif
+#ifdef SO_PASSCRED
+ PyModule_AddIntConstant(m, "SO_PASSCRED", SO_PASSCRED);
+#endif
+#ifdef SO_PEERCRED
+ PyModule_AddIntConstant(m, "SO_PEERCRED", SO_PEERCRED);
+#endif
+#ifdef SO_SECURITY_AUTHENTICATION
+ PyModule_AddIntConstant(m, "SO_SECURITY_AUTHENTICATION", SO_SECURITY_AUTHENTICATION);
+#endif
+#ifdef SO_SECURITY_ENCRYPTION_TRANSPORT
+ PyModule_AddIntConstant(m, "SO_SECURITY_ENCRYPTION_TRANSPORT", SO_SECURITY_ENCRYPTION_TRANSPORT);
+#endif
+#ifdef SO_SECURITY_ENCRYPTION_NETWORK
+ PyModule_AddIntConstant(m, "SO_SECURITY_ENCRYPTION_NETWORK", SO_SECURITY_ENCRYPTION_NETWORK);
+#endif
+#ifdef SO_BINDTODEVICE
+ PyModule_AddIntConstant(m, "SO_BINDTODEVICE", SO_BINDTODEVICE);
+#endif
+#ifdef SO_ATTACH_FILTER
+ PyModule_AddIntConstant(m, "SO_ATTACH_FILTER", SO_ATTACH_FILTER);
+#endif
+#ifdef SO_DETACH_FILTER
+ PyModule_AddIntConstant(m, "SO_DETACH_FILTER", SO_DETACH_FILTER);
+#endif
+#ifdef SO_PEERNAME
+ PyModule_AddIntConstant(m, "SO_PEERNAME", SO_PEERNAME);
+#endif
+#ifdef SO_TIMESTAMP
+ PyModule_AddIntConstant(m, "SO_TIMESTAMP", SO_TIMESTAMP);
+#endif
+#ifdef SO_PEERSEC
+ PyModule_AddIntConstant(m, "SO_PEERSEC", SO_PEERSEC);
+#endif
+#ifdef SO_PASSSEC
+ PyModule_AddIntConstant(m, "SO_PASSSEC", SO_PASSSEC);
+#endif
+#ifdef SO_TIMESTAMPNS
+ PyModule_AddIntConstant(m, "SO_TIMESTAMPNS", SO_TIMESTAMPNS);
+#endif
+
/* Maximum number of connections for "listen" */
#ifdef SOMAXCONN
PyModule_AddIntConstant(m, "SOMAXCONN", SOMAXCONN);

View File

@ -0,0 +1,19 @@
diff -up Python-2.7rc1/Modules/socketmodule.c.socketmodule2 Python-2.7rc1/Modules/socketmodule.c
--- Python-2.7rc1/Modules/socketmodule.c.socketmodule2 2010-06-07 23:06:59.133498087 -0400
+++ Python-2.7rc1/Modules/socketmodule.c 2010-06-07 23:11:51.249520087 -0400
@@ -5253,6 +5253,15 @@ init_socket(void)
#ifdef TCP_QUICKACK
PyModule_AddIntConstant(m, "TCP_QUICKACK", TCP_QUICKACK);
#endif
+#ifdef TCP_CONGESTION
+ PyModule_AddIntConstant(m, "TCP_CONGESTION", TCP_CONGESTION);
+#endif
+#ifdef TCP_MD5SIG
+ PyModule_AddIntConstant(m, "TCP_MD5SIG", TCP_MD5SIG);
+#endif
+#ifdef TCP_MD5SIG_MAXKEYLEN
+ PyModule_AddIntConstant(m, "TCP_MD5SIG_MAXKEYLEN", TCP_MD5SIG_MAXKEYLEN);
+#endif
/* IPX options */

32
SOURCES/pythondeps.sh Executable file
View File

@ -0,0 +1,32 @@
#!/bin/bash
[ $# -ge 1 ] || {
cat > /dev/null
exit 0
}
case $1 in
-P|--provides)
shift
# Match buildroot/payload paths of the form
# /PATH/OF/BUILDROOT/usr/bin/pythonMAJOR.MINOR
# generating a line of the form
# python(abi) = MAJOR.MINOR
# (Don't match against -config tools e.g. /usr/bin/python2.6-config)
grep "/usr/bin/python.\..$" \
| sed -e "s|.*/usr/bin/python\(.\..\)|python(abi) = \1|"
;;
-R|--requires)
shift
# Match buildroot paths of the form
# /PATH/OF/BUILDROOT/usr/lib/pythonMAJOR.MINOR/ and
# /PATH/OF/BUILDROOT/usr/lib64/pythonMAJOR.MINOR/
# generating (uniqely) lines of the form:
# python(abi) = MAJOR.MINOR
grep "/usr/lib[^/]*/python.\../.*" \
| sed -e "s|.*/usr/lib[^/]*/python\(.\..\)/.*|python(abi) = \1|g" \
| sort | uniq
;;
esac
exit 0

View File

@ -0,0 +1,19 @@
/*
Example usage of the Python systemtap tapset to show a nested view of all
Python function calls (and returns) across the whole system.
Run this using
stap systemtap-example.stp
to instrument all Python processes on the system, or (for example) using
stap systemtap-example.stp -c COMMAND
to instrument a specific program (implemented in Python)
*/
probe python.function.entry
{
printf("%s => %s in %s:%d\n", thread_indent(1), funcname, filename, lineno);
}
probe python.function.return
{
printf("%s <= %s in %s:%d\n", thread_indent(-1), funcname, filename, lineno);
}

3792
SPECS/python2.spec Normal file

File diff suppressed because it is too large Load Diff