Delete content. This branch was pushed by mistake.

This commit is contained in:
Petr Viktorin 2017-08-24 17:39:37 +02:00
parent fdb68ed66b
commit eb1f07d918
35 changed files with 2 additions and 7007 deletions

View File

@ -1,188 +0,0 @@
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index 9474e9c..c0ce4c6 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -30,14 +30,14 @@ WINDOWS_SCHEME = {
INSTALL_SCHEMES = {
'unix_prefix': {
'purelib': '$base/lib/platform-python$py_version_short/site-packages',
- 'platlib': '$platbase/lib/platform-python$py_version_short/site-packages',
+ 'platlib': '$platbase/lib64/platform-python$py_version_short/site-packages',
'headers': '$base/include/platform-python$py_version_short$abiflags/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
},
'unix_home': {
'purelib': '$base/lib/python',
- 'platlib': '$base/lib/python',
+ 'platlib': '$base/lib64/python',
'headers': '$base/include/python/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 026cca7..6d3e077 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -132,8 +132,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
prefix = plat_specific and EXEC_PREFIX or PREFIX
if os.name == "posix":
+ if plat_specific or standard_lib:
+ lib = "lib64"
+ else:
+ lib = "lib"
libpython = os.path.join(prefix,
- "lib", "platform-python" + get_python_version())
+ lib, "platform-python" + get_python_version())
if standard_lib:
return libpython
else:
diff --git a/Lib/site.py b/Lib/site.py
index a84e3bb..ba0d3ea 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -303,11 +303,15 @@ def getsitepackages(prefixes=None):
seen.add(prefix)
if os.sep == '/':
+ sitepackages.append(os.path.join(prefix, "lib64",
+ "platform-python" + sys.version[:3],
+ "site-packages"))
sitepackages.append(os.path.join(prefix, "lib",
"platform-python%d.%d" % sys.version_info[:2],
"site-packages"))
else:
sitepackages.append(prefix)
+ sitepackages.append(os.path.join(prefix, "lib64", "site-packages"))
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
if sys.platform == "darwin":
# for framework builds *only* we add the standard Apple
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index b9bbfe5..2a5f29c 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -20,10 +20,10 @@ __all__ = [
_INSTALL_SCHEMES = {
'posix_prefix': {
- 'stdlib': '{installed_base}/lib/platform-python{py_version_short}',
- 'platstdlib': '{platbase}/lib/platform-python{py_version_short}',
+ 'stdlib': '{installed_base}/lib64/platform-python{py_version_short}',
+ 'platstdlib': '{platbase}/lib64/platform-python{py_version_short}',
'purelib': '{base}/lib/platform-python{py_version_short}/site-packages',
- 'platlib': '{platbase}/lib/platform-python{py_version_short}/site-packages',
+ 'platlib': '{platbase}/lib64/platform-python{py_version_short}/site-packages',
'include':
'{installed_base}/include/platform-python{py_version_short}{abiflags}',
'platinclude':
@@ -61,10 +61,10 @@ _INSTALL_SCHEMES = {
'data': '{userbase}',
},
'posix_user': {
- 'stdlib': '{installed_base}/lib/platform-python{py_version_short}',
- 'platstdlib': '{platbase}/lib/platform-python{py_version_short}',
+ 'stdlib': '{installed_base}/lib64/platform-python{py_version_short}',
+ 'platstdlib': '{platbase}/lib64/platform-python{py_version_short}',
'purelib': '{base}/lib/platform-python{py_version_short}/site-packages',
- 'platlib': '{platbase}/lib/platform-python{py_version_short}/site-packages',
+ 'platlib': '{platbase}/lib64/platform-python{py_version_short}/site-packages',
'include': '{installed_base}/include/platform-python{py_version_short}{abiflags}',
'scripts': '{base}/bin',
'data': '{base}',
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
index f698927..bc977b5 100644
--- a/Lib/test/test_site.py
+++ b/Lib/test/test_site.py
@@ -248,8 +248,8 @@ class HelperFunctionsTests(unittest.TestCase):
self.assertEqual(dirs[1], wanted)
elif os.sep == '/':
# OS X non-framwework builds, Linux, FreeBSD, etc
- self.assertEqual(len(dirs), 1)
- wanted = os.path.join('xoxo', 'lib',
+ self.assertEqual(len(dirs), 2)
+ wanted = os.path.join('xoxo', 'lib64',
'python%d.%d' % sys.version_info[:2],
'site-packages')
self.assertEqual(dirs[0], wanted)
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 8fa7934..a693917 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -126,7 +126,7 @@ LIBDIR= @libdir@
MANDIR= @mandir@
INCLUDEDIR= @includedir@
CONFINCLUDEDIR= $(exec_prefix)/include
-SCRIPTDIR= $(prefix)/lib
+SCRIPTDIR= $(prefix)/lib64
ABIFLAGS= @ABIFLAGS@
# Detailed destination directories
diff --git a/Modules/getpath.c b/Modules/getpath.c
index 65b47a3..eaa756c 100644
--- a/Modules/getpath.c
+++ b/Modules/getpath.c
@@ -494,7 +494,7 @@ calculate_path(void)
_pythonpath = Py_DecodeLocale(PYTHONPATH, NULL);
_prefix = Py_DecodeLocale(PREFIX, NULL);
_exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL);
- lib_python = Py_DecodeLocale("lib/platform-python" VERSION, NULL);
+ lib_python = Py_DecodeLocale("lib64/platform-python" VERSION, NULL);
if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) {
Py_FatalError(
@@ -683,7 +683,7 @@ calculate_path(void)
}
else
wcsncpy(zip_path, _prefix, MAXPATHLEN);
- joinpath(zip_path, L"lib/python00.zip");
+ joinpath(zip_path, L"lib64/python00.zip");
bufsz = wcslen(zip_path); /* Replace "00" with version */
zip_path[bufsz - 6] = VERSION[0];
zip_path[bufsz - 5] = VERSION[2];
@@ -695,7 +695,7 @@ calculate_path(void)
fprintf(stderr,
"Could not find platform dependent libraries <exec_prefix>\n");
wcsncpy(exec_prefix, _exec_prefix, MAXPATHLEN);
- joinpath(exec_prefix, L"lib/lib-dynload");
+ joinpath(exec_prefix, L"lib64/lib-dynload");
}
/* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */
diff --git a/setup.py b/setup.py
index 0f2dfc4..da37896 100644
--- a/setup.py
+++ b/setup.py
@@ -492,7 +492,7 @@ class PyBuildExt(build_ext):
# directories (i.e. '.' and 'Include') must be first. See issue
# 10520.
if not cross_compiling:
- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
+ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64')
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
# only change this for cross builds for 3.3, issues on Mageia
if cross_compiling:
@@ -780,11 +780,11 @@ class PyBuildExt(build_ext):
elif curses_library:
readline_libs.append(curses_library)
elif self.compiler.find_library_file(lib_dirs +
- ['/usr/lib/termcap'],
+ ['/usr/lib64/termcap'],
'termcap'):
readline_libs.append('termcap')
exts.append( Extension('readline', ['readline.c'],
- library_dirs=['/usr/lib/termcap'],
+ library_dirs=['/usr/lib64/termcap'],
extra_link_args=readline_extra_link_args,
libraries=readline_libs) )
else:
@@ -821,8 +821,8 @@ class PyBuildExt(build_ext):
if krb5_h:
ssl_incs += krb5_h
ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
- ['/usr/local/ssl/lib',
- '/usr/contrib/ssl/lib/'
+ ['/usr/local/ssl/lib64',
+ '/usr/contrib/ssl/lib64/'
] )
if (ssl_incs is not None and

View File

@ -1,13 +0,0 @@
--- Python-2.7.2/Lib/distutils/tests/test_install.py.lib64 2011-09-08 17:51:57.851405376 -0400
+++ Python-2.7.2/Lib/distutils/tests/test_install.py 2011-09-08 18:40:46.754205096 -0400
@@ -41,8 +41,9 @@ class InstallTestCase(support.TempdirMan
self.assertEqual(got, expected)
libdir = os.path.join(destination, "lib", "python")
+ platlibdir = os.path.join(destination, "lib64", "platform-python")
check_path(cmd.install_lib, libdir)
- check_path(cmd.install_platlib, libdir)
+ check_path(cmd.install_platlib, platlibdir)
check_path(cmd.install_purelib, libdir)
check_path(cmd.install_headers,
os.path.join(destination, "include", "python", "foopkg"))

View File

@ -1,60 +0,0 @@
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 70e5927..04c8e3d 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -556,7 +556,7 @@ clinic: $(BUILDPYTHON) $(srcdir)/Modules/_blake2/blake2s_impl.c
$(RUNSHARED) $(PYTHON_FOR_BUILD) ./Tools/clinic/clinic.py --make
# Build the interpreter
-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
+$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY)
$(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
platform: $(BUILDPYTHON) pybuilddir.txt
@@ -601,18 +601,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
-# Build static library
-# avoid long command lines, same as LIBRARY_OBJS
-$(LIBRARY): $(LIBRARY_OBJS)
- -rm -f $@
- $(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o
- $(AR) $(ARFLAGS) $@ $(PARSER_OBJS)
- $(AR) $(ARFLAGS) $@ $(OBJECT_OBJS)
- $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS) Python/frozen.o
- $(AR) $(ARFLAGS) $@ $(MODULE_OBJS)
- $(AR) $(ARFLAGS) $@ $(MODOBJS)
- $(RANLIB) $@
-
libplatform-python$(LDVERSION).so: $(LIBRARY_OBJS)
if test $(INSTSONAME) != $(LDLIBRARY); then \
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
@@ -702,7 +690,7 @@ Modules/Setup: $(srcdir)/Modules/Setup.dist
echo "-----------------------------------------------"; \
fi
-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
+Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY)
$(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
############################################################################
@@ -1382,18 +1370,6 @@ libainstall: all python-config
else true; \
fi; \
done
- @if test -d $(LIBRARY); then :; else \
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
- if test "$(SHLIB_SUFFIX)" = .dll; then \
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
- else \
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
- $(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
- fi; \
- else \
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \
- fi; \
- fi
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
$(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in

View File

@ -1,46 +0,0 @@
diff -up Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/case.py
--- Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400
+++ Python-3.2.2/Lib/unittest/case.py 2011-09-09 06:35:16.365568382 -0400
@@ -3,6 +3,7 @@
import sys
import functools
import difflib
+import os
import logging
import pprint
import re
@@ -101,5 +102,21 @@ def expectedFailure(func):
raise self.test_case.failureException(msg)
+# Non-standard/downstream-only hooks for handling issues with specific test
+# cases:
+
+def _skipInRpmBuild(reason):
+ """
+ Non-standard/downstream-only decorator for marking a specific unit test
+ to be skipped when run within the %check of an rpmbuild.
+
+ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within
+ the environment, and has no effect otherwise.
+ """
+ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ:
+ return skip(reason)
+ else:
+ return _id
+
class _AssertRaisesBaseContext(_BaseTestCaseContext):
def __init__(self, expected, test_case, expected_regex=None):
diff -up Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/__init__.py
--- Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400
+++ Python-3.2.2/Lib/unittest/__init__.py 2011-09-09 06:35:16.366568382 -0400
@@ -57,7 +57,8 @@ __unittest = True
from .result import TestResult
from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf,
- skipUnless, expectedFailure)
+ skipUnless, expectedFailure,
+ _skipInRpmBuild)
from .suite import BaseTestSuite, TestSuite
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
findTestCases)

View File

@ -1,12 +0,0 @@
diff -up Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py.skip-distutils-tests-that-fail-in-rpmbuild Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py
--- Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py.skip-distutils-tests-that-fail-in-rpmbuild 2011-09-03 12:16:40.000000000 -0400
+++ Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py 2011-09-10 05:04:56.328852558 -0400
@@ -23,6 +23,7 @@ setup(name='foo', version='0.1', py_modu
"""
+@unittest._skipInRpmBuild("don't try to nest one rpm build inside another rpm build")
class BuildRpmTestCase(support.TempdirManager,
support.EnvironGuard,
support.LoggingSilencer,
diff -up Python-3.2.2/Lib/distutils/tests/test_build_ext.py.skip-distutils-tests-that-fail-in-rpmbuild Python-3.2.2/Lib/distutils/tests/test_build_ext.py

View File

@ -1,640 +0,0 @@
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
index 316cece..b7ad879 100644
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -23,6 +23,16 @@ the zlib module.
Choose your hash function wisely. Some have known collision weaknesses.
sha384 and sha512 will be slow on 32 bit platforms.
+If the underlying implementation supports "FIPS mode", and this is enabled, it
+may restrict the available hashes to only those that are compliant with FIPS
+regulations. For example, it may deny the use of MD5, on the grounds that this
+is not secure for uses such as authentication, system integrity checking, or
+digital signatures. If you need to use such a hash for non-security purposes
+(such as indexing into a data structure for speed), you can override the keyword
+argument "usedforsecurity" from True to False to signify that your code is not
+relying on the hash for security purposes, and this will allow the hash to be
+usable even in FIPS mode.
+
Hash objects have these methods:
- update(arg): Update the hash object with the bytes in arg. Repeated calls
are equivalent to a single call with the concatenation of all
@@ -62,6 +72,18 @@ algorithms_available = set(__always_supported)
__all__ = __always_supported + ('new', 'algorithms_guaranteed',
'algorithms_available', 'pbkdf2_hmac')
+import functools
+def __ignore_usedforsecurity(func):
+ """Used for sha3_* functions. Until OpenSSL implements them, we want
+ to use them from Python _sha3 module, but we want them to accept
+ usedforsecurity argument too."""
+ # TODO: remove this function when OpenSSL implements sha3
+ @functools.wraps(func)
+ def inner(*args, **kwargs):
+ if 'usedforsecurity' in kwargs:
+ kwargs.pop('usedforsecurity')
+ return func(*args, **kwargs)
+ return inner
__builtin_constructor_cache = {}
@@ -100,31 +122,39 @@ def __get_openssl_constructor(name):
f = getattr(_hashlib, 'openssl_' + name)
# Allow the C module to raise ValueError. The function will be
# defined but the hash not actually available thanks to OpenSSL.
- f()
+ # We pass "usedforsecurity=False" to disable FIPS-based restrictions:
+ # at this stage we're merely seeing if the function is callable,
+ # rather than using it for actual work.
+ f(usedforsecurity=False)
# Use the C function directly (very fast)
return f
except (AttributeError, ValueError):
+ # TODO: We want to just raise here when OpenSSL implements sha3
+ # because we want to make sure that Fedora uses everything from OpenSSL
return __get_builtin_constructor(name)
-def __py_new(name, data=b''):
- """new(name, data=b'') - Return a new hashing object using the named algorithm;
- optionally initialized with data (which must be bytes).
+def __py_new(name, data=b'', usedforsecurity=True):
+ """new(name, data=b'', usedforsecurity=True) - Return a new hashing object using
+ the named algorithm; optionally initialized with data (which must be bytes).
+ The 'usedforsecurity' keyword argument does nothing, and is for compatibilty
+ with the OpenSSL implementation
"""
return __get_builtin_constructor(name)(data)
-def __hash_new(name, data=b''):
- """new(name, data=b'') - Return a new hashing object using the named algorithm;
- optionally initialized with data (which must be bytes).
+def __hash_new(name, data=b'', usedforsecurity=True):
+ """new(name, data=b'', usedforsecurity=True) - Return a new hashing object using
+ the named algorithm; optionally initialized with data (which must be bytes).
+
+ Override 'usedforsecurity' to False when using for non-security purposes in
+ a FIPS environment
"""
try:
- return _hashlib.new(name, data)
+ return _hashlib.new(name, data, usedforsecurity)
except ValueError:
- # If the _hashlib module (OpenSSL) doesn't support the named
- # hash, try using our builtin implementations.
- # This allows for SHA224/256 and SHA384/512 support even though
- # the OpenSSL library prior to 0.9.8 doesn't provide them.
+ # TODO: We want to just raise here when OpenSSL implements sha3
+ # because we want to make sure that Fedora uses everything from OpenSSL
return __get_builtin_constructor(name)(data)
@@ -207,7 +237,10 @@ for __func_name in __always_supported:
# try them all, some may not work due to the OpenSSL
# version not supporting that algorithm.
try:
- globals()[__func_name] = __get_hash(__func_name)
+ func = __get_hash(__func_name)
+ if 'sha3_' in __func_name:
+ func = __ignore_usedforsecurity(func)
+ globals()[__func_name] = func
except ValueError:
import logging
logging.exception('code for hash %s was not found.', __func_name)
@@ -215,3 +248,4 @@ for __func_name in __always_supported:
# Cleanup locals()
del __always_supported, __func_name, __get_hash
del __py_new, __hash_new, __get_openssl_constructor
+del __ignore_usedforsecurity
\ No newline at end of file
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index c9b113e..60e2392 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -24,7 +24,22 @@ from test.support import _4G, bigmemtest, import_fresh_module
COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib'])
-py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib'])
+# skipped on Fedora, since we always use OpenSSL implementation
+# py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib'])
+
+def openssl_enforces_fips():
+ # Use the "openssl" command (if present) to try to determine if the local
+ # OpenSSL is configured to enforce FIPS
+ from subprocess import Popen, PIPE
+ try:
+ p = Popen(['openssl', 'md5'],
+ stdin=PIPE, stdout=PIPE, stderr=PIPE)
+ except OSError:
+ # "openssl" command not found
+ return False
+ stdout, stderr = p.communicate(input=b'abc')
+ return b'unknown cipher' in stderr
+OPENSSL_ENFORCES_FIPS = openssl_enforces_fips()
def hexstr(s):
assert isinstance(s, bytes), repr(s)
@@ -34,6 +49,16 @@ def hexstr(s):
r += h[(i >> 4) & 0xF] + h[i & 0xF]
return r
+# hashlib and _hashlib-based functions support a "usedforsecurity" keyword
+# argument, and FIPS mode requires that it be used overridden with a False
+# value for these selftests to work. Other cryptographic code within Python
+# doesn't support this keyword.
+# Modify a function to one in which "usedforsecurity=False" is added to the
+# keyword arguments:
+def suppress_fips(f):
+ def g(*args, **kwargs):
+ return f(*args, usedforsecurity=False, **kwargs)
+ return g
class HashLibTestCase(unittest.TestCase):
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
@@ -63,11 +88,11 @@ class HashLibTestCase(unittest.TestCase):
# For each algorithm, test the direct constructor and the use
# of hashlib.new given the algorithm name.
for algorithm, constructors in self.constructors_to_test.items():
- constructors.add(getattr(hashlib, algorithm))
+ constructors.add(suppress_fips(getattr(hashlib, algorithm)))
def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm):
if data is None:
- return hashlib.new(_alg)
- return hashlib.new(_alg, data)
+ return suppress_fips(hashlib.new)(_alg)
+ return suppress_fips(hashlib.new)(_alg, data)
constructors.add(_test_algorithm_via_hashlib_new)
_hashlib = self._conditional_import_module('_hashlib')
@@ -79,27 +104,12 @@ class HashLibTestCase(unittest.TestCase):
for algorithm, constructors in self.constructors_to_test.items():
constructor = getattr(_hashlib, 'openssl_'+algorithm, None)
if constructor:
- constructors.add(constructor)
+ constructors.add(suppress_fips(constructor))
def add_builtin_constructor(name):
constructor = getattr(hashlib, "__get_builtin_constructor")(name)
self.constructors_to_test[name].add(constructor)
- _md5 = self._conditional_import_module('_md5')
- if _md5:
- add_builtin_constructor('md5')
- _sha1 = self._conditional_import_module('_sha1')
- if _sha1:
- add_builtin_constructor('sha1')
- _sha256 = self._conditional_import_module('_sha256')
- if _sha256:
- add_builtin_constructor('sha224')
- add_builtin_constructor('sha256')
- _sha512 = self._conditional_import_module('_sha512')
- if _sha512:
- add_builtin_constructor('sha384')
- add_builtin_constructor('sha512')
-
super(HashLibTestCase, self).__init__(*args, **kwargs)
@property
@@ -148,9 +158,6 @@ class HashLibTestCase(unittest.TestCase):
else:
del sys.modules['_md5']
self.assertRaises(TypeError, get_builtin_constructor, 3)
- constructor = get_builtin_constructor('md5')
- self.assertIs(constructor, _md5.md5)
- self.assertEqual(sorted(builtin_constructor_cache), ['MD5', 'md5'])
def test_hexdigest(self):
for cons in self.hash_constructors:
@@ -433,6 +440,64 @@ class HashLibTestCase(unittest.TestCase):
self.assertEqual(expected_hash, hasher.hexdigest())
+ def test_issue9146(self):
+ # Ensure that various ways to use "MD5" from "hashlib" don't segfault:
+ m = hashlib.md5(usedforsecurity=False)
+ m.update(b'abc\n')
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = hashlib.new('md5', usedforsecurity=False)
+ m.update(b'abc\n')
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = hashlib.md5(b'abc\n', usedforsecurity=False)
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = hashlib.new('md5', b'abc\n', usedforsecurity=False)
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS,
+ 'FIPS enforcement required for this test.')
+ def test_hashlib_fips_mode(self):
+ # Ensure that we raise a ValueError on vanilla attempts to use MD5
+ # in hashlib in a FIPS-enforced setting:
+ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'):
+ m = hashlib.md5()
+
+ if not self._conditional_import_module('_md5'):
+ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'):
+ m = hashlib.new('md5')
+
+ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS,
+ 'FIPS enforcement required for this test.')
+ def test_hashopenssl_fips_mode(self):
+ # Verify the _hashlib module's handling of md5:
+ _hashlib = self._conditional_import_module('_hashlib')
+ if _hashlib:
+ assert hasattr(_hashlib, 'openssl_md5')
+
+ # Ensure that _hashlib raises a ValueError on vanilla attempts to
+ # use MD5 in a FIPS-enforced setting:
+ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'):
+ m = _hashlib.openssl_md5()
+ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'):
+ m = _hashlib.new('md5')
+
+ # Ensure that in such a setting we can whitelist a callsite with
+ # usedforsecurity=False and have it succeed:
+ m = _hashlib.openssl_md5(usedforsecurity=False)
+ m.update(b'abc\n')
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = _hashlib.new('md5', usedforsecurity=False)
+ m.update(b'abc\n')
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = _hashlib.openssl_md5(b'abc\n', usedforsecurity=False)
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
+
+ m = _hashlib.new('md5', b'abc\n', usedforsecurity=False)
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
class KDFTests(unittest.TestCase):
@@ -516,7 +581,7 @@ class KDFTests(unittest.TestCase):
out = pbkdf2(hash_name='sha1', password=b'password', salt=b'salt',
iterations=1, dklen=None)
self.assertEqual(out, self.pbkdf2_results['sha1'][0][0])
-
+ @unittest.skip('skipped on Fedora, as we always use OpenSSL pbkdf2_hmac')
def test_pbkdf2_hmac_py(self):
self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac)
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
index 44765ac..b8cf490 100644
--- a/Modules/_hashopenssl.c
+++ b/Modules/_hashopenssl.c
@@ -20,6 +20,8 @@
/* EVP is the preferred interface to hashing in OpenSSL */
+#include <openssl/ssl.h>
+#include <openssl/err.h>
#include <openssl/evp.h>
#include <openssl/hmac.h>
/* We use the object interface to discover what hashes OpenSSL supports. */
@@ -45,11 +47,19 @@ typedef struct {
static PyTypeObject EVPtype;
+/* Struct to hold all the cached information we need on a specific algorithm.
+ We have one of these per algorithm */
+typedef struct {
+ PyObject *name_obj;
+ EVP_MD_CTX ctxs[2];
+ /* ctx_ptrs will point to ctxs unless an error occurred, when it will
+ be NULL: */
+ EVP_MD_CTX *ctx_ptrs[2];
+ PyObject *error_msgs[2];
+} EVPCachedInfo;
-#define DEFINE_CONSTS_FOR_NEW(Name) \
- static PyObject *CONST_ ## Name ## _name_obj = NULL; \
- static EVP_MD_CTX CONST_new_ ## Name ## _ctx; \
- static EVP_MD_CTX *CONST_new_ ## Name ## _ctx_p = NULL;
+#define DEFINE_CONSTS_FOR_NEW(Name) \
+ static EVPCachedInfo cached_info_ ##Name;
DEFINE_CONSTS_FOR_NEW(md5)
DEFINE_CONSTS_FOR_NEW(sha1)
@@ -92,6 +102,48 @@ EVP_hash(EVPobject *self, const void *vp, Py_ssize_t len)
}
}
+static void
+mc_ctx_init(EVP_MD_CTX *ctx, int usedforsecurity)
+{
+ EVP_MD_CTX_init(ctx);
+
+ /*
+ If the user has declared that this digest is being used in a
+ non-security role (e.g. indexing into a data structure), set
+ the exception flag for openssl to allow it
+ */
+ if (!usedforsecurity) {
+#ifdef EVP_MD_CTX_FLAG_NON_FIPS_ALLOW
+ EVP_MD_CTX_set_flags(ctx,
+ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW);
+#endif
+ }
+}
+
+/* Get an error msg for the last error as a PyObject */
+static PyObject *
+error_msg_for_last_error(void)
+{
+ char *errstr;
+
+ errstr = ERR_error_string(ERR_peek_last_error(), NULL);
+ ERR_clear_error();
+
+ return PyUnicode_FromString(errstr); /* Can be NULL */
+}
+
+static void
+set_evp_exception(void)
+{
+ char *errstr;
+
+ errstr = ERR_error_string(ERR_peek_last_error(), NULL);
+ ERR_clear_error();
+
+ PyErr_SetString(PyExc_ValueError, errstr);
+}
+
+
/* Internal methods for a hash object */
static void
@@ -259,15 +311,16 @@ EVP_repr(EVPobject *self)
static int
EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"name", "string", NULL};
+ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL};
PyObject *name_obj = NULL;
PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
Py_buffer view;
char *nameStr;
const EVP_MD *digest;
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:HASH", kwlist,
- &name_obj, &data_obj)) {
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|Oi:HASH", kwlist,
+ &name_obj, &data_obj, &usedforsecurity)) {
return -1;
}
@@ -288,7 +341,12 @@ EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds)
PyBuffer_Release(&view);
return -1;
}
- EVP_DigestInit(&self->ctx, digest);
+ mc_ctx_init(&self->ctx, usedforsecurity);
+ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) {
+ set_evp_exception();
+ PyBuffer_Release(&view);
+ return -1;
+ }
self->name = name_obj;
Py_INCREF(self->name);
@@ -372,7 +430,8 @@ static PyTypeObject EVPtype = {
static PyObject *
EVPnew(PyObject *name_obj,
const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
- const unsigned char *cp, Py_ssize_t len)
+ const unsigned char *cp, Py_ssize_t len,
+ int usedforsecurity)
{
EVPobject *self;
@@ -387,7 +446,12 @@ EVPnew(PyObject *name_obj,
if (initial_ctx) {
EVP_MD_CTX_copy(&self->ctx, initial_ctx);
} else {
- EVP_DigestInit(&self->ctx, digest);
+ mc_ctx_init(&self->ctx, usedforsecurity);
+ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) {
+ set_evp_exception();
+ Py_DECREF(self);
+ return NULL;
+ }
}
if (cp && len) {
@@ -411,21 +475,29 @@ PyDoc_STRVAR(EVP_new__doc__,
An optional string argument may be provided and will be\n\
automatically hashed.\n\
\n\
-The MD5 and SHA1 algorithms are always supported.\n");
+The MD5 and SHA1 algorithms are always supported.\n\
+\n\
+An optional \"usedforsecurity=True\" keyword argument is provided for use in\n\
+environments that enforce FIPS-based restrictions. Some implementations of\n\
+OpenSSL can be configured to prevent the usage of non-secure algorithms (such\n\
+as MD5). If you have a non-security use for these algorithms (e.g. a hash\n\
+table), you can override this argument by marking the callsite as\n\
+\"usedforsecurity=False\".");
static PyObject *
EVP_new(PyObject *self, PyObject *args, PyObject *kwdict)
{
- static char *kwlist[] = {"name", "string", NULL};
+ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL};
PyObject *name_obj = NULL;
PyObject *data_obj = NULL;
+ int usedforsecurity = 1;
Py_buffer view = { 0 };
PyObject *ret_obj;
char *name;
const EVP_MD *digest;
- if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|O:new", kwlist,
- &name_obj, &data_obj)) {
+ if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|Oi:new", kwlist,
+ &name_obj, &data_obj, &usedforsecurity)) {
return NULL;
}
@@ -439,7 +511,8 @@ EVP_new(PyObject *self, PyObject *args, PyObject *kwdict)
digest = EVP_get_digestbyname(name);
- ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len);
+ ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len,
+ usedforsecurity);
if (data_obj)
PyBuffer_Release(&view);
@@ -722,57 +795,114 @@ generate_hash_name_list(void)
/*
- * This macro generates constructor function definitions for specific
- * hash algorithms. These constructors are much faster than calling
- * the generic one passing it a python string and are noticably
- * faster than calling a python new() wrapper. Thats important for
+ * This macro and function generates a family of constructor function
+ * definitions for specific hash algorithms. These constructors are much
+ * faster than calling the generic one passing it a python string and are
+ * noticably faster than calling a python new() wrapper. That's important for
* code that wants to make hashes of a bunch of small strings.
*/
#define GEN_CONSTRUCTOR(NAME) \
static PyObject * \
- EVP_new_ ## NAME (PyObject *self, PyObject *args) \
+ EVP_new_ ## NAME (PyObject *self, PyObject *args, PyObject *kwdict) \
{ \
- PyObject *data_obj = NULL; \
- Py_buffer view = { 0 }; \
- PyObject *ret_obj; \
- \
- if (!PyArg_ParseTuple(args, "|O:" #NAME , &data_obj)) { \
- return NULL; \
- } \
- \
- if (data_obj) \
- GET_BUFFER_VIEW_OR_ERROUT(data_obj, &view); \
- \
- ret_obj = EVPnew( \
- CONST_ ## NAME ## _name_obj, \
- NULL, \
- CONST_new_ ## NAME ## _ctx_p, \
- (unsigned char*)view.buf, \
- view.len); \
- \
- if (data_obj) \
- PyBuffer_Release(&view); \
- return ret_obj; \
+ return implement_specific_EVP_new(self, args, kwdict, \
+ "|Oi:" #NAME, \
+ &cached_info_ ## NAME ); \
}
+static PyObject *
+implement_specific_EVP_new(PyObject *self, PyObject *args, PyObject *kwdict,
+ const char *format,
+ EVPCachedInfo *cached_info)
+{
+ static char *kwlist[] = {"string", "usedforsecurity", NULL};
+ PyObject *data_obj = NULL;
+ Py_buffer view = { 0 };
+ int usedforsecurity = 1;
+ int idx;
+ PyObject *ret_obj = NULL;
+
+ assert(cached_info);
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwdict, format, kwlist,
+ &data_obj, &usedforsecurity)) {
+ return NULL;
+ }
+
+ if (data_obj)
+ GET_BUFFER_VIEW_OR_ERROUT(data_obj, &view);
+
+ idx = usedforsecurity ? 1 : 0;
+
+ /*
+ * If an error occurred during creation of the global content, the ctx_ptr
+ * will be NULL, and the error_msg will hopefully be non-NULL:
+ */
+ if (cached_info->ctx_ptrs[idx]) {
+ /* We successfully initialized this context; copy it: */
+ ret_obj = EVPnew(cached_info->name_obj,
+ NULL,
+ cached_info->ctx_ptrs[idx],
+ (unsigned char*)view.buf, view.len,
+ usedforsecurity);
+ } else {
+ /* Some kind of error happened initializing the global context for
+ this (digest, usedforsecurity) pair.
+ Raise an exception with the saved error message: */
+ if (cached_info->error_msgs[idx]) {
+ PyErr_SetObject(PyExc_ValueError, cached_info->error_msgs[idx]);
+ } else {
+ PyErr_SetString(PyExc_ValueError, "Error initializing hash");
+ }
+ }
+
+ if (data_obj)
+ PyBuffer_Release(&view);
+
+ return ret_obj;
+}
+
/* a PyMethodDef structure for the constructor */
#define CONSTRUCTOR_METH_DEF(NAME) \
- {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \
+ {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, \
+ METH_VARARGS|METH_KEYWORDS, \
PyDoc_STR("Returns a " #NAME \
" hash object; optionally initialized with a string") \
}
-/* used in the init function to setup a constructor: initialize OpenSSL
- constructor constants if they haven't been initialized already. */
-#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \
- if (CONST_ ## NAME ## _name_obj == NULL) { \
- CONST_ ## NAME ## _name_obj = PyUnicode_FromString(#NAME); \
- if (EVP_get_digestbyname(#NAME)) { \
- CONST_new_ ## NAME ## _ctx_p = &CONST_new_ ## NAME ## _ctx; \
- EVP_DigestInit(CONST_new_ ## NAME ## _ctx_p, EVP_get_digestbyname(#NAME)); \
- } \
- } \
+/*
+ Macro/function pair to set up the constructors.
+
+ Try to initialize a context for each hash twice, once with
+ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW and once without.
+
+ Any that have errors during initialization will end up with a NULL ctx_ptrs
+ entry, and err_msgs will be set (unless we're very low on memory)
+*/
+#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \
+ init_constructor_constant(&cached_info_ ## NAME, #NAME); \
} while (0);
+static void
+init_constructor_constant(EVPCachedInfo *cached_info, const char *name)
+{
+ assert(cached_info);
+ cached_info->name_obj = PyUnicode_FromString(name);
+ if (EVP_get_digestbyname(name)) {
+ int i;
+ for (i=0; i<2; i++) {
+ mc_ctx_init(&cached_info->ctxs[i], i);
+ if (EVP_DigestInit_ex(&cached_info->ctxs[i],
+ EVP_get_digestbyname(name), NULL)) {
+ /* Success: */
+ cached_info->ctx_ptrs[i] = &cached_info->ctxs[i];
+ } else {
+ /* Failure: */
+ cached_info->ctx_ptrs[i] = NULL;
+ cached_info->error_msgs[i] = error_msg_for_last_error();
+ }
+ }
+ }
+}
GEN_CONSTRUCTOR(md5)
GEN_CONSTRUCTOR(sha1)
@@ -819,13 +949,10 @@ PyInit__hashlib(void)
{
PyObject *m, *openssl_md_meth_names;
- OpenSSL_add_all_digests();
- ERR_load_crypto_strings();
+ SSL_load_error_strings();
+ SSL_library_init();
- /* TODO build EVP_functions openssl_* entries dynamically based
- * on what hashes are supported rather than listing many
- * but having some be unsupported. Only init appropriate
- * constants. */
+ OpenSSL_add_all_digests();
Py_TYPE(&EVPtype) = &PyType_Type;
if (PyType_Ready(&EVPtype) < 0)

View File

@ -1,15 +0,0 @@
diff -up Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 Python-3.2.3/Lib/ctypes/__init__.py
--- Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 15:12:49.017867692 -0400
+++ Python-3.2.3/Lib/ctypes/__init__.py 2012-04-20 15:15:09.501111408 -0400
@@ -275,11 +275,6 @@ def _reset_cache():
# _SimpleCData.c_char_p_from_param
POINTER(c_char).from_param = c_char_p.from_param
_pointer_type_cache[None] = c_void_p
- # XXX for whatever reasons, creating the first instance of a callback
- # function is needed for the unittests on Win64 to succeed. This MAY
- # be a compiler bug, since the problem occurs only when _ctypes is
- # compiled with the MS SDK compiler. Or an uninitialized variable?
- CFUNCTYPE(c_int)(lambda: None)
def create_unicode_buffer(init, size=None):
"""create_unicode_buffer(aString) -> character array

View File

@ -1,68 +0,0 @@
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index e9fdb07..ea60e6e 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -1723,30 +1723,36 @@ class PosixUidGidTests(unittest.TestCase):
def test_setuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.setuid, 0)
+ self.assertRaises(TypeError, os.setuid, 'not an int')
self.assertRaises(OverflowError, os.setuid, 1<<32)
@unittest.skipUnless(hasattr(os, 'setgid'), 'test needs os.setgid()')
def test_setgid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setgid, 0)
+ self.assertRaises(TypeError, os.setgid, 'not an int')
self.assertRaises(OverflowError, os.setgid, 1<<32)
@unittest.skipUnless(hasattr(os, 'seteuid'), 'test needs os.seteuid()')
def test_seteuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.seteuid, 0)
+ self.assertRaises(TypeError, os.seteuid, 'not an int')
self.assertRaises(OverflowError, os.seteuid, 1<<32)
@unittest.skipUnless(hasattr(os, 'setegid'), 'test needs os.setegid()')
def test_setegid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setegid, 0)
+ self.assertRaises(TypeError, os.setegid, 'not an int')
self.assertRaises(OverflowError, os.setegid, 1<<32)
@unittest.skipUnless(hasattr(os, 'setreuid'), 'test needs os.setreuid()')
def test_setreuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.setreuid, 0, 0)
+ self.assertRaises(TypeError, os.setreuid, 'not an int', 0)
+ self.assertRaises(TypeError, os.setreuid, 0, 'not an int')
self.assertRaises(OverflowError, os.setreuid, 1<<32, 0)
self.assertRaises(OverflowError, os.setreuid, 0, 1<<32)
@@ -1762,6 +1768,8 @@ class PosixUidGidTests(unittest.TestCase):
def test_setregid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setregid, 0, 0)
+ self.assertRaises(TypeError, os.setregid, 'not an int', 0)
+ self.assertRaises(TypeError, os.setregid, 0, 'not an int')
self.assertRaises(OverflowError, os.setregid, 1<<32, 0)
self.assertRaises(OverflowError, os.setregid, 0, 1<<32)
diff --git a/Lib/test/test_pwd.py b/Lib/test/test_pwd.py
index ac9cff7..db98159 100644
--- a/Lib/test/test_pwd.py
+++ b/Lib/test/test_pwd.py
@@ -104,11 +104,11 @@ class PwdTest(unittest.TestCase):
# In some cases, byuids isn't a complete list of all users in the
# system, so if we try to pick a value not in byuids (via a perturbing
# loop, say), pwd.getpwuid() might still be able to find data for that
- # uid. Using sys.maxint may provoke the same problems, but hopefully
+ # uid. Using 2**32 - 2 may provoke the same problems, but hopefully
# it will be a more repeatable failure.
# Android accepts a very large span of uids including sys.maxsize and
# -1; it raises KeyError with 1 or 2 for example.
- fakeuid = sys.maxsize
+ fakeuid = 2**32 - 2
self.assertNotIn(fakeuid, byuids)
if not support.is_android:
self.assertRaises(KeyError, pwd.getpwuid, fakeuid)

View File

@ -1,11 +0,0 @@
diff -up cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build cpython-59223da36dec/Lib/test/test_posix.py
--- cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build 2012-08-07 17:15:59.000000000 -0400
+++ cpython-59223da36dec/Lib/test/test_posix.py 2012-08-07 17:16:53.528330330 -0400
@@ -973,6 +973,7 @@ class PosixTester(unittest.TestCase):
posix.RTLD_GLOBAL
posix.RTLD_LOCAL
+ @unittest._skipInRpmBuild('running kernel may not match kernel in chroot')
@unittest.skipUnless(hasattr(os, 'SEEK_HOLE'),
"test needs an OS that reports file holes")
def test_fs_holes(self):

View File

@ -1,11 +0,0 @@
diff -up Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds Python-3.3.0b1/Lib/test/test_socket.py
--- Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds 2012-07-24 15:02:30.823355067 -0400
+++ Python-3.3.0b1/Lib/test/test_socket.py 2012-07-24 15:08:13.021354999 -0400
@@ -2188,6 +2188,7 @@ class RecvmsgGenericStreamTests(RecvmsgG
# Tests which require a stream socket and can use either recvmsg()
# or recvmsg_into().
+ @unittest._skipInRpmBuild('fails intermittently when run within Koji')
def testRecvmsgEOF(self):
# Receive end-of-stream indicator (b"", peer socket closed).
msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, 1024)

View File

@ -1,310 +0,0 @@
diff --git a/Include/object.h b/Include/object.h
index 0c88603..e3413e8 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -1059,6 +1059,49 @@ PyAPI_FUNC(void)
_PyObject_DebugTypeStats(FILE *out);
#endif /* ifndef Py_LIMITED_API */
+/*
+ Define a pair of assertion macros.
+
+ These work like the regular C assert(), in that they will abort the
+ process with a message on stderr if the given condition fails to hold,
+ but compile away to nothing if NDEBUG is defined.
+
+ However, before aborting, Python will also try to call _PyObject_Dump() on
+ the given object. This may be of use when investigating bugs in which a
+ particular object is corrupt (e.g. buggy a tp_visit method in an extension
+ module breaking the garbage collector), to help locate the broken objects.
+
+ The WITH_MSG variant allows you to supply an additional message that Python
+ will attempt to print to stderr, after the object dump.
+*/
+#ifdef NDEBUG
+/* No debugging: compile away the assertions: */
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0)
+#else
+/* With debugging: generate checks: */
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \
+ ((expr) \
+ ? (void)(0) \
+ : _PyObject_AssertFailed((obj), \
+ (msg), \
+ (__STRING(expr)), \
+ (__FILE__), \
+ (__LINE__), \
+ (__PRETTY_FUNCTION__)))
+#endif
+
+#define PyObject_ASSERT(obj, expr) \
+ PyObject_ASSERT_WITH_MSG(obj, expr, NULL)
+
+/*
+ Declare and define the entrypoint even when NDEBUG is defined, to avoid
+ causing compiler/linker errors when building extensions without NDEBUG
+ against a Python built with NDEBUG defined
+*/
+PyAPI_FUNC(void) _PyObject_AssertFailed(PyObject *, const char *,
+ const char *, const char *, int,
+ const char *);
+
#ifdef __cplusplus
}
#endif
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index e727499..6efcafb 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -1,10 +1,11 @@
import unittest
from test.support import (verbose, refcount_test, run_unittest,
strip_python_stderr, cpython_only, start_threads,
- temp_dir, requires_type_collecting)
+ temp_dir, import_module, requires_type_collecting)
from test.support.script_helper import assert_python_ok, make_script
import sys
+import sysconfig
import time
import gc
import weakref
@@ -50,6 +51,8 @@ class GC_Detector(object):
# gc collects it.
self.wr = weakref.ref(C1055820(666), it_happened)
+BUILD_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS'])
+
@with_tp_del
class Uncollectable(object):
"""Create a reference cycle with multiple __del__ methods.
@@ -862,6 +865,50 @@ class GCCallbackTests(unittest.TestCase):
self.assertEqual(len(gc.garbage), 0)
+ @unittest.skipIf(BUILD_WITH_NDEBUG,
+ 'built with -NDEBUG')
+ def test_refcount_errors(self):
+ self.preclean()
+ # Verify the "handling" of objects with broken refcounts
+ import_module("ctypes") #skip if not supported
+
+ import subprocess
+ code = '''if 1:
+ a = []
+ b = [a]
+
+ # Simulate the refcount of "a" being too low (compared to the
+ # references held on it by live data), but keeping it above zero
+ # (to avoid deallocating it):
+ import ctypes
+ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a))
+
+ # The garbage collector should now have a fatal error when it reaches
+ # the broken object:
+ import gc
+ gc.collect()
+ '''
+ p = subprocess.Popen([sys.executable, "-c", code],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ p.stdout.close()
+ p.stderr.close()
+ # Verify that stderr has a useful error message:
+ self.assertRegex(stderr,
+ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "\(\(gc\)->gc.gc_refs >> \(1\)\) != 0" failed.')
+ self.assertRegex(stderr,
+ b'refcount was too small')
+ self.assertRegex(stderr,
+ b'object : \[\]')
+ self.assertRegex(stderr,
+ b'type : list')
+ self.assertRegex(stderr,
+ b'refcount: 1')
+ self.assertRegex(stderr,
+ b'address : 0x[0-9a-f]+')
+
+
class GCTogglingTests(unittest.TestCase):
def setUp(self):
gc.enable()
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
index 0c6f444..87edd5a 100644
--- a/Modules/gcmodule.c
+++ b/Modules/gcmodule.c
@@ -341,7 +341,8 @@ update_refs(PyGC_Head *containers)
{
PyGC_Head *gc = containers->gc.gc_next;
for (; gc != containers; gc = gc->gc.gc_next) {
- assert(_PyGCHead_REFS(gc) == GC_REACHABLE);
+ PyObject_ASSERT(FROM_GC(gc),
+ _PyGCHead_REFS(gc) == GC_REACHABLE);
_PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc)));
/* Python's cyclic gc should never see an incoming refcount
* of 0: if something decref'ed to 0, it should have been
@@ -361,7 +362,8 @@ update_refs(PyGC_Head *containers)
* so serious that maybe this should be a release-build
* check instead of an assert?
*/
- assert(_PyGCHead_REFS(gc) != 0);
+ PyObject_ASSERT(FROM_GC(gc),
+ _PyGCHead_REFS(gc) != 0);
}
}
@@ -376,7 +378,9 @@ visit_decref(PyObject *op, void *data)
* generation being collected, which can be recognized
* because only they have positive gc_refs.
*/
- assert(_PyGCHead_REFS(gc) != 0); /* else refcount was too small */
+ PyObject_ASSERT_WITH_MSG(FROM_GC(gc),
+ _PyGCHead_REFS(gc) != 0,
+ "refcount was too small"); /* else refcount was too small */
if (_PyGCHead_REFS(gc) > 0)
_PyGCHead_DECREF(gc);
}
@@ -436,9 +440,10 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
* If gc_refs == GC_UNTRACKED, it must be ignored.
*/
else {
- assert(gc_refs > 0
- || gc_refs == GC_REACHABLE
- || gc_refs == GC_UNTRACKED);
+ PyObject_ASSERT(FROM_GC(gc),
+ gc_refs > 0
+ || gc_refs == GC_REACHABLE
+ || gc_refs == GC_UNTRACKED);
}
}
return 0;
@@ -480,7 +485,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
*/
PyObject *op = FROM_GC(gc);
traverseproc traverse = Py_TYPE(op)->tp_traverse;
- assert(_PyGCHead_REFS(gc) > 0);
+ PyObject_ASSERT(op, _PyGCHead_REFS(gc) > 0);
_PyGCHead_SET_REFS(gc, GC_REACHABLE);
(void) traverse(op,
(visitproc)visit_reachable,
@@ -543,7 +548,7 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers)
for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) {
PyObject *op = FROM_GC(gc);
- assert(IS_TENTATIVELY_UNREACHABLE(op));
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
next = gc->gc.gc_next;
if (has_legacy_finalizer(op)) {
@@ -619,7 +624,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
PyWeakReference **wrlist;
op = FROM_GC(gc);
- assert(IS_TENTATIVELY_UNREACHABLE(op));
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
next = gc->gc.gc_next;
if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op)))
@@ -640,9 +645,9 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
* the callback pointer intact. Obscure: it also
* changes *wrlist.
*/
- assert(wr->wr_object == op);
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == op);
_PyWeakref_ClearRef(wr);
- assert(wr->wr_object == Py_None);
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None);
if (wr->wr_callback == NULL)
continue; /* no callback */
@@ -676,7 +681,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
*/
if (IS_TENTATIVELY_UNREACHABLE(wr))
continue;
- assert(IS_REACHABLE(wr));
+ PyObject_ASSERT(op, IS_REACHABLE(wr));
/* Create a new reference so that wr can't go away
* before we can process it again.
@@ -685,7 +690,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
/* Move wr to wrcb_to_call, for the next pass. */
wrasgc = AS_GC(wr);
- assert(wrasgc != next); /* wrasgc is reachable, but
+ PyObject_ASSERT(op, wrasgc != next);
+ /* wrasgc is reachable, but
next isn't, so they can't
be the same */
gc_list_move(wrasgc, &wrcb_to_call);
@@ -701,11 +707,11 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
gc = wrcb_to_call.gc.gc_next;
op = FROM_GC(gc);
- assert(IS_REACHABLE(op));
- assert(PyWeakref_Check(op));
+ PyObject_ASSERT(op, IS_REACHABLE(op));
+ PyObject_ASSERT(op, PyWeakref_Check(op));
wr = (PyWeakReference *)op;
callback = wr->wr_callback;
- assert(callback != NULL);
+ PyObject_ASSERT(op, callback != NULL);
/* copy-paste of weakrefobject.c's handle_callback() */
temp = PyObject_CallFunctionObjArgs(callback, wr, NULL);
@@ -822,12 +828,14 @@ check_garbage(PyGC_Head *collectable)
for (gc = collectable->gc.gc_next; gc != collectable;
gc = gc->gc.gc_next) {
_PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc)));
- assert(_PyGCHead_REFS(gc) != 0);
+ PyObject_ASSERT(FROM_GC(gc),
+ _PyGCHead_REFS(gc) != 0);
}
subtract_refs(collectable);
for (gc = collectable->gc.gc_next; gc != collectable;
gc = gc->gc.gc_next) {
- assert(_PyGCHead_REFS(gc) >= 0);
+ PyObject_ASSERT(FROM_GC(gc),
+ _PyGCHead_REFS(gc) >= 0);
if (_PyGCHead_REFS(gc) != 0)
return -1;
}
diff --git a/Objects/object.c b/Objects/object.c
index 559794f..a47d47f 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -2022,6 +2022,35 @@ _PyTrash_thread_destroy_chain(void)
}
}
+PyAPI_FUNC(void)
+_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr,
+ const char *file, int line, const char *function)
+{
+ fprintf(stderr,
+ "%s:%d: %s: Assertion \"%s\" failed.\n",
+ file, line, function, expr);
+ if (msg) {
+ fprintf(stderr, "%s\n", msg);
+ }
+
+ fflush(stderr);
+
+ if (obj) {
+ /* This might succeed or fail, but we're about to abort, so at least
+ try to provide any extra info we can: */
+ _PyObject_Dump(obj);
+ }
+ else {
+ fprintf(stderr, "NULL object\n");
+ }
+
+ fflush(stdout);
+ fflush(stderr);
+
+ /* Terminate the process: */
+ abort();
+}
+
#ifndef Py_TRACE_REFS
/* For Py_LIMITED_API, we need an out-of-line version of _Py_Dealloc.
Define this here, so we can undefine the macro. */

View File

@ -1,30 +0,0 @@
diff -r 39b9b05c3085 Lib/distutils/sysconfig.py
--- a/Lib/distutils/sysconfig.py Wed Apr 10 00:27:23 2013 +0200
+++ b/Lib/distutils/sysconfig.py Wed Apr 10 10:14:18 2013 +0200
@@ -362,7 +362,10 @@
done[n] = item = ""
if found:
after = value[m.end():]
- value = value[:m.start()] + item + after
+ value = value[:m.start()]
+ if item.strip() not in value:
+ value += item
+ value += after
if "$" in after:
notdone[name] = value
else:
diff -r 39b9b05c3085 Lib/sysconfig.py
--- a/Lib/sysconfig.py Wed Apr 10 00:27:23 2013 +0200
+++ b/Lib/sysconfig.py Wed Apr 10 10:14:18 2013 +0200
@@ -296,7 +296,10 @@
if found:
after = value[m.end():]
- value = value[:m.start()] + item + after
+ value = value[:m.start()]
+ if item.strip() not in value:
+ value += item
+ value += after
if "$" in after:
notdone[name] = value
else:

View File

@ -1,13 +0,0 @@
diff --git a/config.sub b/config.sub
index 40ea5df..932128b 100755
--- a/config.sub
+++ b/config.sub
@@ -1045,7 +1045,7 @@ case $basic_machine in
;;
ppc64) basic_machine=powerpc64-unknown
;;
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
;;
ppc64le | powerpc64little)
basic_machine=powerpc64le-unknown

View File

@ -1,14 +0,0 @@
diff -r 7fa3e824a4ee Lib/test/test_py_compile.py
--- a/Lib/test/test_py_compile.py Tue Oct 29 22:25:06 2013 -0400
+++ b/Lib/test/test_py_compile.py Wed Oct 30 11:08:31 2013 +0100
@@ -54,6 +54,10 @@
self.assertTrue(os.path.exists(self.pyc_path))
self.assertFalse(os.path.exists(self.cache_path))
+ def test_bad_coding(self):
+ bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
+ self.assertIsNone(py_compile.compile(bad_coding, doraise=False))
+
def test_relative_path(self):
py_compile.compile(os.path.relpath(self.source_path),
os.path.relpath(self.pyc_path))

View File

@ -1,11 +0,0 @@
diff -r 28c04e954bb6 Lib/lib2to3/main.py
--- a/Lib/lib2to3/main.py Tue Oct 29 22:25:55 2013 -0400
+++ b/Lib/lib2to3/main.py Wed Nov 06 14:33:07 2013 +0100
@@ -213,6 +213,7 @@
# Set up logging handler
level = logging.DEBUG if options.verbose else logging.INFO
+ logging.root.handlers = []
logging.basicConfig(format='%(name)s: %(message)s', level=level)
logger = logging.getLogger('lib2to3.main')

View File

@ -1,12 +0,0 @@
diff -up Python-3.5.0/Makefile.pre.in.lib Python-3.5.0/Makefile.pre.in
--- Python-3.5.0/Makefile.pre.in.lib 2015-09-21 15:39:47.928286620 +0200
+++ Python-3.5.0/Makefile.pre.in 2015-09-21 15:42:58.004042762 +0200
@@ -1340,7 +1340,7 @@ inclinstall:
# Install the library and miscellaneous stuff needed for extending/embedding
# This goes into $(exec_prefix)
-LIBPL= @LIBPL@
+LIBPL= $(LIBDEST)/config-$(LDVERSION)-$(MULTIARCH)
# pkgconfig directory
LIBPC= $(LIBDIR)/pkgconfig

View File

@ -1,15 +0,0 @@
diff -up Python-3.5.0/configure.ac.eabi Python-3.5.0/configure.ac
--- Python-3.5.0/configure.eabi 2015-09-23 13:52:20.756909744 +0200
+++ Python-3.5.0/configure 2015-09-23 13:52:46.859163629 +0200
@@ -762,9 +762,9 @@ cat >> conftest.c <<EOF
alpha-linux-gnu
# elif defined(__ARM_EABI__) && defined(__ARM_PCS_VFP)
# if defined(__ARMEL__)
- arm-linux-gnueabihf
+ arm-linux-gnueabi
# else
- armeb-linux-gnueabihf
+ armeb-linux-gnueabi
# endif
# elif defined(__ARM_EABI__) && !defined(__ARM_PCS_VFP)
# if defined(__ARMEL__)

View File

@ -1,42 +0,0 @@
diff -urp Python-3.5.0/configure p/configure
--- Python-3.5.0/configure 2016-02-25 16:12:12.615184011 +0000
+++ p/configure 2016-02-25 16:13:01.293412517 +0000
@@ -5133,7 +5133,7 @@ cat >> conftest.c <<EOF
# elif _MIPS_SIM == _ABIN32
mips64el-linux-gnuabin32
# elif _MIPS_SIM == _ABI64
- mips64el-linux-gnuabi64
+ mips64el-linux-gnu
# else
# error unknown platform triplet
# endif
@@ -5143,7 +5143,7 @@ cat >> conftest.c <<EOF
# elif _MIPS_SIM == _ABIN32
mips64-linux-gnuabin32
# elif _MIPS_SIM == _ABI64
- mips64-linux-gnuabi64
+ mips64-linux-gnu
# else
# error unknown platform triplet
# endif
diff -urp Python-3.5.0/configure.ac p/configure.ac
--- Python-3.5.0/configure.ac 2016-02-25 16:12:11.663159985 +0000
+++ p/configure.ac 2016-02-25 16:13:18.814854710 +0000
@@ -784,7 +784,7 @@ cat >> conftest.c <<EOF
# elif _MIPS_SIM == _ABIN32
mips64el-linux-gnuabin32
# elif _MIPS_SIM == _ABI64
- mips64el-linux-gnuabi64
+ mips64el-linux-gnu
# else
# error unknown platform triplet
# endif
@@ -794,7 +794,7 @@ cat >> conftest.c <<EOF
# elif _MIPS_SIM == _ABIN32
mips64-linux-gnuabin32
# elif _MIPS_SIM == _ABI64
- mips64-linux-gnuabi64
+ mips64-linux-gnu
# else
# error unknown platform triplet
# endif

View File

@ -1,46 +0,0 @@
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index 9d31d13..ed44a93 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -424,8 +424,18 @@ class install(Command):
raise DistutilsOptionError(
"must not supply exec-prefix without prefix")
- self.prefix = os.path.normpath(sys.prefix)
- self.exec_prefix = os.path.normpath(sys.exec_prefix)
+ # self.prefix is set to sys.prefix + /local/
+ # if the executable is /usr/bin/python* and RPM build
+ # is not detected to make pip and distutils install into
+ # the separate location.
+ if (sys.executable.startswith("/usr/bin/python")
+ and 'RPM_BUILD_ROOT' not in os.environ):
+ addition = "/local"
+ else:
+ addition = ""
+
+ self.prefix = os.path.normpath(sys.prefix) + addition
+ self.exec_prefix = os.path.normpath(sys.exec_prefix) + addition
else:
if self.exec_prefix is None:
diff --git a/Lib/site.py b/Lib/site.py
index 4744eb0..b5fe571 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -326,7 +326,15 @@ def getsitepackages(prefixes=None):
return sitepackages
def addsitepackages(known_paths, prefixes=None):
- """Add site-packages to sys.path"""
+ """Add site-packages to sys.path.
+
+ '/usr/local' is included in PREFIXES if the executable is /usr/bin/python*
+ and RPM build is not detected to make sudo pip installed packages visible.
+
+ """
+ if (ENABLE_USER_SITE and sys.executable.startswith("/usr/bin/python")
+ and 'RPM_BUILD_ROOT' not in os.environ):
+ PREFIXES.insert(0, "/usr/local")
for sitedir in getsitepackages(prefixes):
if os.path.isdir(sitedir):
addsitedir(sitedir, known_paths)

File diff suppressed because it is too large Load Diff

View File

@ -1,12 +0,0 @@
diff --git a/Lib/ctypes/test/test_structures.py b/Lib/ctypes/test/test_structures.py
index 3eded77..ad7859a 100644
--- a/Lib/ctypes/test/test_structures.py
+++ b/Lib/ctypes/test/test_structures.py
@@ -392,6 +392,7 @@ class StructureTestCase(unittest.TestCase):
(1, 0, 0, 0, 0, 0))
self.assertRaises(TypeError, lambda: Z(1, 2, 3, 4, 5, 6, 7))
+ @unittest.skip('Fails on aarch64: http://bugs.python.org/issue29804')
def test_pass_by_value(self):
# This should mirror the structure in Modules/_ctypes/_ctypes_test.c
class X(Structure):

View File

@ -1,16 +0,0 @@
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index d203cdd..c128dae 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -3256,8 +3256,9 @@ if _have_threads:
except ssl.SSLError as e:
stats = e
- if expected is None and IS_OPENSSL_1_1:
- # OpenSSL 1.1.0 raises handshake error
+ if (expected is None and IS_OPENSSL_1_1
+ and ssl.OPENSSL_VERSION_INFO < (1, 1, 0, 6)):
+ # OpenSSL 1.1.0 to 1.1.0e raises handshake error
self.assertIsInstance(stats, ssl.SSLError)
else:
msg = "failed trying %s (s) and %s (c).\n" \

View File

@ -1,99 +0,0 @@
diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py
index 492a84a2313..9746678607c 100644
--- a/Lib/test/test_asyncio/test_events.py
+++ b/Lib/test/test_asyncio/test_events.py
@@ -1980,19 +1980,26 @@ def test_subprocess_terminate(self):
@unittest.skipIf(sys.platform == 'win32', "Don't have SIGHUP")
def test_subprocess_send_signal(self):
- prog = os.path.join(os.path.dirname(__file__), 'echo.py')
-
- connect = self.loop.subprocess_exec(
- functools.partial(MySubprocessProtocol, self.loop),
- sys.executable, prog)
- transp, proto = self.loop.run_until_complete(connect)
- self.assertIsInstance(proto, MySubprocessProtocol)
- self.loop.run_until_complete(proto.connected)
-
- transp.send_signal(signal.SIGHUP)
- self.loop.run_until_complete(proto.completed)
- self.assertEqual(-signal.SIGHUP, proto.returncode)
- transp.close()
+ # bpo-31034: Make sure that we get the default signal handler (killing
+ # the process). The parent process may have decided to ignore SIGHUP,
+ # and signal handlers are inherited.
+ old_handler = signal.signal(signal.SIGHUP, signal.SIG_DFL)
+ try:
+ prog = os.path.join(os.path.dirname(__file__), 'echo.py')
+
+ connect = self.loop.subprocess_exec(
+ functools.partial(MySubprocessProtocol, self.loop),
+ sys.executable, prog)
+ transp, proto = self.loop.run_until_complete(connect)
+ self.assertIsInstance(proto, MySubprocessProtocol)
+ self.loop.run_until_complete(proto.connected)
+
+ transp.send_signal(signal.SIGHUP)
+ self.loop.run_until_complete(proto.completed)
+ self.assertEqual(-signal.SIGHUP, proto.returncode)
+ transp.close()
+ finally:
+ signal.signal(signal.SIGHUP, old_handler)
def test_subprocess_stderr(self):
prog = os.path.join(os.path.dirname(__file__), 'echo2.py')
diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py
index 2e14a8a9735..e8822c36698 100644
--- a/Lib/test/test_asyncio/test_subprocess.py
+++ b/Lib/test/test_asyncio/test_subprocess.py
@@ -166,25 +166,32 @@ def test_terminate(self):
@unittest.skipIf(sys.platform == 'win32', "Don't have SIGHUP")
def test_send_signal(self):
- code = 'import time; print("sleeping", flush=True); time.sleep(3600)'
- args = [sys.executable, '-c', code]
- create = asyncio.create_subprocess_exec(*args,
- stdout=subprocess.PIPE,
- loop=self.loop)
- proc = self.loop.run_until_complete(create)
-
- @asyncio.coroutine
- def send_signal(proc):
- # basic synchronization to wait until the program is sleeping
- line = yield from proc.stdout.readline()
- self.assertEqual(line, b'sleeping\n')
+ # bpo-31034: Make sure that we get the default signal handler (killing
+ # the process). The parent process may have decided to ignore SIGHUP,
+ # and signal handlers are inherited.
+ old_handler = signal.signal(signal.SIGHUP, signal.SIG_DFL)
+ try:
+ code = 'import time; print("sleeping", flush=True); time.sleep(3600)'
+ args = [sys.executable, '-c', code]
+ create = asyncio.create_subprocess_exec(*args,
+ stdout=subprocess.PIPE,
+ loop=self.loop)
+ proc = self.loop.run_until_complete(create)
- proc.send_signal(signal.SIGHUP)
- returncode = (yield from proc.wait())
- return returncode
-
- returncode = self.loop.run_until_complete(send_signal(proc))
- self.assertEqual(-signal.SIGHUP, returncode)
+ @asyncio.coroutine
+ def send_signal(proc):
+ # basic synchronization to wait until the program is sleeping
+ line = yield from proc.stdout.readline()
+ self.assertEqual(line, b'sleeping\n')
+
+ proc.send_signal(signal.SIGHUP)
+ returncode = (yield from proc.wait())
+ return returncode
+
+ returncode = self.loop.run_until_complete(send_signal(proc))
+ self.assertEqual(-signal.SIGHUP, returncode)
+ finally:
+ signal.signal(signal.SIGHUP, old_handler)
def prepare_broken_pipe_test(self):
# buffer large enough to feed the whole pipe buffer

View File

@ -1,58 +0,0 @@
From 8c2d4cf092c5f0335e7982392a33927579c4d512 Mon Sep 17 00:00:00 2001
From: Dong-hee Na <donghee.na92@gmail.com>
Date: Wed, 26 Jul 2017 21:11:25 +0900
Subject: [PATCH] [3.6] bpo-30119: fix ftplib.FTP.putline() to throw an error
for a illegal command (#1214) (#2886)
---
Lib/ftplib.py | 2 ++
Lib/test/test_ftplib.py | 6 +++++-
Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst | 2 ++
3 files changed, 9 insertions(+), 1 deletion(-)
create mode 100644 Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst
diff --git a/Lib/ftplib.py b/Lib/ftplib.py
index 8f36f537e8a..a02e595cb02 100644
--- a/Lib/ftplib.py
+++ b/Lib/ftplib.py
@@ -186,6 +186,8 @@ def sanitize(self, s):
# Internal: send one line to the server, appending CRLF
def putline(self, line):
+ if '\r' in line or '\n' in line:
+ raise ValueError('an illegal newline character should not be contained')
line = line + CRLF
if self.debugging > 1:
print('*put*', self.sanitize(line))
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
index 12fabc5e8be..a561e9efa03 100644
--- a/Lib/test/test_ftplib.py
+++ b/Lib/test/test_ftplib.py
@@ -484,6 +484,9 @@ def test_sanitize(self):
self.assertEqual(self.client.sanitize('PASS 12345'), repr('PASS *****'))
def test_exceptions(self):
+ self.assertRaises(ValueError, self.client.sendcmd, 'echo 40\r\n0')
+ self.assertRaises(ValueError, self.client.sendcmd, 'echo 40\n0')
+ self.assertRaises(ValueError, self.client.sendcmd, 'echo 40\r0')
self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 400')
self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 499')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 500')
@@ -492,7 +495,8 @@ def test_exceptions(self):
def test_all_errors(self):
exceptions = (ftplib.error_reply, ftplib.error_temp, ftplib.error_perm,
- ftplib.error_proto, ftplib.Error, OSError, EOFError)
+ ftplib.error_proto, ftplib.Error, OSError,
+ EOFError)
for x in exceptions:
try:
raise x('exception not included in all_errors set')
diff --git a/Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst b/Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst
new file mode 100644
index 00000000000..a37d3703842
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst
@@ -0,0 +1,2 @@
+ftplib.FTP.putline() now throws ValueError on commands that contains CR or
+LF. Patch by Dong-hee Na.

View File

@ -1,20 +0,0 @@
diff -up Python-3.1.1/Lib/distutils/unixccompiler.py.rpath Python-3.1.1/Lib/distutils/unixccompiler.py
--- Python-3.1.1/Lib/distutils/unixccompiler.py.rpath 2009-09-04 17:29:34.000000000 -0400
+++ Python-3.1.1/Lib/distutils/unixccompiler.py 2009-09-04 17:49:54.000000000 -0400
@@ -141,6 +141,16 @@ class UnixCCompiler(CCompiler):
if sys.platform == "cygwin":
exe_extension = ".exe"
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+ """Remove standard library path from rpath"""
+ libraries, library_dirs, runtime_library_dirs = super(
+ self.__class__, self)._fix_lib_args(libraries, library_dirs,
+ runtime_library_dirs)
+ libdir = sysconfig.get_config_var('LIBDIR')
+ if runtime_library_dirs and (libdir in runtime_library_dirs):
+ runtime_library_dirs.remove(libdir)
+ return libraries, library_dirs, runtime_library_dirs
+
def preprocess(self, source, output_file=None, macros=None,
include_dirs=None, extra_preargs=None, extra_postargs=None):
fixed_args = self._fix_compile_args(None, macros, include_dirs)

1
README Symbolic link
View File

@ -0,0 +1 @@
dead.package

View File

@ -1,59 +0,0 @@
"""Checks if all *.pyc and *.pyo files have later mtime than their *.py files."""
import imp
import os
import sys
# list of test and other files that we expect not to have bytecode
not_compiled = [
'test/bad_coding.py',
'test/bad_coding2.py',
'test/badsyntax_3131.py',
'test/badsyntax_future3.py',
'test/badsyntax_future4.py',
'test/badsyntax_future5.py',
'test/badsyntax_future6.py',
'test/badsyntax_future7.py',
'test/badsyntax_future8.py',
'test/badsyntax_future9.py',
'test/badsyntax_future10.py',
'test/badsyntax_async1.py',
'test/badsyntax_async2.py',
'test/badsyntax_async3.py',
'test/badsyntax_async4.py',
'test/badsyntax_async5.py',
'test/badsyntax_async6.py',
'test/badsyntax_async7.py',
'test/badsyntax_async8.py',
'test/badsyntax_async9.py',
'test/badsyntax_pep3120.py',
'lib2to3/tests/data/bom.py',
'lib2to3/tests/data/crlf.py',
'lib2to3/tests/data/different_encoding.py',
'lib2to3/tests/data/false_encoding.py',
'lib2to3/tests/data/py2_test_grammar.py',
'.debug-gdb.py',
]
failed = 0
def bytecode_expected(source):
for f in not_compiled:
if source.endswith(f):
return False
return True
compiled = filter(lambda f: bytecode_expected(f), sys.argv[1:])
for f in compiled:
# check both pyo and pyc
to_check = map(lambda b: imp.cache_from_source(f, b), (True, False))
f_mtime = os.path.getmtime(f)
for c in to_check:
c_mtime = os.path.getmtime(c)
if c_mtime < f_mtime:
sys.stderr.write('Failed bytecompilation timestamps check: ')
sys.stderr.write('Bytecode file {} is older than source file {}.\n'.format(c, f))
failed += 1
if failed:
sys.stderr.write('\n{} files failed bytecompilation timestamps check.\n'.format(failed))
sys.exit(1)

1
dead.package Normal file
View File

@ -0,0 +1 @@
This branch was pushed by mistake. Please ignore it.

View File

@ -1,10 +0,0 @@
# Note that the path could itself be a python file, or a directory
# Python's compile_all module only works on directories, and requires a max
# recursion depth
%py_byte_compile()\
python_binary="%1"\
bytecode_compilation_path="%2"\
find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("$RPM_BUILD_ROOT")[2], optimize=opt) for opt in range(2) for f in sys.argv[1:]]' || :\
%{nil}

View File

@ -1,471 +0,0 @@
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
index 74de782..8ca9d6d 100644
--- a/Lib/distutils/command/build_ext.py
+++ b/Lib/distutils/command/build_ext.py
@@ -747,7 +747,7 @@ class build_ext(Command):
else:
from distutils import sysconfig
if sysconfig.get_config_var('Py_ENABLE_SHARED'):
- pythonlib = 'python{}.{}{}'.format(
+ pythonlib = 'platform-python{}.{}{}'.format(
sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff,
sysconfig.get_config_var('ABIFLAGS'))
return ext.libraries + [pythonlib]
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index ec135d4..60c3a18 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -29,9 +29,9 @@ WINDOWS_SCHEME = {
INSTALL_SCHEMES = {
'unix_prefix': {
- 'purelib': '$base/lib/python$py_version_short/site-packages',
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
- 'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
+ 'purelib': '$base/lib/platform-python$py_version_short/site-packages',
+ 'platlib': '$platbase/lib/platform-python$py_version_short/site-packages',
+ 'headers': '$base/include/platform-python$py_version_short$abiflags/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
},
@@ -59,7 +59,7 @@ if HAS_USER_SITE:
'purelib': '$usersite',
'platlib': '$usersite',
'headers':
- '$userbase/include/python$py_version_short$abiflags/$dist_name',
+ '$userbase/include/platform-python$py_version_short$abiflags/$dist_name',
'scripts': '$userbase/bin',
'data' : '$userbase',
}
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 5c2670f..6e90b40 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -130,7 +130,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
if os.name == "posix":
libpython = os.path.join(prefix,
- "lib", "python" + get_python_version())
+ "lib", "platform-python" + get_python_version())
if standard_lib:
return libpython
else:
diff --git a/Lib/site.py b/Lib/site.py
index 386d2ad..cfef02b 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -305,7 +305,7 @@ def getsitepackages(prefixes=None):
if os.sep == '/':
sitepackages.append(os.path.join(prefix, "lib",
- "python%d.%d" % sys.version_info[:2],
+ "platform-python%d.%d" % sys.version_info[:2],
"site-packages"))
else:
sitepackages.append(prefix)
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index d35b5eb..25059e4 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -20,14 +20,14 @@ __all__ = [
_INSTALL_SCHEMES = {
'posix_prefix': {
- 'stdlib': '{installed_base}/lib/python{py_version_short}',
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
- 'purelib': '{base}/lib/python{py_version_short}/site-packages',
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
+ 'stdlib': '{installed_base}/lib/platform-python{py_version_short}',
+ 'platstdlib': '{platbase}/lib/platform-python{py_version_short}',
+ 'purelib': '{base}/lib/platform-python{py_version_short}/site-packages',
+ 'platlib': '{platbase}/lib/platform-python{py_version_short}/site-packages',
'include':
- '{installed_base}/include/python{py_version_short}{abiflags}',
+ '{installed_base}/include/platform-python{py_version_short}{abiflags}',
'platinclude':
- '{installed_platbase}/include/python{py_version_short}{abiflags}',
+ '{installed_platbase}/include/platform-python{py_version_short}{abiflags}',
'scripts': '{base}/bin',
'data': '{base}',
},
@@ -61,13 +61,13 @@ _INSTALL_SCHEMES = {
'data': '{userbase}',
},
'posix_user': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
- 'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'include': '{userbase}/include/python{py_version_short}',
- 'scripts': '{userbase}/bin',
- 'data': '{userbase}',
+ 'stdlib': '{installed_base}/lib/platform-python{py_version_short}',
+ 'platstdlib': '{platbase}/lib/platform-python{py_version_short}',
+ 'purelib': '{base}/lib/platform-python{py_version_short}/site-packages',
+ 'platlib': '{platbase}/lib/platform-python{py_version_short}/site-packages',
+ 'include': '{installed_base}/include/platform-python{py_version_short}{abiflags}',
+ 'scripts': '{base}/bin',
+ 'data': '{base}',
},
'osx_framework_user': {
'stdlib': '{userbase}/lib/python',
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
index a29ca96..fea10e3 100644
--- a/Lib/test/test_sysconfig.py
+++ b/Lib/test/test_sysconfig.py
@@ -260,7 +260,7 @@ class TestSysConfig(unittest.TestCase):
self.assertEqual(get(real), get(link))
finally:
unlink(link)
-
+ @unittest.skip('posix_prefix and posix_user schemes are the same so skipping the test')
def test_user_similar(self):
# Issue #8759: make sure the posix scheme for the users
# is similar to the global posix_prefix one
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 8b6454f..bf83e4f 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -137,10 +137,10 @@ SCRIPTDIR= $(prefix)/lib
ABIFLAGS= @ABIFLAGS@
# Detailed destination directories
-BINLIBDEST= $(LIBDIR)/python$(VERSION)
-LIBDEST= $(SCRIPTDIR)/python$(VERSION)
-INCLUDEPY= $(INCLUDEDIR)/python$(LDVERSION)
-CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(LDVERSION)
+BINLIBDEST= $(LIBDIR)/platform-python$(VERSION)
+LIBDEST= $(SCRIPTDIR)/platform-python$(VERSION)
+INCLUDEPY= $(INCLUDEDIR)/platform-python$(LDVERSION)
+CONFINCLUDEPY= $(CONFINCLUDEDIR)/platform-python$(LDVERSION)
# Symbols used for using shared libraries
SHLIB_SUFFIX= @SHLIB_SUFFIX@
@@ -590,7 +590,7 @@ $(LIBRARY): $(LIBRARY_OBJS)
$(AR) $(ARFLAGS) $@ $(MODOBJS)
$(RANLIB) $@
-libpython$(LDVERSION).so: $(LIBRARY_OBJS)
+libplatform-python$(LDVERSION).so: $(LIBRARY_OBJS)
if test $(INSTSONAME) != $(LDLIBRARY); then \
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
$(LN) -f $(INSTSONAME) $@; \
@@ -598,14 +598,14 @@ libpython$(LDVERSION).so: $(LIBRARY_OBJS)
$(BLDSHARED) -o $@ $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
fi
-libpython3.so: libpython$(LDVERSION).so
+libplatform-python3.so: libplatform-python$(LDVERSION).so
$(BLDSHARED) $(NO_AS_NEEDED) -o $@ -Wl,-h$@ $^
-libpython$(LDVERSION).dylib: $(LIBRARY_OBJS)
- $(CC) -dynamiclib -Wl,-single_module $(PY_LDFLAGS) -undefined dynamic_lookup -Wl,-install_name,$(prefix)/lib/libpython$(LDVERSION).dylib -Wl,-compatibility_version,$(VERSION) -Wl,-current_version,$(VERSION) -o $@ $(LIBRARY_OBJS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
+libplatform-python$(LDVERSION).dylib: $(LIBRARY_OBJS)
+ $(CC) -dynamiclib -Wl,-single_module $(PY_LDFLAGS) -undefined dynamic_lookup -Wl,-install_name,$(prefix)/lib/libplatform-python$(LDVERSION).dylib -Wl,-compatibility_version,$(VERSION) -Wl,-current_version,$(VERSION) -o $@ $(LIBRARY_OBJS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
-libpython$(VERSION).sl: $(LIBRARY_OBJS)
+libplatform-python$(VERSION).sl: $(LIBRARY_OBJS)
$(LDSHARED) -o $@ $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST)
# Copy up the gdb python hooks into a position where they can be automatically
@@ -643,7 +643,7 @@ $(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK): \
# This rule builds the Cygwin Python DLL and import library if configured
# for a shared core library; otherwise, this rule is a noop.
-$(DLLLIBRARY) libpython$(VERSION).dll.a: $(LIBRARY_OBJS)
+$(DLLLIBRARY) libplatform-python$(VERSION).dll.a: $(LIBRARY_OBJS)
if test -n "$(DLLLIBRARY)"; then \
$(LDSHARED) -Wl,--out-implib=$@ -o $(DLLLIBRARY) $^ \
$(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST); \
@@ -1346,7 +1346,7 @@
python-config: $(srcdir)/Misc/python-config.in Misc/python-config.sh
# Substitution happens here, as the completely-expanded BINDIR
# is not available in configure
- sed -e "s,@EXENAME@,$(BINDIR)/python$(LDVERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config.py
+ sed -e "s,@EXENAME@,/usr/libexec/platform-python$(LDVERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config.py
# Replace makefile compat. variable references with shell script compat. ones; $(VAR) -> ${VAR}
LC_ALL=C sed -e 's,\$$(\([A-Za-z0-9_]*\)),\$$\{\1\},g' < Misc/python-config.sh >python-config
# On Darwin, always use the python version of the script, the shell
@@ -1485,12 +1485,12 @@ frameworkinstallstructure: $(LDLIBRARY)
# Install a number of symlinks to keep software that expects a normal unix
# install (which includes python-config) happy.
frameworkinstallmaclib:
- $(LN) -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(LIBPL)/libpython$(LDVERSION).a"
- $(LN) -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(LIBPL)/libpython$(LDVERSION).dylib"
- $(LN) -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(LIBPL)/libpython$(VERSION).a"
- $(LN) -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(LIBPL)/libpython$(VERSION).dylib"
- $(LN) -fs "../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/libpython$(LDVERSION).dylib"
- $(LN) -fs "../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/libpython$(VERSION).dylib"
+ $(LN) -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(LIBPL)/libplatform-python$(LDVERSION).a"
+ $(LN) -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(LIBPL)/libplatform-python$(LDVERSION).dylib"
+ $(LN) -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(LIBPL)/libplatform-python$(VERSION).a"
+ $(LN) -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(LIBPL)/libplatform-python$(VERSION).dylib"
+ $(LN) -fs "../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/libplatformp-ython$(LDVERSION).dylib"
+ $(LN) -fs "../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/libplatformp-ython$(VERSION).dylib"
# This installs the IDE, the Launcher and other apps into /Applications
frameworkinstallapps:
diff --git a/Misc/python-config.in b/Misc/python-config.in
index e13da75..a72893c 100644
--- a/Misc/python-config.in
+++ b/Misc/python-config.in
@@ -47,7 +47,7 @@ for opt in opt_flags:
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
- libs = ['-lpython' + pyver + sys.abiflags]
+ libs = ['-lplatform-python' + pyver + sys.abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
diff --git a/Misc/python-config.sh.in b/Misc/python-config.sh.in
index 30c6927..33ea82f 100644
--- a/Misc/python-config.sh.in
+++ b/Misc/python-config.sh.in
@@ -40,7 +40,7 @@ LIBM="@LIBM@"
LIBC="@LIBC@"
SYSLIBS="$LIBM $LIBC"
ABIFLAGS="@ABIFLAGS@"
-LIBS="-lpython${VERSION}${ABIFLAGS} @LIBS@ $SYSLIBS"
+LIBS="-lplatform-python${VERSION}${ABIFLAGS} @LIBS@ $SYSLIBS"
BASECFLAGS="@BASECFLAGS@"
LDLIBRARY="@LDLIBRARY@"
LINKFORSHARED="@LINKFORSHARED@"
@@ -51,8 +51,8 @@ LIBDEST=${prefix}/lib/python${VERSION}
LIBPL=$(echo "@LIBPL@" | sed "s#$prefix_build#$prefix_real#")
SO="@EXT_SUFFIX@"
PYTHONFRAMEWORK="@PYTHONFRAMEWORK@"
-INCDIR="-I$includedir/python${VERSION}${ABIFLAGS}"
-PLATINCDIR="-I$includedir/python${VERSION}${ABIFLAGS}"
+INCDIR="-I$includedir/platform-python${VERSION}${ABIFLAGS}"
+PLATINCDIR="-I$includedir/platform-python${VERSION}${ABIFLAGS}"
# Scan for --help or unknown argument.
for ARG in $*
diff --git a/Misc/python.pc.in b/Misc/python.pc.in
index ae69867..38405a1 100644
--- a/Misc/python.pc.in
+++ b/Misc/python.pc.in
@@ -9,5 +9,5 @@ Description: Python library
Requires:
Version: @VERSION@
Libs.private: @LIBS@
-Libs: -L${libdir} -lpython@VERSION@@ABIFLAGS@
-Cflags: -I${includedir}/python@VERSION@@ABIFLAGS@
+Libs: -L${libdir} -lplatform-python@VERSION@@ABIFLAGS@
+Cflags: -I${includedir}/platform-python@VERSION@@ABIFLAGS@
diff --git a/Modules/getpath.c b/Modules/getpath.c
index 0f91643..8d7b675 100644
--- a/Modules/getpath.c
+++ b/Modules/getpath.c
@@ -494,7 +494,7 @@ calculate_path(void)
_pythonpath = Py_DecodeLocale(PYTHONPATH, NULL);
_prefix = Py_DecodeLocale(PREFIX, NULL);
_exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL);
- lib_python = Py_DecodeLocale("lib/python" VERSION, NULL);
+ lib_python = Py_DecodeLocale("lib/platform-python" VERSION, NULL);
if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) {
Py_FatalError(
diff --git a/Modules/makesetup b/Modules/makesetup
index 8db8de8..430e323 100755
--- a/Modules/makesetup
+++ b/Modules/makesetup
@@ -92,7 +92,7 @@ CYGWIN*) if test $libdir = .
else
ExtraLibDir='$(LIBPL)'
fi
- ExtraLibs="-L$ExtraLibDir -lpython\$(VERSION)";;
+ ExtraLibs="-L$ExtraLibDir -lplatform-python\$(VERSION)";;
esac
# Main loop
diff --git a/configure b/configure
index eea17a4..76b3d3a 100755
--- a/configure
+++ b/configure
@@ -5741,7 +5741,7 @@ esac
$as_echo_n "checking LIBRARY... " >&6; }
if test -z "$LIBRARY"
then
- LIBRARY='libpython$(VERSION)$(ABIFLAGS).a'
+ LIBRARY='libplatform-python$(VERSION)$(ABIFLAGS).a'
fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBRARY" >&5
$as_echo "$LIBRARY" >&6; }
@@ -5957,48 +5957,48 @@ $as_echo "#define Py_ENABLE_SHARED 1" >>confdefs.h
case $ac_sys_system in
CYGWIN*)
- LDLIBRARY='libpython$(LDVERSION).dll.a'
- DLLLIBRARY='libpython$(LDVERSION).dll'
+ LDLIBRARY='libplatform-python$(LDVERSION).dll.a'
+ DLLLIBRARY='libplatfor-mpython$(LDVERSION).dll'
;;
SunOS*)
- LDLIBRARY='libpython$(LDVERSION).so'
- BLDLIBRARY='-Wl,-R,$(LIBDIR) -L. -lpython$(LDVERSION)'
+ LDLIBRARY='libplatform-python$(LDVERSION).so'
+ BLDLIBRARY='-Wl,-R,$(LIBDIR) -L. -lplatform-python$(LDVERSION)'
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
INSTSONAME="$LDLIBRARY".$SOVERSION
if test "$with_pydebug" != yes
then
- PY3LIBRARY=libpython3.so
+ PY3LIBRARY=libplatform-python3.so
fi
;;
Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*)
- LDLIBRARY='libpython$(LDVERSION).so'
- BLDLIBRARY='-L. -lpython$(LDVERSION)'
+ LDLIBRARY='libplatform-python$(LDVERSION).so'
+ BLDLIBRARY='-L. -lplatform-python$(LDVERSION)'
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
INSTSONAME="$LDLIBRARY".$SOVERSION
if test "$with_pydebug" != yes
then
- PY3LIBRARY=libpython3.so
+ PY3LIBRARY=libplatform-python3.so
fi
;;
hp*|HP*)
case `uname -m` in
ia64)
- LDLIBRARY='libpython$(LDVERSION).so'
+ LDLIBRARY='libplatform-python$(LDVERSION).so'
;;
*)
- LDLIBRARY='libpython$(LDVERSION).sl'
+ LDLIBRARY='libplatform-python$(LDVERSION).sl'
;;
esac
- BLDLIBRARY='-Wl,+b,$(LIBDIR) -L. -lpython$(LDVERSION)'
+ BLDLIBRARY='-Wl,+b,$(LIBDIR) -L. -lplatform-python$(LDVERSION)'
RUNSHARED=SHLIB_PATH=`pwd`${SHLIB_PATH:+:${SHLIB_PATH}}
;;
Darwin*)
- LDLIBRARY='libpython$(LDVERSION).dylib'
- BLDLIBRARY='-L. -lpython$(LDVERSION)'
+ LDLIBRARY='libplatform-python$(LDVERSION).dylib'
+ BLDLIBRARY='-L. -lplatform-python$(LDVERSION)'
RUNSHARED=DYLD_LIBRARY_PATH=`pwd`${DYLD_LIBRARY_PATH:+:${DYLD_LIBRARY_PATH}}
;;
AIX*)
- LDLIBRARY='libpython$(LDVERSION).so'
+ LDLIBRARY='libplatform-python$(LDVERSION).so'
RUNSHARED=LIBPATH=`pwd`${LIBPATH:+:${LIBPATH}}
;;
@@ -6008,7 +6008,7 @@ else # shared is disabled
case $ac_sys_system in
CYGWIN*)
BLDLIBRARY='$(LIBRARY)'
- LDLIBRARY='libpython$(LDVERSION).dll.a'
+ LDLIBRARY='libplatform-python$(LDVERSION).dll.a'
;;
esac
fi
@@ -9448,7 +9448,7 @@ $as_echo "$CFLAGSFORSHARED" >&6; }
# For platforms on which shared libraries are not allowed to have unresolved
# symbols, this must be set to $(LIBS) (expanded by make). We do this even
# if it is not required, since it creates a dependency of the shared library
-# to LIBS. This, in turn, means that applications linking the shared libpython
+# to LIBS. This, in turn, means that applications linking the shared libplatformpython
# don't need to link LIBS explicitly. The default should be only changed
# on systems where this approach causes problems.
diff --git a/configure.ac b/configure.ac
index 74b0e57..ce28eeb 100644
--- a/configure.ac
+++ b/configure.ac
@@ -987,7 +987,7 @@ AC_SUBST(LIBRARY)
AC_MSG_CHECKING(LIBRARY)
if test -z "$LIBRARY"
then
- LIBRARY='libpython$(VERSION)$(ABIFLAGS).a'
+ LIBRARY='libplatform-python$(VERSION)$(ABIFLAGS).a'
fi
AC_MSG_RESULT($LIBRARY)
@@ -1134,48 +1134,48 @@ if test $enable_shared = "yes"; then
AC_DEFINE(Py_ENABLE_SHARED, 1, [Defined if Python is built as a shared library.])
case $ac_sys_system in
CYGWIN*)
- LDLIBRARY='libpython$(LDVERSION).dll.a'
- DLLLIBRARY='libpython$(LDVERSION).dll'
+ LDLIBRARY='libplatform-python$(LDVERSION).dll.a'
+ DLLLIBRARY='libplatform-python$(LDVERSION).dll'
;;
SunOS*)
- LDLIBRARY='libpython$(LDVERSION).so'
- BLDLIBRARY='-Wl,-R,$(LIBDIR) -L. -lpython$(LDVERSION)'
+ LDLIBRARY='libplatform-python$(LDVERSION).so'
+ BLDLIBRARY='-Wl,-R,$(LIBDIR) -L. -lplatform-python$(LDVERSION)'
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
INSTSONAME="$LDLIBRARY".$SOVERSION
if test "$with_pydebug" != yes
then
- PY3LIBRARY=libpython3.so
+ PY3LIBRARY=libplatform-python3.so
fi
;;
Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*)
- LDLIBRARY='libpython$(LDVERSION).so'
- BLDLIBRARY='-L. -lpython$(LDVERSION)'
+ LDLIBRARY='libplatform-python$(LDVERSION).so'
+ BLDLIBRARY='-L. -lplatform-python$(LDVERSION)'
RUNSHARED=LD_LIBRARY_PATH=`pwd`${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
INSTSONAME="$LDLIBRARY".$SOVERSION
if test "$with_pydebug" != yes
then
- PY3LIBRARY=libpython3.so
+ PY3LIBRARY=libplatform-python3.so
fi
;;
hp*|HP*)
case `uname -m` in
ia64)
- LDLIBRARY='libpython$(LDVERSION).so'
+ LDLIBRARY='libplatform-python$(LDVERSION).so'
;;
*)
- LDLIBRARY='libpython$(LDVERSION).sl'
+ LDLIBRARY='libplatform-python$(LDVERSION).sl'
;;
esac
- BLDLIBRARY='-Wl,+b,$(LIBDIR) -L. -lpython$(LDVERSION)'
+ BLDLIBRARY='-Wl,+b,$(LIBDIR) -L. -lplatform-python$(LDVERSION)'
RUNSHARED=SHLIB_PATH=`pwd`${SHLIB_PATH:+:${SHLIB_PATH}}
;;
Darwin*)
- LDLIBRARY='libpython$(LDVERSION).dylib'
- BLDLIBRARY='-L. -lpython$(LDVERSION)'
+ LDLIBRARY='libplatform-python$(LDVERSION).dylib'
+ BLDLIBRARY='-L. -lplatform-python$(LDVERSION)'
RUNSHARED=DYLD_LIBRARY_PATH=`pwd`${DYLD_LIBRARY_PATH:+:${DYLD_LIBRARY_PATH}}
;;
AIX*)
- LDLIBRARY='libpython$(LDVERSION).so'
+ LDLIBRARY='libplatform-python$(LDVERSION).so'
RUNSHARED=LIBPATH=`pwd`${LIBPATH:+:${LIBPATH}}
;;
@@ -1185,7 +1185,7 @@ else # shared is disabled
case $ac_sys_system in
CYGWIN*)
BLDLIBRARY='$(LIBRARY)'
- LDLIBRARY='libpython$(LDVERSION).dll.a'
+ LDLIBRARY='libplatform-python$(LDVERSION).dll.a'
;;
esac
fi
@@ -2679,7 +2679,7 @@ AC_MSG_RESULT($CFLAGSFORSHARED)
# For platforms on which shared libraries are not allowed to have unresolved
# symbols, this must be set to $(LIBS) (expanded by make). We do this even
# if it is not required, since it creates a dependency of the shared library
-# to LIBS. This, in turn, means that applications linking the shared libpython
+# to LIBS. This, in turn, means that applications linking the shared libplatformpython
# don't need to link LIBS explicitly. The default should be only changed
# on systems where this approach causes problems.
AC_SUBST(SHLIBS)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,30 +0,0 @@
diff -up Python-3.5.0/configure.ac.than Python-3.5.0/configure.ac
--- Python-3.5.0/configure.ac.than 2015-11-13 11:51:32.039560172 -0500
+++ Python-3.5.0/configure.ac 2015-11-13 11:52:11.670168157 -0500
@@ -804,9 +804,9 @@ cat >> conftest.c <<EOF
powerpc-linux-gnuspe
# elif defined(__powerpc64__)
# if defined(__LITTLE_ENDIAN__)
- powerpc64le-linux-gnu
+ ppc64le-linux-gnu
# else
- powerpc64-linux-gnu
+ ppc64-linux-gnu
# endif
# elif defined(__powerpc__)
powerpc-linux-gnu
diff -up Python-3.5.0/configure.than Python-3.5.0/configure
--- Python-3.5.0/configure.than 2015-11-13 12:13:19.039658399 -0500
+++ Python-3.5.0/configure 2015-11-13 12:13:35.199906857 -0500
@@ -5153,9 +5153,9 @@ cat >> conftest.c <<EOF
powerpc-linux-gnuspe
# elif defined(__powerpc64__)
# if defined(__LITTLE_ENDIAN__)
- powerpc64le-linux-gnu
+ ppc64le-linux-gnu
# else
- powerpc64-linux-gnu
+ ppc64-linux-gnu
# endif
# elif defined(__powerpc__)
powerpc-linux-gnu

View File

@ -1 +0,0 @@
SHA512 (Python-3.6.2.tar.xz) = a8270a09a9e9b39f69ece6cdade2fa964665d2107b5acbad4453f1b921107b329c697c137185928fb4a576fc0f2ae2a98dbf26a8b7ea17219e990ddbc216db8b