DRAFT [thud] Import python3 v3.7.2 from openembedded-core@058d613af39a24cffe6053e0ff98b503c73e920e
This commit is contained in:
parent
bcbb03f0d7
commit
025ce66020
|
@ -0,0 +1,25 @@
|
||||||
|
From 23294c6ba6896115828293fdb7e67b47b38ba675 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
Date: Fri, 25 Jan 2019 19:04:13 +0100
|
||||||
|
Subject: [PATCH] Do not add /usr/lib/termcap to linker flags to avoid host
|
||||||
|
contamination
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [oe-core specific]
|
||||||
|
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
|
||||||
|
---
|
||||||
|
setup.py | 1 -
|
||||||
|
1 file changed, 1 deletion(-)
|
||||||
|
|
||||||
|
diff --git a/setup.py b/setup.py
|
||||||
|
index b4357e3..fbec00d 100644
|
||||||
|
--- a/setup.py
|
||||||
|
+++ b/setup.py
|
||||||
|
@@ -856,7 +856,6 @@ class PyBuildExt(build_ext):
|
||||||
|
'termcap'):
|
||||||
|
readline_libs.append('termcap')
|
||||||
|
exts.append( Extension('readline', ['readline.c'],
|
||||||
|
- library_dirs=['/usr/lib/termcap'],
|
||||||
|
extra_link_args=readline_extra_link_args,
|
||||||
|
libraries=readline_libs) )
|
||||||
|
else:
|
|
@ -0,0 +1,196 @@
|
||||||
|
From 0fbdad1eaf541a8e92be81f39514cd249b3b0801 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
Date: Tue, 5 Feb 2019 15:52:02 +0100
|
||||||
|
Subject: [PATCH] Do not hardcode "lib" as location for modules, site-packages
|
||||||
|
and lib-dynload
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [oe-core specific]
|
||||||
|
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
|
||||||
|
---
|
||||||
|
Include/pythonrun.h | 2 ++
|
||||||
|
Lib/site.py | 4 ++--
|
||||||
|
Makefile.pre.in | 5 +++--
|
||||||
|
Modules/getpath.c | 18 ++++++++++++------
|
||||||
|
Python/getplatform.c | 10 ++++++++++
|
||||||
|
Python/sysmodule.c | 2 ++
|
||||||
|
6 files changed, 31 insertions(+), 10 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Include/pythonrun.h b/Include/pythonrun.h
|
||||||
|
index 6f0c6fc..0a17edd 100644
|
||||||
|
--- a/Include/pythonrun.h
|
||||||
|
+++ b/Include/pythonrun.h
|
||||||
|
@@ -7,6 +7,8 @@
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
|
||||||
|
+PyAPI_FUNC(const char *) Py_GetLib(void);
|
||||||
|
+
|
||||||
|
#ifndef Py_LIMITED_API
|
||||||
|
PyAPI_FUNC(int) PyRun_SimpleStringFlags(const char *, PyCompilerFlags *);
|
||||||
|
PyAPI_FUNC(int) PyRun_AnyFileFlags(FILE *, const char *, PyCompilerFlags *);
|
||||||
|
diff --git a/Lib/site.py b/Lib/site.py
|
||||||
|
index ffd132b..b55f6d8 100644
|
||||||
|
--- a/Lib/site.py
|
||||||
|
+++ b/Lib/site.py
|
||||||
|
@@ -334,12 +334,12 @@ def getsitepackages(prefixes=None):
|
||||||
|
seen.add(prefix)
|
||||||
|
|
||||||
|
if os.sep == '/':
|
||||||
|
- sitepackages.append(os.path.join(prefix, "lib",
|
||||||
|
+ sitepackages.append(os.path.join(prefix, sys.lib,
|
||||||
|
"python%d.%d" % sys.version_info[:2],
|
||||||
|
"site-packages"))
|
||||||
|
else:
|
||||||
|
sitepackages.append(prefix)
|
||||||
|
- sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
|
||||||
|
+ sitepackages.append(os.path.join(prefix, sys.lib, "site-packages"))
|
||||||
|
return sitepackages
|
||||||
|
|
||||||
|
def addsitepackages(known_paths, prefixes=None):
|
||||||
|
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||||
|
index 6e81b2f..671a20e 100644
|
||||||
|
--- a/Makefile.pre.in
|
||||||
|
+++ b/Makefile.pre.in
|
||||||
|
@@ -142,7 +142,7 @@ LIBDIR= @libdir@
|
||||||
|
MANDIR= @mandir@
|
||||||
|
INCLUDEDIR= @includedir@
|
||||||
|
CONFINCLUDEDIR= $(exec_prefix)/include
|
||||||
|
-SCRIPTDIR= $(prefix)/lib
|
||||||
|
+SCRIPTDIR= @libdir@
|
||||||
|
ABIFLAGS= @ABIFLAGS@
|
||||||
|
|
||||||
|
# Detailed destination directories
|
||||||
|
@@ -768,6 +768,7 @@ Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile
|
||||||
|
-DEXEC_PREFIX='"$(exec_prefix)"' \
|
||||||
|
-DVERSION='"$(VERSION)"' \
|
||||||
|
-DVPATH='"$(VPATH)"' \
|
||||||
|
+ -DLIB='"$(LIB)"' \
|
||||||
|
-o $@ $(srcdir)/Modules/getpath.c
|
||||||
|
|
||||||
|
Programs/python.o: $(srcdir)/Programs/python.c
|
||||||
|
@@ -856,7 +857,7 @@ regen-opcode:
|
||||||
|
Python/compile.o Python/symtable.o Python/ast_unparse.o Python/ast.o: $(srcdir)/Include/graminit.h $(srcdir)/Include/Python-ast.h
|
||||||
|
|
||||||
|
Python/getplatform.o: $(srcdir)/Python/getplatform.c
|
||||||
|
- $(CC) -c $(PY_CORE_CFLAGS) -DPLATFORM='"$(MACHDEP)"' -o $@ $(srcdir)/Python/getplatform.c
|
||||||
|
+ $(CC) -c $(PY_CORE_CFLAGS) -DPLATFORM='"$(MACHDEP)"' -DLIB='"$(LIB)"' -o $@ $(srcdir)/Python/getplatform.c
|
||||||
|
|
||||||
|
Python/importdl.o: $(srcdir)/Python/importdl.c
|
||||||
|
$(CC) -c $(PY_CORE_CFLAGS) -I$(DLINCLDIR) -o $@ $(srcdir)/Python/importdl.c
|
||||||
|
diff --git a/Modules/getpath.c b/Modules/getpath.c
|
||||||
|
index e6a3e8e..0c62af6 100644
|
||||||
|
--- a/Modules/getpath.c
|
||||||
|
+++ b/Modules/getpath.c
|
||||||
|
@@ -123,6 +123,7 @@ typedef struct {
|
||||||
|
wchar_t *exec_prefix; /* EXEC_PREFIX define */
|
||||||
|
|
||||||
|
wchar_t *lib_python; /* "lib/pythonX.Y" */
|
||||||
|
+ wchar_t *multilib_python; /* "lib[suffix]/pythonX.Y" */
|
||||||
|
wchar_t argv0_path[MAXPATHLEN+1];
|
||||||
|
wchar_t zip_path[MAXPATHLEN+1]; /* ".../lib/pythonXY.zip" */
|
||||||
|
|
||||||
|
@@ -314,7 +315,7 @@ search_for_prefix(const _PyCoreConfig *core_config,
|
||||||
|
if (delim) {
|
||||||
|
*delim = L'\0';
|
||||||
|
}
|
||||||
|
- joinpath(prefix, calculate->lib_python);
|
||||||
|
+ joinpath(prefix, calculate->multilib_python);
|
||||||
|
joinpath(prefix, LANDMARK);
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
@@ -343,7 +344,7 @@ search_for_prefix(const _PyCoreConfig *core_config,
|
||||||
|
copy_absolute(prefix, calculate->argv0_path, MAXPATHLEN+1);
|
||||||
|
do {
|
||||||
|
n = wcslen(prefix);
|
||||||
|
- joinpath(prefix, calculate->lib_python);
|
||||||
|
+ joinpath(prefix, calculate->multilib_python);
|
||||||
|
joinpath(prefix, LANDMARK);
|
||||||
|
if (ismodule(prefix)) {
|
||||||
|
return 1;
|
||||||
|
@@ -355,7 +356,7 @@ search_for_prefix(const _PyCoreConfig *core_config,
|
||||||
|
/* Look at configure's PREFIX */
|
||||||
|
wcsncpy(prefix, calculate->prefix, MAXPATHLEN);
|
||||||
|
prefix[MAXPATHLEN] = L'\0';
|
||||||
|
- joinpath(prefix, calculate->lib_python);
|
||||||
|
+ joinpath(prefix, calculate->multilib_python);
|
||||||
|
joinpath(prefix, LANDMARK);
|
||||||
|
if (ismodule(prefix)) {
|
||||||
|
return 1;
|
||||||
|
@@ -427,7 +428,7 @@ search_for_exec_prefix(const _PyCoreConfig *core_config,
|
||||||
|
wcsncpy(exec_prefix, core_config->home, MAXPATHLEN);
|
||||||
|
}
|
||||||
|
exec_prefix[MAXPATHLEN] = L'\0';
|
||||||
|
- joinpath(exec_prefix, calculate->lib_python);
|
||||||
|
+ joinpath(exec_prefix, calculate->multilib_python);
|
||||||
|
joinpath(exec_prefix, L"lib-dynload");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
@@ -464,7 +465,7 @@ search_for_exec_prefix(const _PyCoreConfig *core_config,
|
||||||
|
copy_absolute(exec_prefix, calculate->argv0_path, MAXPATHLEN+1);
|
||||||
|
do {
|
||||||
|
n = wcslen(exec_prefix);
|
||||||
|
- joinpath(exec_prefix, calculate->lib_python);
|
||||||
|
+ joinpath(exec_prefix, calculate->multilib_python);
|
||||||
|
joinpath(exec_prefix, L"lib-dynload");
|
||||||
|
if (isdir(exec_prefix)) {
|
||||||
|
return 1;
|
||||||
|
@@ -476,7 +477,7 @@ search_for_exec_prefix(const _PyCoreConfig *core_config,
|
||||||
|
/* Look at configure's EXEC_PREFIX */
|
||||||
|
wcsncpy(exec_prefix, calculate->exec_prefix, MAXPATHLEN);
|
||||||
|
exec_prefix[MAXPATHLEN] = L'\0';
|
||||||
|
- joinpath(exec_prefix, calculate->lib_python);
|
||||||
|
+ joinpath(exec_prefix, calculate->multilib_python);
|
||||||
|
joinpath(exec_prefix, L"lib-dynload");
|
||||||
|
if (isdir(exec_prefix)) {
|
||||||
|
return 1;
|
||||||
|
@@ -871,6 +872,10 @@ calculate_init(PyCalculatePath *calculate,
|
||||||
|
if (!calculate->lib_python) {
|
||||||
|
return DECODE_LOCALE_ERR("EXEC_PREFIX define", len);
|
||||||
|
}
|
||||||
|
+ calculate->multilib_python = Py_DecodeLocale(LIB "/python" VERSION, &len);
|
||||||
|
+ if (!calculate->multilib_python) {
|
||||||
|
+ return DECODE_LOCALE_ERR("EXEC_PREFIX define", len);
|
||||||
|
+ }
|
||||||
|
return _Py_INIT_OK();
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -882,6 +887,7 @@ calculate_free(PyCalculatePath *calculate)
|
||||||
|
PyMem_RawFree(calculate->prefix);
|
||||||
|
PyMem_RawFree(calculate->exec_prefix);
|
||||||
|
PyMem_RawFree(calculate->lib_python);
|
||||||
|
+ PyMem_RawFree(calculate->multilib_python);
|
||||||
|
PyMem_RawFree(calculate->path_env);
|
||||||
|
}
|
||||||
|
|
||||||
|
diff --git a/Python/getplatform.c b/Python/getplatform.c
|
||||||
|
index 81a0f7a..d55396b 100644
|
||||||
|
--- a/Python/getplatform.c
|
||||||
|
+++ b/Python/getplatform.c
|
||||||
|
@@ -10,3 +10,13 @@ Py_GetPlatform(void)
|
||||||
|
{
|
||||||
|
return PLATFORM;
|
||||||
|
}
|
||||||
|
+
|
||||||
|
+#ifndef LIB
|
||||||
|
+#define LIB "lib"
|
||||||
|
+#endif
|
||||||
|
+
|
||||||
|
+const char *
|
||||||
|
+Py_GetLib(void)
|
||||||
|
+{
|
||||||
|
+ return LIB;
|
||||||
|
+}
|
||||||
|
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
|
||||||
|
index efe5b29..de77b17 100644
|
||||||
|
--- a/Python/sysmodule.c
|
||||||
|
+++ b/Python/sysmodule.c
|
||||||
|
@@ -2319,6 +2319,8 @@ _PySys_BeginInit(PyObject **sysmod)
|
||||||
|
PyUnicode_FromString(Py_GetCopyright()));
|
||||||
|
SET_SYS_FROM_STRING("platform",
|
||||||
|
PyUnicode_FromString(Py_GetPlatform()));
|
||||||
|
+ SET_SYS_FROM_STRING("lib",
|
||||||
|
+ PyUnicode_FromString(Py_GetLib()));
|
||||||
|
SET_SYS_FROM_STRING("maxsize",
|
||||||
|
PyLong_FromSsize_t(PY_SSIZE_T_MAX));
|
||||||
|
SET_SYS_FROM_STRING("float_info",
|
|
@ -0,0 +1,35 @@
|
||||||
|
From 148861fa16f2aaacd518770f337ea54b5182f981 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
Date: Tue, 29 Jan 2019 15:03:01 +0100
|
||||||
|
Subject: [PATCH] Do not use the shell version of python-config that was
|
||||||
|
introduced in 3.4
|
||||||
|
|
||||||
|
Revert instead to the original python version: it has our tweaks and
|
||||||
|
outputs directories correctly.
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [oe-specific]
|
||||||
|
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
---
|
||||||
|
Makefile.pre.in | 9 +++------
|
||||||
|
1 file changed, 3 insertions(+), 6 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||||
|
index 2d2e11f..cc19942 100644
|
||||||
|
--- a/Makefile.pre.in
|
||||||
|
+++ b/Makefile.pre.in
|
||||||
|
@@ -1431,12 +1431,9 @@ python-config: $(srcdir)/Misc/python-config.in Misc/python-config.sh
|
||||||
|
sed -e "s,@EXENAME@,$(BINDIR)/python$(LDVERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config.py
|
||||||
|
@ # Replace makefile compat. variable references with shell script compat. ones; $(VAR) -> ${VAR}
|
||||||
|
LC_ALL=C sed -e 's,\$$(\([A-Za-z0-9_]*\)),\$$\{\1\},g' < Misc/python-config.sh >python-config
|
||||||
|
- @ # On Darwin, always use the python version of the script, the shell
|
||||||
|
- @ # version doesn't use the compiler customizations that are provided
|
||||||
|
- @ # in python (_osx_support.py).
|
||||||
|
- @if test `uname -s` = Darwin; then \
|
||||||
|
- cp python-config.py python-config; \
|
||||||
|
- fi
|
||||||
|
+ @ # In OpenEmbedded, always use the python version of the script, the shell
|
||||||
|
+ @ # version is broken in multiple ways, and doesn't return correct directories
|
||||||
|
+ cp python-config.py python-config
|
||||||
|
|
||||||
|
|
||||||
|
# Install the include files
|
|
@ -0,0 +1,31 @@
|
||||||
|
From a21f4f8fa5e5c0601898740b4ac08ec84f41e190 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
Date: Thu, 18 Apr 2019 17:11:06 +0200
|
||||||
|
Subject: [PATCH] Lib/sysconfig.py: fix another place where 'lib' is hardcoded
|
||||||
|
as the library path
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [oe-core specific]
|
||||||
|
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
---
|
||||||
|
Lib/sysconfig.py | 8 ++++----
|
||||||
|
1 file changed, 4 insertions(+), 4 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
|
||||||
|
index d15cec8..87fa5e6 100644
|
||||||
|
--- a/Lib/sysconfig.py
|
||||||
|
+++ b/Lib/sysconfig.py
|
||||||
|
@@ -20,10 +20,10 @@ __all__ = [
|
||||||
|
|
||||||
|
_INSTALL_SCHEMES = {
|
||||||
|
'posix_prefix': {
|
||||||
|
- 'stdlib': '{installed_base}/lib/python{py_version_short}',
|
||||||
|
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
|
||||||
|
- 'purelib': '{base}/lib/python{py_version_short}/site-packages',
|
||||||
|
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
|
||||||
|
+ 'stdlib': '{LIBDEST}',
|
||||||
|
+ 'platstdlib': '{LIBDEST}',
|
||||||
|
+ 'purelib': '{LIBDEST}/site-packages',
|
||||||
|
+ 'platlib': '{LIBDEST}/site-packages',
|
||||||
|
'include':
|
||||||
|
'{installed_base}/include/python{py_version_short}{abiflags}',
|
||||||
|
'platinclude':
|
|
@ -0,0 +1,25 @@
|
||||||
|
From cf6a9100902484e4d028ee88742dd2487b014a98 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
Date: Wed, 30 Jan 2019 12:41:04 +0100
|
||||||
|
Subject: [PATCH] Makefile.pre: use qemu wrapper when gathering profile data
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [oe-core specific]
|
||||||
|
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
---
|
||||||
|
Makefile.pre.in | 3 +--
|
||||||
|
1 file changed, 1 insertion(+), 2 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||||
|
index a3a02a7..d5503dd 100644
|
||||||
|
--- a/Makefile.pre.in
|
||||||
|
+++ b/Makefile.pre.in
|
||||||
|
@@ -507,8 +507,7 @@ build_all_generate_profile:
|
||||||
|
$(MAKE) @DEF_MAKE_RULE@ CFLAGS_NODIST="$(CFLAGS_NODIST) $(PGO_PROF_GEN_FLAG)" LDFLAGS_NODIST="$(LDFLAGS_NODIST) $(PGO_PROF_GEN_FLAG)" LIBS="$(LIBS)"
|
||||||
|
|
||||||
|
run_profile_task:
|
||||||
|
- @ # FIXME: can't run for a cross build
|
||||||
|
- $(LLVM_PROF_FILE) $(RUNSHARED) ./$(BUILDPYTHON) $(PROFILE_TASK) || true
|
||||||
|
+ ./pgo-wrapper ./python -m test.regrtest --pgo test_grammar test_opcodes test_dict test_builtin test_exceptions test_types test_support || true
|
||||||
|
|
||||||
|
build_all_merge_profile:
|
||||||
|
$(LLVM_PROF_MERGER)
|
|
@ -0,0 +1,42 @@
|
||||||
|
From 4865615a2bc2b78c739e4c33f536712c7f9af061 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
Date: Thu, 31 Jan 2019 16:46:30 +0100
|
||||||
|
Subject: [PATCH] distutils/sysconfig: append
|
||||||
|
STAGING_LIBDIR/python-sysconfigdata to sys.path
|
||||||
|
|
||||||
|
So that target configuration can be used when running native python
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [oe-core specific]
|
||||||
|
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||||
|
|
||||||
|
---
|
||||||
|
Lib/distutils/sysconfig.py | 2 ++
|
||||||
|
Lib/sysconfig.py | 2 ++
|
||||||
|
2 files changed, 4 insertions(+)
|
||||||
|
|
||||||
|
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
|
||||||
|
index e07a6c8..6b8c129 100644
|
||||||
|
--- a/Lib/distutils/sysconfig.py
|
||||||
|
+++ b/Lib/distutils/sysconfig.py
|
||||||
|
@@ -421,6 +421,8 @@ def _init_posix():
|
||||||
|
platform=sys.platform,
|
||||||
|
multiarch=getattr(sys.implementation, '_multiarch', ''),
|
||||||
|
))
|
||||||
|
+ if 'STAGING_LIBDIR' in os.environ:
|
||||||
|
+ sys.path.append(os.environ['STAGING_LIBDIR']+'/python-sysconfigdata')
|
||||||
|
_temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)
|
||||||
|
build_time_vars = _temp.build_time_vars
|
||||||
|
global _config_vars
|
||||||
|
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
|
||||||
|
index 9ee4d31..e586abd 100644
|
||||||
|
--- a/Lib/sysconfig.py
|
||||||
|
+++ b/Lib/sysconfig.py
|
||||||
|
@@ -412,6 +412,8 @@ def _init_posix(vars):
|
||||||
|
"""Initialize the module as appropriate for POSIX systems."""
|
||||||
|
# _sysconfigdata is generated at build time, see _generate_posix_vars()
|
||||||
|
name = _get_sysconfigdata_name()
|
||||||
|
+ if 'STAGING_LIBDIR' in os.environ:
|
||||||
|
+ sys.path.append(os.environ['STAGING_LIBDIR']+'/python-sysconfigdata')
|
||||||
|
_temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)
|
||||||
|
build_time_vars = _temp.build_time_vars
|
||||||
|
vars.update(build_time_vars)
|
|
@ -0,0 +1,47 @@
|
||||||
|
From ffe7797637f08cd6ee4c82e2d67462c5e194d30a Mon Sep 17 00:00:00 2001
|
||||||
|
From: Jaewon Lee <jaewon.lee@xilinx.com>
|
||||||
|
Date: Thu, 25 Apr 2019 15:34:26 -0700
|
||||||
|
Subject: [PATCH] main.c: if OEPYTHON3HOME is set use instead of PYTHONHOME
|
||||||
|
|
||||||
|
There is one variable PYTHONHOME to determine where libraries are coming
|
||||||
|
from for both python2 and python3. This becomes an issue if only one has
|
||||||
|
libraries in the specified PYTHONHOME path, but they are using the same
|
||||||
|
PYTHONHOME. Creating another variable OEPYTHON3HOME to allow for a way
|
||||||
|
to set a different path for python3
|
||||||
|
|
||||||
|
Signed-off-by: Jaewon Lee <jaewon.lee@xilinx.com>
|
||||||
|
---
|
||||||
|
Modules/main.c | 17 +++++++++++++----
|
||||||
|
1 file changed, 13 insertions(+), 4 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Modules/main.c b/Modules/main.c
|
||||||
|
index a745381..b553e30 100644
|
||||||
|
--- a/Modules/main.c
|
||||||
|
+++ b/Modules/main.c
|
||||||
|
@@ -1855,10 +1855,19 @@ config_init_home(_PyCoreConfig *config)
|
||||||
|
}
|
||||||
|
return _Py_INIT_OK();
|
||||||
|
}
|
||||||
|
-
|
||||||
|
- int res = config_get_env_var_dup(&home, L"PYTHONHOME", "PYTHONHOME");
|
||||||
|
- if (res < 0) {
|
||||||
|
- return DECODE_LOCALE_ERR("PYTHONHOME", res);
|
||||||
|
+ int res;
|
||||||
|
+ const char *oepython3home = config_get_env_var("OEPYTHON3HOME");
|
||||||
|
+ if (oepython3home) {
|
||||||
|
+ res = config_get_env_var_dup(&home, L"OEPYTHON3HOME", "OEPYTHON3HOME");
|
||||||
|
+ if (res < 0) {
|
||||||
|
+ return DECODE_LOCALE_ERR("OEPYTHON3HOME", res);
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
+ else {
|
||||||
|
+ res = config_get_env_var_dup(&home, L"PYTHONHOME", "PYTHONHOME");
|
||||||
|
+ if (res < 0) {
|
||||||
|
+ return DECODE_LOCALE_ERR("PYTHONHOME", res);
|
||||||
|
+ }
|
||||||
|
}
|
||||||
|
config->home = home;
|
||||||
|
return _Py_INIT_OK();
|
||||||
|
--
|
||||||
|
2.7.4
|
||||||
|
|
|
@ -0,0 +1,120 @@
|
||||||
|
From 2645317fef09afe31b01bb2c1d4fe5b9afdbb11a Mon Sep 17 00:00:00 2001
|
||||||
|
From: Changqing Li <changqing.li@windriver.com>
|
||||||
|
Date: Mon, 22 Oct 2018 15:19:51 +0800
|
||||||
|
Subject: [PATCH] python3: use cc_basename to replace CC for checking compiler
|
||||||
|
|
||||||
|
When working path contains "clang"/"gcc"/"icc", it might be part of $CC
|
||||||
|
because of the "--sysroot" parameter. That could cause judgement error
|
||||||
|
about clang/gcc/icc compilers. e.g.
|
||||||
|
When "icc" is containded in working path, below errors are reported when
|
||||||
|
compiling python3:
|
||||||
|
x86_64-wrs-linux-gcc: error: strict: No such file or directory
|
||||||
|
x86_64-wrs-linux-gcc: error: unrecognized command line option '-fp-model'
|
||||||
|
|
||||||
|
Here use cc_basename to replace CC for checking compiler to avoid such
|
||||||
|
kind of issue.
|
||||||
|
|
||||||
|
Upstream-Status: Pending
|
||||||
|
|
||||||
|
Signed-off-by: Li Zhou <li.zhou@windriver.com>
|
||||||
|
|
||||||
|
patch originally from Li Zhou, I just rework it to new version
|
||||||
|
|
||||||
|
Signed-off-by: Changqing Li <changqing.li@windriver.com>
|
||||||
|
---
|
||||||
|
configure.ac | 19 ++++++++++---------
|
||||||
|
1 file changed, 10 insertions(+), 9 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/configure.ac b/configure.ac
|
||||||
|
index a7de901..4a3681f 100644
|
||||||
|
--- a/configure.ac
|
||||||
|
+++ b/configure.ac
|
||||||
|
@@ -54,6 +54,7 @@ AC_CONFIG_HEADER(pyconfig.h)
|
||||||
|
AC_CANONICAL_HOST
|
||||||
|
AC_SUBST(build)
|
||||||
|
AC_SUBST(host)
|
||||||
|
+LT_INIT
|
||||||
|
|
||||||
|
# pybuilddir.txt will be created by --generate-posix-vars in the Makefile
|
||||||
|
rm -f pybuilddir.txt
|
||||||
|
@@ -695,7 +696,7 @@ AC_MSG_RESULT($with_cxx_main)
|
||||||
|
preset_cxx="$CXX"
|
||||||
|
if test -z "$CXX"
|
||||||
|
then
|
||||||
|
- case "$CC" in
|
||||||
|
+ case "$cc_basename" in
|
||||||
|
gcc) AC_PATH_TOOL(CXX, [g++], [g++], [notfound]) ;;
|
||||||
|
cc) AC_PATH_TOOL(CXX, [c++], [c++], [notfound]) ;;
|
||||||
|
clang|*/clang) AC_PATH_TOOL(CXX, [clang++], [clang++], [notfound]) ;;
|
||||||
|
@@ -979,7 +980,7 @@ rmdir CaseSensitiveTestDir
|
||||||
|
|
||||||
|
case $ac_sys_system in
|
||||||
|
hp*|HP*)
|
||||||
|
- case $CC in
|
||||||
|
+ case $cc_basename in
|
||||||
|
cc|*/cc) CC="$CC -Ae";;
|
||||||
|
esac;;
|
||||||
|
esac
|
||||||
|
@@ -1336,7 +1337,7 @@ else
|
||||||
|
fi],
|
||||||
|
[AC_MSG_RESULT(no)])
|
||||||
|
if test "$Py_LTO" = 'true' ; then
|
||||||
|
- case $CC in
|
||||||
|
+ case $cc_basename in
|
||||||
|
*clang*)
|
||||||
|
AC_SUBST(LLVM_AR)
|
||||||
|
AC_PATH_TARGET_TOOL(LLVM_AR, llvm-ar, '', ${llvm_path})
|
||||||
|
@@ -1426,7 +1427,7 @@ then
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
LLVM_PROF_ERR=no
|
||||||
|
-case $CC in
|
||||||
|
+case $cc_basename in
|
||||||
|
*clang*)
|
||||||
|
# Any changes made here should be reflected in the GCC+Darwin case below
|
||||||
|
PGO_PROF_GEN_FLAG="-fprofile-instr-generate"
|
||||||
|
@@ -1500,7 +1501,7 @@ then
|
||||||
|
WRAP="-fwrapv"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- case $CC in
|
||||||
|
+ case $cc_basename in
|
||||||
|
*clang*)
|
||||||
|
cc_is_clang=1
|
||||||
|
;;
|
||||||
|
@@ -1623,7 +1624,7 @@ yes)
|
||||||
|
|
||||||
|
# ICC doesn't recognize the option, but only emits a warning
|
||||||
|
## XXX does it emit an unused result warning and can it be disabled?
|
||||||
|
- case "$CC" in
|
||||||
|
+ case "$cc_basename" in
|
||||||
|
*icc*)
|
||||||
|
ac_cv_disable_unused_result_warning=no
|
||||||
|
;;
|
||||||
|
@@ -1965,7 +1966,7 @@ yes)
|
||||||
|
esac
|
||||||
|
|
||||||
|
# ICC needs -fp-model strict or floats behave badly
|
||||||
|
-case "$CC" in
|
||||||
|
+case "$cc_basename" in
|
||||||
|
*icc*)
|
||||||
|
CFLAGS_NODIST="$CFLAGS_NODIST -fp-model strict"
|
||||||
|
;;
|
||||||
|
@@ -2727,7 +2728,7 @@ then
|
||||||
|
then
|
||||||
|
LINKFORSHARED="-Wl,--export-dynamic"
|
||||||
|
fi;;
|
||||||
|
- SunOS/5*) case $CC in
|
||||||
|
+ SunOS/5*) case $cc_basename in
|
||||||
|
*gcc*)
|
||||||
|
if $CC -Xlinker --help 2>&1 | grep export-dynamic >/dev/null
|
||||||
|
then
|
||||||
|
@@ -5429,7 +5430,7 @@ if test "$have_gcc_asm_for_x87" = yes; then
|
||||||
|
# Some versions of gcc miscompile inline asm:
|
||||||
|
# http://gcc.gnu.org/bugzilla/show_bug.cgi?id=46491
|
||||||
|
# http://gcc.gnu.org/ml/gcc/2010-11/msg00366.html
|
||||||
|
- case $CC in
|
||||||
|
+ case $cc_basename in
|
||||||
|
*gcc*)
|
||||||
|
AC_MSG_CHECKING(for gcc ipa-pure-const bug)
|
||||||
|
saved_cflags="$CFLAGS"
|
|
@ -0,0 +1,199 @@
|
||||||
|
From fa96a7fd19e17b9c6b4dd01c3c3774fb382dddc6 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Ross Burton <ross.burton@intel.com>
|
||||||
|
Date: Wed, 5 Sep 2018 11:45:52 +0100
|
||||||
|
Subject: [PATCH] Don't do runtime test to get float byte order
|
||||||
|
|
||||||
|
Python uses AC_RUN_IFELSE to determine the byte order for floats and doubles,
|
||||||
|
and falls back onto "I don't know" if it can't run code. This results in
|
||||||
|
crippled floating point numbers in Python, and the regression tests fail.
|
||||||
|
|
||||||
|
Instead of running code, take a macro from autoconf-archive which compiles C
|
||||||
|
with a special double in which has an ASCII representation, and then greps the
|
||||||
|
binary to identify the format.
|
||||||
|
|
||||||
|
Upstream-Status: Submitted [https://bugs.python.org/issue34585]
|
||||||
|
Signed-off-by: Ross Burton <ross.burton@intel.com>
|
||||||
|
---
|
||||||
|
configure.ac | 72 +++------------------------
|
||||||
|
m4/ax_c_float_words_bigendian.m4 | 83 ++++++++++++++++++++++++++++++++
|
||||||
|
2 files changed, 90 insertions(+), 65 deletions(-)
|
||||||
|
create mode 100644 m4/ax_c_float_words_bigendian.m4
|
||||||
|
|
||||||
|
diff --git a/configure.ac b/configure.ac
|
||||||
|
index 4a3681f..4ab19a6 100644
|
||||||
|
--- a/configure.ac
|
||||||
|
+++ b/configure.ac
|
||||||
|
@@ -4328,77 +4328,19 @@ fi
|
||||||
|
# * Check for various properties of floating point *
|
||||||
|
# **************************************************
|
||||||
|
|
||||||
|
-AC_MSG_CHECKING(whether C doubles are little-endian IEEE 754 binary64)
|
||||||
|
-AC_CACHE_VAL(ac_cv_little_endian_double, [
|
||||||
|
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
|
||||||
|
-#include <string.h>
|
||||||
|
-int main() {
|
||||||
|
- double x = 9006104071832581.0;
|
||||||
|
- if (memcmp(&x, "\x05\x04\x03\x02\x01\xff\x3f\x43", 8) == 0)
|
||||||
|
- return 0;
|
||||||
|
- else
|
||||||
|
- return 1;
|
||||||
|
-}
|
||||||
|
-]])],
|
||||||
|
-[ac_cv_little_endian_double=yes],
|
||||||
|
-[ac_cv_little_endian_double=no],
|
||||||
|
-[ac_cv_little_endian_double=no])])
|
||||||
|
-AC_MSG_RESULT($ac_cv_little_endian_double)
|
||||||
|
-if test "$ac_cv_little_endian_double" = yes
|
||||||
|
-then
|
||||||
|
- AC_DEFINE(DOUBLE_IS_LITTLE_ENDIAN_IEEE754, 1,
|
||||||
|
- [Define if C doubles are 64-bit IEEE 754 binary format, stored
|
||||||
|
- with the least significant byte first])
|
||||||
|
-fi
|
||||||
|
-
|
||||||
|
-AC_MSG_CHECKING(whether C doubles are big-endian IEEE 754 binary64)
|
||||||
|
-AC_CACHE_VAL(ac_cv_big_endian_double, [
|
||||||
|
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
|
||||||
|
-#include <string.h>
|
||||||
|
-int main() {
|
||||||
|
- double x = 9006104071832581.0;
|
||||||
|
- if (memcmp(&x, "\x43\x3f\xff\x01\x02\x03\x04\x05", 8) == 0)
|
||||||
|
- return 0;
|
||||||
|
- else
|
||||||
|
- return 1;
|
||||||
|
-}
|
||||||
|
-]])],
|
||||||
|
-[ac_cv_big_endian_double=yes],
|
||||||
|
-[ac_cv_big_endian_double=no],
|
||||||
|
-[ac_cv_big_endian_double=no])])
|
||||||
|
-AC_MSG_RESULT($ac_cv_big_endian_double)
|
||||||
|
-if test "$ac_cv_big_endian_double" = yes
|
||||||
|
+AX_C_FLOAT_WORDS_BIGENDIAN
|
||||||
|
+if test "$ax_cv_c_float_words_bigendian" = "yes"
|
||||||
|
then
|
||||||
|
AC_DEFINE(DOUBLE_IS_BIG_ENDIAN_IEEE754, 1,
|
||||||
|
[Define if C doubles are 64-bit IEEE 754 binary format, stored
|
||||||
|
with the most significant byte first])
|
||||||
|
-fi
|
||||||
|
-
|
||||||
|
-# Some ARM platforms use a mixed-endian representation for doubles.
|
||||||
|
-# While Python doesn't currently have full support for these platforms
|
||||||
|
-# (see e.g., issue 1762561), we can at least make sure that float <-> string
|
||||||
|
-# conversions work.
|
||||||
|
-AC_MSG_CHECKING(whether C doubles are ARM mixed-endian IEEE 754 binary64)
|
||||||
|
-AC_CACHE_VAL(ac_cv_mixed_endian_double, [
|
||||||
|
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
|
||||||
|
-#include <string.h>
|
||||||
|
-int main() {
|
||||||
|
- double x = 9006104071832581.0;
|
||||||
|
- if (memcmp(&x, "\x01\xff\x3f\x43\x05\x04\x03\x02", 8) == 0)
|
||||||
|
- return 0;
|
||||||
|
- else
|
||||||
|
- return 1;
|
||||||
|
-}
|
||||||
|
-]])],
|
||||||
|
-[ac_cv_mixed_endian_double=yes],
|
||||||
|
-[ac_cv_mixed_endian_double=no],
|
||||||
|
-[ac_cv_mixed_endian_double=no])])
|
||||||
|
-AC_MSG_RESULT($ac_cv_mixed_endian_double)
|
||||||
|
-if test "$ac_cv_mixed_endian_double" = yes
|
||||||
|
+elif test "$ax_cv_c_float_words_bigendian" = "no"
|
||||||
|
then
|
||||||
|
- AC_DEFINE(DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754, 1,
|
||||||
|
+ AC_DEFINE(DOUBLE_IS_LITTLE_ENDIAN_IEEE754, 1,
|
||||||
|
[Define if C doubles are 64-bit IEEE 754 binary format, stored
|
||||||
|
- in ARM mixed-endian order (byte order 45670123)])
|
||||||
|
+ with the least significant byte first])
|
||||||
|
+else
|
||||||
|
+ AC_MSG_ERROR([Cannot identify floating point byte order])
|
||||||
|
fi
|
||||||
|
|
||||||
|
# The short float repr introduced in Python 3.1 requires the
|
||||||
|
diff --git a/m4/ax_c_float_words_bigendian.m4 b/m4/ax_c_float_words_bigendian.m4
|
||||||
|
new file mode 100644
|
||||||
|
index 0000000..216b90d
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/m4/ax_c_float_words_bigendian.m4
|
||||||
|
@@ -0,0 +1,83 @@
|
||||||
|
+# ===============================================================================
|
||||||
|
+# https://www.gnu.org/software/autoconf-archive/ax_c_float_words_bigendian.html
|
||||||
|
+# ===============================================================================
|
||||||
|
+#
|
||||||
|
+# SYNOPSIS
|
||||||
|
+#
|
||||||
|
+# AX_C_FLOAT_WORDS_BIGENDIAN([ACTION-IF-TRUE], [ACTION-IF-FALSE], [ACTION-IF-UNKNOWN])
|
||||||
|
+#
|
||||||
|
+# DESCRIPTION
|
||||||
|
+#
|
||||||
|
+# Checks the ordering of words within a multi-word float. This check is
|
||||||
|
+# necessary because on some systems (e.g. certain ARM systems), the float
|
||||||
|
+# word ordering can be different from the byte ordering. In a multi-word
|
||||||
|
+# float context, "big-endian" implies that the word containing the sign
|
||||||
|
+# bit is found in the memory location with the lowest address. This
|
||||||
|
+# implementation was inspired by the AC_C_BIGENDIAN macro in autoconf.
|
||||||
|
+#
|
||||||
|
+# The endianness is detected by first compiling C code that contains a
|
||||||
|
+# special double float value, then grepping the resulting object file for
|
||||||
|
+# certain strings of ASCII values. The double is specially crafted to have
|
||||||
|
+# a binary representation that corresponds with a simple string. In this
|
||||||
|
+# implementation, the string "noonsees" was selected because the
|
||||||
|
+# individual word values ("noon" and "sees") are palindromes, thus making
|
||||||
|
+# this test byte-order agnostic. If grep finds the string "noonsees" in
|
||||||
|
+# the object file, the target platform stores float words in big-endian
|
||||||
|
+# order. If grep finds "seesnoon", float words are in little-endian order.
|
||||||
|
+# If neither value is found, the user is instructed to specify the
|
||||||
|
+# ordering.
|
||||||
|
+#
|
||||||
|
+# LICENSE
|
||||||
|
+#
|
||||||
|
+# Copyright (c) 2008 Daniel Amelang <dan@amelang.net>
|
||||||
|
+#
|
||||||
|
+# Copying and distribution of this file, with or without modification, are
|
||||||
|
+# permitted in any medium without royalty provided the copyright notice
|
||||||
|
+# and this notice are preserved. This file is offered as-is, without any
|
||||||
|
+# warranty.
|
||||||
|
+
|
||||||
|
+#serial 11
|
||||||
|
+
|
||||||
|
+AC_DEFUN([AX_C_FLOAT_WORDS_BIGENDIAN],
|
||||||
|
+ [AC_CACHE_CHECK(whether float word ordering is bigendian,
|
||||||
|
+ ax_cv_c_float_words_bigendian, [
|
||||||
|
+
|
||||||
|
+ax_cv_c_float_words_bigendian=unknown
|
||||||
|
+AC_COMPILE_IFELSE([AC_LANG_SOURCE([[
|
||||||
|
+
|
||||||
|
+double d = 90904234967036810337470478905505011476211692735615632014797120844053488865816695273723469097858056257517020191247487429516932130503560650002327564517570778480236724525140520121371739201496540132640109977779420565776568942592.0;
|
||||||
|
+
|
||||||
|
+]])], [
|
||||||
|
+
|
||||||
|
+if grep noonsees conftest.$ac_objext >/dev/null ; then
|
||||||
|
+ ax_cv_c_float_words_bigendian=yes
|
||||||
|
+fi
|
||||||
|
+if grep seesnoon conftest.$ac_objext >/dev/null ; then
|
||||||
|
+ if test "$ax_cv_c_float_words_bigendian" = unknown; then
|
||||||
|
+ ax_cv_c_float_words_bigendian=no
|
||||||
|
+ else
|
||||||
|
+ ax_cv_c_float_words_bigendian=unknown
|
||||||
|
+ fi
|
||||||
|
+fi
|
||||||
|
+
|
||||||
|
+])])
|
||||||
|
+
|
||||||
|
+case $ax_cv_c_float_words_bigendian in
|
||||||
|
+ yes)
|
||||||
|
+ m4_default([$1],
|
||||||
|
+ [AC_DEFINE([FLOAT_WORDS_BIGENDIAN], 1,
|
||||||
|
+ [Define to 1 if your system stores words within floats
|
||||||
|
+ with the most significant word first])]) ;;
|
||||||
|
+ no)
|
||||||
|
+ $2 ;;
|
||||||
|
+ *)
|
||||||
|
+ m4_default([$3],
|
||||||
|
+ [AC_MSG_ERROR([
|
||||||
|
+
|
||||||
|
+Unknown float word ordering. You need to manually preset
|
||||||
|
+ax_cv_c_float_words_bigendian=no (or yes) according to your system.
|
||||||
|
+
|
||||||
|
+ ])]) ;;
|
||||||
|
+esac
|
||||||
|
+
|
||||||
|
+])# AX_C_FLOAT_WORDS_BIGENDIAN
|
|
@ -0,0 +1,80 @@
|
||||||
|
From b881a79adcd4ae5ac8fe4f49d0fc77c47f777919 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Hongxu Jia <hongxu.jia@windriver.com>
|
||||||
|
Date: Fri, 4 Aug 2017 11:16:14 +0800
|
||||||
|
Subject: [PATCH] setup.py: pass missing libraries to Extension for
|
||||||
|
multiprocessing module
|
||||||
|
|
||||||
|
In the following commit:
|
||||||
|
...
|
||||||
|
commit e711cafab13efc9c1fe6c5cd75826401445eb585
|
||||||
|
Author: Benjamin Peterson <benjamin@python.org>
|
||||||
|
Date: Wed Jun 11 16:44:04 2008 +0000
|
||||||
|
|
||||||
|
Merged revisions 64104,64117 via svnmerge from
|
||||||
|
svn+ssh://pythondev@svn.python.org/python/trunk
|
||||||
|
...
|
||||||
|
(see diff in setup.py)
|
||||||
|
It assigned libraries for multiprocessing module according
|
||||||
|
the host_platform, but not pass it to Extension.
|
||||||
|
|
||||||
|
In glibc, the following commit caused two definition of
|
||||||
|
sem_getvalue are different.
|
||||||
|
https://sourceware.org/git/?p=glibc.git;a=commit;h=042e1521c794a945edc43b5bfa7e69ad70420524
|
||||||
|
(see diff in nptl/sem_getvalue.c for detail)
|
||||||
|
`__new_sem_getvalue' is the latest sem_getvalue@@GLIBC_2.1
|
||||||
|
and `__old_sem_getvalue' is to compat the old version
|
||||||
|
sem_getvalue@GLIBC_2.0.
|
||||||
|
|
||||||
|
To build python for embedded Linux systems:
|
||||||
|
http://www.yoctoproject.org/docs/2.3.1/yocto-project-qs/yocto-project-qs.html
|
||||||
|
If not explicitly link to library pthread (-lpthread), it will
|
||||||
|
load glibc's sem_getvalue randomly at runtime.
|
||||||
|
|
||||||
|
Such as build python on linux x86_64 host and run the python
|
||||||
|
on linux x86_32 target. If not link library pthread, it caused
|
||||||
|
multiprocessing bounded semaphore could not work correctly.
|
||||||
|
...
|
||||||
|
>>> import multiprocessing
|
||||||
|
>>> pool_sema = multiprocessing.BoundedSemaphore(value=1)
|
||||||
|
>>> pool_sema.acquire()
|
||||||
|
True
|
||||||
|
>>> pool_sema.release()
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
ValueError: semaphore or lock released too many times
|
||||||
|
...
|
||||||
|
|
||||||
|
And the semaphore issue also caused multiprocessing.Queue().put() hung.
|
||||||
|
|
||||||
|
Upstream-Status: Submitted [https://github.com/python/cpython/pull/2999]
|
||||||
|
|
||||||
|
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
|
||||||
|
---
|
||||||
|
setup.py | 7 +++++--
|
||||||
|
1 file changed, 5 insertions(+), 2 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/setup.py b/setup.py
|
||||||
|
index b7a36a6..658ead3 100644
|
||||||
|
--- a/setup.py
|
||||||
|
+++ b/setup.py
|
||||||
|
@@ -1584,8 +1584,10 @@ class PyBuildExt(build_ext):
|
||||||
|
elif host_platform.startswith('netbsd'):
|
||||||
|
macros = dict()
|
||||||
|
libraries = []
|
||||||
|
-
|
||||||
|
- else: # Linux and other unices
|
||||||
|
+ elif host_platform.startswith(('linux')):
|
||||||
|
+ macros = dict()
|
||||||
|
+ libraries = ['pthread']
|
||||||
|
+ else: # Other unices
|
||||||
|
macros = dict()
|
||||||
|
libraries = ['rt']
|
||||||
|
|
||||||
|
@@ -1603,6 +1605,7 @@ class PyBuildExt(build_ext):
|
||||||
|
|
||||||
|
exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
|
||||||
|
define_macros=list(macros.items()),
|
||||||
|
+ libraries=libraries,
|
||||||
|
include_dirs=["Modules/_multiprocessing"]))
|
||||||
|
# End multiprocessing
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
From 1397979ee445ff6826aa5469511e003539f77bb2 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Khem Raj <raj.khem@gmail.com>
|
||||||
|
Date: Tue, 14 May 2013 15:00:26 -0700
|
||||||
|
Subject: [PATCH] python3: Add target and native recipes
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [embedded specific]
|
||||||
|
|
||||||
|
02/2015 Rebased for Python 3.4.2
|
||||||
|
|
||||||
|
# The proper prefix is inside our staging area.
|
||||||
|
# Signed-Off: Michael 'Mickey' Lauer <mickey@vanille-media.de>
|
||||||
|
# Signed-off-by: Phil Blundell <philb@gnu.org>
|
||||||
|
# Signed-off-by: Khem Raj <raj.khem@gmail.com>
|
||||||
|
# Signed-off-by: Alejandro Hernandez <alejandro.hernandez@linux.intel.com>
|
||||||
|
|
||||||
|
---
|
||||||
|
Lib/distutils/sysconfig.py | 10 ++++++++--
|
||||||
|
1 file changed, 8 insertions(+), 2 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
|
||||||
|
index 6b8c129..3ca7f79 100644
|
||||||
|
--- a/Lib/distutils/sysconfig.py
|
||||||
|
+++ b/Lib/distutils/sysconfig.py
|
||||||
|
@@ -84,7 +84,9 @@ def get_python_inc(plat_specific=0, prefix=None):
|
||||||
|
If 'prefix' is supplied, use it instead of sys.base_prefix or
|
||||||
|
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
|
||||||
|
"""
|
||||||
|
- if prefix is None:
|
||||||
|
+ if prefix is None and os.environ['STAGING_INCDIR'] != "":
|
||||||
|
+ prefix = os.environ['STAGING_INCDIR'].rstrip('include')
|
||||||
|
+ elif prefix is None:
|
||||||
|
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
|
||||||
|
if os.name == "posix":
|
||||||
|
if python_build:
|
||||||
|
@@ -122,6 +124,10 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
|
||||||
|
If 'prefix' is supplied, use it instead of sys.base_prefix or
|
||||||
|
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
|
||||||
|
"""
|
||||||
|
+ lib_basename = os.environ['STAGING_LIBDIR'].split('/')[-1]
|
||||||
|
+ if prefix is None and os.environ['STAGING_LIBDIR'] != "":
|
||||||
|
+ prefix = os.environ['STAGING_LIBDIR'].rstrip(lib_basename)
|
||||||
|
+
|
||||||
|
if prefix is None:
|
||||||
|
if standard_lib:
|
||||||
|
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
|
||||||
|
@@ -130,7 +136,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
|
||||||
|
|
||||||
|
if os.name == "posix":
|
||||||
|
libpython = os.path.join(prefix,
|
||||||
|
- "lib", "python" + get_python_version())
|
||||||
|
+ lib_basename, "python" + get_python_version())
|
||||||
|
if standard_lib:
|
||||||
|
return libpython
|
||||||
|
else:
|
|
@ -0,0 +1,36 @@
|
||||||
|
From fead48c8b501a8d7c3db21df2e599f90f38f11d3 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Andrei Gherzan <andrei@gherzan.ro>
|
||||||
|
Date: Mon, 28 Jan 2019 15:57:54 +0000
|
||||||
|
Subject: [PATCH] _tkinter module needs tk module along with tcl. tk is not yet
|
||||||
|
integrated in yocto so we skip the check for this module. Avoid a warning by
|
||||||
|
not adding this module to missing variable.
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [distribution]
|
||||||
|
|
||||||
|
Also simply disable the tk module since its not in DEPENDS.
|
||||||
|
Signed-off-by: Andrei Gherzan <andrei@gherzan.ro>
|
||||||
|
|
||||||
|
---
|
||||||
|
setup.py | 8 +++++---
|
||||||
|
1 file changed, 5 insertions(+), 3 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/setup.py b/setup.py
|
||||||
|
index fbec00d..b7a36a6 100644
|
||||||
|
--- a/setup.py
|
||||||
|
+++ b/setup.py
|
||||||
|
@@ -1623,10 +1623,12 @@ class PyBuildExt(build_ext):
|
||||||
|
self.extensions.extend(exts)
|
||||||
|
|
||||||
|
# Call the method for detecting whether _tkinter can be compiled
|
||||||
|
- self.detect_tkinter(inc_dirs, lib_dirs)
|
||||||
|
+ # self.detect_tkinter(inc_dirs, lib_dirs)
|
||||||
|
|
||||||
|
- if '_tkinter' not in [e.name for e in self.extensions]:
|
||||||
|
- missing.append('_tkinter')
|
||||||
|
+ # tkinter module will not be avalaible as yocto
|
||||||
|
+ # doesn't have tk integrated (yet)
|
||||||
|
+ #if '_tkinter' not in [e.name for e in self.extensions]:
|
||||||
|
+ # missing.append('_tkinter')
|
||||||
|
|
||||||
|
# Build the _uuid module if possible
|
||||||
|
uuid_incs = find_file("uuid.h", inc_dirs, ["/usr/include/uuid"])
|
|
@ -0,0 +1,32 @@
|
||||||
|
From 62336285cba38017b35cb761c03f0c7e80a671a3 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Mark Hatle <mark.hatle@windriver.com>
|
||||||
|
Date: Wed, 21 Sep 2011 20:55:33 -0500
|
||||||
|
Subject: [PATCH] Lib/cgi.py: Update the script as mentioned in the comment
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [distribution]
|
||||||
|
|
||||||
|
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
|
||||||
|
|
||||||
|
---
|
||||||
|
Lib/cgi.py | 11 +----------
|
||||||
|
1 file changed, 1 insertion(+), 10 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/cgi.py b/Lib/cgi.py
|
||||||
|
index 8cf6687..094c7b4 100755
|
||||||
|
--- a/Lib/cgi.py
|
||||||
|
+++ b/Lib/cgi.py
|
||||||
|
@@ -1,13 +1,4 @@
|
||||||
|
-#! /usr/local/bin/python
|
||||||
|
-
|
||||||
|
-# NOTE: the above "/usr/local/bin/python" is NOT a mistake. It is
|
||||||
|
-# intentionally NOT "/usr/bin/env python". On many systems
|
||||||
|
-# (e.g. Solaris), /usr/local/bin is not in $PATH as passed to CGI
|
||||||
|
-# scripts, and /usr/local/bin is the default directory where Python is
|
||||||
|
-# installed, so /usr/bin/env would be unable to find python. Granted,
|
||||||
|
-# binary installations by Linux vendors often install Python in
|
||||||
|
-# /usr/bin. So let those vendors patch cgi.py to match their choice
|
||||||
|
-# of installation.
|
||||||
|
+#! /usr/bin/env python
|
||||||
|
|
||||||
|
"""Support module for CGI (Common Gateway Interface) scripts.
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
logfile = open(sys.argv[1]).read()
|
||||||
|
|
||||||
|
necessary_bits = logfile.find("The necessary bits to build these optional modules were not found")
|
||||||
|
to_find_bits = logfile.find("To find the necessary bits, look in setup.py in detect_modules() for the module's name.")
|
||||||
|
if necessary_bits != -1:
|
||||||
|
print("%s" %(logfile[necessary_bits:to_find_bits]))
|
||||||
|
|
||||||
|
failed_to_build = logfile.find("Failed to build these modules:")
|
||||||
|
if failed_to_build != -1:
|
||||||
|
failed_to_build_end = logfile.find("\n\n", failed_to_build)
|
||||||
|
print("%s" %(logfile[failed_to_build:failed_to_build_end]))
|
||||||
|
|
||||||
|
if necessary_bits != -1 or failed_to_build != -1:
|
||||||
|
sys.exit(1)
|
||||||
|
|
|
@ -0,0 +1,433 @@
|
||||||
|
# This script is used as a bitbake task to create a new python manifest
|
||||||
|
# $ bitbake python -c create_manifest
|
||||||
|
#
|
||||||
|
# Our goal is to keep python-core as small as posible and add other python
|
||||||
|
# packages only when the user needs them, hence why we split upstream python
|
||||||
|
# into several packages.
|
||||||
|
#
|
||||||
|
# In a very simplistic way what this does is:
|
||||||
|
# Launch python and see specifically what is required for it to run at a minimum
|
||||||
|
#
|
||||||
|
# Go through the python-manifest file and launch a separate task for every single
|
||||||
|
# one of the files on each package, this task will check what was required for that
|
||||||
|
# specific module to run, these modules will be called dependencies.
|
||||||
|
# The output of such task will be a list of the modules or dependencies that were
|
||||||
|
# found for that file.
|
||||||
|
#
|
||||||
|
# Such output will be parsed by this script, we will look for each dependency on the
|
||||||
|
# manifest and if we find that another package already includes it, then we will add
|
||||||
|
# that package as an RDEPENDS to the package we are currently checking; in case we dont
|
||||||
|
# find the current dependency on any other package we will add it to the current package
|
||||||
|
# as part of FILES.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# This way we will create a new manifest from the data structure that was built during
|
||||||
|
# this process, on this new manifest each package will contain specifically only
|
||||||
|
# what it needs to run.
|
||||||
|
#
|
||||||
|
# There are some caveats which we try to deal with, such as repeated files on different
|
||||||
|
# packages, packages that include folders, wildcards, and special packages.
|
||||||
|
# Its also important to note that this method only works for python files, and shared
|
||||||
|
# libraries. Static libraries, header files and binaries need to be dealt with manually.
|
||||||
|
#
|
||||||
|
# This script differs from its python2 version mostly on how shared libraries are handled
|
||||||
|
# The manifest file for python3 has an extra field which contains the cached files for
|
||||||
|
# each package.
|
||||||
|
# Tha method to handle cached files does not work when a module includes a folder which
|
||||||
|
# itself contains the pycache folder, gladly this is almost never the case.
|
||||||
|
#
|
||||||
|
# Author: Alejandro Enedino Hernandez Samaniego "aehs29" <aehs29 at gmail dot com>
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import collections
|
||||||
|
|
||||||
|
# Get python version from ${PYTHON_MAJMIN}
|
||||||
|
pyversion = str(sys.argv[1])
|
||||||
|
|
||||||
|
# Hack to get native python search path (for folders), not fond of it but it works for now
|
||||||
|
pivot = 'recipe-sysroot-native'
|
||||||
|
for p in sys.path:
|
||||||
|
if pivot in p:
|
||||||
|
nativelibfolder = p[:p.find(pivot)+len(pivot)]
|
||||||
|
|
||||||
|
# Empty dict to hold the whole manifest
|
||||||
|
new_manifest = collections.OrderedDict()
|
||||||
|
|
||||||
|
# Check for repeated files, folders and wildcards
|
||||||
|
allfiles = []
|
||||||
|
repeated = []
|
||||||
|
wildcards = []
|
||||||
|
|
||||||
|
hasfolders = []
|
||||||
|
allfolders = []
|
||||||
|
|
||||||
|
def isFolder(value):
|
||||||
|
value = value.replace('${PYTHON_MAJMIN}',pyversion)
|
||||||
|
if os.path.isdir(value.replace('${libdir}',nativelibfolder+'/usr/lib')) or os.path.isdir(value.replace('${libdir}',nativelibfolder+'/usr/lib64')) or os.path.isdir(value.replace('${libdir}',nativelibfolder+'/usr/lib32')):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isCached(item):
|
||||||
|
if '__pycache__' in item:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def prepend_comments(comments, json_manifest):
|
||||||
|
with open(json_manifest, 'r+') as manifest:
|
||||||
|
json_contents = manifest.read()
|
||||||
|
manifest.seek(0, 0)
|
||||||
|
manifest.write(comments + json_contents)
|
||||||
|
|
||||||
|
# Read existing JSON manifest
|
||||||
|
with open('python3-manifest.json') as manifest:
|
||||||
|
# The JSON format doesn't allow comments so we hack the call to keep the comments using a marker
|
||||||
|
manifest_str = manifest.read()
|
||||||
|
json_start = manifest_str.find('# EOC') + 6 # EOC + \n
|
||||||
|
manifest.seek(0)
|
||||||
|
comments = manifest.read(json_start)
|
||||||
|
manifest_str = manifest.read()
|
||||||
|
old_manifest = json.loads(manifest_str, object_pairs_hook=collections.OrderedDict)
|
||||||
|
|
||||||
|
#
|
||||||
|
# First pass to get core-package functionality, because we base everything on the fact that core is actually working
|
||||||
|
# Not exactly the same so it should not be a function
|
||||||
|
#
|
||||||
|
|
||||||
|
print ('Getting dependencies for package: core')
|
||||||
|
|
||||||
|
|
||||||
|
# This special call gets the core dependencies and
|
||||||
|
# appends to the old manifest so it doesnt hurt what it
|
||||||
|
# currently holds.
|
||||||
|
# This way when other packages check for dependencies
|
||||||
|
# on the new core package, they will still find them
|
||||||
|
# even when checking the old_manifest
|
||||||
|
|
||||||
|
output = subprocess.check_output([sys.executable, 'get_module_deps3.py', 'python-core-package']).decode('utf8')
|
||||||
|
for coredep in output.split():
|
||||||
|
coredep = coredep.replace(pyversion,'${PYTHON_MAJMIN}')
|
||||||
|
if isCached(coredep):
|
||||||
|
if coredep not in old_manifest['core']['cached']:
|
||||||
|
old_manifest['core']['cached'].append(coredep)
|
||||||
|
else:
|
||||||
|
if coredep not in old_manifest['core']['files']:
|
||||||
|
old_manifest['core']['files'].append(coredep)
|
||||||
|
|
||||||
|
|
||||||
|
# The second step is to loop through the existing files contained in the core package
|
||||||
|
# according to the old manifest, identify if they are modules, or some other type
|
||||||
|
# of file that we cant import (directories, binaries, configs) in which case we
|
||||||
|
# can only assume they were added correctly (manually) so we ignore those and
|
||||||
|
# pass them to the manifest directly.
|
||||||
|
|
||||||
|
for filedep in old_manifest['core']['files']:
|
||||||
|
if isFolder(filedep):
|
||||||
|
if isCached(filedep):
|
||||||
|
if filedep not in old_manifest['core']['cached']:
|
||||||
|
old_manifest['core']['cached'].append(filedep)
|
||||||
|
else:
|
||||||
|
if filedep not in old_manifest['core']['files']:
|
||||||
|
old_manifest['core']['files'].append(filedep)
|
||||||
|
continue
|
||||||
|
if '${bindir}' in filedep:
|
||||||
|
if filedep not in old_manifest['core']['files']:
|
||||||
|
old_manifest['core']['files'].append(filedep)
|
||||||
|
continue
|
||||||
|
if filedep == '':
|
||||||
|
continue
|
||||||
|
if '${includedir}' in filedep:
|
||||||
|
if filedep not in old_manifest['core']['files']:
|
||||||
|
old_manifest['core']['files'].append(filedep)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get actual module name , shouldnt be affected by libdir/bindir, etc.
|
||||||
|
pymodule = os.path.splitext(os.path.basename(os.path.normpath(filedep)))[0]
|
||||||
|
|
||||||
|
|
||||||
|
# We now know that were dealing with a python module, so we can import it
|
||||||
|
# and check what its dependencies are.
|
||||||
|
# We launch a separate task for each module for deterministic behavior.
|
||||||
|
# Each module will only import what is necessary for it to work in specific.
|
||||||
|
# The output of each task will contain each module's dependencies
|
||||||
|
|
||||||
|
print ('Getting dependencies for module: %s' % pymodule)
|
||||||
|
output = subprocess.check_output([sys.executable, 'get_module_deps3.py', '%s' % pymodule]).decode('utf8')
|
||||||
|
print ('The following dependencies were found for module %s:\n' % pymodule)
|
||||||
|
print (output)
|
||||||
|
|
||||||
|
|
||||||
|
for pymodule_dep in output.split():
|
||||||
|
pymodule_dep = pymodule_dep.replace(pyversion,'${PYTHON_MAJMIN}')
|
||||||
|
|
||||||
|
if isCached(pymodule_dep):
|
||||||
|
if pymodule_dep not in old_manifest['core']['cached']:
|
||||||
|
old_manifest['core']['cached'].append(pymodule_dep)
|
||||||
|
else:
|
||||||
|
if pymodule_dep not in old_manifest['core']['files']:
|
||||||
|
old_manifest['core']['files'].append(pymodule_dep)
|
||||||
|
|
||||||
|
|
||||||
|
# At this point we are done with the core package.
|
||||||
|
# The old_manifest dictionary is updated only for the core package because
|
||||||
|
# all others will use this a base.
|
||||||
|
|
||||||
|
|
||||||
|
# To improve the script speed, we check which packages contain directories
|
||||||
|
# since we will be looping through (only) those later.
|
||||||
|
for pypkg in old_manifest:
|
||||||
|
for filedep in old_manifest[pypkg]['files']:
|
||||||
|
if isFolder(filedep):
|
||||||
|
print ('%s is a folder' % filedep)
|
||||||
|
if pypkg not in hasfolders:
|
||||||
|
hasfolders.append(pypkg)
|
||||||
|
if filedep not in allfolders:
|
||||||
|
allfolders.append(filedep)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# This is the main loop that will handle each package.
|
||||||
|
# It works in a similar fashion than the step before, but
|
||||||
|
# we will now be updating a new dictionary that will eventually
|
||||||
|
# become the new manifest.
|
||||||
|
#
|
||||||
|
# The following loops though all packages in the manifest,
|
||||||
|
# through all files on each of them, and checks whether or not
|
||||||
|
# they are modules and can be imported.
|
||||||
|
# If they can be imported, then it checks for dependencies for
|
||||||
|
# each of them by launching a separate task.
|
||||||
|
# The output of that task is then parsed and the manifest is updated
|
||||||
|
# accordingly, wether it should add the module on FILES for the current package
|
||||||
|
# or if that module already belongs to another package then the current one
|
||||||
|
# will RDEPEND on it
|
||||||
|
|
||||||
|
for pypkg in old_manifest:
|
||||||
|
# Use an empty dict as data structure to hold data for each package and fill it up
|
||||||
|
new_manifest[pypkg] = collections.OrderedDict()
|
||||||
|
new_manifest[pypkg]['summary'] = old_manifest[pypkg]['summary']
|
||||||
|
new_manifest[pypkg]['rdepends'] = []
|
||||||
|
new_manifest[pypkg]['files'] = []
|
||||||
|
new_manifest[pypkg]['cached'] = old_manifest[pypkg]['cached']
|
||||||
|
|
||||||
|
# All packages should depend on core
|
||||||
|
if pypkg != 'core':
|
||||||
|
new_manifest[pypkg]['rdepends'].append('core')
|
||||||
|
new_manifest[pypkg]['cached'] = []
|
||||||
|
|
||||||
|
print('\n')
|
||||||
|
print('--------------------------')
|
||||||
|
print ('Handling package %s' % pypkg)
|
||||||
|
print('--------------------------')
|
||||||
|
|
||||||
|
# Handle special cases, we assume that when they were manually added
|
||||||
|
# to the manifest we knew what we were doing.
|
||||||
|
special_packages = ['misc', 'modules', 'dev', 'tests']
|
||||||
|
if pypkg in special_packages or 'staticdev' in pypkg:
|
||||||
|
print('Passing %s package directly' % pypkg)
|
||||||
|
new_manifest[pypkg] = old_manifest[pypkg]
|
||||||
|
continue
|
||||||
|
|
||||||
|
for filedep in old_manifest[pypkg]['files']:
|
||||||
|
# We already handled core on the first pass, we can ignore it now
|
||||||
|
if pypkg == 'core':
|
||||||
|
if filedep not in new_manifest[pypkg]['files']:
|
||||||
|
new_manifest[pypkg]['files'].append(filedep)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Handle/ignore what we cant import
|
||||||
|
if isFolder(filedep):
|
||||||
|
new_manifest[pypkg]['files'].append(filedep)
|
||||||
|
# Asyncio (and others) are both the package and the folder name, we should not skip those...
|
||||||
|
path,mod = os.path.split(filedep)
|
||||||
|
if mod != pypkg:
|
||||||
|
continue
|
||||||
|
if '${bindir}' in filedep:
|
||||||
|
if filedep not in new_manifest[pypkg]['files']:
|
||||||
|
new_manifest[pypkg]['files'].append(filedep)
|
||||||
|
continue
|
||||||
|
if filedep == '':
|
||||||
|
continue
|
||||||
|
if '${includedir}' in filedep:
|
||||||
|
if filedep not in new_manifest[pypkg]['files']:
|
||||||
|
new_manifest[pypkg]['files'].append(filedep)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get actual module name , shouldnt be affected by libdir/bindir, etc.
|
||||||
|
# We need to check if the imported module comes from another (e.g. sqlite3.dump)
|
||||||
|
path,pymodule = os.path.split(filedep)
|
||||||
|
path = os.path.basename(path)
|
||||||
|
pymodule = os.path.splitext(os.path.basename(pymodule))[0]
|
||||||
|
|
||||||
|
# If this condition is met, it means we need to import it from another module
|
||||||
|
# or its the folder itself (e.g. unittest)
|
||||||
|
if path == pypkg:
|
||||||
|
if pymodule:
|
||||||
|
pymodule = path + '.' + pymodule
|
||||||
|
else:
|
||||||
|
pymodule = path
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# We now know that were dealing with a python module, so we can import it
|
||||||
|
# and check what its dependencies are.
|
||||||
|
# We launch a separate task for each module for deterministic behavior.
|
||||||
|
# Each module will only import what is necessary for it to work in specific.
|
||||||
|
# The output of each task will contain each module's dependencies
|
||||||
|
|
||||||
|
print ('\nGetting dependencies for module: %s' % pymodule)
|
||||||
|
output = subprocess.check_output([sys.executable, 'get_module_deps3.py', '%s' % pymodule]).decode('utf8')
|
||||||
|
print ('The following dependencies were found for module %s:\n' % pymodule)
|
||||||
|
print (output)
|
||||||
|
|
||||||
|
reportFILES = []
|
||||||
|
reportRDEPS = []
|
||||||
|
|
||||||
|
for pymodule_dep in output.split():
|
||||||
|
|
||||||
|
# Warning: This first part is ugly
|
||||||
|
# One of the dependencies that was found, could be inside of one of the folders included by another package
|
||||||
|
# We need to check if this happens so we can add the package containing the folder as an rdependency
|
||||||
|
# e.g. Folder encodings contained in codecs
|
||||||
|
# This would be solved if no packages included any folders
|
||||||
|
|
||||||
|
# This can be done in two ways:
|
||||||
|
# 1 - We assume that if we take out the filename from the path we would get
|
||||||
|
# the folder string, then we would check if folder string is in the list of folders
|
||||||
|
# This would not work if a package contains a folder which contains another folder
|
||||||
|
# e.g. path/folder1/folder2/filename folder_string= path/folder1/folder2
|
||||||
|
# folder_string would not match any value contained in the list of folders
|
||||||
|
#
|
||||||
|
# 2 - We do it the other way around, checking if the folder is contained in the path
|
||||||
|
# e.g. path/folder1/folder2/filename folder_string= path/folder1/folder2
|
||||||
|
# is folder_string inside path/folder1/folder2/filename?,
|
||||||
|
# Yes, it works, but we waste a couple of milliseconds.
|
||||||
|
|
||||||
|
pymodule_dep = pymodule_dep.replace(pyversion,'${PYTHON_MAJMIN}')
|
||||||
|
inFolders = False
|
||||||
|
for folder in allfolders:
|
||||||
|
# The module could have a directory named after it, e.g. xml, if we take out the filename from the path
|
||||||
|
# we'll end up with ${libdir}, and we want ${libdir}/xml
|
||||||
|
if isFolder(pymodule_dep):
|
||||||
|
check_path = pymodule_dep
|
||||||
|
else:
|
||||||
|
check_path = os.path.dirname(pymodule_dep)
|
||||||
|
if folder in check_path :
|
||||||
|
inFolders = True # Did we find a folder?
|
||||||
|
folderFound = False # Second flag to break inner for
|
||||||
|
# Loop only through packages which contain folders
|
||||||
|
for pypkg_with_folder in hasfolders:
|
||||||
|
if (folderFound == False):
|
||||||
|
# print('Checking folder %s on package %s' % (pymodule_dep,pypkg_with_folder))
|
||||||
|
for folder_dep in old_manifest[pypkg_with_folder]['files'] or folder_dep in old_manifest[pypkg_with_folder]['cached']:
|
||||||
|
if folder_dep == folder:
|
||||||
|
print ('%s folder found in %s' % (folder, pypkg_with_folder))
|
||||||
|
folderFound = True
|
||||||
|
if pypkg_with_folder not in new_manifest[pypkg]['rdepends'] and pypkg_with_folder != pypkg:
|
||||||
|
new_manifest[pypkg]['rdepends'].append(pypkg_with_folder)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
# A folder was found so we're done with this item, we can go on
|
||||||
|
if inFolders:
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# No directories beyond this point
|
||||||
|
# We might already have this module on the dictionary since it could depend on a (previously checked) module
|
||||||
|
if pymodule_dep not in new_manifest[pypkg]['files'] and pymodule_dep not in new_manifest[pypkg]['cached']:
|
||||||
|
# Handle core as a special package, we already did it so we pass it to NEW data structure directly
|
||||||
|
if pypkg == 'core':
|
||||||
|
print('Adding %s to %s FILES' % (pymodule_dep, pypkg))
|
||||||
|
if pymodule_dep.endswith('*'):
|
||||||
|
wildcards.append(pymodule_dep)
|
||||||
|
if isCached(pymodule_dep):
|
||||||
|
new_manifest[pypkg]['cached'].append(pymodule_dep)
|
||||||
|
else:
|
||||||
|
new_manifest[pypkg]['files'].append(pymodule_dep)
|
||||||
|
|
||||||
|
# Check for repeated files
|
||||||
|
if pymodule_dep not in allfiles:
|
||||||
|
allfiles.append(pymodule_dep)
|
||||||
|
else:
|
||||||
|
if pymodule_dep not in repeated:
|
||||||
|
repeated.append(pymodule_dep)
|
||||||
|
else:
|
||||||
|
|
||||||
|
|
||||||
|
# Last step: Figure out if we this belongs to FILES or RDEPENDS
|
||||||
|
# We check if this module is already contained on another package, so we add that one
|
||||||
|
# as an RDEPENDS, or if its not, it means it should be contained on the current
|
||||||
|
# package, and we should add it to FILES
|
||||||
|
for possible_rdep in old_manifest:
|
||||||
|
# Debug
|
||||||
|
# print('Checking %s ' % pymodule_dep + ' in %s' % possible_rdep)
|
||||||
|
if pymodule_dep in old_manifest[possible_rdep]['files'] or pymodule_dep in old_manifest[possible_rdep]['cached']:
|
||||||
|
# Since were nesting, we need to check its not the same pypkg
|
||||||
|
if(possible_rdep != pypkg):
|
||||||
|
if possible_rdep not in new_manifest[pypkg]['rdepends']:
|
||||||
|
# Add it to the new manifest data struct as RDEPENDS since it contains something this module needs
|
||||||
|
reportRDEPS.append('Adding %s to %s RDEPENDS, because it contains %s\n' % (possible_rdep, pypkg, pymodule_dep))
|
||||||
|
new_manifest[pypkg]['rdepends'].append(possible_rdep)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
|
||||||
|
# Since this module wasnt found on another package, it is not an RDEP,
|
||||||
|
# so we add it to FILES for this package.
|
||||||
|
# A module shouldn't contain itself (${libdir}/python3/sqlite3 shouldnt be on sqlite3 files)
|
||||||
|
if os.path.basename(pymodule_dep) != pypkg:
|
||||||
|
reportFILES.append(('Adding %s to %s FILES\n' % (pymodule_dep, pypkg)))
|
||||||
|
if isCached(pymodule_dep):
|
||||||
|
new_manifest[pypkg]['cached'].append(pymodule_dep)
|
||||||
|
else:
|
||||||
|
new_manifest[pypkg]['files'].append(pymodule_dep)
|
||||||
|
if pymodule_dep.endswith('*'):
|
||||||
|
wildcards.append(pymodule_dep)
|
||||||
|
if pymodule_dep not in allfiles:
|
||||||
|
allfiles.append(pymodule_dep)
|
||||||
|
else:
|
||||||
|
if pymodule_dep not in repeated:
|
||||||
|
repeated.append(pymodule_dep)
|
||||||
|
|
||||||
|
print('\n')
|
||||||
|
print('#################################')
|
||||||
|
print('Summary for module %s' % pymodule)
|
||||||
|
print('FILES found for module %s:' % pymodule)
|
||||||
|
print(''.join(reportFILES))
|
||||||
|
print('RDEPENDS found for module %s:' % pymodule)
|
||||||
|
print(''.join(reportRDEPS))
|
||||||
|
print('#################################')
|
||||||
|
|
||||||
|
print('The following FILES contain wildcards, please check if they are necessary')
|
||||||
|
print(wildcards)
|
||||||
|
print('The following FILES contain folders, please check if they are necessary')
|
||||||
|
print(hasfolders)
|
||||||
|
|
||||||
|
|
||||||
|
# Sort it just so it looks nicer
|
||||||
|
for pypkg in new_manifest:
|
||||||
|
new_manifest[pypkg]['files'].sort()
|
||||||
|
new_manifest[pypkg]['cached'].sort()
|
||||||
|
new_manifest[pypkg]['rdepends'].sort()
|
||||||
|
|
||||||
|
# Create the manifest from the data structure that was built
|
||||||
|
with open('python3-manifest.json.new','w') as outfile:
|
||||||
|
json.dump(new_manifest,outfile, indent=4)
|
||||||
|
outfile.write('\n')
|
||||||
|
|
||||||
|
prepend_comments(comments,'python3-manifest.json.new')
|
||||||
|
|
||||||
|
if (repeated):
|
||||||
|
error_msg = '\n\nERROR:\n'
|
||||||
|
error_msg += 'The following files are repeated (contained in more than one package),\n'
|
||||||
|
error_msg += 'this is likely to happen when new files are introduced after an upgrade,\n'
|
||||||
|
error_msg += 'please check which package should get it,\n modify the manifest accordingly and re-run the create_manifest task:\n'
|
||||||
|
error_msg += '\n'.join(repeated)
|
||||||
|
error_msg += '\n'
|
||||||
|
sys.exit(error_msg)
|
||||||
|
|
|
@ -0,0 +1,146 @@
|
||||||
|
# This script is launched on separate task for each python module
|
||||||
|
# It checks for dependencies for that specific module and prints
|
||||||
|
# them out, the output of this execution will have all dependencies
|
||||||
|
# for a specific module, which will be parsed an dealt on create_manifest.py
|
||||||
|
#
|
||||||
|
# Author: Alejandro Enedino Hernandez Samaniego "aehs29" <aehs29@gmail.com>
|
||||||
|
|
||||||
|
# We can get a log per module, for all the dependencies that were found, but its messy.
|
||||||
|
debug=False
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# We can get a list of the modules which are currently required to run python
|
||||||
|
# so we run python-core and get its modules, we then import what we need
|
||||||
|
# and check what modules are currently running, if we substract them from the
|
||||||
|
# modules we had initially, we get the dependencies for the module we imported.
|
||||||
|
|
||||||
|
# We use importlib to achieve this, so we also need to know what modules importlib needs
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
core_deps=set(sys.modules)
|
||||||
|
|
||||||
|
def fix_path(dep_path):
|
||||||
|
import os
|
||||||
|
# We DONT want the path on our HOST system
|
||||||
|
pivot='recipe-sysroot-native'
|
||||||
|
dep_path=dep_path[dep_path.find(pivot)+len(pivot):]
|
||||||
|
|
||||||
|
if '/usr/bin' in dep_path:
|
||||||
|
dep_path = dep_path.replace('/usr/bin''${bindir}')
|
||||||
|
|
||||||
|
# Handle multilib, is there a better way?
|
||||||
|
if '/usr/lib32' in dep_path:
|
||||||
|
dep_path = dep_path.replace('/usr/lib32','${libdir}')
|
||||||
|
if '/usr/lib64' in dep_path:
|
||||||
|
dep_path = dep_path.replace('/usr/lib64','${libdir}')
|
||||||
|
if '/usr/lib' in dep_path:
|
||||||
|
dep_path = dep_path.replace('/usr/lib','${libdir}')
|
||||||
|
if '/usr/include' in dep_path:
|
||||||
|
dep_path = dep_path.replace('/usr/include','${includedir}')
|
||||||
|
if '__init__.' in dep_path:
|
||||||
|
dep_path = os.path.split(dep_path)[0]
|
||||||
|
return dep_path
|
||||||
|
|
||||||
|
|
||||||
|
# Module to import was passed as an argument
|
||||||
|
current_module = str(sys.argv[1]).rstrip()
|
||||||
|
if(debug==True):
|
||||||
|
log = open('log_%s' % current_module,'w')
|
||||||
|
log.write('Module %s generated the following dependencies:\n' % current_module)
|
||||||
|
try:
|
||||||
|
importlib.import_module('%s' % current_module)
|
||||||
|
except ImportError as e:
|
||||||
|
if (debug==True):
|
||||||
|
log.write('Module was not found')
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# Get current module dependencies, dif will contain a list of specific deps for this module
|
||||||
|
module_deps=set(sys.modules)
|
||||||
|
|
||||||
|
# We handle the core package (1st pass on create_manifest.py) as a special case
|
||||||
|
if current_module == 'python-core-package':
|
||||||
|
dif = core_deps
|
||||||
|
else:
|
||||||
|
# We know this is not the core package, so there must be a difference.
|
||||||
|
dif = module_deps-core_deps
|
||||||
|
|
||||||
|
|
||||||
|
# Check where each dependency came from
|
||||||
|
for item in dif:
|
||||||
|
dep_path=''
|
||||||
|
try:
|
||||||
|
if (debug==True):
|
||||||
|
log.write('Calling: sys.modules[' + '%s' % item + '].__file__\n')
|
||||||
|
dep_path = sys.modules['%s' % item].__file__
|
||||||
|
except AttributeError as e:
|
||||||
|
# Deals with thread (builtin module) not having __file__ attribute
|
||||||
|
if debug==True:
|
||||||
|
log.write(item + ' ')
|
||||||
|
log.write(str(e))
|
||||||
|
log.write('\n')
|
||||||
|
pass
|
||||||
|
except NameError as e:
|
||||||
|
# Deals with NameError: name 'dep_path' is not defined
|
||||||
|
# because module is not found (wasn't compiled?), e.g. bddsm
|
||||||
|
if (debug==True):
|
||||||
|
log.write(item+' ')
|
||||||
|
log.write(str(e))
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Site-customize is a special case since we (OpenEmbedded) put it there manually
|
||||||
|
if 'sitecustomize' in dep_path:
|
||||||
|
dep_path = '${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py'
|
||||||
|
# Prints out result, which is what will be used by create_manifest
|
||||||
|
print (dep_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
dep_path = fix_path(dep_path)
|
||||||
|
|
||||||
|
import sysconfig
|
||||||
|
soabi=sysconfig.get_config_var('SOABI')
|
||||||
|
# Check if its a shared library and deconstruct it
|
||||||
|
if soabi in dep_path:
|
||||||
|
if (debug==True):
|
||||||
|
log.write('Shared library found in %s' % dep_path)
|
||||||
|
dep_path = dep_path.replace(soabi,'*')
|
||||||
|
print (dep_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if (debug==True):
|
||||||
|
log.write(dep_path+'\n')
|
||||||
|
# Prints out result, which is what will be used by create_manifest
|
||||||
|
print (dep_path)
|
||||||
|
|
||||||
|
|
||||||
|
import imp
|
||||||
|
cpython_tag = imp.get_tag()
|
||||||
|
cached=''
|
||||||
|
# Theres no naive way to find *.pyc files on python3
|
||||||
|
try:
|
||||||
|
if (debug==True):
|
||||||
|
log.write('Calling: sys.modules[' + '%s' % item + '].__cached__\n')
|
||||||
|
cached = sys.modules['%s' % item].__cached__
|
||||||
|
except AttributeError as e:
|
||||||
|
# Deals with thread (builtin module) not having __cached__ attribute
|
||||||
|
if debug==True:
|
||||||
|
log.write(item + ' ')
|
||||||
|
log.write(str(e))
|
||||||
|
log.write('\n')
|
||||||
|
pass
|
||||||
|
except NameError as e:
|
||||||
|
# Deals with NameError: name 'cached' is not defined
|
||||||
|
if (debug==True):
|
||||||
|
log.write(item+' ')
|
||||||
|
log.write(str(e))
|
||||||
|
pass
|
||||||
|
if cached is not None:
|
||||||
|
if (debug==True):
|
||||||
|
log.write(cached)
|
||||||
|
cached = fix_path(cached)
|
||||||
|
cached = cached.replace(cpython_tag,'*')
|
||||||
|
print (cached)
|
||||||
|
|
||||||
|
if debug==True:
|
||||||
|
log.close()
|
|
@ -0,0 +1,46 @@
|
||||||
|
python-config: Revert to using distutils.sysconfig
|
||||||
|
|
||||||
|
The newer sysconfig module shares some code with distutils.sysconfig, but the same modifications as in
|
||||||
|
|
||||||
|
12-distutils-prefix-is-inside-staging-area.patch makes distutils.sysconfig
|
||||||
|
|
||||||
|
affect the native runtime as well as cross building. Use the old, patched
|
||||||
|
implementation which returns paths in the staging directory and for the target,
|
||||||
|
as appropriate.
|
||||||
|
|
||||||
|
Upstream-Status: Inappropriate [Embedded Specific]
|
||||||
|
|
||||||
|
Signed-off-by: Tyler Hall <tylerwhall@gmail.com>
|
||||||
|
:
|
||||||
|
Index: Python-3.3.3/Misc/python-config.in
|
||||||
|
===================================================================
|
||||||
|
--- Python-3.3.3.orig/Misc/python-config.in
|
||||||
|
+++ Python-3.3.3/Misc/python-config.in
|
||||||
|
@@ -4,7 +4,7 @@
|
||||||
|
import getopt
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
-import sysconfig
|
||||||
|
+from distutils import sysconfig
|
||||||
|
|
||||||
|
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
|
||||||
|
'ldflags', 'extension-suffix', 'help', 'abiflags', 'configdir']
|
||||||
|
@@ -32,14 +32,14 @@ if '--help' in opt_flags:
|
||||||
|
|
||||||
|
for opt in opt_flags:
|
||||||
|
if opt == '--prefix':
|
||||||
|
- print(sysconfig.get_config_var('prefix'))
|
||||||
|
+ print(sysconfig.PREFIX)
|
||||||
|
|
||||||
|
elif opt == '--exec-prefix':
|
||||||
|
- print(sysconfig.get_config_var('exec_prefix'))
|
||||||
|
+ print(sysconfig.EXEC_PREFIX)
|
||||||
|
|
||||||
|
elif opt in ('--includes', '--cflags'):
|
||||||
|
- flags = ['-I' + sysconfig.get_path('include'),
|
||||||
|
- '-I' + sysconfig.get_path('platinclude')]
|
||||||
|
+ flags = ['-I' + sysconfig.get_python_inc(),
|
||||||
|
+ '-I' + sysconfig.get_python_inc(plat_specific=True)]
|
||||||
|
if opt == '--cflags':
|
||||||
|
flags.extend(getvar('CFLAGS').split())
|
||||||
|
print(' '.join(flags))
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
python3 -m test -v | sed -u -e '/\.\.\. ok/ s/^/PASS: /g' -e '/\.\.\. [ERROR|FAIL]/ s/^/FAIL: /g' -e '/\.\.\. skipped/ s/^/SKIP: /g' -e 's/ \.\.\. ok//g' -e 's/ \.\.\. ERROR//g' -e 's/ \.\.\. FAIL//g' -e 's/ \.\.\. skipped//g'
|
|
@ -0,0 +1,299 @@
|
||||||
|
SUMMARY = "The Python Programming Language"
|
||||||
|
HOMEPAGE = "http://www.python.org"
|
||||||
|
LICENSE = "PSFv2"
|
||||||
|
SECTION = "devel/python"
|
||||||
|
|
||||||
|
LIC_FILES_CHKSUM = "file://LICENSE;md5=f257cc14f81685691652a3d3e1b5d754"
|
||||||
|
|
||||||
|
SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \
|
||||||
|
file://run-ptest \
|
||||||
|
file://create_manifest3.py \
|
||||||
|
file://get_module_deps3.py \
|
||||||
|
file://python3-manifest.json \
|
||||||
|
file://check_build_completeness.py \
|
||||||
|
file://cgi_py.patch \
|
||||||
|
file://0001-Do-not-add-usr-lib-termcap-to-linker-flags-to-avoid-.patch \
|
||||||
|
${@bb.utils.contains('PACKAGECONFIG', 'tk', '', 'file://avoid_warning_about_tkinter.patch', d)} \
|
||||||
|
file://0001-Do-not-use-the-shell-version-of-python-config-that-w.patch \
|
||||||
|
file://python-config.patch \
|
||||||
|
file://0001-Makefile.pre-use-qemu-wrapper-when-gathering-profile.patch \
|
||||||
|
file://0001-Do-not-hardcode-lib-as-location-for-site-packages-an.patch \
|
||||||
|
file://0001-python3-use-cc_basename-to-replace-CC-for-checking-c.patch \
|
||||||
|
file://0002-Don-t-do-runtime-test-to-get-float-byte-order.patch \
|
||||||
|
file://0003-setup.py-pass-missing-libraries-to-Extension-for-mul.patch \
|
||||||
|
file://0001-Lib-sysconfig.py-fix-another-place-where-lib-is-hard.patch \
|
||||||
|
"
|
||||||
|
|
||||||
|
SRC_URI_append_class-native = " \
|
||||||
|
file://0001-distutils-sysconfig-append-STAGING_LIBDIR-python-sys.patch \
|
||||||
|
file://12-distutils-prefix-is-inside-staging-area.patch \
|
||||||
|
"
|
||||||
|
SRC_URI_append_class-nativesdk = " \
|
||||||
|
file://0001-main.c-if-OEPYTHON3HOME-is-set-use-instead-of-PYTHON.patch \
|
||||||
|
"
|
||||||
|
|
||||||
|
SRC_URI[md5sum] = "df6ec36011808205beda239c72f947cb"
|
||||||
|
SRC_URI[sha256sum] = "d83fe8ce51b1bb48bbcf0550fd265b9a75cdfdfa93f916f9e700aef8444bf1bb"
|
||||||
|
|
||||||
|
# exclude pre-releases for both python 2.x and 3.x
|
||||||
|
UPSTREAM_CHECK_REGEX = "[Pp]ython-(?P<pver>\d+(\.\d+)+).tar"
|
||||||
|
|
||||||
|
CVE_PRODUCT = "python"
|
||||||
|
|
||||||
|
PYTHON_MAJMIN = "3.7"
|
||||||
|
PYTHON_BINABI = "${PYTHON_MAJMIN}m"
|
||||||
|
|
||||||
|
S = "${WORKDIR}/Python-${PV}"
|
||||||
|
|
||||||
|
BBCLASSEXTEND = "native nativesdk"
|
||||||
|
|
||||||
|
inherit autotools pkgconfig qemu ptest multilib_header update-alternatives
|
||||||
|
|
||||||
|
MULTILIB_SUFFIX = "${@d.getVar('base_libdir',1).split('/')[-1]}"
|
||||||
|
|
||||||
|
ALTERNATIVE_${PN}-dev = "python-config"
|
||||||
|
ALTERNATIVE_LINK_NAME[python-config] = "${bindir}/python${PYTHON_BINABI}-config"
|
||||||
|
ALTERNATIVE_TARGET[python-config] = "${bindir}/python${PYTHON_BINABI}-config-${MULTILIB_SUFFIX}"
|
||||||
|
|
||||||
|
|
||||||
|
DEPENDS = "bzip2-replacement-native libffi bzip2 gdbm openssl sqlite3 zlib virtual/libintl xz virtual/crypt util-linux libtirpc libnsl2"
|
||||||
|
DEPENDS_append_class-target = " python3-native"
|
||||||
|
DEPENDS_append_class-nativesdk = " python3-native"
|
||||||
|
|
||||||
|
EXTRA_OECONF = " --without-ensurepip --enable-shared"
|
||||||
|
EXTRA_OECONF_append_class-native = " --bindir=${bindir}/${PN}"
|
||||||
|
|
||||||
|
|
||||||
|
EXTRANATIVEPATH += "python3-native"
|
||||||
|
|
||||||
|
CACHED_CONFIGUREVARS = " \
|
||||||
|
ac_cv_file__dev_ptmx=yes \
|
||||||
|
ac_cv_file__dev_ptc=no \
|
||||||
|
ac_cv_working_tzset=yes \
|
||||||
|
"
|
||||||
|
|
||||||
|
PACKAGECONFIG_class-target ??= "readline ${@bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'pgo', '', d)}"
|
||||||
|
PACKAGECONFIG_class-native ??= "readline"
|
||||||
|
PACKAGECONFIG_class-nativesdk ??= "readline"
|
||||||
|
PACKAGECONFIG[readline] = ",,readline"
|
||||||
|
# Use profile guided optimisation by running PyBench inside qemu-user
|
||||||
|
PACKAGECONFIG[pgo] = "--enable-optimizations,,qemu-native"
|
||||||
|
PACKAGECONFIG[tk] = ",,tk"
|
||||||
|
|
||||||
|
CPPFLAGS_append = " -I${STAGING_INCDIR}/ncursesw -I${STAGING_INCDIR}/uuid"
|
||||||
|
|
||||||
|
EXTRA_OEMAKE = '\
|
||||||
|
STAGING_LIBDIR=${STAGING_LIBDIR} \
|
||||||
|
STAGING_INCDIR=${STAGING_INCDIR} \
|
||||||
|
LIB=${baselib} \
|
||||||
|
'
|
||||||
|
|
||||||
|
do_compile_prepend_class-target() {
|
||||||
|
if ${@bb.utils.contains('PACKAGECONFIG', 'pgo', 'true', 'false', d)}; then
|
||||||
|
qemu_binary="${@qemu_wrapper_cmdline(d, '${STAGING_DIR_TARGET}', ['${B}', '${STAGING_DIR_TARGET}/${base_libdir}'])}"
|
||||||
|
cat >pgo-wrapper <<EOF
|
||||||
|
#!/bin/sh
|
||||||
|
cd ${B}
|
||||||
|
$qemu_binary "\$@"
|
||||||
|
EOF
|
||||||
|
chmod +x pgo-wrapper
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
do_install_prepend() {
|
||||||
|
${WORKDIR}/check_build_completeness.py ${T}/log.do_compile
|
||||||
|
}
|
||||||
|
|
||||||
|
do_install_append_class-target() {
|
||||||
|
oe_multilib_header python${PYTHON_BINABI}/pyconfig.h
|
||||||
|
}
|
||||||
|
|
||||||
|
do_install_append_class-native() {
|
||||||
|
# Make sure we use /usr/bin/env python
|
||||||
|
for PYTHSCRIPT in `grep -rIl ${bindir}/${PN}/python ${D}${bindir}/${PN}`; do
|
||||||
|
sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' $PYTHSCRIPT
|
||||||
|
done
|
||||||
|
# Add a symlink to the native Python so that scripts can just invoke
|
||||||
|
# "nativepython" and get the right one without needing absolute paths
|
||||||
|
# (these often end up too long for the #! parser in the kernel as the
|
||||||
|
# buffer is 128 bytes long).
|
||||||
|
ln -s python3-native/python3 ${D}${bindir}/nativepython3
|
||||||
|
}
|
||||||
|
|
||||||
|
do_install_append() {
|
||||||
|
mkdir -p ${D}${libdir}/python-sysconfigdata
|
||||||
|
sysconfigfile=`find ${D} -name _sysconfig*.py`
|
||||||
|
cp $sysconfigfile ${D}${libdir}/python-sysconfigdata/_sysconfigdata.py
|
||||||
|
|
||||||
|
sed -i \
|
||||||
|
-e "s,^ 'LIBDIR'.*, 'LIBDIR': '${STAGING_LIBDIR}'\,,g" \
|
||||||
|
-e "s,^ 'INCLUDEDIR'.*, 'INCLUDEDIR': '${STAGING_INCDIR}'\,,g" \
|
||||||
|
-e "s,^ 'CONFINCLUDEDIR'.*, 'CONFINCLUDEDIR': '${STAGING_INCDIR}'\,,g" \
|
||||||
|
-e "/^ 'INCLDIRSTOMAKE'/{N; s,/usr/include,${STAGING_INCDIR},g}" \
|
||||||
|
-e "/^ 'INCLUDEPY'/s,/usr/include,${STAGING_INCDIR},g" \
|
||||||
|
${D}${libdir}/python-sysconfigdata/_sysconfigdata.py
|
||||||
|
}
|
||||||
|
|
||||||
|
do_install_append_class-nativesdk () {
|
||||||
|
create_wrapper ${D}${bindir}/python${PYTHON_MAJMIN} OEPYTHON3HOME='${prefix}' TERMINFO_DIRS='${sysconfdir}/terminfo:/etc/terminfo:/usr/share/terminfo:/usr/share/misc/terminfo:/lib/terminfo' PYTHONNOUSERSITE='1'
|
||||||
|
}
|
||||||
|
|
||||||
|
SSTATE_SCAN_FILES += "Makefile _sysconfigdata.py"
|
||||||
|
PACKAGE_PREPROCESS_FUNCS += "py_package_preprocess"
|
||||||
|
|
||||||
|
py_package_preprocess () {
|
||||||
|
# Remove references to buildmachine paths in target Makefile and _sysconfigdata
|
||||||
|
sed -i -e 's:--sysroot=${STAGING_DIR_TARGET}::g' -e s:'--with-libtool-sysroot=${STAGING_DIR_TARGET}'::g \
|
||||||
|
-e 's|${DEBUG_PREFIX_MAP}||g' \
|
||||||
|
-e 's:${HOSTTOOLS_DIR}/::g' \
|
||||||
|
-e 's:${RECIPE_SYSROOT_NATIVE}::g' \
|
||||||
|
-e 's:${RECIPE_SYSROOT}::g' \
|
||||||
|
-e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
|
||||||
|
${PKGD}/${prefix}/lib/python${PYTHON_MAJMIN}/config-${PYTHON_MAJMIN}${PYTHON_ABI}*/Makefile \
|
||||||
|
${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata*.py \
|
||||||
|
${PKGD}/${bindir}/python${PYTHON_BINABI}-config
|
||||||
|
|
||||||
|
# Recompile _sysconfigdata after modifying it
|
||||||
|
cd ${PKGD}
|
||||||
|
sysconfigfile=`find . -name _sysconfigdata_*.py`
|
||||||
|
${STAGING_BINDIR_NATIVE}/python3-native/python3 \
|
||||||
|
-c "from py_compile import compile; compile('$sysconfigfile')"
|
||||||
|
${STAGING_BINDIR_NATIVE}/python3-native/python3 \
|
||||||
|
-c "from py_compile import compile; compile('$sysconfigfile', optimize=1)"
|
||||||
|
${STAGING_BINDIR_NATIVE}/python3-native/python3 \
|
||||||
|
-c "from py_compile import compile; compile('$sysconfigfile', optimize=2)"
|
||||||
|
cd -
|
||||||
|
|
||||||
|
mv ${PKGD}/${bindir}/python${PYTHON_BINABI}-config ${PKGD}/${bindir}/python${PYTHON_BINABI}-config-${MULTILIB_SUFFIX}
|
||||||
|
|
||||||
|
#Remove the unneeded copy of target sysconfig data
|
||||||
|
rm -rf ${PKGD}/${libdir}/python-sysconfigdata
|
||||||
|
}
|
||||||
|
|
||||||
|
# We want bytecode precompiled .py files (.pyc's) by default
|
||||||
|
# but the user may set it on their own conf
|
||||||
|
INCLUDE_PYCS ?= "1"
|
||||||
|
|
||||||
|
python(){
|
||||||
|
import collections, json
|
||||||
|
|
||||||
|
filename = os.path.join(d.getVar('THISDIR'), 'python3', 'python3-manifest.json')
|
||||||
|
# This python changes the datastore based on the contents of a file, so mark
|
||||||
|
# that dependency.
|
||||||
|
bb.parse.mark_dependency(d, filename)
|
||||||
|
|
||||||
|
with open(filename) as manifest_file:
|
||||||
|
manifest_str = manifest_file.read()
|
||||||
|
json_start = manifest_str.find('# EOC') + 6
|
||||||
|
manifest_file.seek(json_start)
|
||||||
|
manifest_str = manifest_file.read()
|
||||||
|
python_manifest = json.loads(manifest_str, object_pairs_hook=collections.OrderedDict)
|
||||||
|
|
||||||
|
# First set RPROVIDES for -native case
|
||||||
|
# Hardcoded since it cant be python3-native-foo, should be python3-foo-native
|
||||||
|
pn = 'python3'
|
||||||
|
rprovides = d.getVar('RPROVIDES').split()
|
||||||
|
|
||||||
|
for key in python_manifest:
|
||||||
|
pypackage = pn + '-' + key + '-native'
|
||||||
|
if pypackage not in rprovides:
|
||||||
|
rprovides.append(pypackage)
|
||||||
|
|
||||||
|
d.setVar('RPROVIDES_class-native', ' '.join(rprovides))
|
||||||
|
|
||||||
|
# Then work on the target
|
||||||
|
include_pycs = d.getVar('INCLUDE_PYCS')
|
||||||
|
|
||||||
|
packages = d.getVar('PACKAGES').split()
|
||||||
|
pn = d.getVar('PN')
|
||||||
|
|
||||||
|
newpackages=[]
|
||||||
|
for key in python_manifest:
|
||||||
|
pypackage= pn + '-' + key
|
||||||
|
|
||||||
|
if pypackage not in packages:
|
||||||
|
# We need to prepend, otherwise python-misc gets everything
|
||||||
|
# so we use a new variable
|
||||||
|
newpackages.append(pypackage)
|
||||||
|
|
||||||
|
# "Build" python's manifest FILES, RDEPENDS and SUMMARY
|
||||||
|
d.setVar('FILES_' + pypackage, '')
|
||||||
|
for value in python_manifest[key]['files']:
|
||||||
|
d.appendVar('FILES_' + pypackage, ' ' + value)
|
||||||
|
|
||||||
|
# Add cached files
|
||||||
|
if include_pycs == '1':
|
||||||
|
for value in python_manifest[key]['cached']:
|
||||||
|
d.appendVar('FILES_' + pypackage, ' ' + value)
|
||||||
|
|
||||||
|
for value in python_manifest[key]['rdepends']:
|
||||||
|
# Make it work with or without $PN
|
||||||
|
if '${PN}' in value:
|
||||||
|
value=value.split('-')[1]
|
||||||
|
d.appendVar('RDEPENDS_' + pypackage, ' ' + pn + '-' + value)
|
||||||
|
d.setVar('SUMMARY_' + pypackage, python_manifest[key]['summary'])
|
||||||
|
|
||||||
|
# Prepending so to avoid python-misc getting everything
|
||||||
|
packages = newpackages + packages
|
||||||
|
d.setVar('PACKAGES', ' '.join(packages))
|
||||||
|
d.setVar('ALLOW_EMPTY_${PN}-modules', '1')
|
||||||
|
}
|
||||||
|
|
||||||
|
# Files needed to create a new manifest
|
||||||
|
|
||||||
|
do_create_manifest() {
|
||||||
|
# This task should be run with every new release of Python.
|
||||||
|
# We must ensure that PACKAGECONFIG enables everything when creating
|
||||||
|
# a new manifest, this is to base our new manifest on a complete
|
||||||
|
# native python build, containing all dependencies, otherwise the task
|
||||||
|
# wont be able to find the required files.
|
||||||
|
# e.g. BerkeleyDB is an optional build dependency so it may or may not
|
||||||
|
# be present, we must ensure it is.
|
||||||
|
|
||||||
|
cd ${WORKDIR}
|
||||||
|
# This needs to be executed by python-native and NOT by HOST's python
|
||||||
|
nativepython3 create_manifest3.py ${PYTHON_MAJMIN}
|
||||||
|
cp python3-manifest.json.new ${THISDIR}/python3/python3-manifest.json
|
||||||
|
}
|
||||||
|
|
||||||
|
# bitbake python -c create_manifest
|
||||||
|
addtask do_create_manifest
|
||||||
|
|
||||||
|
# Make sure we have native python ready when we create a new manifest
|
||||||
|
do_create_manifest[depends] += "${PN}:do_prepare_recipe_sysroot"
|
||||||
|
do_create_manifest[depends] += "${PN}:do_patch"
|
||||||
|
|
||||||
|
# manual dependency additions
|
||||||
|
RPROVIDES_${PN}-modules = "${PN}"
|
||||||
|
RRECOMMENDS_${PN}-core_append_class-nativesdk = " nativesdk-python3-modules"
|
||||||
|
RRECOMMENDS_${PN}-crypt_append_class-target = " openssl ca-certificates"
|
||||||
|
RRECOMMENDS_${PN}-crypt_append_class-nativesdk = " openssl ca-certificates"
|
||||||
|
|
||||||
|
FILES_${PN}-pydoc += "${bindir}/pydoc${PYTHON_MAJMIN} ${bindir}/pydoc3"
|
||||||
|
FILES_${PN}-idle += "${bindir}/idle3 ${bindir}/idle${PYTHON_MAJMIN}"
|
||||||
|
|
||||||
|
# provide python-pyvenv from python3-venv
|
||||||
|
RPROVIDES_${PN}-venv += "python3-pyvenv"
|
||||||
|
|
||||||
|
# package libpython3
|
||||||
|
PACKAGES =+ "libpython3 libpython3-staticdev"
|
||||||
|
FILES_libpython3 = "${libdir}/libpython*.so.*"
|
||||||
|
FILES_libpython3-staticdev += "${prefix}/lib/python${PYTHON_MAJMIN}/config-${PYTHON_BINABI}-*/libpython${PYTHON_BINABI}.a"
|
||||||
|
INSANE_SKIP_${PN}-dev += "dev-elf"
|
||||||
|
|
||||||
|
# catch all the rest (unsorted)
|
||||||
|
PACKAGES += "${PN}-misc"
|
||||||
|
RDEPENDS_${PN}-misc += "python3-core python3-email python3-codecs"
|
||||||
|
RDEPENDS_${PN}-modules_append_class-target = " python3-misc"
|
||||||
|
RDEPENDS_${PN}-modules_append_class-nativesdk = " python3-misc"
|
||||||
|
FILES_${PN}-misc = "${libdir}/python${PYTHON_MAJMIN} ${libdir}/python${PYTHON_MAJMIN}/lib-dynload"
|
||||||
|
|
||||||
|
# catch manpage
|
||||||
|
PACKAGES += "${PN}-man"
|
||||||
|
FILES_${PN}-man = "${datadir}/man"
|
||||||
|
|
||||||
|
RDEPENDS_${PN}-ptest = "${PN}-modules ${PN}-tests unzip bzip2 libgcc tzdata-europe coreutils sed"
|
||||||
|
RDEPENDS_${PN}-ptest_append_libc-glibc = " locale-base-tr-tr.iso-8859-9"
|
||||||
|
RDEPENDS_${PN}-tkinter += "${@bb.utils.contains('PACKAGECONFIG', 'tk', 'tk', '', d)}"
|
||||||
|
RDEPENDS_${PN}-dev = ""
|
||||||
|
|
Loading…
Reference in New Issue