summaryrefslogtreecommitdiff
path: root/poky/meta/classes
diff options
context:
space:
mode:
authorAndrew Geissler <geissonator@yahoo.com>2020-04-13 21:39:40 +0300
committerAndrew Geissler <geissonator@yahoo.com>2020-05-05 16:30:44 +0300
commit82c905dc58a36aeae40b1b273a12f63fb1973cf4 (patch)
tree38caf00263451b5036435cdc36e035b25d32e623 /poky/meta/classes
parent83ecb75644b3d677c274188f9ac0b2374d6f6925 (diff)
downloadopenbmc-82c905dc58a36aeae40b1b273a12f63fb1973cf4.tar.xz
meta-openembedded and poky: subtree updates
Squash of the following due to dependencies among them and OpenBMC changes: meta-openembedded: subtree update:d0748372d2..9201611135 meta-openembedded: subtree update:9201611135..17fd382f34 poky: subtree update:9052e5b32a..2e11d97b6c poky: subtree update:2e11d97b6c..a8544811d7 The change log was too large for the jenkins plugin to handle therefore it has been removed. Here is the first and last commit of each subtree: meta-openembedded:d0748372d2 cppzmq: bump to version 4.6.0 meta-openembedded:17fd382f34 mpv: Remove X11 dependency poky:9052e5b32a package_ipk: Remove pointless comment to trigger rebuild poky:a8544811d7 pbzip2: Fix license warning Change-Id: If0fc6c37629642ee207a4ca2f7aa501a2c673cd6 Signed-off-by: Andrew Geissler <geissonator@yahoo.com>
Diffstat (limited to 'poky/meta/classes')
-rw-r--r--poky/meta/classes/archiver.bbclass136
-rw-r--r--poky/meta/classes/autotools.bbclass2
-rw-r--r--poky/meta/classes/base.bbclass72
-rw-r--r--poky/meta/classes/buildhistory.bbclass7
-rw-r--r--poky/meta/classes/buildstats.bbclass2
-rw-r--r--poky/meta/classes/chrpath.bbclass34
-rw-r--r--poky/meta/classes/cmake.bbclass25
-rw-r--r--poky/meta/classes/core-image.bbclass2
-rw-r--r--poky/meta/classes/cross.bbclass2
-rw-r--r--poky/meta/classes/crosssdk.bbclass7
-rw-r--r--poky/meta/classes/cve-check.bbclass29
-rw-r--r--poky/meta/classes/devicetree.bbclass2
-rw-r--r--poky/meta/classes/distutils-base.bbclass4
-rw-r--r--poky/meta/classes/distutils.bbclass87
-rw-r--r--poky/meta/classes/distutils3.bbclass38
-rw-r--r--poky/meta/classes/externalsrc.bbclass5
-rw-r--r--poky/meta/classes/features_check.bbclass3
-rw-r--r--poky/meta/classes/fontcache.bbclass2
-rw-r--r--poky/meta/classes/go.bbclass3
-rw-r--r--poky/meta/classes/goarch.bbclass9
-rw-r--r--poky/meta/classes/grub-efi-cfg.bbclass6
-rw-r--r--poky/meta/classes/icecc.bbclass2
-rw-r--r--poky/meta/classes/image-prelink.bbclass17
-rw-r--r--poky/meta/classes/image.bbclass6
-rw-r--r--poky/meta/classes/image_types.bbclass10
-rw-r--r--poky/meta/classes/image_types_wic.bbclass16
-rw-r--r--poky/meta/classes/insane.bbclass84
-rw-r--r--poky/meta/classes/kernel-fitimage.bbclass5
-rw-r--r--poky/meta/classes/kernel-yocto.bbclass76
-rw-r--r--poky/meta/classes/kernel.bbclass21
-rw-r--r--poky/meta/classes/kernelsrc.bbclass2
-rw-r--r--poky/meta/classes/libc-common.bbclass37
-rw-r--r--poky/meta/classes/libc-package.bbclass1
-rw-r--r--poky/meta/classes/license.bbclass39
-rw-r--r--poky/meta/classes/license_image.bbclass5
-rw-r--r--poky/meta/classes/linuxloader.bbclass31
-rw-r--r--poky/meta/classes/manpages.bbclass9
-rw-r--r--poky/meta/classes/meson.bbclass37
-rw-r--r--poky/meta/classes/mime-xdg.bbclass74
-rw-r--r--poky/meta/classes/mime.bbclass62
-rw-r--r--poky/meta/classes/multilib.bbclass9
-rw-r--r--poky/meta/classes/native.bbclass2
-rw-r--r--poky/meta/classes/nativesdk.bbclass1
-rw-r--r--poky/meta/classes/npm.bbclass363
-rw-r--r--poky/meta/classes/package.bbclass191
-rw-r--r--poky/meta/classes/package_ipk.bbclass8
-rw-r--r--poky/meta/classes/patch.bbclass7
-rw-r--r--poky/meta/classes/populate_sdk_base.bbclass10
-rw-r--r--poky/meta/classes/populate_sdk_ext.bbclass20
-rw-r--r--poky/meta/classes/python-dir.bbclass5
-rw-r--r--poky/meta/classes/python3-dir.bbclass4
-rw-r--r--poky/meta/classes/pythonnative.bbclass27
-rw-r--r--poky/meta/classes/qemu.bbclass3
-rw-r--r--poky/meta/classes/qemuboot.bbclass20
-rw-r--r--poky/meta/classes/reproducible_build.bbclass49
-rw-r--r--poky/meta/classes/reproducible_build_simple.bbclass1
-rw-r--r--poky/meta/classes/sanity.bbclass41
-rw-r--r--poky/meta/classes/setuptools.bbclass3
-rw-r--r--poky/meta/classes/siteinfo.bbclass10
-rw-r--r--poky/meta/classes/sstate.bbclass113
-rw-r--r--poky/meta/classes/staging.bbclass30
-rw-r--r--poky/meta/classes/testimage.bbclass69
-rw-r--r--poky/meta/classes/texinfo.bbclass8
-rw-r--r--poky/meta/classes/toaster.bbclass26
-rw-r--r--poky/meta/classes/uninative.bbclass2
-rw-r--r--poky/meta/classes/vala.bbclass2
66 files changed, 1430 insertions, 605 deletions
diff --git a/poky/meta/classes/archiver.bbclass b/poky/meta/classes/archiver.bbclass
index 7c46cff91..48b4913a9 100644
--- a/poky/meta/classes/archiver.bbclass
+++ b/poky/meta/classes/archiver.bbclass
@@ -2,25 +2,42 @@
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# This bbclass is used for creating archive for:
-# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
-# 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
-# 3) configured source: ARCHIVER_MODE[src] = "configured"
-# 4) The patches between do_unpack and do_patch:
-# ARCHIVER_MODE[diff] = "1"
-# And you can set the one that you'd like to exclude from the diff:
-# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
-# 5) The environment data, similar to 'bitbake -e recipe':
-# ARCHIVER_MODE[dumpdata] = "1"
-# 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1"
-# 7) Whether output the .src.rpm package:
-# ARCHIVER_MODE[srpm] = "1"
-# 8) Filter the license, the recipe whose license in
-# COPYLEFT_LICENSE_INCLUDE will be included, and in
-# COPYLEFT_LICENSE_EXCLUDE will be excluded.
-# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*'
-# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary'
-# 9) The recipe type that will be archived:
-# COPYLEFT_RECIPE_TYPES = 'target'
+# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
+# 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
+# 3) configured source: ARCHIVER_MODE[src] = "configured"
+# 4) source mirror: ARCHIVE_MODE[src] = "mirror"
+# 5) The patches between do_unpack and do_patch:
+# ARCHIVER_MODE[diff] = "1"
+# And you can set the one that you'd like to exclude from the diff:
+# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
+# 6) The environment data, similar to 'bitbake -e recipe':
+# ARCHIVER_MODE[dumpdata] = "1"
+# 7) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1"
+# 8) Whether output the .src.rpm package:
+# ARCHIVER_MODE[srpm] = "1"
+# 9) Filter the license, the recipe whose license in
+# COPYLEFT_LICENSE_INCLUDE will be included, and in
+# COPYLEFT_LICENSE_EXCLUDE will be excluded.
+# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*'
+# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary'
+# 10) The recipe type that will be archived:
+# COPYLEFT_RECIPE_TYPES = 'target'
+# 11) The source mirror mode:
+# ARCHIVER_MODE[mirror] = "split" (default): Sources are split into
+# per-recipe directories in a similar way to other archiver modes.
+# Post-processing may be required to produce a single mirror directory.
+# This does however allow inspection of duplicate sources and more
+# intelligent handling.
+# ARCHIVER_MODE[mirror] = "combined": All sources are placed into a single
+# directory suitable for direct use as a mirror. Duplicate sources are
+# ignored.
+# 12) Source mirror exclusions:
+# ARCHIVER_MIRROR_EXCLUDE is a list of prefixes to exclude from the mirror.
+# This may be used for sources which you are already publishing yourself
+# (e.g. if the URI starts with 'https://mysite.com/' and your mirror is
+# going to be published to the same site). It may also be used to exclude
+# local files (with the prefix 'file://') if these will be provided as part
+# of an archive of the layers themselves.
#
# Create archive for all the recipe types
@@ -33,6 +50,7 @@ ARCHIVER_MODE[diff] ?= "0"
ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
ARCHIVER_MODE[dumpdata] ?= "0"
ARCHIVER_MODE[recipe] ?= "0"
+ARCHIVER_MODE[mirror] ?= "split"
DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources"
@@ -41,6 +59,10 @@ ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/"
ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
+# When producing a combined mirror directory, allow duplicates for the case
+# where multiple recipes use the same SRC_URI.
+ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror"
+SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}/mirror"
do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
@@ -106,6 +128,8 @@ python () {
elif hasTask("do_configure"):
d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn)
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn)
+ elif ar_src == "mirror":
+ d.appendVarFlag('do_deploy_archives', 'depends', '%s:do_ar_mirror' % pn)
elif ar_src:
bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src)
@@ -281,6 +305,79 @@ python do_ar_configured() {
create_tarball(d, srcdir, 'configured', ar_outdir)
}
+python do_ar_mirror() {
+ import subprocess
+
+ src_uri = (d.getVar('SRC_URI') or '').split()
+ if len(src_uri) == 0:
+ return
+
+ dl_dir = d.getVar('DL_DIR')
+ mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split()
+ mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror')
+ have_mirror_tarballs = d.getVar('BB_GENERATE_MIRROR_TARBALLS')
+
+ if mirror_mode == 'combined':
+ destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR')
+ elif mirror_mode == 'split':
+ destdir = d.getVar('ARCHIVER_OUTDIR')
+ else:
+ bb.fatal('Invalid ARCHIVER_MODE[mirror]: %s' % (mirror_mode))
+
+ if not have_mirror_tarballs:
+ bb.fatal('Using `ARCHIVER_MODE[src] = "mirror"` depends on setting `BB_GENERATE_MIRROR_TARBALLS = "1"`')
+
+ def is_excluded(url):
+ for prefix in mirror_exclusions:
+ if url.startswith(prefix):
+ return True
+ return False
+
+ bb.note('Archiving the source as a mirror...')
+
+ bb.utils.mkdirhier(destdir)
+
+ fetcher = bb.fetch2.Fetch(src_uri, d)
+
+ for url in fetcher.urls:
+ if is_excluded(url):
+ bb.note('Skipping excluded url: %s' % (url))
+ continue
+
+ bb.note('Archiving url: %s' % (url))
+ ud = fetcher.ud[url]
+ ud.setup_localpath(d)
+ localpath = None
+
+ # Check for mirror tarballs first. We will archive the first mirror
+ # tarball that we find as it's assumed that we just need one.
+ for mirror_fname in ud.mirrortarballs:
+ mirror_path = os.path.join(dl_dir, mirror_fname)
+ if os.path.exists(mirror_path):
+ bb.note('Found mirror tarball: %s' % (mirror_path))
+ localpath = mirror_path
+ break
+
+ if len(ud.mirrortarballs) and not localpath:
+ bb.warn('Mirror tarballs are listed for a source but none are present. ' \
+ 'Falling back to original download.\n' \
+ 'SRC_URI = %s' % (url))
+
+ # Check original download
+ if not localpath:
+ bb.note('Using original download: %s' % (ud.localpath))
+ localpath = ud.localpath
+
+ if not localpath or not os.path.exists(localpath):
+ bb.fatal('Original download is missing for a source.\n' \
+ 'SRC_URI = %s' % (url))
+
+ # We now have an appropriate localpath
+ bb.note('Copying source mirror')
+ cmd = 'cp -fpPRH %s %s' % (localpath, destdir)
+ subprocess.check_call(cmd, shell=True)
+}
+
def exclude_useless_paths(tarinfo):
if tarinfo.isdir():
if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'):
@@ -483,6 +580,7 @@ addtask do_ar_original after do_unpack
addtask do_unpack_and_patch after do_patch
addtask do_ar_patched after do_unpack_and_patch
addtask do_ar_configured after do_unpack_and_patch
+addtask do_ar_mirror after do_fetch
addtask do_dumpdata
addtask do_ar_recipe
addtask do_deploy_archives before do_build
diff --git a/poky/meta/classes/autotools.bbclass b/poky/meta/classes/autotools.bbclass
index 3d22ad025..6c2a33ac7 100644
--- a/poky/meta/classes/autotools.bbclass
+++ b/poky/meta/classes/autotools.bbclass
@@ -90,7 +90,7 @@ oe_runconf () {
cfgscript=`python3 -c "import os; print(os.path.relpath(os.path.dirname('${CONFIGURE_SCRIPT}'), '.'))"`/$cfgscript_name
if [ -x "$cfgscript" ] ; then
bbnote "Running $cfgscript ${CONFIGUREOPTS} ${EXTRA_OECONF} $@"
- if ! ${CACHED_CONFIGUREVARS} $cfgscript ${CONFIGUREOPTS} ${EXTRA_OECONF} "$@"; then
+ if ! ${CACHED_CONFIGUREVARS} CONFIG_SHELL=/bin/bash $cfgscript ${CONFIGUREOPTS} ${EXTRA_OECONF} "$@"; then
bbnote "The following config.log files may provide further information."
bbnote `find ${B} -ignore_readdir_race -type f -name config.log`
bbfatal_log "configure failed"
diff --git a/poky/meta/classes/base.bbclass b/poky/meta/classes/base.bbclass
index 1cea3a221..45f9435fd 100644
--- a/poky/meta/classes/base.bbclass
+++ b/poky/meta/classes/base.bbclass
@@ -127,6 +127,9 @@ def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
for tool in tools:
desttool = os.path.join(dest, tool)
if not os.path.exists(desttool):
+ # clean up dead symlink
+ if os.path.islink(desttool):
+ os.unlink(desttool)
srctool = bb.utils.which(path, tool, executable=True)
# gcc/g++ may link to ccache on some hosts, e.g.,
# /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
@@ -138,11 +141,6 @@ def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
os.symlink(srctool, desttool)
else:
notfound.append(tool)
- # Force "python" -> "python2"
- desttool = os.path.join(dest, "python")
- if not os.path.exists(desttool):
- srctool = "python2"
- os.symlink(srctool, desttool)
if notfound and fatal:
bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
@@ -395,7 +393,7 @@ python () {
# These take the form:
#
# PACKAGECONFIG ??= "<default options>"
- # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends"
+ # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig"
pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
if pkgconfigflags:
pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
@@ -442,8 +440,8 @@ python () {
for flag, flagval in sorted(pkgconfigflags.items()):
items = flagval.split(",")
num = len(items)
- if num > 5:
- bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend can be specified!"
+ if num > 6:
+ bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!"
% (d.getVar('PN'), flag))
if flag in pkgconfig:
@@ -457,6 +455,20 @@ python () {
extraconf.append(items[0])
elif num >= 2 and items[1]:
extraconf.append(items[1])
+
+ if num >= 6 and items[5]:
+ conflicts = set(items[5].split())
+ invalid = conflicts.difference(set(pkgconfigflags.keys()))
+ if invalid:
+ bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified."
+ % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid)))
+
+ if flag in pkgconfig:
+ intersec = conflicts.intersection(set(pkgconfig))
+ if intersec:
+ bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG."
+ % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec)))
+
appendVar('DEPENDS', extradeps)
appendVar('RDEPENDS_${PN}', extrardeps)
appendVar('RRECOMMENDS_${PN}', extrarrecs)
@@ -498,7 +510,7 @@ python () {
d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
need_machine = d.getVar('COMPATIBLE_MACHINE')
- if need_machine:
+ if need_machine and not d.getVar('PARSE_ALL_RECIPES', False):
import re
compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
for m in compat_machines:
@@ -507,7 +519,7 @@ python () {
else:
raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
- source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
+ source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False)
if not source_mirror_fetch:
need_host = d.getVar('COMPATIBLE_HOST')
if need_host:
@@ -531,45 +543,46 @@ python () {
bad_licenses = expand_wildcard_licenses(d, bad_licenses)
whitelist = []
- incompatwl = []
for lic in bad_licenses:
spdx_license = return_spdx(d, lic)
whitelist.extend((d.getVar("WHITELIST_" + lic) or "").split())
if spdx_license:
whitelist.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
+
+ if pn in whitelist:
'''
We need to track what we are whitelisting and why. If pn is
incompatible we need to be able to note that the image that
is created may infact contain incompatible licenses despite
INCOMPATIBLE_LICENSE being set.
'''
- incompatwl.extend((d.getVar("WHITELIST_" + lic) or "").split())
- if spdx_license:
- incompatwl.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
-
- if not pn in whitelist:
+ bb.note("Including %s as buildable despite it having an incompatible license because it has been whitelisted" % pn)
+ else:
pkgs = d.getVar('PACKAGES').split()
- skipped_pkgs = []
+ skipped_pkgs = {}
unskipped_pkgs = []
for pkg in pkgs:
- if incompatible_license(d, bad_licenses, pkg):
- skipped_pkgs.append(pkg)
+ incompatible_lic = incompatible_license(d, bad_licenses, pkg)
+ if incompatible_lic:
+ skipped_pkgs[pkg] = incompatible_lic
else:
unskipped_pkgs.append(pkg)
- all_skipped = skipped_pkgs and not unskipped_pkgs
if unskipped_pkgs:
for pkg in skipped_pkgs:
- bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
+ bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg])))
mlprefix = d.getVar('MLPREFIX')
- d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
+ d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, ' '.join(skipped_pkgs[pkg]))
for pkg in unskipped_pkgs:
- bb.debug(1, "INCLUDING the package " + pkg)
- elif all_skipped or incompatible_license(d, bad_licenses):
- bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
- raise bb.parse.SkipRecipe("it has an incompatible license: %s" % license)
- elif pn in whitelist:
- if pn in incompatwl:
- bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
+ bb.debug(1, "Including the package %s" % pkg)
+ else:
+ incompatible_lic = incompatible_license(d, bad_licenses)
+ for pkg in skipped_pkgs:
+ incompatible_lic += skipped_pkgs[pkg]
+ incompatible_lic = sorted(list(set(incompatible_lic)))
+
+ if incompatible_lic:
+ bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
+ raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
# Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
# superset of all per-package licenses. We do not do advanced (pattern)
@@ -606,6 +619,7 @@ python () {
# Mercurial packages should DEPEND on mercurial-native
elif scheme == "hg":
needsrcrev = True
+ d.appendVar("EXTRANATIVEPATH", ' python3-native ')
d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
# Perforce packages support SRCREV = "${AUTOREV}"
diff --git a/poky/meta/classes/buildhistory.bbclass b/poky/meta/classes/buildhistory.bbclass
index affdf272d..eb7295570 100644
--- a/poky/meta/classes/buildhistory.bbclass
+++ b/poky/meta/classes/buildhistory.bbclass
@@ -40,6 +40,7 @@ BUILDHISTORY_SDK_FILES ?= "conf/local.conf conf/bblayers.conf conf/auto.conf con
BUILDHISTORY_COMMIT ?= "1"
BUILDHISTORY_COMMIT_AUTHOR ?= "buildhistory <buildhistory@${DISTRO}>"
BUILDHISTORY_PUSH_REPO ?= ""
+BUILDHISTORY_TAG ?= "build"
SSTATEPOSTINSTFUNCS_append = " buildhistory_emit_pkghistory"
# We want to avoid influencing the signatures of sstate tasks - first the function itself:
@@ -824,9 +825,9 @@ END
if [ ! -e .git ] ; then
git init -q
else
- git tag -f build-minus-3 build-minus-2 > /dev/null 2>&1 || true
- git tag -f build-minus-2 build-minus-1 > /dev/null 2>&1 || true
- git tag -f build-minus-1 > /dev/null 2>&1 || true
+ git tag -f ${BUILDHISTORY_TAG}-minus-3 ${BUILDHISTORY_TAG}-minus-2 > /dev/null 2>&1 || true
+ git tag -f ${BUILDHISTORY_TAG}-minus-2 ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
+ git tag -f ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
fi
check_git_config
diff --git a/poky/meta/classes/buildstats.bbclass b/poky/meta/classes/buildstats.bbclass
index 960653c70..2590c60c6 100644
--- a/poky/meta/classes/buildstats.bbclass
+++ b/poky/meta/classes/buildstats.bbclass
@@ -100,7 +100,7 @@ def write_task_data(status, logfile, e, d):
f.write("rusage %s: %s\n" % (i, getattr(resources, i)))
for i in rusages:
f.write("Child rusage %s: %s\n" % (i, getattr(childres, i)))
- if status is "passed":
+ if status == "passed":
f.write("Status: PASSED \n")
else:
f.write("Status: FAILED \n")
diff --git a/poky/meta/classes/chrpath.bbclass b/poky/meta/classes/chrpath.bbclass
index 2870c10d5..26b984c4d 100644
--- a/poky/meta/classes/chrpath.bbclass
+++ b/poky/meta/classes/chrpath.bbclass
@@ -2,15 +2,18 @@ CHRPATH_BIN ?= "chrpath"
PREPROCESS_RELOCATE_DIRS ?= ""
def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = False):
- import subprocess as sub
+ import subprocess, oe.qa
- p = sub.Popen([cmd, '-l', fpath],stdout=sub.PIPE,stderr=sub.PIPE)
- out, err = p.communicate()
- # If returned successfully, process stdout for results
- if p.returncode != 0:
- return
+ with oe.qa.ELFFile(fpath) as elf:
+ try:
+ elf.open()
+ except oe.qa.NotELFFileError:
+ return
- out = out.decode('utf-8')
+ try:
+ out = subprocess.check_output([cmd, "-l", fpath], universal_newlines=True)
+ except subprocess.CalledProcessError:
+ return
# Handle RUNPATH as well as RPATH
out = out.replace("RUNPATH=","RPATH=")
@@ -44,10 +47,11 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlin
args = ":".join(new_rpaths)
#bb.note("Setting rpath for %s to %s" %(fpath, args))
- p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE)
- out, err = p.communicate()
- if p.returncode != 0:
- bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN'), p.returncode, out, err))
+ try:
+ subprocess.check_output([cmd, "-r", args, fpath],
+ stderr=subprocess.PIPE, universal_newlines=True)
+ except subprocess.CalledProcessError as e:
+ bb.fatal("chrpath command failed with exit code %d:\n%s\n%s" % (e.returncode, e.stdout, e.stderr))
def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = False):
import subprocess as sub
@@ -72,6 +76,10 @@ def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardli
out, err = p.communicate()
def process_dir(rootdir, directory, d, break_hardlinks = False):
+ bb.debug(2, "Checking %s for binaries to process" % directory)
+ if not os.path.exists(directory):
+ return
+
import stat
rootdir = os.path.normpath(rootdir)
@@ -80,10 +88,6 @@ def process_dir(rootdir, directory, d, break_hardlinks = False):
baseprefix = os.path.normpath(d.expand('${base_prefix}'))
hostos = d.getVar("HOST_OS")
- #bb.debug("Checking %s for binaries to process" % directory)
- if not os.path.exists(directory):
- return
-
if "linux" in hostos:
process_file = process_file_linux
elif "darwin" in hostos:
diff --git a/poky/meta/classes/cmake.bbclass b/poky/meta/classes/cmake.bbclass
index 291f1e8d4..94ed8061b 100644
--- a/poky/meta/classes/cmake.bbclass
+++ b/poky/meta/classes/cmake.bbclass
@@ -63,8 +63,9 @@ OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM_class-native = "BOTH"
EXTRA_OECMAKE_append = " ${PACKAGECONFIG_CONFARGS}"
-EXTRA_OECMAKE_BUILD_prepend_task-compile = "${PARALLEL_MAKE} "
-EXTRA_OECMAKE_BUILD_prepend_task-install = "${PARALLEL_MAKEINST} "
+export CMAKE_BUILD_PARALLEL_LEVEL
+CMAKE_BUILD_PARALLEL_LEVEL_task-compile = "${@oe.utils.parallel_make(d, False)}"
+CMAKE_BUILD_PARALLEL_LEVEL_task-install = "${@oe.utils.parallel_make(d, True)}"
OECMAKE_TARGET_COMPILE ?= "all"
OECMAKE_TARGET_INSTALL ?= "install"
@@ -120,6 +121,9 @@ set( ENV{QT_CONF_PATH} ${WORKDIR}/qt.conf )
# directory as rpath by default
set( CMAKE_INSTALL_RPATH ${OECMAKE_RPATH} )
+# Use RPATHs relative to build directory for reproducibility
+set( CMAKE_BUILD_RPATH_USE_ORIGIN ON )
+
# Use our cmake modules
list(APPEND CMAKE_MODULE_PATH "${STAGING_DATADIR}/cmake/Modules/")
@@ -171,6 +175,10 @@ cmake_do_configure() {
-DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir'), d.getVar('prefix') + '/')} \
-DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir'), d.getVar('prefix') + '/')} \
-DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix') + '/')} \
+ -DPYTHON_EXECUTABLE:PATH=${PYTHON} \
+ -DPython_EXECUTABLE:PATH=${PYTHON} \
+ -DPython3_EXECUTABLE:PATH=${PYTHON} \
+ -DLIB_SUFFIX=${@d.getVar('baselib').replace('lib', '')} \
-DCMAKE_INSTALL_SO_NO_EXE=0 \
-DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \
-DCMAKE_NO_SYSTEM_FROM_IMPORTED=1 \
@@ -178,9 +186,18 @@ cmake_do_configure() {
-Wno-dev
}
+# To disable verbose cmake logs for a given recipe or globally config metadata e.g. local.conf
+# add following
+#
+# CMAKE_VERBOSE = ""
+#
+
+CMAKE_VERBOSE ??= "VERBOSE=1"
+
+# Then run do_compile again
cmake_runcmake_build() {
- bbnote ${DESTDIR:+DESTDIR=${DESTDIR} }VERBOSE=1 cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
- eval ${DESTDIR:+DESTDIR=${DESTDIR} }VERBOSE=1 cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
+ bbnote ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
+ eval ${DESTDIR:+DESTDIR=${DESTDIR} }${CMAKE_VERBOSE} cmake --build '${B}' "$@" -- ${EXTRA_OECMAKE_BUILD}
}
cmake_do_compile() {
diff --git a/poky/meta/classes/core-image.bbclass b/poky/meta/classes/core-image.bbclass
index 94f112c39..88ca27214 100644
--- a/poky/meta/classes/core-image.bbclass
+++ b/poky/meta/classes/core-image.bbclass
@@ -26,6 +26,7 @@
# - debug-tweaks - makes an image suitable for development, e.g. allowing passwordless root logins
# - empty-root-password
# - allow-empty-password
+# - allow-root-login
# - post-install-logging
# - dev-pkgs - development packages (headers, etc.) for all installed packages in the rootfs
# - dbg-pkgs - debug symbol packages for all installed packages in the rootfs
@@ -33,6 +34,7 @@
# - bash-completion-pkgs - bash-completion packages for recipes using bash-completion bbclass
# - ptest-pkgs - ptest packages for all ptest-enabled recipes
# - read-only-rootfs - tweaks an image to support read-only rootfs
+# - stateless-rootfs - systemctl-native not run, image populated by systemd at runtime
# - splash - bootup splash screen
#
FEATURE_PACKAGES_x11 = "packagegroup-core-x11"
diff --git a/poky/meta/classes/cross.bbclass b/poky/meta/classes/cross.bbclass
index f832561da..bfec91d04 100644
--- a/poky/meta/classes/cross.bbclass
+++ b/poky/meta/classes/cross.bbclass
@@ -70,7 +70,6 @@ libdir = "${exec_prefix}/lib/${CROSS_TARGET_SYS_DIR}"
libexecdir = "${exec_prefix}/libexec/${CROSS_TARGET_SYS_DIR}"
do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}/"
-do_populate_sysroot[stamp-extra-info] = ""
do_packagedata[stamp-extra-info] = ""
do_install () {
@@ -97,3 +96,4 @@ python do_addto_recipe_sysroot () {
bb.build.exec_func("extend_recipe_sysroot", d)
}
addtask addto_recipe_sysroot after do_populate_sysroot
+do_addto_recipe_sysroot[deptask] = "do_populate_sysroot"
diff --git a/poky/meta/classes/crosssdk.bbclass b/poky/meta/classes/crosssdk.bbclass
index c0c0bfee1..04aecb694 100644
--- a/poky/meta/classes/crosssdk.bbclass
+++ b/poky/meta/classes/crosssdk.bbclass
@@ -5,9 +5,15 @@ NATIVESDKLIBC ?= "libc-glibc"
LIBCOVERRIDE = ":${NATIVESDKLIBC}"
MACHINEOVERRIDES = ""
PACKAGE_ARCH = "${SDK_ARCH}"
+
python () {
# set TUNE_PKGARCH to SDK_ARCH
d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH'))
+ # Set features here to prevent appends and distro features backfill
+ # from modifying nativesdk distro features
+ features = set(d.getVar("DISTRO_FEATURES_NATIVESDK").split())
+ filtered = set(bb.utils.filter("DISTRO_FEATURES", d.getVar("DISTRO_FEATURES_FILTER_NATIVESDK"), d).split())
+ d.setVar("DISTRO_FEATURES", " ".join(sorted(features | filtered)))
}
STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}"
@@ -37,7 +43,6 @@ target_prefix = "${SDKPATHNATIVE}${prefix_nativesdk}"
target_exec_prefix = "${SDKPATHNATIVE}${prefix_nativesdk}"
baselib = "lib"
-do_populate_sysroot[stamp-extra-info] = ""
do_packagedata[stamp-extra-info] = ""
# Need to force this to ensure consitency across architectures
diff --git a/poky/meta/classes/cve-check.bbclass b/poky/meta/classes/cve-check.bbclass
index 19ed5548b..2a530a048 100644
--- a/poky/meta/classes/cve-check.bbclass
+++ b/poky/meta/classes/cve-check.bbclass
@@ -26,7 +26,7 @@ CVE_PRODUCT ??= "${BPN}"
CVE_VERSION ??= "${PV}"
CVE_CHECK_DB_DIR ?= "${DL_DIR}/CVE_CHECK"
-CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_1.0.db"
+CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_1.1.db"
CVE_CHECK_LOG ?= "${T}/cve.log"
CVE_CHECK_TMP_FILE ?= "${TMPDIR}/cve_check"
@@ -52,11 +52,14 @@ python do_cve_check () {
"""
if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")):
- patched_cves = get_patches_cves(d)
- patched, unpatched = check_cves(d, patched_cves)
+ try:
+ patched_cves = get_patches_cves(d)
+ except FileNotFoundError:
+ bb.fatal("Failure in searching patches")
+ whitelisted, patched, unpatched = check_cves(d, patched_cves)
if patched or unpatched:
cve_data = get_cve_info(d, patched + unpatched)
- cve_write_data(d, patched, unpatched, cve_data)
+ cve_write_data(d, patched, unpatched, whitelisted, cve_data)
else:
bb.note("No CVE database found, skipping CVE check")
@@ -129,6 +132,10 @@ def get_patches_cves(d):
for url in src_patches(d):
patch_file = bb.fetch.decodeurl(url)[2]
+ if not os.path.isfile(patch_file):
+ bb.error("File Not found: %s" % patch_file)
+ raise FileNotFoundError
+
# Check patch file name for CVE ID
fname_match = cve_file_name_match.search(patch_file)
if fname_match:
@@ -172,13 +179,13 @@ def check_cves(d, patched_cves):
products = d.getVar("CVE_PRODUCT").split()
# If this has been unset then we're not scanning for CVEs here (for example, image recipes)
if not products:
- return ([], [])
+ return ([], [], [])
pv = d.getVar("CVE_VERSION").split("+git")[0]
# If the recipe has been whitlisted we return empty lists
if d.getVar("PN") in d.getVar("CVE_CHECK_PN_WHITELIST").split():
bb.note("Recipe has been whitelisted, skipping check")
- return ([], [])
+ return ([], [], [])
old_cve_whitelist = d.getVar("CVE_CHECK_CVE_WHITELIST")
if old_cve_whitelist:
@@ -214,7 +221,7 @@ def check_cves(d, patched_cves):
(_, _, _, version_start, operator_start, version_end, operator_end) = row
#bb.debug(2, "Evaluating row " + str(row))
- if (operator_start == '=' and pv == version_start):
+ if (operator_start == '=' and pv == version_start) or version_start == '-':
vulnerable = True
else:
if operator_start:
@@ -256,7 +263,7 @@ def check_cves(d, patched_cves):
conn.close()
- return (list(patched_cves), cves_unpatched)
+ return (list(cve_whitelist), list(patched_cves), cves_unpatched)
def get_cve_info(d, cves):
"""
@@ -280,7 +287,7 @@ def get_cve_info(d, cves):
conn.close()
return cve_data
-def cve_write_data(d, patched, unpatched, cve_data):
+def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
"""
Write CVE information in WORKDIR; and to CVE_CHECK_DIR, and
CVE manifest if enabled.
@@ -296,7 +303,9 @@ def cve_write_data(d, patched, unpatched, cve_data):
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
write_string += "PACKAGE VERSION: %s\n" % d.getVar("PV")
write_string += "CVE: %s\n" % cve
- if cve in patched:
+ if cve in whitelisted:
+ write_string += "CVE STATUS: Whitelisted\n"
+ elif cve in patched:
write_string += "CVE STATUS: Patched\n"
else:
unpatched_cves.append(cve)
diff --git a/poky/meta/classes/devicetree.bbclass b/poky/meta/classes/devicetree.bbclass
index d8779c794..c772ab2ab 100644
--- a/poky/meta/classes/devicetree.bbclass
+++ b/poky/meta/classes/devicetree.bbclass
@@ -59,7 +59,7 @@ DT_BOOT_CPU ??= "0"
DTC_FLAGS ?= "-R ${DT_RESERVED_MAP} -b ${DT_BOOT_CPU}"
DTC_PPFLAGS ?= "-nostdinc -undef -D__DTS__ -x assembler-with-cpp"
-DTC_BFLAGS ?= "-p ${DT_PADDING_SIZE}"
+DTC_BFLAGS ?= "-p ${DT_PADDING_SIZE} -@"
DTC_OFLAGS ?= "-p 0 -@ -H epapr"
python () {
diff --git a/poky/meta/classes/distutils-base.bbclass b/poky/meta/classes/distutils-base.bbclass
deleted file mode 100644
index 9f398d705..000000000
--- a/poky/meta/classes/distutils-base.bbclass
+++ /dev/null
@@ -1,4 +0,0 @@
-DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}"
-RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}"
-
-inherit distutils-common-base pythonnative
diff --git a/poky/meta/classes/distutils.bbclass b/poky/meta/classes/distutils.bbclass
deleted file mode 100644
index 3759b5826..000000000
--- a/poky/meta/classes/distutils.bbclass
+++ /dev/null
@@ -1,87 +0,0 @@
-inherit distutils-base
-
-DISTUTILS_BUILD_ARGS ?= ""
-DISTUTILS_STAGE_HEADERS_ARGS ?= "--install-dir=${STAGING_INCDIR}/${PYTHON_DIR}"
-DISTUTILS_STAGE_ALL_ARGS ?= "--prefix=${STAGING_DIR_HOST}${prefix} \
- --install-data=${STAGING_DATADIR}"
-DISTUTILS_INSTALL_ARGS ?= "--root=${D} \
- --prefix=${prefix} \
- --install-lib=${PYTHON_SITEPACKAGES_DIR} \
- --install-data=${datadir}"
-
-DISTUTILS_PYTHON = "python"
-DISTUTILS_PYTHON_class-native = "nativepython"
-
-distutils_do_configure() {
- if [ "${CLEANBROKEN}" != "1" ] ; then
- NO_FETCH_BUILD=1 \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py clean ${DISTUTILS_BUILD_ARGS}
- fi
-}
-
-distutils_do_compile() {
- NO_FETCH_BUILD=1 \
- STAGING_INCDIR=${STAGING_INCDIR} \
- STAGING_LIBDIR=${STAGING_LIBDIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py build ${DISTUTILS_BUILD_ARGS} || \
- bbfatal_log "'${PYTHON_PN} setup.py build ${DISTUTILS_BUILD_ARGS}' execution failed."
-}
-
-distutils_stage_headers() {
- install -d ${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR}
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install_headers ${DISTUTILS_STAGE_HEADERS_ARGS} || \
- bbfatal_log "'${PYTHON_PN} setup.py install_headers ${DISTUTILS_STAGE_HEADERS_ARGS}' execution for stage_headers failed."
-}
-
-distutils_stage_all() {
- STAGING_INCDIR=${STAGING_INCDIR} \
- STAGING_LIBDIR=${STAGING_LIBDIR} \
- install -d ${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR}
- PYTHONPATH=${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install ${DISTUTILS_STAGE_ALL_ARGS} || \
- bbfatal_log "'${PYTHON_PN} setup.py install ${DISTUTILS_STAGE_ALL_ARGS}' execution for stage_all failed."
-}
-
-distutils_do_install() {
- install -d ${D}${PYTHON_SITEPACKAGES_DIR}
- STAGING_INCDIR=${STAGING_INCDIR} \
- STAGING_LIBDIR=${STAGING_LIBDIR} \
- PYTHONPATH=${D}${PYTHON_SITEPACKAGES_DIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install ${DISTUTILS_INSTALL_ARGS} || \
- bbfatal_log "'${PYTHON_PN} setup.py install ${DISTUTILS_INSTALL_ARGS}' execution failed."
-
- # support filenames with *spaces*
- # only modify file if it contains path and recompile it
- find ${D} -name "*.py" -exec grep -q ${D} {} \; \
- -exec sed -i -e s:${D}::g {} \; \
- -exec ${STAGING_BINDIR_NATIVE}/python-native/python -mcompileall {} \;
-
- for i in ${D}${bindir}/* ${D}${sbindir}/*; do
- if [ -f "$i" ]; then
- sed -i -e s:${PYTHON}:${USRBINPATH}/env\ ${DISTUTILS_PYTHON}:g $i
- sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i
- fi
- done
-
- rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/easy-install.pth
- rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/site.py*
-
- #
- # FIXME: Bandaid against wrong datadir computation
- #
- if [ -e ${D}${datadir}/share ]; then
- mv -f ${D}${datadir}/share/* ${D}${datadir}/
- rmdir ${D}${datadir}/share
- fi
-
- # Fix backport modules
- if [ -e ${STAGING_LIBDIR}/${PYTHON_DIR}/site-packages/backports/__init__.py ] &&
- [ -e ${D}${PYTHON_SITEPACKAGES_DIR}/backports/__init__.py ]; then
- rm ${D}${PYTHON_SITEPACKAGES_DIR}/backports/__init__.py;
- rm ${D}${PYTHON_SITEPACKAGES_DIR}/backports/__init__.pyc;
- fi
-}
-
-EXPORT_FUNCTIONS do_configure do_compile do_install
-
-export LDSHARED="${CCLD} -shared"
diff --git a/poky/meta/classes/distutils3.bbclass b/poky/meta/classes/distutils3.bbclass
index 05a24bfe2..7356b5245 100644
--- a/poky/meta/classes/distutils3.bbclass
+++ b/poky/meta/classes/distutils3.bbclass
@@ -1,10 +1,9 @@
inherit distutils3-base
+B = "${WORKDIR}/build"
+distutils_do_configure[cleandirs] = "${B}"
+
DISTUTILS_BUILD_ARGS ?= ""
-DISTUTILS_BUILD_EXT_ARGS ?= ""
-DISTUTILS_STAGE_HEADERS_ARGS ?= "--install-dir=${STAGING_INCDIR}/${PYTHON_DIR}"
-DISTUTILS_STAGE_ALL_ARGS ?= "--prefix=${STAGING_DIR_HOST}${prefix} \
- --install-data=${STAGING_DATADIR}"
DISTUTILS_INSTALL_ARGS ?= "--root=${D} \
--prefix=${prefix} \
--install-lib=${PYTHON_SITEPACKAGES_DIR} \
@@ -14,45 +13,28 @@ DISTUTILS_PYTHON = "python3"
DISTUTILS_PYTHON_class-native = "nativepython3"
distutils3_do_configure() {
- if [ "${CLEANBROKEN}" != "1" ] ; then
- NO_FETCH_BUILD=1 \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py clean ${DISTUTILS_BUILD_ARGS}
- fi
+ :
}
distutils3_do_compile() {
+ cd ${S}
NO_FETCH_BUILD=1 \
STAGING_INCDIR=${STAGING_INCDIR} \
STAGING_LIBDIR=${STAGING_LIBDIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py \
- build ${DISTUTILS_BUILD_ARGS} || \
+ ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} ${S}/setup.py \
+ build --build-base=${B} ${DISTUTILS_BUILD_ARGS} || \
bbfatal_log "'${PYTHON_PN} setup.py build ${DISTUTILS_BUILD_ARGS}' execution failed."
}
distutils3_do_compile[vardepsexclude] = "MACHINE"
-distutils3_stage_headers() {
- install -d ${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR}
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install_headers ${DISTUTILS_STAGE_HEADERS_ARGS} || \
- bbfatal_log "'${PYTHON_PN} setup.py install_headers ${DISTUTILS_STAGE_HEADERS_ARGS}' execution for stage_headers failed."
-}
-distutils3_stage_headers[vardepsexclude] = "MACHINE"
-
-distutils3_stage_all() {
- STAGING_INCDIR=${STAGING_INCDIR} \
- STAGING_LIBDIR=${STAGING_LIBDIR} \
- install -d ${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR}
- PYTHONPATH=${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install ${DISTUTILS_STAGE_ALL_ARGS} || \
- bbfatal_log "'${PYTHON_PN} setup.py install ${DISTUTILS_STAGE_ALL_ARGS}' execution for stage_all failed."
-}
-distutils3_stage_all[vardepsexclude] = "MACHINE"
-
distutils3_do_install() {
+ cd ${S}
install -d ${D}${PYTHON_SITEPACKAGES_DIR}
STAGING_INCDIR=${STAGING_INCDIR} \
STAGING_LIBDIR=${STAGING_LIBDIR} \
PYTHONPATH=${D}${PYTHON_SITEPACKAGES_DIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install ${DISTUTILS_INSTALL_ARGS} || \
+ ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} ${S}/setup.py \
+ build --build-base=${B} install --skip-build ${DISTUTILS_INSTALL_ARGS} || \
bbfatal_log "'${PYTHON_PN} setup.py install ${DISTUTILS_INSTALL_ARGS}' execution failed."
# support filenames with *spaces*
diff --git a/poky/meta/classes/externalsrc.bbclass b/poky/meta/classes/externalsrc.bbclass
index ea59d02ed..d20012998 100644
--- a/poky/meta/classes/externalsrc.bbclass
+++ b/poky/meta/classes/externalsrc.bbclass
@@ -73,9 +73,8 @@ python () {
d.setVar('SRC_URI', ' '.join(local_srcuri))
- if '{SRCPV}' in d.getVar('PV', False):
- # Dummy value because the default function can't be called with blank SRC_URI
- d.setVar('SRCPV', '999')
+ # Dummy value because the default function can't be called with blank SRC_URI
+ d.setVar('SRCPV', '999')
if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking':
d.setVar('CONFIGUREOPT_DEPTRACK', '')
diff --git a/poky/meta/classes/features_check.bbclass b/poky/meta/classes/features_check.bbclass
index 391fbe1c9..876d32e31 100644
--- a/poky/meta/classes/features_check.bbclass
+++ b/poky/meta/classes/features_check.bbclass
@@ -23,6 +23,9 @@
# Copyright 2013 (C) O.S. Systems Software LTDA.
python () {
+ if d.getVar('PARSE_ALL_RECIPES', False):
+ return
+
# Assume at least one var is set.
distro_features = set((d.getVar('DISTRO_FEATURES') or '').split())
diff --git a/poky/meta/classes/fontcache.bbclass b/poky/meta/classes/fontcache.bbclass
index 13f9df159..97e7f17f0 100644
--- a/poky/meta/classes/fontcache.bbclass
+++ b/poky/meta/classes/fontcache.bbclass
@@ -20,7 +20,7 @@ if [ -n "$D" ] ; then
$INTERCEPT_DIR/postinst_intercept update_font_cache ${PKG} mlprefix=${MLPREFIX} binprefix=${MLPREFIX} \
'bindir="${bindir}"' \
'libdir="${libdir}"' \
- 'libexecdir="${libexecdir}"' \
+ 'libexecdir="${libexecdir}"' \
'base_libdir="${base_libdir}"' \
'fontconfigcachedir="${FONTCONFIG_CACHE_DIR}"' \
'fontconfigcacheparams="${FONTCONFIG_CACHE_PARAMS}"' \
diff --git a/poky/meta/classes/go.bbclass b/poky/meta/classes/go.bbclass
index e40e55689..c99689ac5 100644
--- a/poky/meta/classes/go.bbclass
+++ b/poky/meta/classes/go.bbclass
@@ -53,6 +53,7 @@ GOTOOLDIR_class-native = "${STAGING_LIBDIR_NATIVE}/go/pkg/tool/${BUILD_GOTUPLE}"
export GOTOOLDIR
export CGO_ENABLED ?= "1"
+export CGO_ENABLED_riscv64 = "0"
export CGO_CFLAGS ?= "${CFLAGS}"
export CGO_CPPFLAGS ?= "${CPPFLAGS}"
export CGO_CXXFLAGS ?= "${CXXFLAGS}"
@@ -147,7 +148,7 @@ INSANE_SKIP_${PN} += "ldflags"
# doesn't support -buildmode=pie, so skip the QA checking for mips and its
# variants.
python() {
- if 'mips' in d.getVar('TARGET_ARCH'):
+ if 'mips' in d.getVar('TARGET_ARCH') or 'riscv' in d.getVar('TARGET_ARCH'):
d.appendVar('INSANE_SKIP_%s' % d.getVar('PN'), " textrel")
else:
d.appendVar('GOBUILDFLAGS', ' -buildmode=pie')
diff --git a/poky/meta/classes/goarch.bbclass b/poky/meta/classes/goarch.bbclass
index 166dea9dc..1099b9576 100644
--- a/poky/meta/classes/goarch.bbclass
+++ b/poky/meta/classes/goarch.bbclass
@@ -6,12 +6,18 @@ HOST_GOARCH = "${@go_map_arch(d.getVar('HOST_ARCH'), d)}"
HOST_GOARM = "${@go_map_arm(d.getVar('HOST_ARCH'), d)}"
HOST_GO386 = "${@go_map_386(d.getVar('HOST_ARCH'), d.getVar('TUNE_FEATURES'), d)}"
HOST_GOMIPS = "${@go_map_mips(d.getVar('HOST_ARCH'), d.getVar('TUNE_FEATURES'), d)}"
+HOST_GOARM_class-native = "7"
+HOST_GO386_class-native = "sse2"
+HOST_GOMIPS_class-native = "hardfloat"
HOST_GOTUPLE = "${HOST_GOOS}_${HOST_GOARCH}"
TARGET_GOOS = "${@go_map_os(d.getVar('TARGET_OS'), d)}"
TARGET_GOARCH = "${@go_map_arch(d.getVar('TARGET_ARCH'), d)}"
TARGET_GOARM = "${@go_map_arm(d.getVar('TARGET_ARCH'), d)}"
TARGET_GO386 = "${@go_map_386(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'), d)}"
TARGET_GOMIPS = "${@go_map_mips(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'), d)}"
+TARGET_GOARM_class-native = "7"
+TARGET_GO386_class-native = "sse2"
+TARGET_GOMIPS_class-native = "hardfloat"
TARGET_GOTUPLE = "${TARGET_GOOS}_${TARGET_GOARCH}"
GO_BUILD_BINDIR = "${@['bin/${HOST_GOTUPLE}','bin'][d.getVar('BUILD_GOTUPLE') == d.getVar('HOST_GOTUPLE')]}"
@@ -31,6 +37,7 @@ GO_DYNLINK_aarch64 = "1"
GO_DYNLINK_x86 = "1"
GO_DYNLINK_x86-64 = "1"
GO_DYNLINK_powerpc64 = "1"
+GO_DYNLINK_powerpc64le = "1"
GO_DYNLINK_class-native = ""
GO_DYNLINK_class-nativesdk = ""
@@ -40,6 +47,7 @@ COMPATIBLE_HOST_linux-gnux32 = "null"
COMPATIBLE_HOST_linux-muslx32 = "null"
COMPATIBLE_HOST_powerpc = "null"
COMPATIBLE_HOST_powerpc64 = "null"
+COMPATIBLE_HOST_powerpc64le = "null"
COMPATIBLE_HOST_mipsarchn32 = "null"
ARM_INSTRUCTION_SET_armv4 = "arm"
@@ -47,7 +55,6 @@ ARM_INSTRUCTION_SET_armv5 = "arm"
ARM_INSTRUCTION_SET_armv6 = "arm"
TUNE_CCARGS_remove = "-march=mips32r2"
-SECURITY_CFLAGS_mipsarch = "${SECURITY_NOPIE_CFLAGS}"
SECURITY_NOPIE_CFLAGS ??= ""
# go can't be built with ccache:
diff --git a/poky/meta/classes/grub-efi-cfg.bbclass b/poky/meta/classes/grub-efi-cfg.bbclass
index 8b5ff20c7..3a2cdd698 100644
--- a/poky/meta/classes/grub-efi-cfg.bbclass
+++ b/poky/meta/classes/grub-efi-cfg.bbclass
@@ -88,6 +88,12 @@ python build_efi_cfg() {
for label in labels.split():
localdata = d.createCopy()
+ overrides = localdata.getVar('OVERRIDES')
+ if not overrides:
+ bb.fatal('OVERRIDES not defined')
+
+ localdata.setVar('OVERRIDES', 'grub_' + label + ':' + overrides)
+
for btype in btypes:
cfgfile.write('\nmenuentry \'%s%s\'{\n' % (label, btype[0]))
lb = label
diff --git a/poky/meta/classes/icecc.bbclass b/poky/meta/classes/icecc.bbclass
index bc3d6f4cc..d095305ed 100644
--- a/poky/meta/classes/icecc.bbclass
+++ b/poky/meta/classes/icecc.bbclass
@@ -105,7 +105,7 @@ def icecc_dep_prepend(d):
return "icecc-create-env-native"
return ""
-DEPENDS_prepend += "${@icecc_dep_prepend(d)} "
+DEPENDS_prepend = "${@icecc_dep_prepend(d)} "
get_cross_kernel_cc[vardepsexclude] += "KERNEL_CC"
def get_cross_kernel_cc(bb,d):
diff --git a/poky/meta/classes/image-prelink.bbclass b/poky/meta/classes/image-prelink.bbclass
index 04dd57c94..ebf6e6d7e 100644
--- a/poky/meta/classes/image-prelink.bbclass
+++ b/poky/meta/classes/image-prelink.bbclass
@@ -17,6 +17,16 @@ prelink_image () {
pre_prelink_size=`du -ks ${IMAGE_ROOTFS} | awk '{size = $1 ; print size }'`
echo "Size before prelinking $pre_prelink_size."
+ # The filesystem may not contain sysconfdir so establish what is present
+ # to enable cleanup after temporary creation of sysconfdir if needed
+ presentdir="${IMAGE_ROOTFS}${sysconfdir}"
+ while [ "${IMAGE_ROOTFS}" != "${presentdir}" ] ; do
+ [ ! -d "${presentdir}" ] || break
+ presentdir=`dirname "${presentdir}"`
+ done
+
+ mkdir -p "${IMAGE_ROOTFS}${sysconfdir}"
+
# We need a prelink conf on the filesystem, add one if it's missing
if [ ! -e ${IMAGE_ROOTFS}${sysconfdir}/prelink.conf ]; then
cp ${STAGING_ETCDIR_NATIVE}/prelink.conf \
@@ -59,6 +69,13 @@ prelink_image () {
rm $ldsoconf
fi
+ # Remove any directories temporarily created for sysconfdir
+ cleanupdir="${IMAGE_ROOTFS}${sysconfdir}"
+ while [ "${presentdir}" != "${cleanupdir}" ] ; do
+ rmdir "${cleanupdir}"
+ cleanupdir=`dirname ${cleanupdir}`
+ done
+
pre_prelink_size=`du -ks ${IMAGE_ROOTFS} | awk '{size = $1 ; print size }'`
echo "Size after prelinking $pre_prelink_size."
}
diff --git a/poky/meta/classes/image.bbclass b/poky/meta/classes/image.bbclass
index c2824395c..694b58fc9 100644
--- a/poky/meta/classes/image.bbclass
+++ b/poky/meta/classes/image.bbclass
@@ -62,10 +62,7 @@ def check_image_features(d):
valid_features = (d.getVarFlag('IMAGE_FEATURES', 'validitems') or "").split()
valid_features += d.getVarFlags('COMPLEMENTARY_GLOB').keys()
for var in d:
- if var.startswith("PACKAGE_GROUP_"):
- bb.warn("PACKAGE_GROUP is deprecated, please use FEATURE_PACKAGES instead")
- valid_features.append(var[14:])
- elif var.startswith("FEATURE_PACKAGES_"):
+ if var.startswith("FEATURE_PACKAGES_"):
valid_features.append(var[17:])
valid_features.sort()
@@ -609,6 +606,7 @@ do_patch[noexec] = "1"
do_configure[noexec] = "1"
do_compile[noexec] = "1"
do_install[noexec] = "1"
+deltask do_populate_lic
deltask do_populate_sysroot
do_package[noexec] = "1"
deltask do_package_qa
diff --git a/poky/meta/classes/image_types.bbclass b/poky/meta/classes/image_types.bbclass
index 2eeffbb36..f82f1d886 100644
--- a/poky/meta/classes/image_types.bbclass
+++ b/poky/meta/classes/image_types.bbclass
@@ -59,6 +59,8 @@ XZ_INTEGRITY_CHECK ?= "crc32"
ZIP_COMPRESSION_LEVEL ?= "-9"
+ZSTD_COMPRESSION_LEVEL ?= "-3"
+
JFFS2_SUM_EXTRA_ARGS ?= ""
IMAGE_CMD_jffs2 = "mkfs.jffs2 --root=${IMAGE_ROOTFS} --faketime --output=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.jffs2 ${EXTRA_IMAGECMD}"
@@ -128,7 +130,7 @@ IMAGE_CMD_tar = "${IMAGE_CMD_TAR} --numeric-owner -cf ${IMGDEPLOYDIR}/${IMAGE_NA
do_image_cpio[cleandirs] += "${WORKDIR}/cpio_append"
IMAGE_CMD_cpio () {
- (cd ${IMAGE_ROOTFS} && find . | cpio -o -H newc >${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cpio)
+ (cd ${IMAGE_ROOTFS} && find . | sort | cpio --reproducible -o -H newc >${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cpio)
# We only need the /init symlink if we're building the real
# image. The -dbg image doesn't need it! By being clever
# about this we also avoid 'touch' below failing, as it
@@ -269,7 +271,7 @@ IMAGE_TYPES = " \
hddimg \
squashfs squashfs-xz squashfs-lzo squashfs-lz4 \
ubi ubifs multiubi \
- tar tar.gz tar.bz2 tar.xz tar.lz4 \
+ tar tar.gz tar.bz2 tar.xz tar.lz4 tar.zst \
cpio cpio.gz cpio.xz cpio.lzma cpio.lz4 \
wic wic.gz wic.bz2 wic.lzma \
container \
@@ -282,7 +284,7 @@ IMAGE_TYPES = " \
# CONVERSION_CMD/DEPENDS.
COMPRESSIONTYPES ?= ""
-CONVERSIONTYPES = "gz bz2 lzma xz lz4 lzo zip sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap u-boot vmdk vdi qcow2 base64 ${COMPRESSIONTYPES}"
+CONVERSIONTYPES = "gz bz2 lzma xz lz4 lzo zip zst sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap u-boot vmdk vdi qcow2 base64 ${COMPRESSIONTYPES}"
CONVERSION_CMD_lzma = "lzma -k -f -7 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
CONVERSION_CMD_gz = "gzip -f -9 -n -c --rsyncable ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.gz"
CONVERSION_CMD_bz2 = "pbzip2 -f -k ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
@@ -290,6 +292,7 @@ CONVERSION_CMD_xz = "xz -f -k -c ${XZ_COMPRESSION_LEVEL} ${XZ_DEFAULTS} --check=
CONVERSION_CMD_lz4 = "lz4 -9 -z -l ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.lz4"
CONVERSION_CMD_lzo = "lzop -9 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
CONVERSION_CMD_zip = "zip ${ZIP_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zip ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
+CONVERSION_CMD_zst = "zstd -f -k -T0 -c ${ZSTD_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zst"
CONVERSION_CMD_sum = "sumtool -i ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} -o ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sum ${JFFS2_SUM_EXTRA_ARGS}"
CONVERSION_CMD_md5sum = "md5sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.md5sum"
CONVERSION_CMD_sha1sum = "sha1sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha1sum"
@@ -310,6 +313,7 @@ CONVERSION_DEPENDS_xz = "xz-native"
CONVERSION_DEPENDS_lz4 = "lz4-native"
CONVERSION_DEPENDS_lzo = "lzop-native"
CONVERSION_DEPENDS_zip = "zip-native"
+CONVERSION_DEPENDS_zst = "zstd-native"
CONVERSION_DEPENDS_sum = "mtd-utils-native"
CONVERSION_DEPENDS_bmap = "bmap-tools-native"
CONVERSION_DEPENDS_u-boot = "u-boot-tools-native"
diff --git a/poky/meta/classes/image_types_wic.bbclass b/poky/meta/classes/image_types_wic.bbclass
index f350dc272..96ed0473e 100644
--- a/poky/meta/classes/image_types_wic.bbclass
+++ b/poky/meta/classes/image_types_wic.bbclass
@@ -4,7 +4,7 @@ WICVARS ?= "\
BBLAYERS IMGDEPLOYDIR DEPLOY_DIR_IMAGE FAKEROOTCMD IMAGE_BASENAME IMAGE_BOOT_FILES \
IMAGE_LINK_NAME IMAGE_ROOTFS INITRAMFS_FSTYPES INITRD INITRD_LIVE ISODIR RECIPE_SYSROOT_NATIVE \
ROOTFS_SIZE STAGING_DATADIR STAGING_DIR STAGING_LIBDIR TARGET_SYS \
- KERNEL_IMAGETYPE MACHINE INITRAMFS_IMAGE INITRAMFS_IMAGE_BUNDLE INITRAMFS_LINK_NAME"
+ KERNEL_IMAGETYPE MACHINE INITRAMFS_IMAGE INITRAMFS_IMAGE_BUNDLE INITRAMFS_LINK_NAME APPEND"
inherit ${@bb.utils.contains('INITRAMFS_IMAGE_BUNDLE', '1', 'kernel-artifact-names', '', d)}
@@ -27,16 +27,16 @@ WIC_CREATE_EXTRA_ARGS ?= ""
IMAGE_CMD_wic () {
out="${IMGDEPLOYDIR}/${IMAGE_NAME}"
+ build_wic="${WORKDIR}/build-wic"
wks="${WKS_FULL_PATH}"
if [ -z "$wks" ]; then
bbfatal "No kickstart files from WKS_FILES were found: ${WKS_FILES}. Please set WKS_FILE or WKS_FILES appropriately."
fi
-
- BUILDDIR="${TOPDIR}" wic create "$wks" --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" -o "$out/" ${WIC_CREATE_EXTRA_ARGS}
- mv "$out/$(basename "${wks%.wks}")"*.direct "$out${IMAGE_NAME_SUFFIX}.wic"
- rm -rf "$out/"
+ BUILDDIR="${TOPDIR}" PSEUDO_UNLOAD=1 wic create "$wks" --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" -o "$build_wic/" ${WIC_CREATE_EXTRA_ARGS}
+ mv "$build_wic/$(basename "${wks%.wks}")"*.direct "$out${IMAGE_NAME_SUFFIX}.wic"
}
IMAGE_CMD_wic[vardepsexclude] = "WKS_FULL_PATH WKS_FILES TOPDIR"
+do_image_wic[cleandirs] = "${WORKDIR}/build-wic"
# Rebuild when the wks file or vars in WICVARS change
USING_WIC = "${@bb.utils.contains_any('IMAGE_FSTYPES', 'wic ' + ' '.join('wic.%s' % c for c in '${CONVERSIONTYPES}'.split()), '1', '', d)}"
@@ -46,6 +46,7 @@ do_image_wic[depends] += "${@' '.join('%s-native:do_populate_sysroot' % r for r
# We ensure all artfacts are deployed (e.g virtual/bootloader)
do_image_wic[recrdeptask] += "do_deploy"
+do_image_wic[deptask] += "do_image_complete"
WKS_FILE_DEPENDS_DEFAULT = '${@bb.utils.contains_any("BUILD_ARCH", [ 'x86_64', 'i686' ], "syslinux-native", "",d)}'
WKS_FILE_DEPENDS_DEFAULT += "bmap-tools-native cdrtools-native btrfs-tools-native squashfs-tools-native e2fsprogs-native"
@@ -84,6 +85,10 @@ python do_write_wks_template () {
bb.utils.copyfile(wks_file, "%s/%s" % (depdir, basename + '-' + os.path.basename(wks_file)))
}
+do_flush_pseudodb() {
+ ${FAKEROOTENV} ${FAKEROOTCMD} -S
+}
+
python () {
if d.getVar('USING_WIC'):
wks_file_u = d.getVar('WKS_FULL_PATH', False)
@@ -137,6 +142,7 @@ python do_rootfs_wicenv () {
depdir = d.getVar('IMGDEPLOYDIR')
bb.utils.copyfile(os.path.join(outdir, basename) + '.env', os.path.join(depdir, basename) + '.env')
}
+addtask do_flush_pseudodb after do_image before do_image_wic
addtask do_rootfs_wicenv after do_image before do_image_wic
do_rootfs_wicenv[vardeps] += "${WICVARS}"
do_rootfs_wicenv[prefuncs] = 'set_image_size'
diff --git a/poky/meta/classes/insane.bbclass b/poky/meta/classes/insane.bbclass
index f856cf6a2..7fc8f33a9 100644
--- a/poky/meta/classes/insane.bbclass
+++ b/poky/meta/classes/insane.bbclass
@@ -28,13 +28,14 @@ WARN_QA ?= "ldflags useless-rpaths rpaths staticdev libdir xorg-driver-abi \
pn-overrides infodir build-deps src-uri-bad \
unknown-configure-option symlink-to-sysroot multilib \
invalid-packageconfig host-user-contaminated uppercase-pn patch-fuzz \
+ mime mime-xdg \
"
ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
split-strip packages-list pkgv-undefined var-undefined \
version-going-backwards expanded-d invalid-chars \
license-checksum dev-elf file-rdeps configure-unsafe \
- configure-gettext perllocalpod \
+ configure-gettext perllocalpod shebang-size \
"
# Add usrmerge QA check based on distro feature
ERROR_QA_append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}"
@@ -82,6 +83,29 @@ def package_qa_add_message(messages, section, new_msg):
else:
messages[section] = messages[section] + "\n" + new_msg
+QAPATHTEST[shebang-size] = "package_qa_check_shebang_size"
+def package_qa_check_shebang_size(path, name, d, elf, messages):
+ if os.path.islink(path) or elf:
+ return
+
+ try:
+ with open(path, 'rb') as f:
+ stanza = f.readline(130)
+ except IOError:
+ return
+
+ if stanza.startswith(b'#!'):
+ #Shebang not found
+ try:
+ stanza = stanza.decode("utf-8")
+ except UnicodeDecodeError:
+ #If it is not a text file, it is not a script
+ return
+
+ if len(stanza) > 129:
+ package_qa_add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d)))
+ return
+
QAPATHTEST[libexec] = "package_qa_check_libexec"
def package_qa_check_libexec(path,name, d, elf, messages):
@@ -181,10 +205,50 @@ def package_qa_check_staticdev(path, name, d, elf, messages):
libgcc.a, libgcov.a will be skipped in their packages
"""
- if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a"):
+ if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path:
package_qa_add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \
(name, package_qa_clean_path(path,d)))
+QAPATHTEST[mime] = "package_qa_check_mime"
+def package_qa_check_mime(path, name, d, elf, messages):
+ """
+ Check if package installs mime types to /usr/share/mime/packages
+ while no inheriting mime.bbclass
+ """
+
+ if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d):
+ package_qa_add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \
+ (name, package_qa_clean_path(path,d)))
+
+QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg"
+def package_qa_check_mime_xdg(path, name, d, elf, messages):
+ """
+ Check if package installs desktop file containing MimeType and requires
+ mime-types.bbclass to create /usr/share/applications/mimeinfo.cache
+ """
+
+ if d.getVar("datadir") + "/applications" in path and path.endswith('.desktop') and not bb.data.inherits_class("mime-xdg", d):
+ mime_type_found = False
+ try:
+ with open(path, 'r') as f:
+ for line in f.read().split('\n'):
+ if 'MimeType' in line:
+ mime_type_found = True
+ break;
+ except:
+ # At least libreoffice installs symlinks with absolute paths that are dangling here.
+ # We could implement some magic but for few (one) recipes it is not worth the effort so just warn:
+ wstr = "%s cannot open %s - is it a symlink with absolute path?\n" % (name, package_qa_clean_path(path,d))
+ wstr += "Please check if (linked) file contains key 'MimeType'.\n"
+ pkgname = name
+ if name == d.getVar('PN'):
+ pkgname = '${PN}'
+ wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP_%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname)
+ package_qa_add_message(messages, "mime-xdg", wstr)
+ if mime_type_found:
+ package_qa_add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \
+ (name, package_qa_clean_path(path,d)))
+
def package_qa_check_libdir(d):
"""
Check for wrong library installation paths. For instance, catch
@@ -373,11 +437,10 @@ def package_qa_hash_style(path, name, d, elf, messages):
for line in phdrs.split("\n"):
if "SYMTAB" in line:
has_syms = True
- if "GNU_HASH" in line:
+ if "GNU_HASH" or "DT_MIPS_XHASH" in line:
sane = True
- if "[mips32]" in line or "[mips64]" in line:
+ if ("[mips32]" in line or "[mips64]" in line) and d.getVar('TCLIBC') == "musl":
sane = True
-
if has_syms and not sane:
package_qa_add_message(messages, "ldflags", "No GNU_HASH in the ELF binary %s, didn't pass LDFLAGS?" % path)
@@ -893,9 +956,9 @@ def package_qa_check_src_uri(pn, d, messages):
if "${PN}" in d.getVar("SRC_URI", False):
package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
- pn = d.getVar("SRC_URI")
- if re.search(r"github\.com/.+/.+/archive/.+", pn):
- package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub archives" % pn, d)
+ for url in d.getVar("SRC_URI").split():
+ if re.search(r"github\.com/.+/.+/archive/.+", url):
+ package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub archives" % pn, d)
# The PACKAGE FUNC to scan each package
@@ -937,14 +1000,13 @@ python do_package_qa () {
pkgdest = d.getVar('PKGDEST')
packages = set((d.getVar('PACKAGES') or '').split())
- cpath = oe.cachedpath.CachedPath()
global pkgfiles
pkgfiles = {}
for pkg in packages:
pkgfiles[pkg] = []
- for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
+ for walkroot, dirs, files in os.walk(os.path.join(pkgdest, pkg)):
for file in files:
- pkgfiles[pkg].append(walkroot + os.sep + file)
+ pkgfiles[pkg].append(os.path.join(walkroot, file))
# no packages should be scanned
if not packages:
diff --git a/poky/meta/classes/kernel-fitimage.bbclass b/poky/meta/classes/kernel-fitimage.bbclass
index ec18a3d69..72b05ff8d 100644
--- a/poky/meta/classes/kernel-fitimage.bbclass
+++ b/poky/meta/classes/kernel-fitimage.bbclass
@@ -391,11 +391,10 @@ fitimage_assemble() {
if [ -n "${EXTERNAL_KERNEL_DEVICETREE}" ]; then
dtbcount=1
- for DTBFILE in ${EXTERNAL_KERNEL_DEVICETREE}/*.dtb; do
- DTB=`basename ${DTBFILE}`
+ for DTB in $(find "${EXTERNAL_KERNEL_DEVICETREE}" \( -name '*.dtb' -o -name '*.dtbo' \) -printf '%P\n' | sort); do
DTB=$(echo "${DTB}" | tr '/' '_')
DTBS="${DTBS} ${DTB}"
- fitimage_emit_section_dtb ${1} ${DTB} ${DTBFILE}
+ fitimage_emit_section_dtb ${1} ${DTB} "${EXTERNAL_KERNEL_DEVICETREE}/${DTB}"
done
fi
diff --git a/poky/meta/classes/kernel-yocto.bbclass b/poky/meta/classes/kernel-yocto.bbclass
index ed9bcfa57..5bc627066 100644
--- a/poky/meta/classes/kernel-yocto.bbclass
+++ b/poky/meta/classes/kernel-yocto.bbclass
@@ -1,8 +1,24 @@
# remove tasks that modify the source tree in case externalsrc is inherited
-SRCTREECOVEREDTASKS += "do_kernel_configme do_validate_branches do_kernel_configcheck do_kernel_checkout do_fetch do_unpack do_patch"
+SRCTREECOVEREDTASKS += "do_validate_branches do_kernel_configcheck do_kernel_checkout do_fetch do_unpack do_patch"
PATCH_GIT_USER_EMAIL ?= "kernel-yocto@oe"
PATCH_GIT_USER_NAME ?= "OpenEmbedded"
+# The distro or local.conf should set this, but if nobody cares...
+LINUX_KERNEL_TYPE ??= "standard"
+
+# KMETA ?= ""
+KBRANCH ?= "master"
+KMACHINE ?= "${MACHINE}"
+SRCREV_FORMAT ?= "meta_machine"
+
+# LEVELS:
+# 0: no reporting
+# 1: report options that are specified, but not in the final config
+# 2: report options that are not hardware related, but set by a BSP
+KCONF_AUDIT_LEVEL ?= "1"
+KCONF_BSP_AUDIT_LEVEL ?= "0"
+KMETA_AUDIT ?= "yes"
+
# returns local (absolute) path names for all valid patches in the
# src_uri
def find_patches(d,subdir):
@@ -31,7 +47,7 @@ def find_sccs(d):
base, ext = os.path.splitext(os.path.basename(s))
if ext and ext in [".scc", ".cfg"]:
sources_list.append(s)
- elif base and base in 'defconfig':
+ elif base and 'defconfig' in base:
sources_list.append(s)
return sources_list
@@ -83,13 +99,6 @@ do_kernel_metadata() {
fi
fi
- machine_branch="${@ get_machine_branch(d, "${KBRANCH}" )}"
- machine_srcrev="${SRCREV_machine}"
- if [ -z "${machine_srcrev}" ]; then
- # fallback to SRCREV if a non machine_meta tree is being built
- machine_srcrev="${SRCREV}"
- fi
-
# In a similar manner to the kernel itself:
#
# defconfig: $(obj)/conf
@@ -182,6 +191,14 @@ do_kernel_metadata() {
if [ $? -ne 0 ]; then
bbfatal_log "Could not locate BSP definition for ${KMACHINE}/${LINUX_KERNEL_TYPE} and no defconfig was provided"
fi
+
+ # if the bsp definition has "define KMETA_EXTERNAL_BSP t",
+ # then we need to set a flag that will instruct the next
+ # steps to use the BSP as both configuration and patches.
+ grep -q KMETA_EXTERNAL_BSP $bsp_definition
+ if [ $? -eq 0 ]; then
+ KMETA_EXTERNAL_BSPS="t"
+ fi
fi
meta_dir=$(kgit --meta)
@@ -195,6 +212,13 @@ do_kernel_metadata() {
fi
fi
+ # if KMETA_EXTERNAL_BSPS has been set, or it has been detected from
+ # the bsp definition, then we inject the bsp_definition into the
+ # patch phase below. we'll piggy back on the sccs variable.
+ if [ -n "${KMETA_EXTERNAL_BSPS}" ]; then
+ sccs="${bsp_definition} ${sccs}"
+ fi
+
# run2: only generate patches for elements that have been passed on the SRC_URI
elements="`echo -n ${sccs} ${patches} ${KERNEL_FEATURES}`"
if [ -n "${elements}" ]; then
@@ -293,7 +317,7 @@ do_kernel_checkout() {
}
do_kernel_checkout[dirs] = "${S}"
-addtask kernel_checkout before do_kernel_metadata after do_unpack
+addtask kernel_checkout before do_kernel_metadata after do_symlink_kernsrc
addtask kernel_metadata after do_validate_branches do_unpack before do_patch
do_kernel_metadata[depends] = "kern-tools-native:do_populate_sysroot"
do_validate_branches[depends] = "kern-tools-native:do_populate_sysroot"
@@ -301,10 +325,9 @@ do_validate_branches[depends] = "kern-tools-native:do_populate_sysroot"
do_kernel_configme[depends] += "virtual/${TARGET_PREFIX}binutils:do_populate_sysroot"
do_kernel_configme[depends] += "virtual/${TARGET_PREFIX}gcc:do_populate_sysroot"
do_kernel_configme[depends] += "bc-native:do_populate_sysroot bison-native:do_populate_sysroot"
+do_kernel_configme[depends] += "kern-tools-native:do_populate_sysroot"
do_kernel_configme[dirs] += "${S} ${B}"
do_kernel_configme() {
- set +e
-
# translate the kconfig_mode into something that merge_config.sh
# understands
case ${KCONFIG_MODE} in
@@ -330,13 +353,20 @@ do_kernel_configme() {
bbfatal_log "Could not find configuration queue (${meta_dir}/config.queue)"
fi
- CFLAGS="${CFLAGS} ${TOOLCHAIN_OPTIONS}" HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" HOSTCPP="${BUILD_CPP}" CC="${KERNEL_CC}" ARCH=${ARCH} merge_config.sh -O ${B} ${config_flags} ${configs} > ${meta_dir}/cfg/merge_config_build.log 2>&1
- if [ $? -ne 0 ]; then
- bbfatal_log "Could not configure ${KMACHINE}-${LINUX_KERNEL_TYPE}"
+ CFLAGS="${CFLAGS} ${TOOLCHAIN_OPTIONS}" HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" HOSTCPP="${BUILD_CPP}" CC="${KERNEL_CC}" LD="${KERNEL_LD}" ARCH=${ARCH} merge_config.sh -O ${B} ${config_flags} ${configs} > ${meta_dir}/cfg/merge_config_build.log 2>&1
+ if [ $? -ne 0 -o ! -f ${B}/.config ]; then
+ bberror "Could not generate a .config for ${KMACHINE}-${LINUX_KERNEL_TYPE}"
+ if [ ${KCONF_AUDIT_LEVEL} -gt 1 ]; then
+ bbfatal_log "`cat ${meta_dir}/cfg/merge_config_build.log`"
+ else
+ bbfatal_log "Details can be found at: ${S}/${meta_dir}/cfg/merge_config_build.log"
+ fi
fi
- echo "# Global settings from linux recipe" >> ${B}/.config
- echo "CONFIG_LOCALVERSION="\"${LINUX_VERSION_EXTENSION}\" >> ${B}/.config
+ if [ ! -z "${LINUX_VERSION_EXTENSION}" ]; then
+ echo "# Global settings from linux recipe" >> ${B}/.config
+ echo "CONFIG_LOCALVERSION="\"${LINUX_VERSION_EXTENSION}\" >> ${B}/.config
+ fi
}
addtask kernel_configme before do_configure after do_patch
@@ -355,6 +385,7 @@ python do_kernel_configcheck() {
env = os.environ.copy()
env['PATH'] = "%s:%s%s" % (d.getVar('PATH'), s, "/scripts/util/")
+ env['LD'] = "${KERNEL_LD}"
try:
configs = subprocess.check_output(['scc', '--configs', '-o', s + '/.kernel-meta'], env=env).decode('utf-8')
@@ -458,4 +489,15 @@ python () {
# If diffconfig is available, ensure it runs after kernel_configme
if 'do_diffconfig' in d:
bb.build.addtask('do_diffconfig', None, 'do_kernel_configme', d)
+
+ externalsrc = d.getVar('EXTERNALSRC')
+ if externalsrc:
+ # If we deltask do_patch, do_kernel_configme is left without
+ # dependencies and runs too early
+ d.setVarFlag('do_kernel_configme', 'deps', (d.getVarFlag('do_kernel_configme', 'deps', False) or []) + ['do_unpack'])
}
+
+# extra tasks
+addtask kernel_version_sanity_check after do_kernel_metadata do_kernel_checkout before do_compile
+addtask validate_branches before do_patch after do_kernel_checkout
+addtask kernel_configcheck after do_configure before do_compile
diff --git a/poky/meta/classes/kernel.bbclass b/poky/meta/classes/kernel.bbclass
index ebcb79a52..a72464546 100644
--- a/poky/meta/classes/kernel.bbclass
+++ b/poky/meta/classes/kernel.bbclass
@@ -294,14 +294,10 @@ kernel_do_compile() {
# kernel sources do not use do_unpack, so SOURCE_DATE_EPOCH may not
# be set....
if [ "${SOURCE_DATE_EPOCH}" = "" -o "${SOURCE_DATE_EPOCH}" = "0" ]; then
- olddir=`pwd`
- cd ${S}
- SOURCE_DATE_EPOCH=`git log -1 --pretty=%ct`
- # git repo not guaranteed, so fall back to REPRODUCIBLE_TIMESTAMP_ROOTFS
- if [ $? -ne 0 ]; then
- SOURCE_DATE_EPOCH=${REPRODUCIBLE_TIMESTAMP_ROOTFS}
- fi
- cd $olddir
+ # The source directory is not necessarily a git repository, so we
+ # specify the git-dir to ensure that git does not query a
+ # repository in any parent directory.
+ SOURCE_DATE_EPOCH=`git --git-dir="${S}/.git" log -1 --pretty=%ct 2>/dev/null || echo "${REPRODUCIBLE_TIMESTAMP_ROOTFS}"`
fi
ts=`LC_ALL=C date -d @$SOURCE_DATE_EPOCH`
@@ -508,7 +504,7 @@ sysroot_stage_all () {
:
}
-KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} CC="${KERNEL_CC}" O=${B} olddefconfig || oe_runmake -C ${S} O=${B} CC="${KERNEL_CC}" oldnoconfig"
+KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} CC="${KERNEL_CC}" LD="${KERNEL_LD}" O=${B} olddefconfig || oe_runmake -C ${S} O=${B} CC="${KERNEL_CC}" LD="${KERNEL_LD}" oldnoconfig"
python check_oldest_kernel() {
oldest_kernel = d.getVar('OLDEST_KERNEL')
@@ -570,9 +566,9 @@ RDEPENDS_${KERNEL_PACKAGE_NAME} = "${KERNEL_PACKAGE_NAME}-base"
# Allow machines to override this dependency if kernel image files are
# not wanted in images as standard
RDEPENDS_${KERNEL_PACKAGE_NAME}-base ?= "${KERNEL_PACKAGE_NAME}-image"
-PKG_${KERNEL_PACKAGE_NAME}-image = "${KERNEL_PACKAGE_NAME}-image-${@legitimize_package_name('${KERNEL_VERSION}')}"
+PKG_${KERNEL_PACKAGE_NAME}-image = "${KERNEL_PACKAGE_NAME}-image-${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}"
RDEPENDS_${KERNEL_PACKAGE_NAME}-image += "${@oe.utils.conditional('KERNEL_IMAGETYPE', 'vmlinux', '${KERNEL_PACKAGE_NAME}-vmlinux', '', d)}"
-PKG_${KERNEL_PACKAGE_NAME}-base = "${KERNEL_PACKAGE_NAME}-${@legitimize_package_name('${KERNEL_VERSION}')}"
+PKG_${KERNEL_PACKAGE_NAME}-base = "${KERNEL_PACKAGE_NAME}-${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}"
RPROVIDES_${KERNEL_PACKAGE_NAME}-base += "${KERNEL_PACKAGE_NAME}-${KERNEL_VERSION}"
ALLOW_EMPTY_${KERNEL_PACKAGE_NAME} = "1"
ALLOW_EMPTY_${KERNEL_PACKAGE_NAME}-base = "1"
@@ -613,6 +609,9 @@ do_kernel_link_images() {
if [ -f ../../../vmlinuz.bin ]; then
ln -sf ../../../vmlinuz.bin
fi
+ if [ -f ../../../vmlinux.64 ]; then
+ ln -sf ../../../vmlinux.64
+ fi
}
addtask kernel_link_images after do_compile before do_strip
diff --git a/poky/meta/classes/kernelsrc.bbclass b/poky/meta/classes/kernelsrc.bbclass
index 675d40ec9..a951ba332 100644
--- a/poky/meta/classes/kernelsrc.bbclass
+++ b/poky/meta/classes/kernelsrc.bbclass
@@ -1,7 +1,7 @@
S = "${STAGING_KERNEL_DIR}"
deltask do_fetch
deltask do_unpack
-do_patch[depends] += "virtual/kernel:do_patch"
+do_patch[depends] += "virtual/kernel:do_shared_workdir"
do_patch[noexec] = "1"
do_package[depends] += "virtual/kernel:do_populate_sysroot"
KERNEL_VERSION = "${@get_kernelversion_file("${STAGING_KERNEL_BUILDDIR}")}"
diff --git a/poky/meta/classes/libc-common.bbclass b/poky/meta/classes/libc-common.bbclass
deleted file mode 100644
index 0e351b674..000000000
--- a/poky/meta/classes/libc-common.bbclass
+++ /dev/null
@@ -1,37 +0,0 @@
-do_install() {
- oe_runmake install_root=${D} install
- install -Dm 0644 ${WORKDIR}/etc/ld.so.conf ${D}/${sysconfdir}/ld.so.conf
- install -d ${D}${localedir}
- make -f ${WORKDIR}/generate-supported.mk IN="${S}/localedata/SUPPORTED" OUT="${WORKDIR}/SUPPORTED"
- # get rid of some broken files...
- for i in ${GLIBC_BROKEN_LOCALES}; do
- sed -i "/$i/d" ${WORKDIR}/SUPPORTED
- done
- rm -f ${D}${sysconfdir}/rpc
- rm -rf ${D}${datadir}/zoneinfo
- rm -rf ${D}${libexecdir}/getconf
-}
-
-def get_libc_fpu_setting(bb, d):
- if d.getVar('TARGET_FPU') in [ 'soft', 'ppc-efd' ]:
- return "--without-fp"
- return ""
-
-python populate_packages_prepend () {
- if d.getVar('DEBIAN_NAMES'):
- pkgs = d.getVar('PACKAGES').split()
- bpn = d.getVar('BPN')
- prefix = d.getVar('MLPREFIX') or ""
- # Set the base package...
- d.setVar('PKG_' + prefix + bpn, prefix + 'libc6')
- libcprefix = prefix + bpn + '-'
- for p in pkgs:
- # And all the subpackages.
- if p.startswith(libcprefix):
- renamed = p.replace(bpn, 'libc6', 1)
- d.setVar('PKG_' + p, renamed)
- # For backward compatibility with old -dbg package
- d.appendVar('RPROVIDES_' + libcprefix + 'dbg', ' ' + prefix + 'libc-dbg')
- d.appendVar('RCONFLICTS_' + libcprefix + 'dbg', ' ' + prefix + 'libc-dbg')
- d.appendVar('RREPLACES_' + libcprefix + 'dbg', ' ' + prefix + 'libc-dbg')
-}
diff --git a/poky/meta/classes/libc-package.bbclass b/poky/meta/classes/libc-package.bbclass
index de816bcec..de3b4250c 100644
--- a/poky/meta/classes/libc-package.bbclass
+++ b/poky/meta/classes/libc-package.bbclass
@@ -248,6 +248,7 @@ python package_do_split_gconvs () {
"sh4": " --uint32-align=4 --big-endian ", \
"powerpc": " --uint32-align=4 --big-endian ", \
"powerpc64": " --uint32-align=4 --big-endian ", \
+ "powerpc64le": " --uint32-align=4 --little-endian ", \
"mips": " --uint32-align=4 --big-endian ", \
"mipsisa32r6": " --uint32-align=4 --big-endian ", \
"mips64": " --uint32-align=4 --big-endian ", \
diff --git a/poky/meta/classes/license.bbclass b/poky/meta/classes/license.bbclass
index 648a4d789..f90176d6c 100644
--- a/poky/meta/classes/license.bbclass
+++ b/poky/meta/classes/license.bbclass
@@ -252,7 +252,7 @@ def canonical_license(d, license):
"""
Return the canonical (SPDX) form of the license if available (so GPLv3
becomes GPL-3.0), for the license named 'X+', return canonical form of
- 'X' if availabel and the tailing '+' (so GPLv3+ becomes GPL-3.0+),
+ 'X' if available and the tailing '+' (so GPLv3+ becomes GPL-3.0+),
or the passed license if there is no canonical form.
"""
lic = d.getVarFlag('SPDXLICENSEMAP', license) or ""
@@ -262,10 +262,29 @@ def canonical_license(d, license):
lic += '+'
return lic or license
+def available_licenses(d):
+ """
+ Return the available licenses by searching the directories specified by
+ COMMON_LICENSE_DIR and LICENSE_PATH.
+ """
+ lic_dirs = ((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' +
+ (d.getVar('LICENSE_PATH') or '')).split()
+
+ licenses = []
+ for lic_dir in lic_dirs:
+ licenses += os.listdir(lic_dir)
+
+ licenses = sorted(licenses)
+ return licenses
+
+# Only determine the list of all available licenses once. This assumes that any
+# additions to LICENSE_PATH have been done before this file is parsed.
+AVAILABLE_LICENSES := "${@' '.join(available_licenses(d))}"
+
def expand_wildcard_licenses(d, wildcard_licenses):
"""
- Return actual spdx format license names if wildcard used. We expand
- wildcards from SPDXLICENSEMAP flags and SRC_DISTRIBUTE_LICENSES values.
+ Return actual spdx format license names if wildcards are used. We expand
+ wildcards from SPDXLICENSEMAP flags and AVAILABLE_LICENSES.
"""
import fnmatch
licenses = wildcard_licenses[:]
@@ -274,7 +293,7 @@ def expand_wildcard_licenses(d, wildcard_licenses):
spdxflags = fnmatch.filter(spdxmapkeys, wld_lic)
licenses += [d.getVarFlag('SPDXLICENSEMAP', flag) for flag in spdxflags]
- spdx_lics = (d.getVar('SRC_DISTRIBUTE_LICENSES', False) or '').split()
+ spdx_lics = d.getVar('AVAILABLE_LICENSES').split()
for wld_lic in wildcard_licenses:
licenses += fnmatch.filter(spdx_lics, wld_lic)
@@ -291,15 +310,21 @@ def incompatible_pkg_license(d, dont_want_licenses, license):
# Handles an "or" or two license sets provided by
# flattened_licenses(), pick one that works if possible.
def choose_lic_set(a, b):
- return a if all(oe.license.license_ok(canonical_license(d, lic),
+ return a if all(oe.license.license_ok(canonical_license(d, lic),
dont_want_licenses) for lic in a) else b
try:
licenses = oe.license.flattened_licenses(license, choose_lic_set)
except oe.license.LicenseError as exc:
bb.fatal('%s: %s' % (d.getVar('P'), exc))
- return any(not oe.license.license_ok(canonical_license(d, l), \
- dont_want_licenses) for l in licenses)
+
+ incompatible_lic = []
+ for l in licenses:
+ license = canonical_license(d, l)
+ if not oe.license.license_ok(license, dont_want_licenses):
+ incompatible_lic.append(license)
+
+ return sorted(incompatible_lic)
def incompatible_license(d, dont_want_licenses, package=None):
"""
diff --git a/poky/meta/classes/license_image.bbclass b/poky/meta/classes/license_image.bbclass
index b5399b6d9..a8c72da3c 100644
--- a/poky/meta/classes/license_image.bbclass
+++ b/poky/meta/classes/license_image.bbclass
@@ -51,8 +51,9 @@ def write_license_files(d, license_manifest, pkg_dic, rootfs=True):
for pkg in sorted(pkg_dic):
if bad_licenses and pkg not in whitelist:
try:
- if incompatible_pkg_license(d, bad_licenses, pkg_dic[pkg]["LICENSE"]):
- bb.fatal("Package %s has an incompatible license %s and cannot be installed into the image." %(pkg, pkg_dic[pkg]["LICENSE"]))
+ licenses = incompatible_pkg_license(d, bad_licenses, pkg_dic[pkg]["LICENSE"])
+ if licenses:
+ bb.fatal("Package %s cannot be installed into the image because it has incompatible license(s): %s" %(pkg, ' '.join(licenses)))
(pkg_dic[pkg]["LICENSE"], pkg_dic[pkg]["LICENSES"]) = \
oe.license.manifest_licenses(pkg_dic[pkg]["LICENSE"],
bad_licenses, canonical_license, d)
diff --git a/poky/meta/classes/linuxloader.bbclass b/poky/meta/classes/linuxloader.bbclass
index c0fbf2683..ec0e0556d 100644
--- a/poky/meta/classes/linuxloader.bbclass
+++ b/poky/meta/classes/linuxloader.bbclass
@@ -1,27 +1,31 @@
-def get_musl_loader(d):
+def get_musl_loader_arch(d):
import re
- dynamic_loader = None
+ ldso_arch = None
targetarch = d.getVar("TARGET_ARCH")
if targetarch.startswith("microblaze"):
- dynamic_loader = "${base_libdir}/ld-musl-microblaze${@bb.utils.contains('TUNE_FEATURES', 'bigendian', '', 'el' ,d)}.so.1"
+ ldso_arch = "microblaze${@bb.utils.contains('TUNE_FEATURES', 'bigendian', '', 'el' ,d)}"
elif targetarch.startswith("mips"):
- dynamic_loader = "${base_libdir}/ld-musl-mips${ABIEXTENSION}${MIPSPKGSFX_BYTE}${MIPSPKGSFX_R6}${MIPSPKGSFX_ENDIAN}${@['', '-sf'][d.getVar('TARGET_FPU') == 'soft']}.so.1"
+ ldso_arch = "mips${ABIEXTENSION}${MIPSPKGSFX_BYTE}${MIPSPKGSFX_R6}${MIPSPKGSFX_ENDIAN}${@['', '-sf'][d.getVar('TARGET_FPU') == 'soft']}"
elif targetarch == "powerpc":
- dynamic_loader = "${base_libdir}/ld-musl-powerpc${@['', '-sf'][d.getVar('TARGET_FPU') == 'soft']}.so.1"
+ ldso_arch = "powerpc${@['', '-sf'][d.getVar('TARGET_FPU') == 'soft']}"
elif targetarch == "powerpc64":
- dynamic_loader = "${base_libdir}/ld-musl-powerpc64.so.1"
+ ldso_arch = "powerpc64"
elif targetarch == "x86_64":
- dynamic_loader = "${base_libdir}/ld-musl-x86_64.so.1"
+ ldso_arch = "x86_64"
elif re.search("i.86", targetarch):
- dynamic_loader = "${base_libdir}/ld-musl-i386.so.1"
+ ldso_arch = "i386"
elif targetarch.startswith("arm"):
- dynamic_loader = "${base_libdir}/ld-musl-arm${ARMPKGSFX_ENDIAN}${ARMPKGSFX_EABI}.so.1"
+ ldso_arch = "arm${ARMPKGSFX_ENDIAN}${ARMPKGSFX_EABI}"
elif targetarch.startswith("aarch64"):
- dynamic_loader = "${base_libdir}/ld-musl-aarch64${ARMPKGSFX_ENDIAN_64}.so.1"
+ ldso_arch = "aarch64${ARMPKGSFX_ENDIAN_64}"
elif targetarch.startswith("riscv64"):
- dynamic_loader = "${base_libdir}/ld-musl-riscv64${@['', '-sf'][d.getVar('TARGET_FPU') == 'soft']}.so.1"
- return dynamic_loader
+ ldso_arch = "riscv64${@['', '-sf'][d.getVar('TARGET_FPU') == 'soft']}"
+ return ldso_arch
+
+def get_musl_loader(d):
+ import re
+ return "/lib/ld-musl-" + get_musl_loader_arch(d) + ".so.1"
def get_glibc_loader(d):
import re
@@ -41,7 +45,7 @@ def get_glibc_loader(d):
elif re.search("i.86", targetarch):
dynamic_loader = "${base_libdir}/ld-linux.so.2"
elif targetarch == "arm":
- dynamic_loader = "${base_libdir}/ld-linux.so.3"
+ dynamic_loader = "${base_libdir}/ld-linux${@['-armhf', ''][d.getVar('TARGET_FPU') == 'soft']}.so.3"
elif targetarch.startswith("aarch64"):
dynamic_loader = "${base_libdir}/ld-linux-aarch64${ARMPKGSFX_ENDIAN_64}.so.1"
elif targetarch.startswith("riscv64"):
@@ -62,4 +66,5 @@ def get_linuxloader(d):
get_linuxloader[vardepvalue] = "${@get_linuxloader(d)}"
get_musl_loader[vardepvalue] = "${@get_musl_loader(d)}"
+get_musl_loader_arch[vardepvalue] = "${@get_musl_loader_arch(d)}"
get_glibc_loader[vardepvalue] = "${@get_glibc_loader(d)}"
diff --git a/poky/meta/classes/manpages.bbclass b/poky/meta/classes/manpages.bbclass
index 50c254763..1e6678064 100644
--- a/poky/meta/classes/manpages.bbclass
+++ b/poky/meta/classes/manpages.bbclass
@@ -18,8 +18,15 @@ pkg_postinst_append_${MAN_PKG} () {
if test -n "$D"; then
if ${@bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'true','false', d)}; then
sed "s:\(\s\)/:\1$D/:g" $D${sysconfdir}/man_db.conf | ${@qemu_run_binary(d, '$D', '${bindir}/mandb')} -C - -u -q $D${mandir}
+ chown -R root:root $D${mandir}
mkdir -p $D${localstatedir}/cache/man
- mv $D${mandir}/index.db $D${localstatedir}/cache/man
+ cd $D${mandir}
+ find . -name index.db | while read index; do
+ mkdir -p $D${localstatedir}/cache/man/$(dirname ${index})
+ mv ${index} $D${localstatedir}/cache/man/${index}
+ chown man:man $D${localstatedir}/cache/man/${index}
+ done
+ cd -
else
$INTERCEPT_DIR/postinst_intercept delay_to_first_boot ${PKG} mlprefix=${MLPREFIX}
fi
diff --git a/poky/meta/classes/meson.bbclass b/poky/meta/classes/meson.bbclass
index efa623407..06034e8b4 100644
--- a/poky/meta/classes/meson.bbclass
+++ b/poky/meta/classes/meson.bbclass
@@ -12,8 +12,9 @@ MESON_SOURCEPATH = "${S}"
def noprefix(var, d):
return d.getVar(var).replace(d.getVar('prefix') + '/', '', 1)
+MESON_BUILDTYPE ?= "plain"
MESONOPTS = " --prefix ${prefix} \
- --buildtype plain \
+ --buildtype ${MESON_BUILDTYPE} \
--bindir ${@noprefix('bindir', d)} \
--sbindir ${@noprefix('sbindir', d)} \
--datadir ${@noprefix('datadir', d)} \
@@ -24,7 +25,8 @@ MESONOPTS = " --prefix ${prefix} \
--infodir ${@noprefix('infodir', d)} \
--sysconfdir ${sysconfdir} \
--localstatedir ${localstatedir} \
- --sharedstatedir ${sharedstatedir} "
+ --sharedstatedir ${sharedstatedir} \
+ --wrap-mode nodownload"
EXTRA_OEMESON_append = " ${PACKAGECONFIG_CONFARGS}"
@@ -43,7 +45,7 @@ def meson_cpu_family(var, d):
arch = d.getVar(var)
if arch == 'powerpc':
return 'ppc'
- elif arch == 'powerpc64':
+ elif arch == 'powerpc64' or arch == 'powerpc64le':
return 'ppc64'
elif arch == 'armeb':
return 'arm'
@@ -55,11 +57,20 @@ def meson_cpu_family(var, d):
return 'mips64'
elif re.match(r"i[3-6]86", arch):
return "x86"
- elif arch == "microblazeel" or arch == "microblazeeb":
+ elif arch == "microblazeel":
return "microblaze"
else:
return arch
+# Map our OS values to what Meson expects:
+# https://mesonbuild.com/Reference-tables.html#operating-system-names
+def meson_operating_system(var, d):
+ os = d.getVar(var)
+ if "mingw" in os:
+ return "windows"
+ else:
+ return os
+
def meson_endian(prefix, d):
arch, os = d.getVar(prefix + "_ARCH"), d.getVar(prefix + "_OS")
sitedata = siteinfo_data_for_machine(arch, os, d)
@@ -80,7 +91,6 @@ c = ${@meson_array('CC', d)}
cpp = ${@meson_array('CXX', d)}
ar = ${@meson_array('AR', d)}
nm = ${@meson_array('NM', d)}
-ld = ${@meson_array('LD', d)}
strip = ${@meson_array('STRIP', d)}
readelf = ${@meson_array('READELF', d)}
pkgconfig = 'pkg-config'
@@ -95,13 +105,13 @@ cpp_link_args = ${@meson_array('LDFLAGS', d)}
gtkdoc_exe_wrapper = '${B}/gtkdoc-qemuwrapper'
[host_machine]
-system = '${HOST_OS}'
+system = '${@meson_operating_system('HOST_OS', d)}'
cpu_family = '${@meson_cpu_family('HOST_ARCH', d)}'
cpu = '${HOST_ARCH}'
endian = '${@meson_endian('HOST', d)}'
[target_machine]
-system = '${TARGET_OS}'
+system = '${@meson_operating_system('TARGET_OS', d)}'
cpu_family = '${@meson_cpu_family('TARGET_ARCH', d)}'
cpu = '${TARGET_ARCH}'
endian = '${@meson_endian('TARGET', d)}'
@@ -111,6 +121,10 @@ EOF
CONFIGURE_FILES = "meson.build"
meson_do_configure() {
+ # Meson requires this to be 'bfd, 'lld' or 'gold' from 0.53 onwards
+ # https://github.com/mesonbuild/meson/commit/ef9aeb188ea2bc7353e59916c18901cde90fa2b3
+ unset LD
+
# Work around "Meson fails if /tmp is mounted with noexec #2972"
mkdir -p "${B}/meson-private/tmp"
export TMPDIR="${B}/meson-private/tmp"
@@ -147,6 +161,15 @@ meson_do_configure_prepend_class-native() {
export PKG_CONFIG="pkg-config-native"
}
+python meson_do_qa_configure() {
+ import re
+ warn_re = re.compile(r"^WARNING: Cross property (.+) is using default value (.+)$", re.MULTILINE)
+ log = open(d.expand("${B}/meson-logs/meson-log.txt")).read()
+ for (prop, value) in warn_re.findall(log):
+ bb.warn("Meson cross property %s used without explicit assignment, defaulting to %s" % (prop, value))
+}
+do_configure[postfuncs] += "meson_do_qa_configure"
+
do_compile[progress] = "outof:^\[(\d+)/(\d+)\]\s+"
meson_do_compile() {
ninja -v ${PARALLEL_MAKE}
diff --git a/poky/meta/classes/mime-xdg.bbclass b/poky/meta/classes/mime-xdg.bbclass
new file mode 100644
index 000000000..642a5b759
--- /dev/null
+++ b/poky/meta/classes/mime-xdg.bbclass
@@ -0,0 +1,74 @@
+#
+# This class creates mime <-> application associations based on entry
+# 'MimeType' in *.desktop files
+#
+
+DEPENDS += "desktop-file-utils"
+PACKAGE_WRITE_DEPS += "desktop-file-utils-native"
+DESKTOPDIR = "${datadir}/applications"
+
+# There are recipes out there installing their .desktop files as absolute
+# symlinks. For us these are dangling and cannot be introspected for "MimeType"
+# easily. By addding package-names to MIME_XDG_PACKAGES, packager can force
+# proper update-desktop-database handling. Note that all introspection is
+# skipped for MIME_XDG_PACKAGES not empty
+MIME_XDG_PACKAGES ?= ""
+
+mime_xdg_postinst() {
+if [ "x$D" != "x" ]; then
+ $INTERCEPT_DIR/postinst_intercept update_desktop_database ${PKG} \
+ mlprefix=${MLPREFIX} \
+ desktop_dir=${DESKTOPDIR}
+else
+ update-desktop-database $D${DESKTOPDIR}
+fi
+}
+
+mime_xdg_postrm() {
+if [ "x$D" != "x" ]; then
+ $INTERCEPT_DIR/postinst_intercept update_desktop_database ${PKG} \
+ mlprefix=${MLPREFIX} \
+ desktop_dir=${DESKTOPDIR}
+else
+ update-desktop-database $D${DESKTOPDIR}
+fi
+}
+
+python populate_packages_append () {
+ packages = d.getVar('PACKAGES').split()
+ pkgdest = d.getVar('PKGDEST')
+ desktop_base = d.getVar('DESKTOPDIR')
+ forced_mime_xdg_pkgs = (d.getVar('MIME_XDG_PACKAGES') or '').split()
+
+ for pkg in packages:
+ desktops_with_mime_found = pkg in forced_mime_xdg_pkgs
+ if d.getVar('MIME_XDG_PACKAGES') == '':
+ desktop_dir = '%s/%s%s' % (pkgdest, pkg, desktop_base)
+ if os.path.exists(desktop_dir):
+ for df in os.listdir(desktop_dir):
+ if df.endswith('.desktop'):
+ try:
+ with open(desktop_dir + '/'+ df, 'r') as f:
+ for line in f.read().split('\n'):
+ if 'MimeType' in line:
+ desktops_with_mime_found = True
+ break;
+ except:
+ bb.warn('Could not open %s. Set MIME_XDG_PACKAGES in recipe or add mime-xdg to INSANE_SKIP.' % desktop_dir + '/'+ df)
+ if desktops_with_mime_found:
+ break
+ if desktops_with_mime_found:
+ bb.note("adding mime-xdg postinst and postrm scripts to %s" % pkg)
+ postinst = d.getVar('pkg_postinst_%s' % pkg)
+ if not postinst:
+ postinst = '#!/bin/sh\n'
+ postinst += d.getVar('mime_xdg_postinst')
+ d.setVar('pkg_postinst_%s' % pkg, postinst)
+ postrm = d.getVar('pkg_postrm_%s' % pkg)
+ if not postrm:
+ postrm = '#!/bin/sh\n'
+ postrm += d.getVar('mime_xdg_postrm')
+ d.setVar('pkg_postrm_%s' % pkg, postrm)
+ bb.note("adding desktop-file-utils dependency to %s" % pkg)
+ d.appendVar('RDEPENDS_' + pkg, " " + d.getVar('MLPREFIX')+"desktop-file-utils")
+}
diff --git a/poky/meta/classes/mime.bbclass b/poky/meta/classes/mime.bbclass
index 6c7b868f7..c9072adf3 100644
--- a/poky/meta/classes/mime.bbclass
+++ b/poky/meta/classes/mime.bbclass
@@ -1,46 +1,47 @@
-DEPENDS += "shared-mime-info"
+#
+# This class is used by recipes installing mime types
+#
+
+DEPENDS += "${@bb.utils.contains('BPN', 'shared-mime-info', '', 'shared-mime-info', d)}"
PACKAGE_WRITE_DEPS += "shared-mime-info-native"
+MIMEDIR = "${datadir}/mime"
mime_postinst() {
-if [ "$1" = configure ]; then
- UPDATEMIMEDB=`which update-mime-database`
- if [ -x "$UPDATEMIMEDB" ] ; then
- echo "Updating MIME database... this may take a while."
- $UPDATEMIMEDB $D${datadir}/mime
- else
- echo "Missing update-mime-database, update of mime database failed!"
- exit 1
- fi
+if [ "x$D" != "x" ]; then
+ $INTERCEPT_DIR/postinst_intercept update_mime_database ${PKG} \
+ mlprefix=${MLPREFIX} \
+ mimedir=${MIMEDIR}
+else
+ echo "Updating MIME database... this may take a while."
+ update-mime-database $D${MIMEDIR}
fi
}
mime_postrm() {
-if [ "$1" = remove ] || [ "$1" = upgrade ]; then
- UPDATEMIMEDB=`which update-mime-database`
- if [ -x "$UPDATEMIMEDB" ] ; then
- echo "Updating MIME database... this may take a while."
- $UPDATEMIMEDB $D${datadir}/mime
- else
- echo "Missing update-mime-database, update of mime database failed!"
- exit 1
- fi
+if [ "x$D" != "x" ]; then
+ $INTERCEPT_DIR/postinst_intercept update_mime_database ${PKG} \
+ mlprefix=${MLPREFIX} \
+ mimedir=${MIMEDIR}
+else
+ echo "Updating MIME database... this may take a while."
+ update-mime-database $D${MIMEDIR}
fi
}
python populate_packages_append () {
- import re
packages = d.getVar('PACKAGES').split()
pkgdest = d.getVar('PKGDEST')
+ mimedir = d.getVar('MIMEDIR')
for pkg in packages:
- mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg)
- mimes = []
- mime_re = re.compile(".*\.xml$")
- if os.path.exists(mime_dir):
- for f in os.listdir(mime_dir):
- if mime_re.match(f):
- mimes.append(f)
- if mimes:
+ mime_packages_dir = '%s/%s%s/packages' % (pkgdest, pkg, mimedir)
+ mimes_types_found = False
+ if os.path.exists(mime_packages_dir):
+ for f in os.listdir(mime_packages_dir):
+ if f.endswith('.xml'):
+ mimes_types_found = True
+ break
+ if mimes_types_found:
bb.note("adding mime postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg)
if not postinst:
@@ -52,6 +53,7 @@ python populate_packages_append () {
postrm = '#!/bin/sh\n'
postrm += d.getVar('mime_postrm')
d.setVar('pkg_postrm_%s' % pkg, postrm)
- bb.note("adding shared-mime-info-data dependency to %s" % pkg)
- d.appendVar('RDEPENDS_' + pkg, " " + d.getVar('MLPREFIX')+"shared-mime-info-data")
+ if pkg != 'shared-mime-info-data':
+ bb.note("adding shared-mime-info-data dependency to %s" % pkg)
+ d.appendVar('RDEPENDS_' + pkg, " " + d.getVar('MLPREFIX')+"shared-mime-info-data")
}
diff --git a/poky/meta/classes/multilib.bbclass b/poky/meta/classes/multilib.bbclass
index 1a9295d36..ee677da1e 100644
--- a/poky/meta/classes/multilib.bbclass
+++ b/poky/meta/classes/multilib.bbclass
@@ -184,11 +184,12 @@ python do_package_qa_multilib() {
for i in values:
if i.startswith('virtual/'):
i = i[len('virtual/'):]
- if (not i.startswith('kernel-module')) and (not i.startswith(mlprefix)) and \
- (not 'cross-canadian' in i) and (not i.startswith("nativesdk-")) and \
- (not i.startswith("rtld")) and (not i.startswith('kernel-vmlinux')) \
- and (not i.startswith("kernel-image")) and (not i.startswith("/")):
+
+ if (not (i.startswith(mlprefix) or i.startswith("kernel-") \
+ or ('cross-canadian' in i) or i.startswith("nativesdk-") \
+ or i.startswith("rtld") or i.startswith("/"))):
candidates.append(i)
+
if len(candidates) > 0:
msg = "%s package %s - suspicious values '%s' in %s" \
% (d.getVar('PN'), pkg, ' '.join(candidates), var)
diff --git a/poky/meta/classes/native.bbclass b/poky/meta/classes/native.bbclass
index d5b6f6af8..08106e345 100644
--- a/poky/meta/classes/native.bbclass
+++ b/poky/meta/classes/native.bbclass
@@ -186,11 +186,11 @@ python do_addto_recipe_sysroot () {
bb.build.exec_func("extend_recipe_sysroot", d)
}
addtask addto_recipe_sysroot after do_populate_sysroot
+do_addto_recipe_sysroot[deptask] = "do_populate_sysroot"
inherit nopackages
do_packagedata[stamp-extra-info] = ""
-do_populate_sysroot[stamp-extra-info] = ""
USE_NLS = "no"
diff --git a/poky/meta/classes/nativesdk.bbclass b/poky/meta/classes/nativesdk.bbclass
index 03135aced..7b7571072 100644
--- a/poky/meta/classes/nativesdk.bbclass
+++ b/poky/meta/classes/nativesdk.bbclass
@@ -107,7 +107,6 @@ python () {
addhandler nativesdk_virtclass_handler
nativesdk_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise"
-do_populate_sysroot[stamp-extra-info] = ""
do_packagedata[stamp-extra-info] = ""
USE_NLS = "${SDKUSE_NLS}"
diff --git a/poky/meta/classes/npm.bbclass b/poky/meta/classes/npm.bbclass
index 4b1f0a39f..068032a1e 100644
--- a/poky/meta/classes/npm.bbclass
+++ b/poky/meta/classes/npm.bbclass
@@ -1,94 +1,307 @@
+# Copyright (C) 2020 Savoir-Faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This bbclass builds and installs an npm package to the target. The package
+# sources files should be fetched in the calling recipe by using the SRC_URI
+# variable. The ${S} variable should be updated depending of your fetcher.
+#
+# Usage:
+# SRC_URI = "..."
+# inherit npm
+#
+# Optional variables:
+# NPM_ARCH:
+# Override the auto generated npm architecture.
+#
+# NPM_INSTALL_DEV:
+# Set to 1 to also install devDependencies.
+
DEPENDS_prepend = "nodejs-native "
RDEPENDS_${PN}_prepend = "nodejs "
-S = "${WORKDIR}/npmpkg"
-def node_pkgname(d):
- bpn = d.getVar('BPN')
- if bpn.startswith("node-"):
- return bpn[5:]
- return bpn
+NPM_INSTALL_DEV ?= "0"
+
+def npm_target_arch_map(target_arch):
+ """Maps arch names to npm arch names"""
+ import re
+ if re.match("p(pc|owerpc)(|64)", target_arch):
+ return "ppc"
+ elif re.match("i.86$", target_arch):
+ return "ia32"
+ elif re.match("x86_64$", target_arch):
+ return "x64"
+ elif re.match("arm64$", target_arch):
+ return "arm"
+ return target_arch
+
+NPM_ARCH ?= "${@npm_target_arch_map(d.getVar("TARGET_ARCH"))}"
+
+NPM_PACKAGE = "${WORKDIR}/npm-package"
+NPM_CACHE = "${WORKDIR}/npm-cache"
+NPM_BUILD = "${WORKDIR}/npm-build"
-NPMPN ?= "${@node_pkgname(d)}"
+def npm_global_configs(d):
+ """Get the npm global configuration"""
+ configs = []
+ # Ensure no network access is done
+ configs.append(("offline", "true"))
+ configs.append(("proxy", "http://invalid"))
+ # Configure the cache directory
+ configs.append(("cache", d.getVar("NPM_CACHE")))
+ return configs
-NPM_INSTALLDIR = "${libdir}/node_modules/${NPMPN}"
+def npm_pack(env, srcdir, workdir):
+ """Run 'npm pack' on a specified directory"""
+ import shlex
+ cmd = "npm pack %s" % shlex.quote(srcdir)
+ configs = [("ignore-scripts", "true")]
+ tarball = env.run(cmd, configs=configs, workdir=workdir).strip("\n")
+ return os.path.join(workdir, tarball)
-# function maps arch names to npm arch names
-def npm_oe_arch_map(target_arch, d):
+python npm_do_configure() {
+ """
+ Step one: configure the npm cache and the main npm package
+
+ Every dependencies have been fetched and patched in the source directory.
+ They have to be packed (this remove unneeded files) and added to the npm
+ cache to be available for the next step.
+
+ The main package and its associated manifest file and shrinkwrap file have
+ to be configured to take into account these cached dependencies.
+ """
+ import base64
+ import copy
+ import json
import re
- if re.match('p(pc|owerpc)(|64)', target_arch): return 'ppc'
- elif re.match('i.86$', target_arch): return 'ia32'
- elif re.match('x86_64$', target_arch): return 'x64'
- elif re.match('arm64$', target_arch): return 'arm'
- return target_arch
+ import shlex
+ import tempfile
+ from bb.fetch2.npm import NpmEnvironment
+ from bb.fetch2.npm import npm_unpack
+ from bb.fetch2.npmsw import foreach_dependencies
+ from bb.progress import OutOfProgressHandler
-NPM_ARCH ?= "${@npm_oe_arch_map(d.getVar('TARGET_ARCH'), d)}"
-NPM_INSTALL_DEV ?= "0"
+ bb.utils.remove(d.getVar("NPM_CACHE"), recurse=True)
+ bb.utils.remove(d.getVar("NPM_PACKAGE"), recurse=True)
+
+ env = NpmEnvironment(d, configs=npm_global_configs(d))
+
+ def _npm_cache_add(tarball):
+ """Run 'npm cache add' for a specified tarball"""
+ cmd = "npm cache add %s" % shlex.quote(tarball)
+ env.run(cmd)
+
+ def _npm_integrity(tarball):
+ """Return the npm integrity of a specified tarball"""
+ sha512 = bb.utils.sha512_file(tarball)
+ return "sha512-" + base64.b64encode(bytes.fromhex(sha512)).decode()
+
+ def _npm_version(tarball):
+ """Return the version of a specified tarball"""
+ regex = r"-(\d+\.\d+\.\d+(-.*)?(\+.*)?)\.tgz"
+ return re.search(regex, tarball).group(1)
+
+ def _npmsw_dependency_dict(orig, deptree):
+ """
+ Return the sub dictionary in the 'orig' dictionary corresponding to the
+ 'deptree' dependency tree. This function follows the shrinkwrap file
+ format.
+ """
+ ptr = orig
+ for dep in deptree:
+ if "dependencies" not in ptr:
+ ptr["dependencies"] = {}
+ ptr = ptr["dependencies"]
+ if dep not in ptr:
+ ptr[dep] = {}
+ ptr = ptr[dep]
+ return ptr
+
+ # Manage the manifest file and shrinkwrap files
+ orig_manifest_file = d.expand("${S}/package.json")
+ orig_shrinkwrap_file = d.expand("${S}/npm-shrinkwrap.json")
+ cached_manifest_file = d.expand("${NPM_PACKAGE}/package.json")
+ cached_shrinkwrap_file = d.expand("${NPM_PACKAGE}/npm-shrinkwrap.json")
+
+ with open(orig_manifest_file, "r") as f:
+ orig_manifest = json.load(f)
+
+ cached_manifest = copy.deepcopy(orig_manifest)
+ cached_manifest.pop("dependencies", None)
+ cached_manifest.pop("devDependencies", None)
+
+ with open(orig_shrinkwrap_file, "r") as f:
+ orig_shrinkwrap = json.load(f)
+
+ cached_shrinkwrap = copy.deepcopy(orig_shrinkwrap)
+ cached_shrinkwrap.pop("dependencies", None)
+
+ # Manage the dependencies
+ progress = OutOfProgressHandler(d, r"^(\d+)/(\d+)$")
+ progress_total = 1 # also count the main package
+ progress_done = 0
+
+ def _count_dependency(name, params, deptree):
+ nonlocal progress_total
+ progress_total += 1
+
+ def _cache_dependency(name, params, deptree):
+ destsubdirs = [os.path.join("node_modules", dep) for dep in deptree]
+ destsuffix = os.path.join(*destsubdirs)
+ with tempfile.TemporaryDirectory() as tmpdir:
+ # Add the dependency to the npm cache
+ destdir = os.path.join(d.getVar("S"), destsuffix)
+ tarball = npm_pack(env, destdir, tmpdir)
+ _npm_cache_add(tarball)
+ # Add its signature to the cached shrinkwrap
+ dep = _npmsw_dependency_dict(cached_shrinkwrap, deptree)
+ dep["version"] = _npm_version(tarball)
+ dep["integrity"] = _npm_integrity(tarball)
+ if params.get("dev", False):
+ dep["dev"] = True
+ # Display progress
+ nonlocal progress_done
+ progress_done += 1
+ progress.write("%d/%d" % (progress_done, progress_total))
+
+ dev = bb.utils.to_boolean(d.getVar("NPM_INSTALL_DEV"), False)
+ foreach_dependencies(orig_shrinkwrap, _count_dependency, dev)
+ foreach_dependencies(orig_shrinkwrap, _cache_dependency, dev)
+
+ # Configure the main package
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tarball = npm_pack(env, d.getVar("S"), tmpdir)
+ npm_unpack(tarball, d.getVar("NPM_PACKAGE"), d)
+
+ # Configure the cached manifest file and cached shrinkwrap file
+ def _update_manifest(depkey):
+ for name in orig_manifest.get(depkey, {}):
+ version = cached_shrinkwrap["dependencies"][name]["version"]
+ if depkey not in cached_manifest:
+ cached_manifest[depkey] = {}
+ cached_manifest[depkey][name] = version
-npm_do_compile() {
- # Copy in any additionally fetched modules
- if [ -d ${WORKDIR}/node_modules ] ; then
- cp -a ${WORKDIR}/node_modules ${S}/
- fi
- # changing the home directory to the working directory, the .npmrc will
- # be created in this directory
- export HOME=${WORKDIR}
- if [ "${NPM_INSTALL_DEV}" = "1" ]; then
- npm config set dev true
- else
- npm config set dev false
- fi
- npm set cache ${WORKDIR}/npm_cache
- # clear cache before every build
- npm cache clear --force
- # Install pkg into ${S} without going to the registry
- if [ "${NPM_INSTALL_DEV}" = "1" ]; then
- npm --arch=${NPM_ARCH} --target_arch=${NPM_ARCH} --no-registry install
- else
- npm --arch=${NPM_ARCH} --target_arch=${NPM_ARCH} --production --no-registry install
- fi
+ _update_manifest("dependencies")
+
+ if dev:
+ _update_manifest("devDependencies")
+
+ with open(cached_manifest_file, "w") as f:
+ json.dump(cached_manifest, f, indent=2)
+
+ with open(cached_shrinkwrap_file, "w") as f:
+ json.dump(cached_shrinkwrap, f, indent=2)
}
-npm_do_install() {
- # changing the home directory to the working directory, the .npmrc will
- # be created in this directory
- export HOME=${WORKDIR}
- mkdir -p ${D}${libdir}/node_modules
- local NPM_PACKFILE=$(npm pack .)
- npm install --prefix ${D}${prefix} -g --arch=${NPM_ARCH} --target_arch=${NPM_ARCH} --production --no-registry ${NPM_PACKFILE}
- ln -fs node_modules ${D}${libdir}/node
- find ${D}${NPM_INSTALLDIR} -type f \( -name "*.a" -o -name "*.d" -o -name "*.o" \) -delete
- if [ -d ${D}${prefix}/etc ] ; then
- # This will be empty
- rmdir ${D}${prefix}/etc
- fi
+python npm_do_compile() {
+ """
+ Step two: install the npm package
+
+ Use the configured main package and the cached dependencies to run the
+ installation process. The installation is done in a directory which is
+ not the destination directory yet.
+
+ A combination of 'npm pack' and 'npm install' is used to ensure that the
+ installed files are actual copies instead of symbolic links (which is the
+ default npm behavior).
+ """
+ import shlex
+ import tempfile
+ from bb.fetch2.npm import NpmEnvironment
+
+ bb.utils.remove(d.getVar("NPM_BUILD"), recurse=True)
+
+ env = NpmEnvironment(d, configs=npm_global_configs(d))
+
+ dev = bb.utils.to_boolean(d.getVar("NPM_INSTALL_DEV"), False)
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ args = []
+ configs = []
+
+ if dev:
+ configs.append(("also", "development"))
+ else:
+ configs.append(("only", "production"))
+
+ # Report as many logs as possible for debugging purpose
+ configs.append(("loglevel", "silly"))
+
+ # Configure the installation to be done globally in the build directory
+ configs.append(("global", "true"))
+ configs.append(("prefix", d.getVar("NPM_BUILD")))
+
+ # Add node-gyp configuration
+ configs.append(("arch", d.getVar("NPM_ARCH")))
+ configs.append(("release", "true"))
+ sysroot = d.getVar("RECIPE_SYSROOT_NATIVE")
+ nodedir = os.path.join(sysroot, d.getVar("prefix_native").strip("/"))
+ configs.append(("nodedir", nodedir))
+ bindir = os.path.join(sysroot, d.getVar("bindir_native").strip("/"))
+ pythondir = os.path.join(bindir, "python-native", "python")
+ configs.append(("python", pythondir))
+
+ # Add node-pre-gyp configuration
+ args.append(("target_arch", d.getVar("NPM_ARCH")))
+ args.append(("build-from-source", "true"))
+
+ # Pack and install the main package
+ tarball = npm_pack(env, d.getVar("NPM_PACKAGE"), tmpdir)
+ env.run("npm install %s" % shlex.quote(tarball), args=args, configs=configs)
}
-python populate_packages_prepend () {
- instdir = d.expand('${D}${NPM_INSTALLDIR}')
- extrapackages = oe.package.npm_split_package_dirs(instdir)
- pkgnames = extrapackages.keys()
- d.prependVar('PACKAGES', '%s ' % ' '.join(pkgnames))
- for pkgname in pkgnames:
- pkgrelpath, pdata = extrapackages[pkgname]
- pkgpath = '${NPM_INSTALLDIR}/' + pkgrelpath
- # package names can't have underscores but npm packages sometimes use them
- oe_pkg_name = pkgname.replace('_', '-')
- expanded_pkgname = d.expand(oe_pkg_name)
- d.setVar('FILES_%s' % expanded_pkgname, pkgpath)
- if pdata:
- version = pdata.get('version', None)
- if version:
- d.setVar('PKGV_%s' % expanded_pkgname, version)
- description = pdata.get('description', None)
- if description:
- d.setVar('SUMMARY_%s' % expanded_pkgname, description.replace(u"\u2018", "'").replace(u"\u2019", "'"))
- d.appendVar('RDEPENDS_%s' % d.getVar('PN'), ' %s' % ' '.join(pkgnames).replace('_', '-'))
+npm_do_install() {
+ # Step three: final install
+ #
+ # The previous installation have to be filtered to remove some extra files.
+
+ rm -rf ${D}
+
+ # Copy the entire lib and bin directories
+ install -d ${D}/${nonarch_libdir}
+ cp --no-preserve=ownership --recursive ${NPM_BUILD}/lib/. ${D}/${nonarch_libdir}
+
+ if [ -d "${NPM_BUILD}/bin" ]
+ then
+ install -d ${D}/${bindir}
+ cp --no-preserve=ownership --recursive ${NPM_BUILD}/bin/. ${D}/${bindir}
+ fi
+
+ # If the package (or its dependencies) uses node-gyp to build native addons,
+ # object files, static libraries or other temporary files can be hidden in
+ # the lib directory. To reduce the package size and to avoid QA issues
+ # (staticdev with static library files) these files must be removed.
+ local GYP_REGEX=".*/build/Release/[^/]*.node"
+
+ # Remove any node-gyp directory in ${D} to remove temporary build files
+ for GYP_D_FILE in $(find ${D} -regex "${GYP_REGEX}")
+ do
+ local GYP_D_DIR=${GYP_D_FILE%/Release/*}
+
+ rm --recursive --force ${GYP_D_DIR}
+ done
+
+ # Copy only the node-gyp release files
+ for GYP_B_FILE in $(find ${NPM_BUILD} -regex "${GYP_REGEX}")
+ do
+ local GYP_D_FILE=${D}/${prefix}/${GYP_B_FILE#${NPM_BUILD}}
+
+ install -d ${GYP_D_FILE%/*}
+ install -m 755 ${GYP_B_FILE} ${GYP_D_FILE}
+ done
+
+ # Remove the shrinkwrap file which does not need to be packed
+ rm -f ${D}/${nonarch_libdir}/node_modules/*/npm-shrinkwrap.json
+ rm -f ${D}/${nonarch_libdir}/node_modules/@*/*/npm-shrinkwrap.json
+
+ # node(1) is using /usr/lib/node as default include directory and npm(1) is
+ # using /usr/lib/node_modules as install directory. Let's make both happy.
+ ln -fs node_modules ${D}/${nonarch_libdir}/node
}
FILES_${PN} += " \
${bindir} \
- ${libdir}/node \
- ${NPM_INSTALLDIR} \
+ ${nonarch_libdir} \
"
-EXPORT_FUNCTIONS do_compile do_install
+EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/poky/meta/classes/package.bbclass b/poky/meta/classes/package.bbclass
index e0d6ff670..0b5cf4774 100644
--- a/poky/meta/classes/package.bbclass
+++ b/poky/meta/classes/package.bbclass
@@ -245,6 +245,8 @@ python () {
deps = ""
for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
deps += " %s:do_populate_sysroot" % dep
+ if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
+ deps += ' xz-native:do_populate_sysroot'
d.appendVarFlag('do_package', 'depends', deps)
# shlibs requires any DEPENDS to have already packaged for the *.list files
@@ -416,6 +418,126 @@ def splitdebuginfo(file, dvar, debugdir, debuglibdir, debugappend, debugsrcdir,
return (file, sources)
+def splitstaticdebuginfo(file, dvar, debugstaticdir, debugstaticlibdir, debugstaticappend, debugsrcdir, d):
+ # Unlike the function above, there is no way to split a static library
+ # two components. So to get similar results we will copy the unmodified
+ # static library (containing the debug symbols) into a new directory.
+ # We will then strip (preserving symbols) the static library in the
+ # typical location.
+ #
+ # return a mapping of files:debugsources
+
+ import stat
+ import shutil
+
+ src = file[len(dvar):]
+ dest = debugstaticlibdir + os.path.dirname(src) + debugstaticdir + "/" + os.path.basename(src) + debugstaticappend
+ debugfile = dvar + dest
+ sources = []
+
+ # Copy the file...
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+ #bb.note("Copy %s -> %s" % (file, debugfile))
+
+ dvar = d.getVar('PKGD')
+
+ newmode = None
+ if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
+ origmode = os.stat(file)[stat.ST_MODE]
+ newmode = origmode | stat.S_IWRITE | stat.S_IREAD
+ os.chmod(file, newmode)
+
+ # We need to extract the debug src information here...
+ if debugsrcdir:
+ sources = source_info(file, d)
+
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+
+ # Copy the unmodified item to the debug directory
+ shutil.copy2(file, debugfile)
+
+ if newmode:
+ os.chmod(file, origmode)
+
+ return (file, sources)
+
+def inject_minidebuginfo(file, dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d):
+ # Extract just the symbols from debuginfo into minidebuginfo,
+ # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
+ # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
+
+ import subprocess
+
+ readelf = d.getVar('READELF')
+ nm = d.getVar('NM')
+ objcopy = d.getVar('OBJCOPY')
+
+ minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
+
+ src = file[len(dvar):]
+ dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
+ debugfile = dvar + dest
+ minidebugfile = minidebuginfodir + src + '.minidebug'
+ bb.utils.mkdirhier(os.path.dirname(minidebugfile))
+
+ # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
+ # so skip it.
+ if not os.path.exists(debugfile):
+ bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
+ return
+
+ # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
+ # We will exclude all of these from minidebuginfo to save space.
+ remove_section_names = []
+ for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
+ fields = line.split()
+ if len(fields) < 8:
+ continue
+ name = fields[0]
+ type = fields[1]
+ flags = fields[7]
+ # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
+ if name.startswith('.debug_'):
+ continue
+ if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
+ remove_section_names.append(name)
+
+ # List dynamic symbols in the binary. We can exclude these from minidebuginfo
+ # because they are always present in the binary.
+ dynsyms = set()
+ for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
+ dynsyms.add(line.split()[0])
+
+ # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
+ # These are the ones we want to keep in minidebuginfo.
+ keep_symbols_file = minidebugfile + '.symlist'
+ found_any_symbols = False
+ with open(keep_symbols_file, 'w') as f:
+ for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
+ fields = line.split('|')
+ if len(fields) < 7:
+ continue
+ name = fields[0].strip()
+ type = fields[3].strip()
+ if type == 'FUNC' and name not in dynsyms:
+ f.write('{}\n'.format(name))
+ found_any_symbols = True
+
+ if not found_any_symbols:
+ bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
+ return
+
+ bb.utils.remove(minidebugfile)
+ bb.utils.remove(minidebugfile + '.xz')
+
+ subprocess.check_call([objcopy, '-S'] +
+ ['--remove-section={}'.format(s) for s in remove_section_names] +
+ ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
+
+ subprocess.check_call(['xz', '--keep', minidebugfile])
+
+ subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
+
def copydebugsources(debugsrcdir, sources, d):
# The debug src information written out to sourcefile is further processed
# and copied to the destination here.
@@ -492,7 +614,7 @@ def copydebugsources(debugsrcdir, sources, d):
# Package data handling routines
#
-def get_package_mapping (pkg, basepkg, d):
+def get_package_mapping (pkg, basepkg, d, depversions=None):
import oe.packagedata
data = oe.packagedata.read_subpkgdata(pkg, d)
@@ -503,6 +625,14 @@ def get_package_mapping (pkg, basepkg, d):
if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
and data[key] == basepkg:
return pkg
+ if depversions == []:
+ # Avoid returning a mapping if the renamed package rprovides its original name
+ rprovkey = "RPROVIDES_%s" % pkg
+ if rprovkey in data:
+ if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
+ bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
+ return pkg
+ # Do map to rewritten package name
return data[key]
return pkg
@@ -523,8 +653,10 @@ def runtime_mapping_rename (varname, pkg, d):
new_depends = {}
deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
- for depend in deps:
- new_depend = get_package_mapping(depend, pkg, d)
+ for depend, depversions in deps.items():
+ new_depend = get_package_mapping(depend, pkg, d, depversions)
+ if depend != new_depend:
+ bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
new_depends[new_depend] = deps[depend]
d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
@@ -916,25 +1048,37 @@ python split_and_strip_files () {
if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
# Single debug-file-directory style debug info
debugappend = ".debug"
+ debugstaticappend = ""
debugdir = ""
+ debugstaticdir = ""
debuglibdir = "/usr/lib/debug"
+ debugstaticlibdir = "/usr/lib/debug-static"
debugsrcdir = "/usr/src/debug"
elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
# Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
debugappend = ""
+ debugstaticappend = ""
debugdir = "/.debug"
+ debugstaticdir = "/.debug-static"
debuglibdir = ""
+ debugstaticlibdir = ""
debugsrcdir = ""
elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
debugappend = ""
+ debugstaticappend = ""
debugdir = "/.debug"
+ debugstaticdir = "/.debug-static"
debuglibdir = ""
+ debugstaticlibdir = ""
debugsrcdir = "/usr/src/debug"
else:
# Original OE-core, a.k.a. ".debug", style debug info
debugappend = ""
+ debugstaticappend = ""
debugdir = "/.debug"
+ debugstaticdir = "/.debug-static"
debuglibdir = ""
+ debugstaticlibdir = ""
debugsrcdir = "/usr/src/debug"
#
@@ -955,12 +1099,6 @@ python split_and_strip_files () {
for root, dirs, files in cpath.walk(dvar):
for f in files:
file = os.path.join(root, f)
- if file.endswith(".ko") and file.find("/lib/modules/") != -1:
- kernmods.append(file)
- continue
- if oe.package.is_static_lib(file):
- staticlibs.append(file)
- continue
# Skip debug files
if debugappend and file.endswith(debugappend):
@@ -971,6 +1109,13 @@ python split_and_strip_files () {
if file in skipfiles:
continue
+ if file.endswith(".ko") and file.find("/lib/modules/") != -1:
+ kernmods.append(file)
+ continue
+ if oe.package.is_static_lib(file):
+ staticlibs.append(file)
+ continue
+
try:
ltarget = cpath.realpath(file, dvar, False)
s = cpath.lstat(ltarget)
@@ -1050,8 +1195,11 @@ python split_and_strip_files () {
results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d))
if debugsrcdir and not targetos.startswith("mingw"):
- for file in staticlibs:
- results.extend(source_info(file, d, fatal=False))
+ if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
+ results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, debugstaticdir, debugstaticlibdir, debugstaticappend, debugsrcdir, d))
+ else:
+ for file in staticlibs:
+ results.append( (file,source_info(file, d)) )
sources = set()
for r in results:
@@ -1120,9 +1268,17 @@ python split_and_strip_files () {
sfiles.append((file, elf_file, strip))
for f in kernmods:
sfiles.append((f, 16, strip))
+ if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
+ for f in staticlibs:
+ sfiles.append((f, 16, strip))
oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
+ # Build "minidebuginfo" and reinject it back into the stripped binaries
+ if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
+ oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
+ extraargs=(dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d))
+
#
# End of strip
#
@@ -1187,7 +1343,7 @@ python populate_packages () {
dir = os.sep
for f in (files + dirs):
path = "." + os.path.join(dir, f)
- if "/.debug/" in path or path.endswith("/.debug"):
+ if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
debug.append(path)
for pkg in packages:
@@ -1263,8 +1419,9 @@ python populate_packages () {
# Handle LICENSE_EXCLUSION
package_list = []
for pkg in packages:
- if d.getVar('LICENSE_EXCLUSION-' + pkg):
- msg = "%s has an incompatible license. Excluding from packaging." % pkg
+ licenses = d.getVar('LICENSE_EXCLUSION-' + pkg)
+ if licenses:
+ msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
package_qa_handle_error("incompatible-license", msg, d)
else:
package_list.append(pkg)
@@ -2108,10 +2265,12 @@ python package_depchains() {
# iteration, we need to list them here:
PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS"
-def gen_packagevar(d):
+def gen_packagevar(d, pkgvars="PACKAGEVARS"):
ret = []
pkgs = (d.getVar("PACKAGES") or "").split()
- vars = (d.getVar("PACKAGEVARS") or "").split()
+ vars = (d.getVar(pkgvars) or "").split()
+ for v in vars:
+ ret.append(v)
for p in pkgs:
for v in vars:
ret.append(v + "_" + p)
diff --git a/poky/meta/classes/package_ipk.bbclass b/poky/meta/classes/package_ipk.bbclass
index 4f2397703..c008559e4 100644
--- a/poky/meta/classes/package_ipk.bbclass
+++ b/poky/meta/classes/package_ipk.bbclass
@@ -45,6 +45,7 @@ def ipk_write_pkg(pkg, d):
import subprocess
import textwrap
import collections
+ import glob
def cleanupcontrol(root):
for p in ['CONTROL', 'DEBIAN']:
@@ -101,8 +102,7 @@ def ipk_write_pkg(pkg, d):
bb.utils.mkdirhier(pkgoutdir)
os.chdir(root)
cleanupcontrol(root)
- from glob import glob
- g = glob('*')
+ g = glob.glob('*')
if not g and localdata.getVar('ALLOW_EMPTY', False) != "1":
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
return
@@ -237,6 +237,10 @@ def ipk_write_pkg(pkg, d):
cleanupcontrol(root)
bb.utils.unlockfile(lf)
+# Have to list any variables referenced as X_<pkg> that aren't in pkgdata here
+IPKEXTRAVARS = "PRIORITY MAINTAINER PACKAGE_ARCH HOMEPAGE"
+ipk_write_pkg[vardeps] += "${@gen_packagevar(d, 'IPKEXTRAVARS')}"
+
# Otherwise allarch packages may change depending on override configuration
ipk_write_pkg[vardepsexclude] = "OVERRIDES"
diff --git a/poky/meta/classes/patch.bbclass b/poky/meta/classes/patch.bbclass
index cd241f1c8..25ec089ae 100644
--- a/poky/meta/classes/patch.bbclass
+++ b/poky/meta/classes/patch.bbclass
@@ -5,6 +5,13 @@ QUILTRCFILE ?= "${STAGING_ETCDIR_NATIVE}/quiltrc"
PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_sysroot"
+# There is a bug in patch 2.7.3 and earlier where index lines
+# in patches can change file modes when they shouldn't:
+# http://git.savannah.gnu.org/cgit/patch.git/patch/?id=82b800c9552a088a241457948219d25ce0a407a4
+# This leaks into debug sources in particular. Add the dependency
+# to target recipes to avoid this problem until we can rely on 2.7.4 or later.
+PATCHDEPENDENCY_append_class-target = " patch-replacement-native:do_populate_sysroot"
+
PATCH_GIT_USER_NAME ?= "OpenEmbedded"
PATCH_GIT_USER_EMAIL ?= "oe.patch@oe"
diff --git a/poky/meta/classes/populate_sdk_base.bbclass b/poky/meta/classes/populate_sdk_base.bbclass
index d03465b6f..f85c3b9f6 100644
--- a/poky/meta/classes/populate_sdk_base.bbclass
+++ b/poky/meta/classes/populate_sdk_base.bbclass
@@ -21,6 +21,7 @@ def complementary_globs(featurevar, d):
SDKIMAGE_FEATURES ??= "dev-pkgs dbg-pkgs src-pkgs ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'doc-pkgs', '', d)}"
SDKIMAGE_INSTALL_COMPLEMENTARY = '${@complementary_globs("SDKIMAGE_FEATURES", d)}'
+SDKIMAGE_INSTALL_COMPLEMENTARY[vardeps] += "SDKIMAGE_FEATURES"
PACKAGE_ARCHS_append_task-populate-sdk = " sdk-provides-dummy-target"
SDK_PACKAGE_ARCHS += "sdk-provides-dummy-${SDKPKGSUFFIX}"
@@ -48,6 +49,8 @@ TOOLCHAIN_OUTPUTNAME ?= "${SDK_NAME}-toolchain-${SDK_VERSION}"
# Default archived SDK's suffix
SDK_ARCHIVE_TYPE ?= "tar.xz"
+SDK_XZ_COMPRESSION_LEVEL ?= "-9"
+SDK_XZ_OPTIONS ?= "${XZ_DEFAULTS} ${SDK_XZ_COMPRESSION_LEVEL}"
# To support different sdk type according to SDK_ARCHIVE_TYPE, now support zip and tar.xz
python () {
@@ -58,7 +61,7 @@ python () {
d.setVar('SDK_ARCHIVE_CMD', 'cd ${SDK_OUTPUT}/${SDKPATH}; zip -r ${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.${SDK_ARCHIVE_TYPE} .')
else:
d.setVar('SDK_ARCHIVE_DEPENDS', 'xz-native')
- d.setVar('SDK_ARCHIVE_CMD', 'cd ${SDK_OUTPUT}/${SDKPATH}; tar ${SDKTAROPTS} -cf - . | xz -T 0 -9 > ${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.${SDK_ARCHIVE_TYPE}')
+ d.setVar('SDK_ARCHIVE_CMD', 'cd ${SDK_OUTPUT}/${SDKPATH}; tar ${SDKTAROPTS} -cf - . | xz ${SDK_XZ_OPTIONS} > ${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.${SDK_ARCHIVE_TYPE}')
}
SDK_RDEPENDS = "${TOOLCHAIN_TARGET_TASK} ${TOOLCHAIN_HOST_TASK}"
@@ -182,6 +185,11 @@ fakeroot create_sdk_files() {
# Escape special characters like '+' and '.' in the SDKPATH
escaped_sdkpath=$(echo ${SDKPATH} |sed -e "s:[\+\.]:\\\\\\\\\0:g")
sed -i -e "s:##DEFAULT_INSTALL_DIR##:$escaped_sdkpath:" ${SDK_OUTPUT}/${SDKPATH}/relocate_sdk.py
+
+ mkdir -p ${SDK_OUTPUT}/${SDKPATHNATIVE}${sysconfdir}/
+ echo '${SDKPATHNATIVE}${libdir_nativesdk}
+${SDKPATHNATIVE}${base_libdir_nativesdk}
+include /etc/ld.so.conf' > ${SDK_OUTPUT}/${SDKPATHNATIVE}${sysconfdir}/ld.so.conf
}
python check_sdk_sysroots() {
diff --git a/poky/meta/classes/populate_sdk_ext.bbclass b/poky/meta/classes/populate_sdk_ext.bbclass
index 05cfc1cc1..9f26cfc13 100644
--- a/poky/meta/classes/populate_sdk_ext.bbclass
+++ b/poky/meta/classes/populate_sdk_ext.bbclass
@@ -124,7 +124,7 @@ SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME') or d.getVar('DISTR
def clean_esdk_builddir(d, sdkbasepath):
"""Clean up traces of the fake build for create_filtered_tasklist()"""
import shutil
- cleanpaths = 'cache conf/sanity_info tmp'.split()
+ cleanpaths = ['cache', 'tmp']
for pth in cleanpaths:
fullpth = os.path.join(sdkbasepath, pth)
if os.path.isdir(fullpth):
@@ -179,7 +179,9 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
# will effectively do
clean_esdk_builddir(d, sdkbasepath)
finally:
- os.replace(sdkbasepath + '/conf/local.conf.bak', sdkbasepath + '/conf/local.conf')
+ localconf = sdkbasepath + '/conf/local.conf'
+ if os.path.exists(localconf + '.bak'):
+ os.replace(localconf + '.bak', localconf)
python copy_buildsystem () {
import re
@@ -386,9 +388,13 @@ python copy_buildsystem () {
bb.utils.mkdirhier(os.path.join(baseoutpath, 'cache'))
shutil.copyfile(builddir + '/cache/bb_unihashes.dat', baseoutpath + '/cache/bb_unihashes.dat')
- # Write a templateconf.cfg
- with open(baseoutpath + '/conf/templateconf.cfg', 'w') as f:
- f.write('meta/conf\n')
+ # Use templateconf.cfg file from builddir if exists
+ if os.path.exists(builddir + '/conf/templateconf.cfg'):
+ shutil.copyfile(builddir + '/conf/templateconf.cfg', baseoutpath + '/conf/templateconf.cfg')
+ else:
+ # Write a templateconf.cfg
+ with open(baseoutpath + '/conf/templateconf.cfg', 'w') as f:
+ f.write('meta/conf\n')
# Ensure any variables set from the external environment (by way of
# BB_ENV_EXTRAWHITE) are set in the SDK's configuration
@@ -611,8 +617,8 @@ sdk_ext_preinst() {
exit 1
fi
# The relocation script used by buildtools installer requires python
- if ! command -v python > /dev/null; then
- echo "ERROR: The installer requires python, please install it first"
+ if ! command -v python3 > /dev/null; then
+ echo "ERROR: The installer requires python3, please install it first"
exit 1
fi
missing_utils=""
diff --git a/poky/meta/classes/python-dir.bbclass b/poky/meta/classes/python-dir.bbclass
deleted file mode 100644
index a11dc350b..000000000
--- a/poky/meta/classes/python-dir.bbclass
+++ /dev/null
@@ -1,5 +0,0 @@
-PYTHON_BASEVERSION = "2.7"
-PYTHON_ABI = ""
-PYTHON_DIR = "python${PYTHON_BASEVERSION}"
-PYTHON_PN = "python"
-PYTHON_SITEPACKAGES_DIR = "${libdir}/${PYTHON_DIR}/site-packages"
diff --git a/poky/meta/classes/python3-dir.bbclass b/poky/meta/classes/python3-dir.bbclass
index 7dd130bad..036d7140d 100644
--- a/poky/meta/classes/python3-dir.bbclass
+++ b/poky/meta/classes/python3-dir.bbclass
@@ -1,5 +1,5 @@
-PYTHON_BASEVERSION = "3.7"
-PYTHON_ABI = "m"
+PYTHON_BASEVERSION = "3.8"
+PYTHON_ABI = ""
PYTHON_DIR = "python${PYTHON_BASEVERSION}"
PYTHON_PN = "python3"
PYTHON_SITEPACKAGES_DIR = "${libdir}/${PYTHON_DIR}/site-packages"
diff --git a/poky/meta/classes/pythonnative.bbclass b/poky/meta/classes/pythonnative.bbclass
deleted file mode 100644
index 0e9019d1e..000000000
--- a/poky/meta/classes/pythonnative.bbclass
+++ /dev/null
@@ -1,27 +0,0 @@
-
-inherit python-dir
-
-PYTHON="${STAGING_BINDIR_NATIVE}/python-native/python"
-# PYTHON_EXECUTABLE is used by cmake
-PYTHON_EXECUTABLE="${PYTHON}"
-EXTRANATIVEPATH += "python-native"
-DEPENDS_append = " python-native "
-
-# python-config and other scripts are using distutils modules
-# which we patch to access these variables
-export STAGING_INCDIR
-export STAGING_LIBDIR
-
-# Packages can use
-# find_package(PythonInterp REQUIRED)
-# find_package(PythonLibs REQUIRED)
-# which ends up using libs/includes from build host
-# Therefore pre-empt that effort
-export PYTHON_LIBRARY="${STAGING_LIBDIR}/lib${PYTHON_DIR}${PYTHON_ABI}.so"
-export PYTHON_INCLUDE_DIR="${STAGING_INCDIR}/${PYTHON_DIR}${PYTHON_ABI}"
-
-# suppress host user's site-packages dirs.
-export PYTHONNOUSERSITE = "1"
-
-# autoconf macros will use their internal default preference otherwise
-export PYTHON
diff --git a/poky/meta/classes/qemu.bbclass b/poky/meta/classes/qemu.bbclass
index f5c578012..55bdff816 100644
--- a/poky/meta/classes/qemu.bbclass
+++ b/poky/meta/classes/qemu.bbclass
@@ -16,6 +16,8 @@ def qemu_target_binary(data):
target_arch = "ppc"
elif target_arch == "powerpc64":
target_arch = "ppc64"
+ elif target_arch == "powerpc64le":
+ target_arch = "ppc64le"
return "qemu-" + target_arch
@@ -62,3 +64,4 @@ QEMU_EXTRAOPTIONS_ppc64e5500 = " -cpu e500mc"
QEMU_EXTRAOPTIONS_ppce6500 = " -cpu e500mc"
QEMU_EXTRAOPTIONS_ppc64e6500 = " -cpu e500mc"
QEMU_EXTRAOPTIONS_ppc7400 = " -cpu 7400"
+QEMU_EXTRAOPTIONS_powerpc64le = " -cpu POWER8"
diff --git a/poky/meta/classes/qemuboot.bbclass b/poky/meta/classes/qemuboot.bbclass
index 15a9e63f2..3162e7a8e 100644
--- a/poky/meta/classes/qemuboot.bbclass
+++ b/poky/meta/classes/qemuboot.bbclass
@@ -36,6 +36,9 @@
# Note, runqemu will replace @MAC@ with a predefined mac, you can set
# a custom one, but that may cause conflicts when multiple qemus are
# running on the same host.
+# Note: If more than one interface of type -device virtio-net-device gets added,
+# QB_NETWORK_DEVICE_prepend might be used, since Qemu enumerates the eth*
+# devices in reverse order to -device arguments.
#
# QB_TAP_OPT: netowrk option for 'tap' mode, e.g.,
# "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no"
@@ -43,6 +46,15 @@
#
# QB_SLIRP_OPT: network option for SLIRP mode, e.g., -netdev user,id=net0"
#
+# QB_CMDLINE_IP_SLIRP: If QB_NETWORK_DEVICE adds more than one network interface to qemu, usually the
+# ip= kernel comand line argument needs to be changed accordingly. Details are documented
+# in the kernel docuemntation https://www.kernel.org/doc/Documentation/filesystems/nfs/nfsroot.txt
+# Example to configure only the first interface: "ip=eth0:dhcp"
+# QB_CMDLINE_IP_TAP: This parameter is similar to the QB_CMDLINE_IP_SLIRP parameter. Since the tap interface requires
+# static IP configuration @CLIENT@ and @GATEWAY@ place holders are replaced by the IP and the gateway
+# address of the qemu guest by runqemu.
+# Example: "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0::eth0"
+#
# QB_ROOTFS_OPT: used as rootfs, e.g.,
# "-drive id=disk0,file=@ROOTFS@,if=none,format=raw -device virtio-blk-device,drive=disk0"
# Note, runqemu will replace "@ROOTFS@" with the one which is used, such as core-image-minimal-qemuarm64.ext4.
@@ -53,6 +65,10 @@
# " -device virtio-serial-device -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1 -device virtconsole,chardev=virtcon"
# Note, runqemu will replace "@PORT@" with the port number which is used.
#
+# QB_ROOTFS_EXTRA_OPT: extra options to be appended to the rootfs device in case there is none specified by QB_ROOTFS_OPT.
+# Can be used to automatically determine the image from the other variables
+# but define things link 'bootindex' when booting from EFI or 'readonly' when using squashfs
+# without the need to specify a dedicated qemu configuration
# Usage:
# IMAGE_CLASSES += "qemuboot"
# See "runqemu help" for more info
@@ -63,13 +79,15 @@ QB_DEFAULT_KERNEL ?= "${KERNEL_IMAGETYPE}"
QB_DEFAULT_FSTYPE ?= "ext4"
QB_OPT_APPEND ?= "-show-cursor"
QB_NETWORK_DEVICE ?= "-device virtio-net-pci,netdev=net0,mac=@MAC@"
+QB_CMDLINE_IP_SLIRP ?= "ip=dhcp"
+QB_CMDLINE_IP_TAP ?= "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0"
+QB_ROOTFS_EXTRA_OPT ?= ""
# This should be kept align with ROOT_VM
QB_DRIVE_TYPE ?= "/dev/sd"
# Create qemuboot.conf
addtask do_write_qemuboot_conf after do_rootfs before do_image
-IMGDEPLOYDIR ?= "${WORKDIR}/deploy-${PN}-image-complete"
def qemuboot_vars(d):
build_vars = ['MACHINE', 'TUNE_ARCH', 'DEPLOY_DIR_IMAGE',
diff --git a/poky/meta/classes/reproducible_build.bbclass b/poky/meta/classes/reproducible_build.bbclass
index 39b6e40ca..8da40f656 100644
--- a/poky/meta/classes/reproducible_build.bbclass
+++ b/poky/meta/classes/reproducible_build.bbclass
@@ -44,10 +44,12 @@ SDE_DEPLOYDIR = "${WORKDIR}/deploy-source-date-epoch"
SSTATETASKS += "do_deploy_source_date_epoch"
do_deploy_source_date_epoch () {
- echo "Deploying SDE to ${SDE_DIR}."
mkdir -p ${SDE_DEPLOYDIR}
if [ -e ${SDE_FILE} ]; then
+ echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}."
cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt
+ else
+ echo "${SDE_FILE} not found!"
fi
}
@@ -56,7 +58,11 @@ python do_deploy_source_date_epoch_setscene () {
bb.utils.mkdirhier(d.getVar('SDE_DIR'))
sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt')
if os.path.exists(sde_file):
- os.rename(sde_file, d.getVar('SDE_FILE'))
+ target = d.getVar('SDE_FILE')
+ bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target))
+ os.rename(sde_file, target)
+ else:
+ bb.debug(1, "%s not found!" % sde_file)
}
do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}"
@@ -144,11 +150,12 @@ def fixed_source_date_epoch():
bb.debug(1, "No tarball or git repo found to determine SOURCE_DATE_EPOCH")
return 0
-python do_create_source_date_epoch_stamp() {
+python create_source_date_epoch_stamp() {
epochfile = d.getVar('SDE_FILE')
+ # If it exists we need to regenerate as the sources may have changed
if os.path.isfile(epochfile):
- bb.debug(1, "Reusing SOURCE_DATE_EPOCH from: %s" % epochfile)
- return
+ bb.debug(1, "Deleting existing SOURCE_DATE_EPOCH from: %s" % epochfile)
+ os.remove(epochfile)
sourcedir = d.getVar('S')
source_date_epoch = (
@@ -164,16 +171,32 @@ python do_create_source_date_epoch_stamp() {
f.write(str(source_date_epoch))
}
+def get_source_date_epoch_value(d):
+ cached = d.getVar('__CACHED_SOURCE_DATE_EPOCH')
+ if cached:
+ return cached
+
+ epochfile = d.getVar('SDE_FILE')
+ source_date_epoch = 0
+ if os.path.isfile(epochfile):
+ with open(epochfile, 'r') as f:
+ s = f.read()
+ try:
+ source_date_epoch = int(s)
+ except ValueError:
+ bb.warn("SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to 0" % s)
+ source_date_epoch = 0
+ bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
+ else:
+ bb.debug(1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" % (epochfile, source_date_epoch))
+
+ d.setVar('__CACHED_SOURCE_DATE_EPOCH', str(source_date_epoch))
+ return str(source_date_epoch)
+
+export SOURCE_DATE_EPOCH ?= "${@get_source_date_epoch_value(d)}"
BB_HASHBASE_WHITELIST += "SOURCE_DATE_EPOCH"
python () {
if d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1':
- d.appendVarFlag("do_unpack", "postfuncs", " do_create_source_date_epoch_stamp")
- epochfile = d.getVar('SDE_FILE')
- source_date_epoch = "0"
- if os.path.isfile(epochfile):
- with open(epochfile, 'r') as f:
- source_date_epoch = f.read()
- bb.debug(1, "SOURCE_DATE_EPOCH: %s" % source_date_epoch)
- d.setVar('SOURCE_DATE_EPOCH', source_date_epoch)
+ d.appendVarFlag("do_unpack", "postfuncs", " create_source_date_epoch_stamp")
}
diff --git a/poky/meta/classes/reproducible_build_simple.bbclass b/poky/meta/classes/reproducible_build_simple.bbclass
index 8a60deef3..393372993 100644
--- a/poky/meta/classes/reproducible_build_simple.bbclass
+++ b/poky/meta/classes/reproducible_build_simple.bbclass
@@ -7,4 +7,3 @@ export PERL_HASH_SEED = "0"
export SOURCE_DATE_EPOCH ??= "1520598896"
REPRODUCIBLE_TIMESTAMP_ROOTFS ??= "1520598896"
-
diff --git a/poky/meta/classes/sanity.bbclass b/poky/meta/classes/sanity.bbclass
index 63ab6cf3d..9e8710173 100644
--- a/poky/meta/classes/sanity.bbclass
+++ b/poky/meta/classes/sanity.bbclass
@@ -2,7 +2,7 @@
# Sanity check the users setup for common misconfigurations
#
-SANITY_REQUIRED_UTILITIES ?= "patch diffstat makeinfo git bzip2 tar \
+SANITY_REQUIRED_UTILITIES ?= "patch diffstat git bzip2 tar \
gzip gawk chrpath wget cpio perl file which"
def bblayers_conf_file(d):
@@ -511,14 +511,38 @@ def check_make_version(sanity_data):
return None
-# Check if we're running on WSL (Windows Subsystem for Linux). Its known not to
-# work but we should tell the user that upfront.
+# Check if we're running on WSL (Windows Subsystem for Linux).
+# WSLv1 is known not to work but WSLv2 should work properly as
+# long as the VHDX file is optimized often, let the user know
+# upfront.
+# More information on installing WSLv2 at:
+# https://docs.microsoft.com/en-us/windows/wsl/wsl2-install
def check_wsl(d):
with open("/proc/version", "r") as f:
verdata = f.readlines()
for l in verdata:
if "Microsoft" in l:
- return "OpenEmbedded doesn't work under WSL at this time, sorry"
+ return "OpenEmbedded doesn't work under WSLv1, please upgrade to WSLv2 if you want to run builds on Windows"
+ elif "microsoft" in l:
+ bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space")
+ return None
+
+# Require at least gcc version 5.0.
+#
+# This can be fixed on CentOS-7 with devtoolset-6+
+# https://www.softwarecollections.org/en/scls/rhscl/devtoolset-6/
+#
+# A less invasive fix is with scripts/install-buildtools (or with user
+# built buildtools-extended-tarball)
+#
+def check_gcc_version(sanity_data):
+ from distutils.version import LooseVersion
+ import subprocess
+
+ build_cc, version = oe.utils.get_host_compiler_version(sanity_data)
+ if build_cc.strip() == "gcc":
+ if LooseVersion(version) < LooseVersion("5.0"):
+ return "Your version of gcc is older than 5.0 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n"
return None
# Tar version 1.24 and onwards handle overwriting symlinks correctly
@@ -532,10 +556,8 @@ def check_tar_version(sanity_data):
except subprocess.CalledProcessError as e:
return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output)
version = result.split()[3]
- if LooseVersion(version) < LooseVersion("1.24"):
- return "Your version of tar is older than 1.24 and has bugs which will break builds. Please install a newer version of tar (1.28+).\n"
if LooseVersion(version) < LooseVersion("1.28"):
- return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the projects buildtools-tarball from our last release).\n"
+ return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n"
return None
# We use git parameters and functionality only found in 1.7.8 or later
@@ -634,6 +656,7 @@ def check_sanity_version_change(status, d):
except ImportError as e:
status.addresult('Your Python 3 is not a full install. Please install the module %s (see the Getting Started guide for further information).\n' % e.name)
+ status.addresult(check_gcc_version(d))
status.addresult(check_make_version(d))
status.addresult(check_patch_version(d))
status.addresult(check_tar_version(d))
@@ -806,7 +829,7 @@ def check_sanity_everybuild(status, d):
# If SDK_VENDOR looks like "-my-sdk" then the triples are badly formed so fail early
sdkvendor = d.getVar("SDK_VENDOR")
if not (sdkvendor.startswith("-") and sdkvendor.count("-") == 1):
- status.addresult("SDK_VENDOR should be of the form '-foosdk' with a single dash\n")
+ status.addresult("SDK_VENDOR should be of the form '-foosdk' with a single dash; found '%s'\n" % sdkvendor)
check_supported_distro(d)
@@ -929,7 +952,7 @@ def check_sanity(sanity_data):
last_tmpdir = ""
last_sstate_dir = ""
last_nativelsbstr = ""
- sanityverfile = sanity_data.expand("${TOPDIR}/conf/sanity_info")
+ sanityverfile = sanity_data.expand("${TOPDIR}/cache/sanity_info")
if os.path.exists(sanityverfile):
with open(sanityverfile, 'r') as f:
for line in f:
diff --git a/poky/meta/classes/setuptools.bbclass b/poky/meta/classes/setuptools.bbclass
deleted file mode 100644
index a923ea3c4..000000000
--- a/poky/meta/classes/setuptools.bbclass
+++ /dev/null
@@ -1,3 +0,0 @@
-inherit distutils
-
-DEPENDS += "python-setuptools-native"
diff --git a/poky/meta/classes/siteinfo.bbclass b/poky/meta/classes/siteinfo.bbclass
index 411e70478..1a048c053 100644
--- a/poky/meta/classes/siteinfo.bbclass
+++ b/poky/meta/classes/siteinfo.bbclass
@@ -35,7 +35,6 @@ def siteinfo_data_for_machine(arch, os, d):
"lm32": "endian-big bit-32",
"m68k": "endian-big bit-32",
"microblaze": "endian-big bit-32 microblaze-common",
- "microblazeeb": "endian-big bit-32 microblaze-common",
"microblazeel": "endian-little bit-32 microblaze-common",
"mips": "endian-big bit-32 mips-common",
"mips64": "endian-big bit-64 mips-common",
@@ -48,6 +47,7 @@ def siteinfo_data_for_machine(arch, os, d):
"powerpc": "endian-big bit-32 powerpc-common",
"nios2": "endian-little bit-32 nios2-common",
"powerpc64": "endian-big bit-64 powerpc-common",
+ "powerpc64le": "endian-little bit-64 powerpc-common",
"ppc": "endian-big bit-32 powerpc-common",
"ppc64": "endian-big bit-64 powerpc-common",
"ppc64le" : "endian-little bit-64 powerpc-common",
@@ -88,8 +88,6 @@ def siteinfo_data_for_machine(arch, os, d):
"arm-linux-musleabi": "arm-linux",
"armeb-linux-gnueabi": "armeb-linux",
"armeb-linux-musleabi": "armeb-linux",
- "microblazeeb-linux" : "microblaze-linux",
- "microblazeeb-linux-musl" : "microblaze-linux",
"microblazeel-linux" : "microblaze-linux",
"microblazeel-linux-musl" : "microblaze-linux",
"mips-linux-musl": "mips-linux",
@@ -106,8 +104,10 @@ def siteinfo_data_for_machine(arch, os, d):
"powerpc-linux-muslspe": "powerpc-linux powerpc32-linux",
"powerpc64-linux-gnuspe": "powerpc-linux powerpc64-linux",
"powerpc64-linux-muslspe": "powerpc-linux powerpc64-linux",
- "powerpc64-linux": "powerpc-linux",
- "powerpc64-linux-musl": "powerpc-linux",
+ "powerpc64-linux": "powerpc-linux powerpc64-linux",
+ "powerpc64-linux-musl": "powerpc-linux powerpc64-linux",
+ "powerpc64le-linux": "powerpc-linux powerpc64-linux",
+ "powerpc64le-linux-musl": "powerpc-linux powerpc64-linux",
"riscv32-linux": "riscv32-linux",
"riscv32-linux-musl": "riscv32-linux",
"riscv64-linux": "riscv64-linux",
diff --git a/poky/meta/classes/sstate.bbclass b/poky/meta/classes/sstate.bbclass
index 64808f8e1..c73c3b42a 100644
--- a/poky/meta/classes/sstate.bbclass
+++ b/poky/meta/classes/sstate.bbclass
@@ -3,19 +3,41 @@ SSTATE_VERSION = "3"
SSTATE_MANIFESTS ?= "${TMPDIR}/sstate-control"
SSTATE_MANFILEPREFIX = "${SSTATE_MANIFESTS}/manifest-${SSTATE_MANMACH}-${PN}"
-def generate_sstatefn(spec, hash, d):
+def generate_sstatefn(spec, hash, taskname, siginfo, d):
+ if taskname is None:
+ return ""
+ extension = ".tgz"
+ # 8 chars reserved for siginfo
+ limit = 254 - 8
+ if siginfo:
+ limit = 254
+ extension = ".tgz.siginfo"
if not hash:
hash = "INVALID"
- return hash[:2] + "/" + spec + hash
+ fn = spec + hash + "_" + taskname + extension
+ # If the filename is too long, attempt to reduce it
+ if len(fn) > limit:
+ components = spec.split(":")
+ # Fields 0,5,6 are mandatory, 1 is most useful, 2,3,4 are just for information
+ # 7 is for the separators
+ avail = (254 - len(hash + "_" + taskname + extension) - len(components[0]) - len(components[1]) - len(components[5]) - len(components[6]) - 7) // 3
+ components[2] = components[2][:avail]
+ components[3] = components[3][:avail]
+ components[4] = components[4][:avail]
+ spec = ":".join(components)
+ fn = spec + hash + "_" + taskname + extension
+ if len(fn) > limit:
+ bb.fatal("Unable to reduce sstate name to less than 255 chararacters")
+ return hash[:2] + "/" + hash[2:4] + "/" + fn
SSTATE_PKGARCH = "${PACKAGE_ARCH}"
SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:"
SSTATE_SWSPEC = "sstate:${PN}::${PV}:${PR}::${SSTATE_VERSION}:"
-SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC'), d.getVar('BB_UNIHASH'), d)}"
+SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC'), d.getVar('BB_UNIHASH'), d.getVar('SSTATE_CURRTASK'), False, d)}"
SSTATE_PKG = "${SSTATE_DIR}/${SSTATE_PKGNAME}"
SSTATE_EXTRAPATH = ""
SSTATE_EXTRAPATHWILDCARD = ""
-SSTATE_PATHSPEC = "${SSTATE_DIR}/${SSTATE_EXTRAPATHWILDCARD}*/${SSTATE_PKGSPEC}"
+SSTATE_PATHSPEC = "${SSTATE_DIR}/${SSTATE_EXTRAPATHWILDCARD}*/*/${SSTATE_PKGSPEC}*_${SSTATE_PATH_CURRTASK}.tgz*"
# explicitly make PV to depend on evaluated value of PV variable
PV[vardepvalue] = "${PV}"
@@ -317,8 +339,9 @@ def sstate_installpkg(ss, d):
from oe.gpg_sign import get_signer
sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task'])
- sstatefetch = d.getVar('SSTATE_PKGNAME') + '_' + ss['task'] + ".tgz"
- sstatepkg = d.getVar('SSTATE_PKG') + '_' + ss['task'] + ".tgz"
+ d.setVar("SSTATE_CURRTASK", ss['task'])
+ sstatefetch = d.getVar('SSTATE_PKGNAME')
+ sstatepkg = d.getVar('SSTATE_PKG')
if not os.path.exists(sstatepkg):
pstaging_fetch(sstatefetch, d)
@@ -330,7 +353,6 @@ def sstate_installpkg(ss, d):
sstate_clean(ss, d)
d.setVar('SSTATE_INSTDIR', sstateinst)
- d.setVar('SSTATE_PKG', sstatepkg)
if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False):
signer = get_signer(d, 'local')
@@ -440,8 +462,9 @@ python sstate_hardcode_path_unpack () {
def sstate_clean_cachefile(ss, d):
import oe.path
- sstatepkgfile = d.getVar('SSTATE_PATHSPEC') + "*_" + ss['task'] + ".tgz*"
if d.getVarFlag('do_%s' % ss['task'], 'task'):
+ d.setVar("SSTATE_PATH_CURRTASK", ss['task'])
+ sstatepkgfile = d.getVar('SSTATE_PATHSPEC')
bb.note("Removing %s" % sstatepkgfile)
oe.path.remove(sstatepkgfile)
@@ -612,10 +635,9 @@ def sstate_package(ss, d):
tmpdir = d.getVar('TMPDIR')
sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task'])
- sstatepkg = d.getVar('SSTATE_PKG') + '_'+ ss['task'] + ".tgz"
+ d.setVar("SSTATE_CURRTASK", ss['task'])
bb.utils.remove(sstatebuild, recurse=True)
bb.utils.mkdirhier(sstatebuild)
- bb.utils.mkdirhier(os.path.dirname(sstatepkg))
for state in ss['dirs']:
if not os.path.exists(state[1]):
continue
@@ -648,7 +670,6 @@ def sstate_package(ss, d):
os.rename(plain, pdir)
d.setVar('SSTATE_BUILDDIR', sstatebuild)
- d.setVar('SSTATE_PKG', sstatepkg)
d.setVar('SSTATE_INSTDIR', sstatebuild)
if d.getVar('SSTATE_SKIP_CREATION') == '1':
@@ -664,7 +685,12 @@ def sstate_package(ss, d):
# All hooks should run in SSTATE_BUILDDIR.
bb.build.exec_func(f, d, (sstatebuild,))
- bb.siggen.dump_this_task(sstatepkg + ".siginfo", d)
+ # SSTATE_PKG may have been changed by sstate_report_unihash
+ siginfo = d.getVar('SSTATE_PKG') + ".siginfo"
+ if not os.path.exists(siginfo):
+ bb.siggen.dump_this_task(siginfo, d)
+ else:
+ os.utime(siginfo, None)
return
@@ -748,18 +774,20 @@ sstate_task_postfunc[dirs] = "${WORKDIR}"
# set as SSTATE_BUILDDIR. Will be run from within SSTATE_BUILDDIR.
#
sstate_create_package () {
- TFILE=`mktemp ${SSTATE_PKG}.XXXXXXXX`
-
- # Exit earlu if it already exists
+ # Exit early if it already exists
if [ -e ${SSTATE_PKG} ]; then
+ touch ${SSTATE_PKG}
return
fi
- # Use pigz if available
- OPT="-czS"
- if [ -x "$(command -v pigz)" ]; then
- OPT="-I pigz -cS"
- fi
+ mkdir -p `dirname ${SSTATE_PKG}`
+ TFILE=`mktemp ${SSTATE_PKG}.XXXXXXXX`
+
+ # Use pigz if available
+ OPT="-czS"
+ if [ -x "$(command -v pigz)" ]; then
+ OPT="-I pigz -cS"
+ fi
# Need to handle empty directories
if [ "$(ls -A)" ]; then
@@ -776,10 +804,13 @@ sstate_create_package () {
chmod 0664 $TFILE
# Skip if it was already created by some other process
if [ ! -e ${SSTATE_PKG} ]; then
- mv -f $TFILE ${SSTATE_PKG}
+ # Move into place using ln to attempt an atomic op.
+ # Abort if it already exists
+ ln $TFILE ${SSTATE_PKG} && rm $TFILE
else
rm $TFILE
fi
+ touch ${SSTATE_PKG}
}
python sstate_sign_package () {
@@ -821,9 +852,6 @@ BB_HASHCHECK_FUNCTION = "sstate_checkhashes"
def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True, **kwargs):
found = set()
missed = set()
- extension = ".tgz"
- if siginfo:
- extension = extension + ".siginfo"
def gethash(task):
return sq_data['unihash'][task]
@@ -850,7 +878,7 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
spec, extrapath, tname = getpathcomponents(tid, d)
- sstatefile = d.expand("${SSTATE_DIR}/" + extrapath + generate_sstatefn(spec, gethash(tid), d) + "_" + tname + extension)
+ sstatefile = d.expand("${SSTATE_DIR}/" + extrapath + generate_sstatefn(spec, gethash(tid), tname, siginfo, d))
if os.path.exists(sstatefile):
bb.debug(2, "SState: Found valid sstate file %s" % sstatefile)
@@ -906,19 +934,22 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
missed.add(tid)
bb.debug(2, "SState: Unsuccessful fetch test for %s" % srcuri)
pass
- bb.event.fire(bb.event.ProcessProgress(msg, len(tasklist) - thread_worker.tasks.qsize()), d)
+ if len(tasklist) >= min_tasks:
+ bb.event.fire(bb.event.ProcessProgress(msg, len(tasklist) - thread_worker.tasks.qsize()), d)
tasklist = []
+ min_tasks = 100
for tid in sq_data['hash']:
if tid in found:
continue
spec, extrapath, tname = getpathcomponents(tid, d)
- sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(tid), d) + "_" + tname + extension)
+ sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(tid), tname, siginfo, d))
tasklist.append((tid, sstatefile))
if tasklist:
- msg = "Checking sstate mirror object availability"
- bb.event.fire(bb.event.ProcessStarted(msg, len(tasklist)), d)
+ if len(tasklist) >= min_tasks:
+ msg = "Checking sstate mirror object availability"
+ bb.event.fire(bb.event.ProcessStarted(msg, len(tasklist)), d)
import multiprocessing
nproc = min(multiprocessing.cpu_count(), len(tasklist))
@@ -932,22 +963,19 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
pool.wait_completion()
bb.event.disable_threadlock()
- bb.event.fire(bb.event.ProcessFinished(msg), d)
-
- # Likely checking an individual task hash again for multiconfig sharing of sstate tasks so skip reporting
- if len(sq_data['hash']) == 1:
- return found
+ if len(tasklist) >= min_tasks:
+ bb.event.fire(bb.event.ProcessFinished(msg), d)
inheritlist = d.getVar("INHERIT")
if "toaster" in inheritlist:
evdata = {'missed': [], 'found': []};
for tid in missed:
spec, extrapath, tname = getpathcomponents(tid, d)
- sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(tid), d) + "_" + tname + ".tgz")
+ sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(tid), tname, False, d))
evdata['missed'].append((bb.runqueue.fn_from_tid(tid), bb.runqueue.taskname_from_tid(tid), gethash(tid), sstatefile ) )
for tid in found:
spec, extrapath, tname = getpathcomponents(tid, d)
- sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(tid), d) + "_" + tname + ".tgz")
+ sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(tid), tname, False, d))
evdata['found'].append((bb.runqueue.fn_from_tid(tid), bb.runqueue.taskname_from_tid(tid), gethash(tid), sstatefile ) )
bb.event.fire(bb.event.MetadataEvent("MissedSstate", evdata), d)
@@ -1081,17 +1109,20 @@ addhandler sstate_eventhandler
sstate_eventhandler[eventmask] = "bb.build.TaskSucceeded"
python sstate_eventhandler() {
d = e.data
- # When we write an sstate package we rewrite the SSTATE_PKG
- spkg = d.getVar('SSTATE_PKG')
- if not spkg.endswith(".tgz"):
+ writtensstate = d.getVar('SSTATE_CURRTASK')
+ if not writtensstate:
taskname = d.getVar("BB_RUNTASK")[3:]
spec = d.getVar('SSTATE_PKGSPEC')
swspec = d.getVar('SSTATE_SWSPEC')
if taskname in ["fetch", "unpack", "patch", "populate_lic", "preconfigure"] and swspec:
d.setVar("SSTATE_PKGSPEC", "${SSTATE_SWSPEC}")
d.setVar("SSTATE_EXTRAPATH", "")
- sstatepkg = d.getVar('SSTATE_PKG')
- bb.siggen.dump_this_task(sstatepkg + '_' + taskname + ".tgz" ".siginfo", d)
+ d.setVar("SSTATE_CURRTASK", taskname)
+ siginfo = d.getVar('SSTATE_PKG') + ".siginfo"
+ if not os.path.exists(siginfo):
+ bb.siggen.dump_this_task(siginfo, d)
+ else:
+ os.utime(siginfo, None)
}
SSTATE_PRUNE_OBSOLETEWORKDIR ?= "1"
diff --git a/poky/meta/classes/staging.bbclass b/poky/meta/classes/staging.bbclass
index cca0b7e0d..5b04f88b2 100644
--- a/poky/meta/classes/staging.bbclass
+++ b/poky/meta/classes/staging.bbclass
@@ -32,7 +32,7 @@ SYSROOT_DIRS_BLACKLIST = " \
${datadir}/gtk-doc/html \
${datadir}/locale \
${datadir}/pixmaps \
- ${libdir}/${PN}/ptest \
+ ${libdir}/${BPN}/ptest \
"
sysroot_stage_dir() {
@@ -75,8 +75,8 @@ python sysroot_strip () {
dstdir = d.getVar('SYSROOT_DESTDIR')
pn = d.getVar('PN')
- libdir = os.path.abspath(dstdir + os.sep + d.getVar("libdir"))
- base_libdir = os.path.abspath(dstdir + os.sep + d.getVar("base_libdir"))
+ libdir = d.getVar("libdir")
+ base_libdir = d.getVar("base_libdir")
qa_already_stripped = 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split()
strip_cmd = d.getVar("STRIP")
@@ -277,11 +277,13 @@ python extend_recipe_sysroot() {
start = None
configuredeps = []
+ owntaskdeps = []
for dep in taskdepdata:
data = taskdepdata[dep]
if data[1] == mytaskname and data[0] == pn:
start = dep
- break
+ elif data[0] == pn:
+ owntaskdeps.append(data[1])
if start is None:
bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
@@ -427,7 +429,7 @@ python extend_recipe_sysroot() {
# Was likely already uninstalled
continue
potential.append(l)
- # We need to ensure not other task needs this dependency. We hold the sysroot
+ # We need to ensure no other task needs this dependency. We hold the sysroot
# lock so we ca search the indexes to check
if potential:
for i in glob.glob(depdir + "/index.*"):
@@ -435,6 +437,11 @@ python extend_recipe_sysroot() {
continue
with open(i, "r") as f:
for l in f:
+ if l.startswith("TaskDeps:"):
+ prevtasks = l.split()[1:]
+ if mytaskname in prevtasks:
+ # We're a dependency of this task so we can clear items out the sysroot
+ break
l = l.strip()
if l in potential:
potential.remove(l)
@@ -470,6 +477,7 @@ python extend_recipe_sysroot() {
elif os.path.lexists(depdir + "/" + c):
os.unlink(depdir + "/" + c)
+ binfiles = {}
# Now handle installs
for dep in configuredeps:
c = setscenedeps[dep][0]
@@ -562,7 +570,16 @@ python extend_recipe_sysroot() {
if l.endswith("/"):
staging_copydir(l, targetdir, dest, seendirs)
continue
- staging_copyfile(l, targetdir, dest, postinsts, seendirs)
+ if "/bin/" in l or "/sbin/" in l:
+ # defer /*bin/* files until last in case they need libs
+ binfiles[l] = (targetdir, dest)
+ else:
+ staging_copyfile(l, targetdir, dest, postinsts, seendirs)
+
+ # Handle deferred binfiles
+ for l in binfiles:
+ (targetdir, dest) = binfiles[l]
+ staging_copyfile(l, targetdir, dest, postinsts, seendirs)
bb.note("Installed into sysroot: %s" % str(msg_adding))
bb.note("Skipping as already exists in sysroot: %s" % str(msg_exists))
@@ -578,6 +595,7 @@ python extend_recipe_sysroot() {
os.symlink(manifests[dep], depdir + "/" + c + ".complete")
with open(taskindex, "w") as f:
+ f.write("TaskDeps: " + " ".join(owntaskdeps) + "\n")
for l in sorted(installed):
f.write(l + "\n")
diff --git a/poky/meta/classes/testimage.bbclass b/poky/meta/classes/testimage.bbclass
index 844ed8794..00f0c2983 100644
--- a/poky/meta/classes/testimage.bbclass
+++ b/poky/meta/classes/testimage.bbclass
@@ -31,9 +31,21 @@ TESTIMAGE_AUTO ??= "0"
# TEST_LOG_DIR contains a command ssh log and may contain infromation about what command is running, output and return codes and for qemu a boot log till login.
# Booting is handled by this class, and it's not a test in itself.
# TEST_QEMUBOOT_TIMEOUT can be used to set the maximum time in seconds the launch code will wait for the login prompt.
+# TEST_OVERALL_TIMEOUT can be used to set the maximum time in seconds the tests will be allowed to run (defaults to no limit).
# TEST_QEMUPARAMS can be used to pass extra parameters to qemu, e.g. "-m 1024" for setting the amount of ram to 1 GB.
# TEST_RUNQEMUPARAMS can be used to pass extra parameters to runqemu, e.g. "gl" to enable OpenGL acceleration.
+# TESTIMAGE_BOOT_PATTERNS can be used to override certain patterns used to communicate with the target when booting,
+# if a pattern is not specifically present on this variable a default will be used when booting the target.
+# TESTIMAGE_BOOT_PATTERNS[<flag>] overrides the pattern used for that specific flag, where flag comes from a list of accepted flags
+# e.g. normally the system boots and waits for a login prompt (login:), after that it sends the command: "root\n" to log as the root user
+# if we wanted to log in as the hypothetical "webserver" user for example we could set the following:
+# TESTIMAGE_BOOT_PATTERNS = "send_login_user search_login_succeeded"
+# TESTIMAGE_BOOT_PATTERNS[send_login_user] = "webserver\n"
+# TESTIMAGE_BOOT_PATTERNS[search_login_succeeded] = "webserver@[a-zA-Z0-9\-]+:~#"
+# The accepted flags are the following: search_reached_prompt, send_login_user, search_login_succeeded, search_cmd_finished.
+# They are prefixed with either search/send, to differentiate if the pattern is meant to be sent or searched to/from the target terminal
+
TEST_LOG_DIR ?= "${WORKDIR}/testimage"
TEST_EXPORT_DIR ?= "${TMPDIR}/testimage/${PN}"
@@ -46,7 +58,7 @@ BASICTESTSUITE = "\
ping date df ssh scp python perl gi ptest parselogs \
logrotate connman systemd oe_syslog pam stap ldd xorg \
kernelmodule gcc buildcpio buildlzip buildgalculator \
- dnf rpm opkg apt"
+ dnf rpm opkg apt weston"
DEFAULT_TEST_SUITES = "${BASICTESTSUITE}"
@@ -64,10 +76,13 @@ DEFAULT_TEST_SUITES_remove_qemumips64 = "${MIPSREMOVE}"
TEST_SUITES ?= "${DEFAULT_TEST_SUITES}"
TEST_QEMUBOOT_TIMEOUT ?= "1000"
+TEST_OVERALL_TIMEOUT ?= ""
TEST_TARGET ?= "qemu"
TEST_QEMUPARAMS ?= ""
TEST_RUNQEMUPARAMS ?= ""
+TESTIMAGE_BOOT_PATTERNS ?= ""
+
TESTIMAGEDEPENDS = ""
TESTIMAGEDEPENDS_append_qemuall = " qemu-native:do_populate_sysroot qemu-helper-native:do_populate_sysroot qemu-helper-native:do_addto_recipe_sysroot"
TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'cpio-native:do_populate_sysroot', '', d)}"
@@ -150,6 +165,29 @@ def get_testimage_json_result_dir(d):
def get_testimage_result_id(configuration):
return '%s_%s_%s_%s' % (configuration['TEST_TYPE'], configuration['IMAGE_BASENAME'], configuration['MACHINE'], configuration['STARTTIME'])
+def get_testimage_boot_patterns(d):
+ from collections import defaultdict
+ boot_patterns = defaultdict(str)
+ # Only accept certain values
+ accepted_patterns = ['search_reached_prompt', 'send_login_user', 'search_login_succeeded', 'search_cmd_finished']
+ # Not all patterns need to be overriden, e.g. perhaps we only want to change the user
+ boot_patterns_flags = d.getVarFlags('TESTIMAGE_BOOT_PATTERNS') or {}
+ if boot_patterns_flags:
+ patterns_set = [p for p in boot_patterns_flags.items() if p[0] in d.getVar('TESTIMAGE_BOOT_PATTERNS').split()]
+ for flag, flagval in patterns_set:
+ if flag not in accepted_patterns:
+ bb.fatal('Testimage: The only accepted boot patterns are: search_reached_prompt,send_login_user, \
+ search_login_succeeded,search_cmd_finished\n Make sure your TESTIMAGE_BOOT_PATTERNS=%s \
+ contains an accepted flag.' % d.getVar('TESTIMAGE_BOOT_PATTERNS'))
+ return
+ # We know boot prompt is searched through in binary format, others might be expressions
+ if flag == 'search_reached_prompt':
+ boot_patterns[flag] = flagval.encode()
+ else:
+ boot_patterns[flag] = flagval.encode().decode('unicode-escape')
+ return boot_patterns
+
+
def testimage_main(d):
import os
import json
@@ -168,7 +206,11 @@ def testimage_main(d):
"""
Catch SIGTERM from worker in order to stop qemu.
"""
- raise RuntimeError
+ os.kill(os.getpid(), signal.SIGINT)
+
+ def handle_test_timeout(timeout):
+ bb.warn("Global test timeout reached (%s seconds), stopping the tests." %(timeout))
+ os.kill(os.getpid(), signal.SIGINT)
testimage_sanity(d)
@@ -207,7 +249,7 @@ def testimage_main(d):
if d.getVar("TEST_TARGET") == "qemu":
fstypes = [fs for fs in fstypes if fs in supported_fstypes]
if not fstypes:
- bb.fatal('Unsupported image type built. Add a comptible image to '
+ bb.fatal('Unsupported image type built. Add a compatible image to '
'IMAGE_FSTYPES. Supported types: %s' %
', '.join(supported_fstypes))
qfstype = fstypes[0]
@@ -239,11 +281,14 @@ def testimage_main(d):
# Get use_kvm
kvm = oe.types.qemu_use_kvm(d.getVar('QEMU_USE_KVM'), d.getVar('TARGET_ARCH'))
+ # Get OVMF
+ ovmf = d.getVar("QEMU_USE_OVMF")
+
slirp = False
if d.getVar("QEMU_USE_SLIRP"):
slirp = True
- # TODO: We use the current implementatin of qemu runner because of
+ # TODO: We use the current implementation of qemu runner because of
# time constrains, qemu runner really needs a refactor too.
target_kwargs = { 'machine' : machine,
'rootfs' : rootfs,
@@ -256,8 +301,13 @@ def testimage_main(d):
'kvm' : kvm,
'slirp' : slirp,
'dump_dir' : d.getVar("TESTIMAGE_DUMP_DIR"),
+ 'serial_ports': len(d.getVar("SERIAL_CONSOLES").split()),
+ 'ovmf' : ovmf,
}
+ if d.getVar("TESTIMAGE_BOOT_PATTERNS"):
+ target_kwargs['boot_patterns'] = get_testimage_boot_patterns(d)
+
# TODO: Currently BBPATH is needed for custom loading of targets.
# It would be better to find these modules using instrospection.
target_kwargs['target_modules_path'] = d.getVar('BBPATH')
@@ -319,10 +369,15 @@ def testimage_main(d):
# We need to check if runqemu ends unexpectedly
# or if the worker send us a SIGTERM
tc.target.start(params=d.getVar("TEST_QEMUPARAMS"), runqemuparams=d.getVar("TEST_RUNQEMUPARAMS"))
+ import threading
+ try:
+ threading.Timer(int(d.getVar("TEST_OVERALL_TIMEOUT")), handle_test_timeout, (int(d.getVar("TEST_OVERALL_TIMEOUT")),)).start()
+ except ValueError:
+ pass
results = tc.runTests()
- except (RuntimeError, BlockingIOError) as err:
- if isinstance(err, RuntimeError):
- bb.error('testimage received SIGTERM, shutting down...')
+ except (KeyboardInterrupt, BlockingIOError) as err:
+ if isinstance(err, KeyboardInterrupt):
+ bb.error('testimage interrupted, shutting down...')
else:
bb.error('runqemu failed, shutting down...')
if results:
diff --git a/poky/meta/classes/texinfo.bbclass b/poky/meta/classes/texinfo.bbclass
index 6b0def0ea..f46bacabd 100644
--- a/poky/meta/classes/texinfo.bbclass
+++ b/poky/meta/classes/texinfo.bbclass
@@ -6,13 +6,13 @@
# Texinfo recipe, you can remove texinfo-native from ASSUME_PROVIDED and
# makeinfo from SANITY_REQUIRED_UTILITIES.
-TEXDEP = "texinfo-native"
+TEXDEP = "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'texinfo-replacement-native', 'texinfo-dummy-native', d)}"
TEXDEP_class-native = "texinfo-dummy-native"
TEXDEP_class-cross = "texinfo-dummy-native"
+TEXDEP_class-crosssdk = "texinfo-dummy-native"
+TEXDEP_class-cross-canadian = "texinfo-dummy-native"
DEPENDS_append = " ${TEXDEP}"
-PATH_prepend_class-native = "${STAGING_BINDIR_NATIVE}/texinfo-dummy-native:"
-PATH_prepend_class-cross = "${STAGING_BINDIR_NATIVE}/texinfo-dummy-native:"
# libtool-cross doesn't inherit cross
TEXDEP_pn-libtool-cross = "texinfo-dummy-native"
-PATH_prepend_pn-libtool-cross = "${STAGING_BINDIR_NATIVE}/texinfo-dummy-native:"
+
diff --git a/poky/meta/classes/toaster.bbclass b/poky/meta/classes/toaster.bbclass
index 6cef0b8f6..9518ddf7a 100644
--- a/poky/meta/classes/toaster.bbclass
+++ b/poky/meta/classes/toaster.bbclass
@@ -113,7 +113,7 @@ def _toaster_load_pkgdatafile(dirpath, filepath):
pass # ignore lines without valid key: value pairs
return pkgdata
-python toaster_package_dumpdata() {
+def _toaster_dumpdata(pkgdatadir, d):
"""
Dumps the data about the packages created by a recipe
"""
@@ -122,16 +122,24 @@ python toaster_package_dumpdata() {
if not d.getVar('PACKAGES'):
return
- pkgdatadir = d.getVar('PKGDESTWORK')
lpkgdata = {}
datadir = os.path.join(pkgdatadir, 'runtime')
# scan and send data for each generated package
- for datafile in os.listdir(datadir):
- if not datafile.endswith('.packaged'):
- lpkgdata = _toaster_load_pkgdatafile(datadir, datafile)
- # Fire an event containing the pkg data
- bb.event.fire(bb.event.MetadataEvent("SinglePackageInfo", lpkgdata), d)
+ if os.path.exists(datadir):
+ for datafile in os.listdir(datadir):
+ if not datafile.endswith('.packaged'):
+ lpkgdata = _toaster_load_pkgdatafile(datadir, datafile)
+ # Fire an event containing the pkg data
+ bb.event.fire(bb.event.MetadataEvent("SinglePackageInfo", lpkgdata), d)
+
+python toaster_package_dumpdata() {
+ _toaster_dumpdata(d.getVar('PKGDESTWORK'), d)
+}
+
+python toaster_packagedata_dumpdata() {
+ # This path needs to match do_packagedata[sstate-inputdirs]
+ _toaster_dumpdata(os.path.join(d.getVar('WORKDIR'), 'pkgdata-pdata-input'), d)
}
# 2. Dump output image files information
@@ -366,8 +374,8 @@ toaster_buildhistory_dump[eventmask] = "bb.event.BuildCompleted"
addhandler toaster_artifacts
toaster_artifacts[eventmask] = "bb.runqueue.runQueueTaskSkipped bb.runqueue.runQueueTaskCompleted"
-do_packagedata_setscene[postfuncs] += "toaster_package_dumpdata "
-do_packagedata_setscene[vardepsexclude] += "toaster_package_dumpdata "
+do_packagedata_setscene[postfuncs] += "toaster_packagedata_dumpdata "
+do_packagedata_setscene[vardepsexclude] += "toaster_packagedata_dumpdata "
do_package[postfuncs] += "toaster_package_dumpdata "
do_package[vardepsexclude] += "toaster_package_dumpdata "
diff --git a/poky/meta/classes/uninative.bbclass b/poky/meta/classes/uninative.bbclass
index 9f8645a36..70799bbf6 100644
--- a/poky/meta/classes/uninative.bbclass
+++ b/poky/meta/classes/uninative.bbclass
@@ -1,4 +1,4 @@
-UNINATIVE_LOADER ?= "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/lib/${@bb.utils.contains('BUILD_ARCH', 'x86_64', 'ld-linux-x86-64.so.2', '', d)}${@bb.utils.contains('BUILD_ARCH', 'i686', 'ld-linux.so.2', '', d)}${@bb.utils.contains('BUILD_ARCH', 'aarch64', 'ld-linux-aarch64.so.1', '', d)}"
+UNINATIVE_LOADER ?= "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/lib/${@bb.utils.contains('BUILD_ARCH', 'x86_64', 'ld-linux-x86-64.so.2', '', d)}${@bb.utils.contains('BUILD_ARCH', 'i686', 'ld-linux.so.2', '', d)}${@bb.utils.contains('BUILD_ARCH', 'aarch64', 'ld-linux-aarch64.so.1', '', d)}${@bb.utils.contains('BUILD_ARCH', 'ppc64le', 'ld64.so.2', '', d)}"
UNINATIVE_STAGING_DIR ?= "${STAGING_DIR}"
UNINATIVE_URL ?= "unset"
diff --git a/poky/meta/classes/vala.bbclass b/poky/meta/classes/vala.bbclass
index 615eb379a..bcaf68c5a 100644
--- a/poky/meta/classes/vala.bbclass
+++ b/poky/meta/classes/vala.bbclass
@@ -8,7 +8,7 @@ DEPENDS_append = " vala-native ${VALADEPENDS}"
# Our patched version of Vala looks in STAGING_DATADIR for .vapi files
export STAGING_DATADIR
# Upstream Vala >= 0.11 looks in XDG_DATA_DIRS for .vapi files
-export XDG_DATA_DIRS = "${STAGING_DATADIR}"
+export XDG_DATA_DIRS = "${STAGING_DATADIR}:${STAGING_LIBDIR}"
# Package additional files
FILES_${PN}-dev += "\