summaryrefslogtreecommitdiff
path: root/poky/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'poky/meta/classes')
-rw-r--r--poky/meta/classes/archiver.bbclass2
-rw-r--r--poky/meta/classes/base.bbclass6
-rw-r--r--poky/meta/classes/buildhistory.bbclass2
-rw-r--r--poky/meta/classes/cve-check.bbclass60
-rw-r--r--poky/meta/classes/distutils3-base.bbclass2
-rw-r--r--poky/meta/classes/distutils3.bbclass10
-rw-r--r--poky/meta/classes/externalsrc.bbclass16
-rw-r--r--poky/meta/classes/image.bbclass2
-rw-r--r--poky/meta/classes/image_types.bbclass2
-rw-r--r--poky/meta/classes/image_types_wic.bbclass20
-rw-r--r--poky/meta/classes/kernel.bbclass2
-rw-r--r--poky/meta/classes/license.bbclass1
-rw-r--r--poky/meta/classes/npm.bbclass8
-rw-r--r--poky/meta/classes/package.bbclass61
-rw-r--r--poky/meta/classes/package_rpm.bbclass2
-rw-r--r--poky/meta/classes/populate_sdk_base.bbclass2
-rw-r--r--poky/meta/classes/python3native.bbclass2
-rw-r--r--poky/meta/classes/python3targetconfig.bbclass17
-rw-r--r--poky/meta/classes/report-error.bbclass4
-rw-r--r--poky/meta/classes/reproducible_build.bbclass15
-rw-r--r--poky/meta/classes/sanity.bbclass25
-rw-r--r--poky/meta/classes/scons.bbclass3
-rw-r--r--poky/meta/classes/sstate.bbclass4
-rw-r--r--poky/meta/classes/staging.bbclass4
24 files changed, 213 insertions, 59 deletions
diff --git a/poky/meta/classes/archiver.bbclass b/poky/meta/classes/archiver.bbclass
index 990f1d4674..7ca35a573b 100644
--- a/poky/meta/classes/archiver.bbclass
+++ b/poky/meta/classes/archiver.bbclass
@@ -53,7 +53,7 @@ ARCHIVER_MODE[recipe] ?= "0"
ARCHIVER_MODE[mirror] ?= "split"
DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
-ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources"
+ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources"
ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/"
diff --git a/poky/meta/classes/base.bbclass b/poky/meta/classes/base.bbclass
index 7aa2e144eb..8a1b5f79c1 100644
--- a/poky/meta/classes/base.bbclass
+++ b/poky/meta/classes/base.bbclass
@@ -231,6 +231,7 @@ python base_eventhandler() {
if isinstance(e, bb.event.ConfigParsed):
if not d.getVar("NATIVELSBSTRING", False):
d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
+ d.setVar("ORIGNATIVELSBSTRING", d.getVar("NATIVELSBSTRING", False))
d.setVar('BB_VERSION', bb.__version__)
# There might be no bb.event.ConfigParsed event if bitbake server is
@@ -388,6 +389,11 @@ python () {
oe.utils.features_backfill("DISTRO_FEATURES", d)
oe.utils.features_backfill("MACHINE_FEATURES", d)
+ if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")):
+ d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}")
+ if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")):
+ d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}")
+
# Handle PACKAGECONFIG
#
# These take the form:
diff --git a/poky/meta/classes/buildhistory.bbclass b/poky/meta/classes/buildhistory.bbclass
index 156324d339..8a1359acbe 100644
--- a/poky/meta/classes/buildhistory.bbclass
+++ b/poky/meta/classes/buildhistory.bbclass
@@ -852,7 +852,7 @@ END
}
python buildhistory_eventhandler() {
- if e.data.getVar('BUILDHISTORY_FEATURES').strip():
+ if (e.data.getVar('BUILDHISTORY_FEATURES') or "").strip():
reset = e.data.getVar("BUILDHISTORY_RESET")
olddir = e.data.getVar("BUILDHISTORY_OLD_DIR")
if isinstance(e, bb.event.BuildStarted):
diff --git a/poky/meta/classes/cve-check.bbclass b/poky/meta/classes/cve-check.bbclass
index 669da6c8e9..8086cf05e9 100644
--- a/poky/meta/classes/cve-check.bbclass
+++ b/poky/meta/classes/cve-check.bbclass
@@ -36,20 +36,26 @@ CVE_CHECK_SUMMARY_FILE_NAME ?= "cve-summary"
CVE_CHECK_SUMMARY_FILE ?= "${CVE_CHECK_SUMMARY_DIR}/${CVE_CHECK_SUMMARY_FILE_NAME}"
CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
+CVE_CHECK_RECIPE_FILE ?= "${CVE_CHECK_DIR}/${PN}"
CVE_CHECK_MANIFEST ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cve"
CVE_CHECK_COPY_FILES ??= "1"
CVE_CHECK_CREATE_MANIFEST ??= "1"
+CVE_CHECK_REPORT_PATCHED ??= "1"
+
# Whitelist for packages (PN)
CVE_CHECK_PN_WHITELIST ?= ""
# Whitelist for CVE. If a CVE is found, then it is considered patched.
# The value is a string containing space separated CVE values:
-#
+#
# CVE_CHECK_WHITELIST = 'CVE-2014-2524 CVE-2018-1234'
-#
+#
CVE_CHECK_WHITELIST ?= ""
+# set to "alphabetical" for version using single alphabetical character as increament release
+CVE_VERSION_SUFFIX ??= ""
+
python cve_save_summary_handler () {
import shutil
import datetime
@@ -118,7 +124,7 @@ python cve_check_write_rootfs_manifest () {
import shutil
if d.getVar("CVE_CHECK_COPY_FILES") == "1":
- deploy_file = os.path.join(d.getVar("CVE_CHECK_DIR"), d.getVar("PN"))
+ deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
if os.path.exists(deploy_file):
bb.utils.remove(deploy_file)
@@ -203,10 +209,11 @@ def check_cves(d, patched_cves):
"""
Connect to the NVD database and find unpatched cves.
"""
- from distutils.version import LooseVersion
+ from oe.cve_check import Version
pn = d.getVar("PN")
real_pv = d.getVar("PV")
+ suffix = d.getVar("CVE_VERSION_SUFFIX")
cves_unpatched = []
# CVE_PRODUCT can contain more than one product (eg. curl/libcurl)
@@ -260,8 +267,8 @@ def check_cves(d, patched_cves):
else:
if operator_start:
try:
- vulnerable_start = (operator_start == '>=' and LooseVersion(pv) >= LooseVersion(version_start))
- vulnerable_start |= (operator_start == '>' and LooseVersion(pv) > LooseVersion(version_start))
+ vulnerable_start = (operator_start == '>=' and Version(pv,suffix) >= Version(version_start,suffix))
+ vulnerable_start |= (operator_start == '>' and Version(pv,suffix) > Version(version_start,suffix))
except:
bb.warn("%s: Failed to compare %s %s %s for %s" %
(product, pv, operator_start, version_start, cve))
@@ -271,8 +278,8 @@ def check_cves(d, patched_cves):
if operator_end:
try:
- vulnerable_end = (operator_end == '<=' and LooseVersion(pv) <= LooseVersion(version_end))
- vulnerable_end |= (operator_end == '<' and LooseVersion(pv) < LooseVersion(version_end))
+ vulnerable_end = (operator_end == '<=' and Version(pv,suffix) <= Version(version_end,suffix) )
+ vulnerable_end |= (operator_end == '<' and Version(pv,suffix) < Version(version_end,suffix) )
except:
bb.warn("%s: Failed to compare %s %s %s for %s" %
(product, pv, operator_end, version_end, cve))
@@ -328,18 +335,25 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
"""
cve_file = d.getVar("CVE_CHECK_LOG")
+ fdir_name = d.getVar("FILE_DIRNAME")
+ layer = fdir_name.split("/")[-3]
+
nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId="
write_string = ""
unpatched_cves = []
bb.utils.mkdirhier(os.path.dirname(cve_file))
for cve in sorted(cve_data):
+ is_patched = cve in patched
+ if is_patched and (d.getVar("CVE_CHECK_REPORT_PATCHED") != "1"):
+ continue
+ write_string += "LAYER: %s\n" % layer
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
write_string += "CVE: %s\n" % cve
if cve in whitelisted:
write_string += "CVE STATUS: Whitelisted\n"
- elif cve in patched:
+ elif is_patched:
write_string += "CVE STATUS: Patched\n"
else:
unpatched_cves.append(cve)
@@ -353,20 +367,20 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
if unpatched_cves:
bb.warn("Found unpatched CVE (%s), for more information check %s" % (" ".join(unpatched_cves),cve_file))
- with open(cve_file, "w") as f:
- bb.note("Writing file %s with CVE information" % cve_file)
- f.write(write_string)
-
- if d.getVar("CVE_CHECK_COPY_FILES") == "1":
- cve_dir = d.getVar("CVE_CHECK_DIR")
- bb.utils.mkdirhier(cve_dir)
- deploy_file = os.path.join(cve_dir, d.getVar("PN"))
- with open(deploy_file, "w") as f:
+ if write_string:
+ with open(cve_file, "w") as f:
+ bb.note("Writing file %s with CVE information" % cve_file)
f.write(write_string)
- if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
- cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
- bb.utils.mkdirhier(cvelogpath)
+ if d.getVar("CVE_CHECK_COPY_FILES") == "1":
+ deploy_file = d.getVar("CVE_CHECK_RECIPE_FILE")
+ bb.utils.mkdirhier(os.path.dirname(deploy_file))
+ with open(deploy_file, "w") as f:
+ f.write(write_string)
+
+ if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
+ cvelogpath = d.getVar("CVE_CHECK_SUMMARY_DIR")
+ bb.utils.mkdirhier(cvelogpath)
- with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
- f.write("%s" % write_string)
+ with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
+ f.write("%s" % write_string)
diff --git a/poky/meta/classes/distutils3-base.bbclass b/poky/meta/classes/distutils3-base.bbclass
index 7dbf07ac4b..a277d1c7bc 100644
--- a/poky/meta/classes/distutils3-base.bbclass
+++ b/poky/meta/classes/distutils3-base.bbclass
@@ -1,5 +1,5 @@
DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}"
RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}"
-inherit distutils-common-base python3native
+inherit distutils-common-base python3native python3targetconfig
diff --git a/poky/meta/classes/distutils3.bbclass b/poky/meta/classes/distutils3.bbclass
index 7356b5245a..a916a8000c 100644
--- a/poky/meta/classes/distutils3.bbclass
+++ b/poky/meta/classes/distutils3.bbclass
@@ -12,28 +12,30 @@ DISTUTILS_INSTALL_ARGS ?= "--root=${D} \
DISTUTILS_PYTHON = "python3"
DISTUTILS_PYTHON_class-native = "nativepython3"
+DISTUTILS_SETUP_PATH ?= "${S}"
+
distutils3_do_configure() {
:
}
distutils3_do_compile() {
- cd ${S}
+ cd ${DISTUTILS_SETUP_PATH}
NO_FETCH_BUILD=1 \
STAGING_INCDIR=${STAGING_INCDIR} \
STAGING_LIBDIR=${STAGING_LIBDIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} ${S}/setup.py \
+ ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py \
build --build-base=${B} ${DISTUTILS_BUILD_ARGS} || \
bbfatal_log "'${PYTHON_PN} setup.py build ${DISTUTILS_BUILD_ARGS}' execution failed."
}
distutils3_do_compile[vardepsexclude] = "MACHINE"
distutils3_do_install() {
- cd ${S}
+ cd ${DISTUTILS_SETUP_PATH}
install -d ${D}${PYTHON_SITEPACKAGES_DIR}
STAGING_INCDIR=${STAGING_INCDIR} \
STAGING_LIBDIR=${STAGING_LIBDIR} \
PYTHONPATH=${D}${PYTHON_SITEPACKAGES_DIR} \
- ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} ${S}/setup.py \
+ ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py \
build --build-base=${B} install --skip-build ${DISTUTILS_INSTALL_ARGS} || \
bbfatal_log "'${PYTHON_PN} setup.py install ${DISTUTILS_INSTALL_ARGS}' execution failed."
diff --git a/poky/meta/classes/externalsrc.bbclass b/poky/meta/classes/externalsrc.bbclass
index d200129987..1d7300d65b 100644
--- a/poky/meta/classes/externalsrc.bbclass
+++ b/poky/meta/classes/externalsrc.bbclass
@@ -190,6 +190,7 @@ def srctree_hash_files(d, srcdir=None):
import shutil
import subprocess
import tempfile
+ import hashlib
s_dir = srcdir or d.getVar('EXTERNALSRC')
git_dir = None
@@ -197,6 +198,10 @@ def srctree_hash_files(d, srcdir=None):
try:
git_dir = os.path.join(s_dir,
subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
+ top_git_dir = os.path.join(s_dir, subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'],
+ stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
+ if git_dir == top_git_dir:
+ git_dir = None
except subprocess.CalledProcessError:
pass
@@ -210,7 +215,16 @@ def srctree_hash_files(d, srcdir=None):
env = os.environ.copy()
env['GIT_INDEX_FILE'] = tmp_index.name
subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env)
- sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8")
+ git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8")
+ submodule_helper = subprocess.check_output(['git', 'submodule--helper', 'list'], cwd=s_dir, env=env).decode("utf-8")
+ for line in submodule_helper.splitlines():
+ module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1])
+ proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+ proc.communicate()
+ proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
+ stdout, _ = proc.communicate()
+ git_sha1 += stdout.decode("utf-8")
+ sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest()
with open(oe_hash_file, 'w') as fobj:
fobj.write(sha1)
ret = oe_hash_file + ':True'
diff --git a/poky/meta/classes/image.bbclass b/poky/meta/classes/image.bbclass
index 459d872b4a..42d2886505 100644
--- a/poky/meta/classes/image.bbclass
+++ b/poky/meta/classes/image.bbclass
@@ -180,6 +180,8 @@ LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IM
# aren't yet available.
PSEUDO_PASSWD = "${IMAGE_ROOTFS}:${STAGING_DIR_NATIVE}"
+PSEUDO_IGNORE_PATHS .= ",${WORKDIR}/intercept_scripts,${WORKDIR}/oe-rootfs-repo,${WORKDIR}/sstate-build-image_complete"
+
PACKAGE_EXCLUDE ??= ""
PACKAGE_EXCLUDE[type] = "list"
diff --git a/poky/meta/classes/image_types.bbclass b/poky/meta/classes/image_types.bbclass
index d81747527b..ff42ac9423 100644
--- a/poky/meta/classes/image_types.bbclass
+++ b/poky/meta/classes/image_types.bbclass
@@ -126,7 +126,7 @@ IMAGE_CMD_squashfs-lz4 = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAM
# required when extracting, but it seems prudent to use it in both cases.
IMAGE_CMD_TAR ?= "tar"
# ignore return code 1 "file changed as we read it" as other tasks(e.g. do_image_wic) may be hardlinking rootfs
-IMAGE_CMD_tar = "${IMAGE_CMD_TAR} --sort=name --numeric-owner -cf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.tar -C ${IMAGE_ROOTFS} . || [ $? -eq 1 ]"
+IMAGE_CMD_tar = "${IMAGE_CMD_TAR} --sort=name --format=posix --numeric-owner -cf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.tar -C ${IMAGE_ROOTFS} . || [ $? -eq 1 ]"
do_image_cpio[cleandirs] += "${WORKDIR}/cpio_append"
IMAGE_CMD_cpio () {
diff --git a/poky/meta/classes/image_types_wic.bbclass b/poky/meta/classes/image_types_wic.bbclass
index 196c86814e..ae00acc5ea 100644
--- a/poky/meta/classes/image_types_wic.bbclass
+++ b/poky/meta/classes/image_types_wic.bbclass
@@ -3,9 +3,9 @@
WICVARS ?= "\
BBLAYERS IMGDEPLOYDIR DEPLOY_DIR_IMAGE FAKEROOTCMD IMAGE_BASENAME IMAGE_BOOT_FILES \
IMAGE_LINK_NAME IMAGE_ROOTFS INITRAMFS_FSTYPES INITRD INITRD_LIVE ISODIR RECIPE_SYSROOT_NATIVE \
- ROOTFS_SIZE STAGING_DATADIR STAGING_DIR STAGING_LIBDIR TARGET_SYS \
+ ROOTFS_SIZE STAGING_DATADIR STAGING_DIR STAGING_LIBDIR TARGET_SYS HOSTTOOLS_DIR \
KERNEL_IMAGETYPE MACHINE INITRAMFS_IMAGE INITRAMFS_IMAGE_BUNDLE INITRAMFS_LINK_NAME APPEND \
- ASSUME_PROVIDED"
+ ASSUME_PROVIDED PSEUDO_IGNORE_PATHS"
inherit ${@bb.utils.contains('INITRAMFS_IMAGE_BUNDLE', '1', 'kernel-artifact-names', '', d)}
@@ -29,17 +29,24 @@ WIC_CREATE_EXTRA_ARGS ?= ""
IMAGE_CMD_wic () {
out="${IMGDEPLOYDIR}/${IMAGE_NAME}"
build_wic="${WORKDIR}/build-wic"
+ tmp_wic="${WORKDIR}/tmp-wic"
wks="${WKS_FULL_PATH}"
+ if [ -e "$tmp_wic" ]; then
+ # Ensure we don't have any junk leftover from a previously interrupted
+ # do_image_wic execution
+ rm -rf "$tmp_wic"
+ fi
if [ -z "$wks" ]; then
bbfatal "No kickstart files from WKS_FILES were found: ${WKS_FILES}. Please set WKS_FILE or WKS_FILES appropriately."
fi
-
- BUILDDIR="${TOPDIR}" wic create "$wks" --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" -o "$build_wic/" ${WIC_CREATE_EXTRA_ARGS}
+ BUILDDIR="${TOPDIR}" PSEUDO_UNLOAD=1 wic create "$wks" --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" -o "$build_wic/" -w "$tmp_wic" ${WIC_CREATE_EXTRA_ARGS}
mv "$build_wic/$(basename "${wks%.wks}")"*.direct "$out${IMAGE_NAME_SUFFIX}.wic"
}
IMAGE_CMD_wic[vardepsexclude] = "WKS_FULL_PATH WKS_FILES TOPDIR"
do_image_wic[cleandirs] = "${WORKDIR}/build-wic"
+PSEUDO_IGNORE_PATHS .= ",${WORKDIR}/build-wic"
+
# Rebuild when the wks file or vars in WICVARS change
USING_WIC = "${@bb.utils.contains_any('IMAGE_FSTYPES', 'wic ' + ' '.join('wic.%s' % c for c in '${CONVERSIONTYPES}'.split()), '1', '', d)}"
WKS_FILE_CHECKSUM = "${@'${WKS_FULL_PATH}:%s' % os.path.exists('${WKS_FULL_PATH}') if '${USING_WIC}' else ''}"
@@ -87,6 +94,10 @@ python do_write_wks_template () {
bb.utils.copyfile(wks_file, "%s/%s" % (depdir, basename + '-' + os.path.basename(wks_file)))
}
+do_flush_pseudodb() {
+ ${FAKEROOTENV} ${FAKEROOTCMD} -S
+}
+
python () {
if d.getVar('USING_WIC'):
wks_file_u = d.getVar('WKS_FULL_PATH', False)
@@ -140,6 +151,7 @@ python do_rootfs_wicenv () {
depdir = d.getVar('IMGDEPLOYDIR')
bb.utils.copyfile(os.path.join(outdir, basename) + '.env', os.path.join(depdir, basename) + '.env')
}
+addtask do_flush_pseudodb after do_rootfs before do_image do_image_qa
addtask do_rootfs_wicenv after do_image before do_image_wic
do_rootfs_wicenv[vardeps] += "${WICVARS}"
do_rootfs_wicenv[prefuncs] = 'set_image_size'
diff --git a/poky/meta/classes/kernel.bbclass b/poky/meta/classes/kernel.bbclass
index 2a65c001d9..83a574efcd 100644
--- a/poky/meta/classes/kernel.bbclass
+++ b/poky/meta/classes/kernel.bbclass
@@ -718,7 +718,7 @@ kernel_do_deploy() {
fi
if [ ! -z "${INITRAMFS_IMAGE}" -a x"${INITRAMFS_IMAGE_BUNDLE}" = x1 ]; then
- for imageType in ${KERNEL_IMAGETYPES} ; do
+ for imageType in ${KERNEL_IMAGETYPE_FOR_MAKE} ; do
if [ "$imageType" = "fitImage" ] ; then
continue
fi
diff --git a/poky/meta/classes/license.bbclass b/poky/meta/classes/license.bbclass
index f90176d6c0..dc91118340 100644
--- a/poky/meta/classes/license.bbclass
+++ b/poky/meta/classes/license.bbclass
@@ -31,6 +31,7 @@ python do_populate_lic() {
f.write("%s: %s\n" % (key, info[key]))
}
+PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '')).split())}"
# it would be better to copy them in do_install_append, but find_license_filesa is python
python perform_packagecopy_prepend () {
enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d)
diff --git a/poky/meta/classes/npm.bbclass b/poky/meta/classes/npm.bbclass
index 068032a1e5..bd01e247cd 100644
--- a/poky/meta/classes/npm.bbclass
+++ b/poky/meta/classes/npm.bbclass
@@ -17,8 +17,10 @@
# NPM_INSTALL_DEV:
# Set to 1 to also install devDependencies.
+inherit python3native
+
DEPENDS_prepend = "nodejs-native "
-RDEPENDS_${PN}_prepend = "nodejs "
+RDEPENDS_${PN}_append_class-target = " nodejs"
NPM_INSTALL_DEV ?= "0"
@@ -237,9 +239,7 @@ python npm_do_compile() {
sysroot = d.getVar("RECIPE_SYSROOT_NATIVE")
nodedir = os.path.join(sysroot, d.getVar("prefix_native").strip("/"))
configs.append(("nodedir", nodedir))
- bindir = os.path.join(sysroot, d.getVar("bindir_native").strip("/"))
- pythondir = os.path.join(bindir, "python-native", "python")
- configs.append(("python", pythondir))
+ configs.append(("python", d.getVar("PYTHON")))
# Add node-pre-gyp configuration
args.append(("target_arch", d.getVar("NPM_ARCH")))
diff --git a/poky/meta/classes/package.bbclass b/poky/meta/classes/package.bbclass
index 7c252dd46b..3ff74c9f31 100644
--- a/poky/meta/classes/package.bbclass
+++ b/poky/meta/classes/package.bbclass
@@ -7,7 +7,7 @@
#
# There are the following default steps but PACKAGEFUNCS can be extended:
#
-# a) package_get_auto_pr - get PRAUTO from remote PR service
+# a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
#
# b) perform_packagecopy - Copy D into PKGD
#
@@ -585,12 +585,20 @@ def runtime_mapping_rename (varname, pkg, d):
#bb.note("%s after: %s" % (varname, d.getVar(varname)))
#
-# Package functions suitable for inclusion in PACKAGEFUNCS
+# Used by do_packagedata (and possibly other routines post do_package)
#
+package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
python package_get_auto_pr() {
import oe.prservice
- import re
+
+ def get_do_package_hash(pn):
+ if d.getVar("BB_RUNTASK") != "do_package":
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ for dep in taskdepdata:
+ if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
+ return taskdepdata[dep][6]
+ return None
# Support per recipe PRSERV_HOST
pn = d.getVar('PN')
@@ -602,15 +610,22 @@ python package_get_auto_pr() {
# PR Server not active, handle AUTOINC
if not d.getVar('PRSERV_HOST'):
- if 'AUTOINC' in pkgv:
- d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
+ d.setVar("PRSERV_PV_AUTOINC", "0")
return
auto_pr = None
pv = d.getVar("PV")
version = d.getVar("PRAUTOINX")
pkgarch = d.getVar("PACKAGE_ARCH")
- checksum = d.getVar("BB_TASKHASH")
+ checksum = get_do_package_hash(pn)
+
+ # If do_package isn't in the dependencies, we can't get the checksum...
+ if not checksum:
+ bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
+ #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ #for dep in taskdepdata:
+ # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
+ return
if d.getVar('PRSERV_LOCKDOWN'):
auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
@@ -628,7 +643,7 @@ python package_get_auto_pr() {
srcpv = bb.fetch2.get_srcrev(d)
base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
value = conn.getPR(base_ver, pkgarch, srcpv)
- d.setVar("PKGV", pkgv.replace("AUTOINC", str(value)))
+ d.setVar("PRSERV_PV_AUTOINC", str(value))
auto_pr = conn.getPR(version, pkgarch, checksum)
except Exception as e:
@@ -638,6 +653,22 @@ python package_get_auto_pr() {
d.setVar('PRAUTO',str(auto_pr))
}
+#
+# Package functions suitable for inclusion in PACKAGEFUNCS
+#
+
+python package_convert_pr_autoinc() {
+ pkgv = d.getVar("PKGV")
+
+ # Adjust pkgv as necessary...
+ if 'AUTOINC' in pkgv:
+ d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
+
+ # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
+ d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
+ d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
+}
+
LOCALEBASEPN ??= "${PN}"
python package_do_split_locales() {
@@ -1589,7 +1620,7 @@ if [ x"$D" = "x" ]; then
fi
}
-RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps"
+RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
# Collect perfile run-time dependency metadata
# Output:
@@ -2251,7 +2282,7 @@ python do_package () {
package_qa_handle_error("var-undefined", msg, d)
return
- bb.build.exec_func("package_get_auto_pr", d)
+ bb.build.exec_func("package_convert_pr_autoinc", d)
###########################################################################
# Optimisations
@@ -2323,9 +2354,21 @@ addtask do_package_setscene
# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
# do_package_setscene and do_packagedata_setscene leading to races
python do_packagedata () {
+ bb.build.exec_func("package_get_auto_pr", d)
+
src = d.expand("${PKGDESTWORK}")
dest = d.expand("${WORKDIR}/pkgdata-pdata-input")
oe.path.copyhardlinktree(src, dest)
+
+ bb.build.exec_func("packagedata_translate_pr_autoinc", d)
+}
+do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input"
+
+# Translate the EXTENDPRAUTO and AUTOINC to the final values
+packagedata_translate_pr_autoinc() {
+ find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \
+ sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \
+ -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i
}
addtask packagedata before do_build after do_package
diff --git a/poky/meta/classes/package_rpm.bbclass b/poky/meta/classes/package_rpm.bbclass
index 9145717f98..7de409197e 100644
--- a/poky/meta/classes/package_rpm.bbclass
+++ b/poky/meta/classes/package_rpm.bbclass
@@ -681,7 +681,9 @@ python do_package_rpm () {
cmd = cmd + " --define '_binary_payload w6T.xzdio'"
cmd = cmd + " --define '_source_payload w6T.xzdio'"
cmd = cmd + " --define 'clamp_mtime_to_source_date_epoch 1'"
+ cmd = cmd + " --define 'use_source_date_epoch_as_buildtime 1'"
cmd = cmd + " --define '_buildhost reproducible'"
+ cmd = cmd + " --define '__font_provides %{nil}'"
if perfiledeps:
cmd = cmd + " --define '__find_requires " + outdepends + "'"
cmd = cmd + " --define '__find_provides " + outprovides + "'"
diff --git a/poky/meta/classes/populate_sdk_base.bbclass b/poky/meta/classes/populate_sdk_base.bbclass
index 39a7cadaf8..6954237596 100644
--- a/poky/meta/classes/populate_sdk_base.bbclass
+++ b/poky/meta/classes/populate_sdk_base.bbclass
@@ -178,6 +178,8 @@ do_populate_sdk[sstate-inputdirs] = "${SDKDEPLOYDIR}"
do_populate_sdk[sstate-outputdirs] = "${SDK_DEPLOY}"
do_populate_sdk[stamp-extra-info] = "${MACHINE_ARCH}${SDKMACHINE}"
+PSEUDO_IGNORE_PATHS .= ",${SDKDEPLOYDIR},${WORKDIR}/oe-sdk-repo,${WORKDIR}/sstate-build-populate_sdk"
+
fakeroot create_sdk_files() {
cp ${COREBASE}/scripts/relocate_sdk.py ${SDK_OUTPUT}/${SDKPATH}/
diff --git a/poky/meta/classes/python3native.bbclass b/poky/meta/classes/python3native.bbclass
index d98fb4c758..2e3a88c126 100644
--- a/poky/meta/classes/python3native.bbclass
+++ b/poky/meta/classes/python3native.bbclass
@@ -17,8 +17,6 @@ export STAGING_LIBDIR
export PYTHON_LIBRARY="${STAGING_LIBDIR}/lib${PYTHON_DIR}${PYTHON_ABI}.so"
export PYTHON_INCLUDE_DIR="${STAGING_INCDIR}/${PYTHON_DIR}${PYTHON_ABI}"
-export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
-
# suppress host user's site-packages dirs.
export PYTHONNOUSERSITE = "1"
diff --git a/poky/meta/classes/python3targetconfig.bbclass b/poky/meta/classes/python3targetconfig.bbclass
new file mode 100644
index 0000000000..fc1025c207
--- /dev/null
+++ b/poky/meta/classes/python3targetconfig.bbclass
@@ -0,0 +1,17 @@
+inherit python3native
+
+EXTRA_PYTHON_DEPENDS ?= ""
+EXTRA_PYTHON_DEPENDS_class-target = "python3"
+DEPENDS_append = " ${EXTRA_PYTHON_DEPENDS}"
+
+do_configure_prepend_class-target() {
+ export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
+}
+
+do_compile_prepend_class-target() {
+ export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
+}
+
+do_install_prepend_class-target() {
+ export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
+}
diff --git a/poky/meta/classes/report-error.bbclass b/poky/meta/classes/report-error.bbclass
index 1a12db1206..9cb6b0bd31 100644
--- a/poky/meta/classes/report-error.bbclass
+++ b/poky/meta/classes/report-error.bbclass
@@ -6,6 +6,8 @@
#
# Licensed under the MIT license, see COPYING.MIT for details
+inherit base
+
ERR_REPORT_DIR ?= "${LOG_DIR}/error-report"
def errorreport_getdata(e):
@@ -64,6 +66,8 @@ python errorreport_handler () {
data['failures'] = []
data['component'] = " ".join(e.getPkgs())
data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data))
+ data['bitbake_version'] = e.data.getVar("BB_VERSION")
+ data['layer_version'] = get_layers_branch_rev(e.data)
data['local_conf'] = get_conf_data(e, 'local.conf')
data['auto_conf'] = get_conf_data(e, 'auto.conf')
lock = bb.utils.lockfile(datafile + '.lock')
diff --git a/poky/meta/classes/reproducible_build.bbclass b/poky/meta/classes/reproducible_build.bbclass
index 2f3bd90b07..f06e00d70d 100644
--- a/poky/meta/classes/reproducible_build.bbclass
+++ b/poky/meta/classes/reproducible_build.bbclass
@@ -37,10 +37,13 @@
BUILD_REPRODUCIBLE_BINARIES ??= '1'
inherit ${@oe.utils.ifelse(d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1', 'reproducible_build_simple', '')}
-SDE_DIR ="${WORKDIR}/source-date-epoch"
+SDE_DIR = "${WORKDIR}/source-date-epoch"
SDE_FILE = "${SDE_DIR}/__source_date_epoch.txt"
SDE_DEPLOYDIR = "${WORKDIR}/deploy-source-date-epoch"
+# A SOURCE_DATE_EPOCH of '0' might be misinterpreted as no SDE
+export SOURCE_DATE_EPOCH_FALLBACK ??= "1302044400"
+
SSTATETASKS += "do_deploy_source_date_epoch"
do_deploy_source_date_epoch () {
@@ -93,15 +96,19 @@ def get_source_date_epoch_value(d):
return cached
epochfile = d.getVar('SDE_FILE')
- source_date_epoch = 0
+ source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
if os.path.isfile(epochfile):
with open(epochfile, 'r') as f:
s = f.read()
try:
source_date_epoch = int(s)
+ # workaround for old sstate with SDE_FILE content being 0 - use SOURCE_DATE_EPOCH_FALLBACK
+ if source_date_epoch == 0 :
+ source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
+ bb.warn("SOURCE_DATE_EPOCH value from sstate '%s' is deprecated/invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK '%s'" % (s, source_date_epoch))
except ValueError:
- bb.warn("SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to 0" % s)
- source_date_epoch = 0
+ bb.warn("SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK" % s)
+ source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
else:
bb.debug(1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" % (epochfile, source_date_epoch))
diff --git a/poky/meta/classes/sanity.bbclass b/poky/meta/classes/sanity.bbclass
index 1486cce357..866d066288 100644
--- a/poky/meta/classes/sanity.bbclass
+++ b/poky/meta/classes/sanity.bbclass
@@ -619,6 +619,9 @@ def sanity_handle_abichanges(status, d):
f.write(current_abi)
elif int(abi) <= 11 and current_abi == "12":
status.addresult("The layout of TMPDIR changed for Recipe Specific Sysroots.\nConversion doesn't make sense and this change will rebuild everything so please delete TMPDIR (%s).\n" % d.getVar("TMPDIR"))
+ elif int(abi) <= 13 and current_abi == "14":
+ status.addresult("TMPDIR changed to include path filtering from the pseudo database.\nIt is recommended to use a clean TMPDIR with the new pseudo path filtering so TMPDIR (%s) would need to be removed to continue.\n" % d.getVar("TMPDIR"))
+
elif (abi != current_abi):
# Code to convert from one ABI to another could go here if possible.
status.addresult("Error, TMPDIR has changed its layout version number (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi))
@@ -700,6 +703,23 @@ def check_sanity_version_change(status, d):
if (tmpdirmode & stat.S_ISUID):
status.addresult("TMPDIR is setuid, please don't build in a setuid directory")
+ # Check that a user isn't building in a path in PSEUDO_IGNORE_PATHS
+ pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",")
+ workdir = d.getVar('WORKDIR', expand=True)
+ for i in pseudoignorepaths:
+ if i and workdir.startswith(i):
+ status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n")
+
+ # Check if PSEUDO_IGNORE_PATHS and and paths under pseudo control overlap
+ pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",")
+ pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}"
+ pseudocontroldir = d.expand(pseudo_control_dir).split(",")
+ for i in pseudoignorepaths:
+ for j in pseudocontroldir:
+ if i and j:
+ if j.startswith(i):
+ status.addresult("A path included in PSEUDO_IGNORE_PATHS " + str(i) + " and the path " + str(j) + " overlap and this will break pseudo permission and ownership tracking. Please set the path " + str(j) + " to a different directory which does not overlap with pseudo controlled directories. \n")
+
# Some third-party software apparently relies on chmod etc. being suid root (!!)
import stat
suid_check_bins = "chown chmod mknod".split()
@@ -784,6 +804,11 @@ def check_sanity_everybuild(status, d):
if "." in paths or "./" in paths or "" in paths:
status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n")
+ #Check if bitbake is present in PATH environment variable
+ bb_check = bb.utils.which(d.getVar('PATH'), 'bitbake')
+ if not bb_check:
+ bb.warn("bitbake binary is not found in PATH, did you source the script?")
+
# Check whether 'inherit' directive is found (used for a class to inherit)
# in conf file it's supposed to be uppercase INHERIT
inherit = d.getVar('inherit')
diff --git a/poky/meta/classes/scons.bbclass b/poky/meta/classes/scons.bbclass
index 6b171ca8df..4f3ae502ef 100644
--- a/poky/meta/classes/scons.bbclass
+++ b/poky/meta/classes/scons.bbclass
@@ -5,7 +5,6 @@ DEPENDS += "python3-scons-native"
EXTRA_OESCONS ?= ""
do_configure() {
- unset _PYTHON_SYSCONFIGDATA_NAME
if [ -n "${CONFIGURESTAMPFILE}" ]; then
if [ -e "${CONFIGURESTAMPFILE}" -a "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" -a "${CLEANBROKEN}" != "1" ]; then
${STAGING_BINDIR_NATIVE}/scons --clean PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS}
@@ -17,13 +16,11 @@ do_configure() {
}
scons_do_compile() {
- unset _PYTHON_SYSCONFIGDATA_NAME
${STAGING_BINDIR_NATIVE}/scons ${PARALLEL_MAKE} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} || \
die "scons build execution failed."
}
scons_do_install() {
- unset _PYTHON_SYSCONFIGDATA_NAME
${STAGING_BINDIR_NATIVE}/scons install_root=${D}${prefix} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} install || \
die "scons install execution failed."
}
diff --git a/poky/meta/classes/sstate.bbclass b/poky/meta/classes/sstate.bbclass
index 66a96a7603..a8e169a10b 100644
--- a/poky/meta/classes/sstate.bbclass
+++ b/poky/meta/classes/sstate.bbclass
@@ -72,6 +72,7 @@ BB_HASHFILENAME = "False ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}"
SSTATE_ARCHS = " \
${BUILD_ARCH} \
+ ${BUILD_ARCH}_${ORIGNATIVELSBSTRING} \
${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS} \
${BUILD_ARCH}_${TARGET_ARCH} \
${SDK_ARCH}_${SDK_OS} \
@@ -80,6 +81,7 @@ SSTATE_ARCHS = " \
${PACKAGE_ARCH} \
${PACKAGE_EXTRA_ARCHS} \
${MACHINE_ARCH}"
+SSTATE_ARCHS[vardepsexclude] = "ORIGNATIVELSBSTRING"
SSTATE_MANMACH ?= "${SSTATE_PKGARCH}"
@@ -121,6 +123,8 @@ SSTATE_HASHEQUIV_REPORT_TASKDATA[doc] = "Report additional useful data to the \
python () {
if bb.data.inherits_class('native', d):
d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH', False))
+ if d.getVar("PN") == "pseudo-native":
+ d.appendVar('SSTATE_PKGARCH', '_${ORIGNATIVELSBSTRING}')
elif bb.data.inherits_class('crosssdk', d):
d.setVar('SSTATE_PKGARCH', d.expand("${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"))
elif bb.data.inherits_class('cross', d):
diff --git a/poky/meta/classes/staging.bbclass b/poky/meta/classes/staging.bbclass
index 5b04f88b2d..506ce0665e 100644
--- a/poky/meta/classes/staging.bbclass
+++ b/poky/meta/classes/staging.bbclass
@@ -27,11 +27,15 @@ SYSROOT_DIRS_BLACKLIST = " \
${mandir} \
${docdir} \
${infodir} \
+ ${datadir}/X11/locale \
${datadir}/applications \
+ ${datadir}/bash-completion \
${datadir}/fonts \
${datadir}/gtk-doc/html \
+ ${datadir}/installed-tests \
${datadir}/locale \
${datadir}/pixmaps \
+ ${datadir}/terminfo \
${libdir}/${BPN}/ptest \
"