diff options
Diffstat (limited to 'poky/meta/classes')
58 files changed, 782 insertions, 439 deletions
diff --git a/poky/meta/classes/archiver.bbclass b/poky/meta/classes/archiver.bbclass index af9f010df..093e2d95a 100644 --- a/poky/meta/classes/archiver.bbclass +++ b/poky/meta/classes/archiver.bbclass @@ -78,6 +78,9 @@ python () { bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) return + def hasTask(task): + return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) + ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') @@ -98,9 +101,6 @@ python () { # There is a corner case with "gcc-source-${PV}" recipes, they don't have # the "do_configure" task, so we need to use "do_preconfigure" - def hasTask(task): - return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) - if hasTask("do_preconfigure"): d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn) elif hasTask("do_configure"): @@ -118,7 +118,11 @@ python () { # Output the SRPM package if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'): - if "package_rpm" in d.getVar('PACKAGE_CLASSES'): + if "package_rpm" not in d.getVar('PACKAGE_CLASSES'): + bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") + + # Some recipes do not have any packaging tasks + if hasTask("do_package_write_rpm"): d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}') d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}') @@ -133,8 +137,6 @@ python () { d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) elif ar_src == "configured": d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) - else: - bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") } # Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. @@ -219,9 +221,10 @@ python do_ar_patched() { # Get the ARCHIVER_OUTDIR before we reset the WORKDIR ar_outdir = d.getVar('ARCHIVER_OUTDIR') - ar_workdir = d.getVar('ARCHIVER_WORKDIR') + if not is_work_shared(d): + ar_workdir = d.getVar('ARCHIVER_WORKDIR') + d.setVar('WORKDIR', ar_workdir) bb.note('Archiving the patched source...') - d.setVar('WORKDIR', ar_workdir) create_tarball(d, d.getVar('S'), 'patched', ar_outdir) } diff --git a/poky/meta/classes/autotools.bbclass b/poky/meta/classes/autotools.bbclass index 8768a6ad6..3d22ad025 100644 --- a/poky/meta/classes/autotools.bbclass +++ b/poky/meta/classes/autotools.bbclass @@ -25,7 +25,9 @@ inherit siteinfo # Space separated list of shell scripts with variables defined to supply test # results for autoconf tests we cannot run at build time. -export CONFIG_SITE = "${@siteinfo_get_files(d)}" +# The value of this variable is filled in in a prefunc because it depends on +# the contents of the sysroot. +export CONFIG_SITE acpaths ?= "default" EXTRA_AUTORECONF = "--exclude=autopoint" @@ -132,6 +134,8 @@ EXTRACONFFUNCS ??= "" EXTRA_OECONF_append = " ${PACKAGECONFIG_CONFARGS}" do_configure[prefuncs] += "autotools_preconfigure autotools_aclocals ${EXTRACONFFUNCS}" +do_compile[prefuncs] += "autotools_aclocals" +do_install[prefuncs] += "autotools_aclocals" do_configure[postfuncs] += "autotools_postconfigure" ACLOCALDIR = "${STAGING_DATADIR}/aclocal" @@ -140,7 +144,6 @@ ACLOCALEXTRAPATH_class-target = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" ACLOCALEXTRAPATH_class-nativesdk = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" python autotools_aclocals () { - # Refresh variable with cache files d.setVar("CONFIG_SITE", siteinfo_get_files(d, sysrootcache=True)) } diff --git a/poky/meta/classes/base.bbclass b/poky/meta/classes/base.bbclass index 0c8a4b286..d3184ecf7 100644 --- a/poky/meta/classes/base.bbclass +++ b/poky/meta/classes/base.bbclass @@ -32,9 +32,11 @@ def oe_import(d): import oe.data for toimport in oe.data.typed_value("OE_IMPORTS", d): - imported = __import__(toimport) - inject(toimport.split(".", 1)[0], imported) - + try: + imported = __import__(toimport) + inject(toimport.split(".", 1)[0], imported) + except AttributeError as e: + bb.error("Error importing OE modules: %s" % str(e)) return "" # We need the oe module name space early (before INHERITs get added) diff --git a/poky/meta/classes/bluetooth.bbclass b/poky/meta/classes/bluetooth.bbclass deleted file mode 100644 index f88b4ae5b..000000000 --- a/poky/meta/classes/bluetooth.bbclass +++ /dev/null @@ -1,14 +0,0 @@ -# Avoid code duplication in bluetooth-dependent recipes. - -# Define a variable that expands to the recipe (package) providing core -# bluetooth support on the platform: -# "" if bluetooth is not in DISTRO_FEATURES -# else "bluez5" if bluez5 is in DISTRO_FEATURES -# else "bluez4" - -# Use this with: -# inherit bluetooth -# PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', '${BLUEZ}', '', d)} -# PACKAGECONFIG[bluez4] = "--enable-bluez4,--disable-bluez4,bluez4" - -BLUEZ ?= "${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', bb.utils.contains('DISTRO_FEATURES', 'bluez5', 'bluez5', 'bluez4', d), '', d)}" diff --git a/poky/meta/classes/buildhistory.bbclass b/poky/meta/classes/buildhistory.bbclass index 2e501df24..f986f7c79 100644 --- a/poky/meta/classes/buildhistory.bbclass +++ b/poky/meta/classes/buildhistory.bbclass @@ -60,15 +60,34 @@ SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs" # When extending build history, derive your class from buildhistory.bbclass # and extend this list here with the additional files created by the derived # class. -BUILDHISTORY_PRESERVE = "latest latest_srcrev" +BUILDHISTORY_PRESERVE = "latest latest_srcrev sysroot" PATCH_GIT_USER_EMAIL ?= "buildhistory@oe" PATCH_GIT_USER_NAME ?= "OpenEmbedded" # +# Write out the contents of the sysroot +# +buildhistory_emit_sysroot() { + mkdir --parents ${BUILDHISTORY_DIR_PACKAGE} + case ${CLASSOVERRIDE} in + class-native|class-cross|class-crosssdk) + BASE=${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE} + ;; + *) + BASE=${SYSROOT_DESTDIR} + ;; + esac + buildhistory_list_files_no_owners $BASE ${BUILDHISTORY_DIR_PACKAGE}/sysroot +} + +# # Write out metadata about this package for comparison when writing future packages # python buildhistory_emit_pkghistory() { + if d.getVar('BB_CURRENTTASK') in ['populate_sysroot', 'populate_sysroot_setscene']: + bb.build.exec_func("buildhistory_emit_sysroot", d) + if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']: return 0 @@ -529,6 +548,20 @@ buildhistory_list_files() { fi | sort -k5 | sed 's/ * -> $//' > $2 ) } +buildhistory_list_files_no_owners() { + # List the files in the specified directory, but exclude date/time etc. + # Also don't output the ownership data, but instead output just - - so + # that the same parsing code as for _list_files works. + # This is somewhat messy, but handles where the size is not printed for device files under pseudo + ( cd $1 + find_cmd='find . ! -path . -printf "%M - - %10s %p -> %l\n"' + if [ "$3" = "fakeroot" ] ; then + eval ${FAKEROOTENV} ${FAKEROOTCMD} "$find_cmd" + else + eval "$find_cmd" + fi | sort -k5 | sed 's/ * -> $//' > $2 ) +} + buildhistory_list_pkg_files() { # Create individual files-in-package for each recipe's package for pkgdir in $(find ${PKGDEST}/* -maxdepth 0 -type d); do diff --git a/poky/meta/classes/chrpath.bbclass b/poky/meta/classes/chrpath.bbclass index ad3c3975a..2870c10d5 100644 --- a/poky/meta/classes/chrpath.bbclass +++ b/poky/meta/classes/chrpath.bbclass @@ -1,7 +1,7 @@ CHRPATH_BIN ?= "chrpath" PREPROCESS_RELOCATE_DIRS ?= "" -def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): +def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = False): import subprocess as sub p = sub.Popen([cmd, '-l', fpath],stdout=sub.PIPE,stderr=sub.PIPE) @@ -39,6 +39,9 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): # if we have modified some rpaths call chrpath to update the binary if modified: + if break_hardlinks: + bb.utils.break_hardlinks(fpath) + args = ":".join(new_rpaths) #bb.note("Setting rpath for %s to %s" %(fpath, args)) p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE) @@ -46,7 +49,7 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): if p.returncode != 0: bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN'), p.returncode, out, err)) -def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d): +def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = False): import subprocess as sub p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', fpath],stdout=sub.PIPE,stderr=sub.PIPE) @@ -61,11 +64,14 @@ def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d): if baseprefix not in rpath: continue + if break_hardlinks: + bb.utils.break_hardlinks(fpath) + newpath = "@loader_path/" + os.path.relpath(rpath, os.path.dirname(fpath.replace(rootdir, "/"))) p = sub.Popen([d.expand("${HOST_PREFIX}install_name_tool"), '-change', rpath, newpath, fpath],stdout=sub.PIPE,stderr=sub.PIPE) out, err = p.communicate() -def process_dir (rootdir, directory, d): +def process_dir(rootdir, directory, d, break_hardlinks = False): import stat rootdir = os.path.normpath(rootdir) @@ -95,7 +101,7 @@ def process_dir (rootdir, directory, d): continue if os.path.isdir(fpath): - process_dir(rootdir, fpath, d) + process_dir(rootdir, fpath, d, break_hardlinks = break_hardlinks) else: #bb.note("Testing %s for relocatability" % fpath) @@ -108,8 +114,9 @@ def process_dir (rootdir, directory, d): else: # Temporarily make the file writeable so we can chrpath it os.chmod(fpath, perms|stat.S_IRWXU) - process_file(cmd, fpath, rootdir, baseprefix, tmpdir, d) - + + process_file(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = break_hardlinks) + if perms: os.chmod(fpath, perms) diff --git a/poky/meta/classes/core-image.bbclass b/poky/meta/classes/core-image.bbclass index a9a2cec68..94f112c39 100644 --- a/poky/meta/classes/core-image.bbclass +++ b/poky/meta/classes/core-image.bbclass @@ -30,6 +30,7 @@ # - dev-pkgs - development packages (headers, etc.) for all installed packages in the rootfs # - dbg-pkgs - debug symbol packages for all installed packages in the rootfs # - doc-pkgs - documentation packages for all installed packages in the rootfs +# - bash-completion-pkgs - bash-completion packages for recipes using bash-completion bbclass # - ptest-pkgs - ptest packages for all ptest-enabled recipes # - read-only-rootfs - tweaks an image to support read-only rootfs # - splash - bootup splash screen diff --git a/poky/meta/classes/cve-check.bbclass b/poky/meta/classes/cve-check.bbclass index 379f7121c..c00d2910b 100644 --- a/poky/meta/classes/cve-check.bbclass +++ b/poky/meta/classes/cve-check.bbclass @@ -26,7 +26,7 @@ CVE_PRODUCT ??= "${BPN}" CVE_VERSION ??= "${PV}" CVE_CHECK_DB_DIR ?= "${DL_DIR}/CVE_CHECK" -CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvd-json.db" +CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_1.0.db" CVE_CHECK_LOG ?= "${T}/cve.log" CVE_CHECK_TMP_FILE ?= "${TMPDIR}/cve_check" @@ -37,32 +37,33 @@ CVE_CHECK_COPY_FILES ??= "1" CVE_CHECK_CREATE_MANIFEST ??= "1" # Whitelist for packages (PN) -CVE_CHECK_PN_WHITELIST = "\ - glibc-locale \ -" +CVE_CHECK_PN_WHITELIST ?= "" -# Whitelist for CVE and version of package -CVE_CHECK_CVE_WHITELIST = "{\ - 'CVE-2014-2524': ('6.3','5.2',), \ -}" +# Whitelist for CVE. If a CVE is found, then it is considered patched. +# The value is a string containing space separated CVE values: +# +# CVE_CHECK_WHITELIST = 'CVE-2014-2524 CVE-2018-1234' +# +CVE_CHECK_WHITELIST ?= "" python do_cve_check () { """ Check recipe for patched and unpatched CVEs """ - if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")): + if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")): patched_cves = get_patches_cves(d) patched, unpatched = check_cves(d, patched_cves) if patched or unpatched: cve_data = get_cve_info(d, patched + unpatched) cve_write_data(d, patched, unpatched, cve_data) else: - bb.note("Failed to update CVE database, skipping CVE check") + bb.note("No CVE database found, skipping CVE check") + } addtask cve_check after do_unpack before do_build -do_cve_check[depends] = "cve-update-db:do_populate_cve_db" +do_cve_check[depends] = "cve-update-db-native:do_populate_cve_db" do_cve_check[nostamp] = "1" python cve_check_cleanup () { @@ -170,52 +171,87 @@ def check_cves(d, patched_cves): cves_unpatched = [] # CVE_PRODUCT can contain more than one product (eg. curl/libcurl) - bpn = d.getVar("CVE_PRODUCT").split() + products = d.getVar("CVE_PRODUCT").split() # If this has been unset then we're not scanning for CVEs here (for example, image recipes) - if len(bpn) == 0: + if not products: return ([], []) pv = d.getVar("CVE_VERSION").split("+git")[0] - cve_whitelist = ast.literal_eval(d.getVar("CVE_CHECK_CVE_WHITELIST")) # If the recipe has been whitlisted we return empty lists if d.getVar("PN") in d.getVar("CVE_CHECK_PN_WHITELIST").split(): bb.note("Recipe has been whitelisted, skipping check") return ([], []) + old_cve_whitelist = d.getVar("CVE_CHECK_CVE_WHITELIST") + if old_cve_whitelist: + bb.warn("CVE_CHECK_CVE_WHITELIST is deprecated, please use CVE_CHECK_WHITELIST.") + cve_whitelist = d.getVar("CVE_CHECK_WHITELIST").split() + import sqlite3 db_file = d.getVar("CVE_CHECK_DB_FILE") conn = sqlite3.connect(db_file) - c = conn.cursor() - query = """SELECT * FROM PRODUCTS WHERE - (PRODUCT IS '{0}' AND VERSION = '{1}' AND OPERATOR IS '=') OR - (PRODUCT IS '{0}' AND OPERATOR IS '<=');""" - for idx in range(len(bpn)): - for row in c.execute(query.format(bpn[idx],pv)): - cve = row[1] - version = row[4] + for product in products: + c = conn.cursor() + if ":" in product: + vendor, product = product.split(":", 1) + c.execute("SELECT * FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR IS ?", (product, vendor)) + else: + c.execute("SELECT * FROM PRODUCTS WHERE PRODUCT IS ?", (product,)) - try: - discardVersion = LooseVersion(version) < LooseVersion(pv) - except: - discardVersion = True + for row in c: + cve = row[0] + version_start = row[3] + operator_start = row[4] + version_end = row[5] + operator_end = row[6] - if pv in cve_whitelist.get(cve,[]): - bb.note("%s-%s has been whitelisted for %s" % (bpn[idx], pv, cve)) + if cve in cve_whitelist: + bb.note("%s-%s has been whitelisted for %s" % (product, pv, cve)) elif cve in patched_cves: bb.note("%s has been patched" % (cve)) - elif discardVersion: - bb.debug(2, "Do not consider version %s " % (version)) else: - cves_unpatched.append(cve) - bb.debug(2, "%s-%s is not patched for %s" % (bpn[idx], pv, cve)) + to_append = False + if (operator_start == '=' and pv == version_start): + cves_unpatched.append(cve) + else: + if operator_start: + try: + to_append_start = (operator_start == '>=' and LooseVersion(pv) >= LooseVersion(version_start)) + to_append_start |= (operator_start == '>' and LooseVersion(pv) > LooseVersion(version_start)) + except: + bb.note("%s: Failed to compare %s %s %s for %s" % + (product, pv, operator_start, version_start, cve)) + to_append_start = False + else: + to_append_start = False + + if operator_end: + try: + to_append_end = (operator_end == '<=' and LooseVersion(pv) <= LooseVersion(version_end)) + to_append_end |= (operator_end == '<' and LooseVersion(pv) < LooseVersion(version_end)) + except: + bb.note("%s: Failed to compare %s %s %s for %s" % + (product, pv, operator_end, version_end, cve)) + to_append_end = False + else: + to_append_end = False + + if operator_start and operator_end: + to_append = to_append_start and to_append_end + else: + to_append = to_append_start or to_append_end + + if to_append: + cves_unpatched.append(cve) + bb.debug(2, "%s-%s is not patched for %s" % (product, pv, cve)) conn.close() return (list(patched_cves), cves_unpatched) def get_cve_info(d, cves): """ - Get CVE information from the database used by cve-check-tool. + Get CVE information from the database. Unfortunately the only way to get CVE info is set the output to html (hard to parse) or query directly the database. diff --git a/poky/meta/classes/externalsrc.bbclass b/poky/meta/classes/externalsrc.bbclass index 3618b99a8..ea59d02ed 100644 --- a/poky/meta/classes/externalsrc.bbclass +++ b/poky/meta/classes/externalsrc.bbclass @@ -203,7 +203,7 @@ def srctree_hash_files(d, srcdir=None): ret = " " if git_dir is not None: - oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1') + oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1-%s' % d.getVar('PN')) with tempfile.NamedTemporaryFile(prefix='oe-devtool-index') as tmp_index: # Clone index shutil.copyfile(os.path.join(git_dir, 'index'), tmp_index.name) diff --git a/poky/meta/classes/gnome.bbclass b/poky/meta/classes/gnome.bbclass deleted file mode 100644 index c6202bbb7..000000000 --- a/poky/meta/classes/gnome.bbclass +++ /dev/null @@ -1 +0,0 @@ -inherit gnomebase gtk-icon-cache gconf mime diff --git a/poky/meta/classes/go-ptest.bbclass b/poky/meta/classes/go-ptest.bbclass new file mode 100644 index 000000000..e230a8058 --- /dev/null +++ b/poky/meta/classes/go-ptest.bbclass @@ -0,0 +1,54 @@ +inherit go ptest + +do_compile_ptest_base() { + export TMPDIR="${GOTMPDIR}" + rm -f ${B}/.go_compiled_tests.list + go_list_package_tests | while read pkg; do + cd ${B}/src/$pkg + ${GO} test ${GOPTESTBUILDFLAGS} $pkg + find . -mindepth 1 -maxdepth 1 -type f -name '*.test' -exec echo $pkg/{} \; | \ + sed -e's,/\./,/,'>> ${B}/.go_compiled_tests.list + done + do_compile_ptest +} + +do_compile_ptest_base[dirs] =+ "${GOTMPDIR}" + +go_make_ptest_wrapper() { + cat >${D}${PTEST_PATH}/run-ptest <<EOF +#!/bin/sh +RC=0 +run_test() ( + cd "\$1" + ((((./\$2 ${GOPTESTFLAGS}; echo \$? >&3) | sed -r -e"s,^(PASS|SKIP|FAIL)\$,\\1: \$1/\$2," >&4) 3>&1) | (read rc; exit \$rc)) 4>&1 + exit \$?) +EOF + +} + +do_install_ptest_base() { + test -f "${B}/.go_compiled_tests.list" || exit 0 + install -d ${D}${PTEST_PATH} + go_stage_testdata + go_make_ptest_wrapper + havetests="" + while read test; do + testdir=`dirname $test` + testprog=`basename $test` + install -d ${D}${PTEST_PATH}/$testdir + install -m 0755 ${B}/src/$test ${D}${PTEST_PATH}/$test + echo "run_test $testdir $testprog || RC=1" >> ${D}${PTEST_PATH}/run-ptest + havetests="yes" + done < ${B}/.go_compiled_tests.list + if [ -n "$havetests" ]; then + echo "exit \$RC" >> ${D}${PTEST_PATH}/run-ptest + chmod +x ${D}${PTEST_PATH}/run-ptest + else + rm -rf ${D}${PTEST_PATH} + fi + do_install_ptest + chown -R root:root ${D}${PTEST_PATH} +} + +INSANE_SKIP_${PN}-ptest += "ldflags" + diff --git a/poky/meta/classes/go.bbclass b/poky/meta/classes/go.bbclass index e05a5c641..e40e55689 100644 --- a/poky/meta/classes/go.bbclass +++ b/poky/meta/classes/go.bbclass @@ -1,4 +1,4 @@ -inherit goarch ptest +inherit goarch GO_PARALLEL_BUILD ?= "${@oe.utils.parallel_make_argument(d, '-p %d')}" @@ -71,17 +71,13 @@ python go_do_unpack() { if len(src_uri) == 0: return - try: - fetcher = bb.fetch2.Fetch(src_uri, d) - for url in fetcher.urls: - if fetcher.ud[url].type == 'git': - if fetcher.ud[url].parm.get('destsuffix') is None: - s_dirname = os.path.basename(d.getVar('S')) - fetcher.ud[url].parm['destsuffix'] = os.path.join(s_dirname, 'src', - d.getVar('GO_IMPORT')) + '/' - fetcher.unpack(d.getVar('WORKDIR')) - except bb.fetch2.BBFetchException as e: - raise bb.build.FuncFailed(e) + fetcher = bb.fetch2.Fetch(src_uri, d) + for url in fetcher.urls: + if fetcher.ud[url].type == 'git': + if fetcher.ud[url].parm.get('destsuffix') is None: + s_dirname = os.path.basename(d.getVar('S')) + fetcher.ud[url].parm['destsuffix'] = os.path.join(s_dirname, 'src', d.getVar('GO_IMPORT')) + '/' + fetcher.unpack(d.getVar('WORKDIR')) } go_list_packages() { @@ -114,19 +110,6 @@ go_do_compile() { do_compile[dirs] =+ "${GOTMPDIR}" do_compile[cleandirs] = "${B}/bin ${B}/pkg" -do_compile_ptest_base() { - export TMPDIR="${GOTMPDIR}" - rm -f ${B}/.go_compiled_tests.list - go_list_package_tests | while read pkg; do - cd ${B}/src/$pkg - ${GO} test ${GOPTESTBUILDFLAGS} $pkg - find . -mindepth 1 -maxdepth 1 -type f -name '*.test' -exec echo $pkg/{} \; | \ - sed -e's,/\./,/,'>> ${B}/.go_compiled_tests.list - done - do_compile_ptest -} -do_compile_ptest_base[dirs] =+ "${GOTMPDIR}" - go_do_install() { install -d ${D}${libdir}/go/src/${GO_IMPORT} tar -C ${S}/src/${GO_IMPORT} -cf - --exclude-vcs --exclude '*.test' --exclude 'testdata' . | \ @@ -139,18 +122,6 @@ go_do_install() { fi } -go_make_ptest_wrapper() { - cat >${D}${PTEST_PATH}/run-ptest <<EOF -#!/bin/sh -RC=0 -run_test() ( - cd "\$1" - ((((./\$2 ${GOPTESTFLAGS}; echo \$? >&3) | sed -r -e"s,^(PASS|SKIP|FAIL)\$,\\1: \$1/\$2," >&4) 3>&1) | (read rc; exit \$rc)) 4>&1 - exit \$?) -EOF - -} - go_stage_testdata() { oldwd="$PWD" cd ${S}/src @@ -165,37 +136,12 @@ go_stage_testdata() { cd "$oldwd" } -do_install_ptest_base() { - test -f "${B}/.go_compiled_tests.list" || exit 0 - install -d ${D}${PTEST_PATH} - go_stage_testdata - go_make_ptest_wrapper - havetests="" - while read test; do - testdir=`dirname $test` - testprog=`basename $test` - install -d ${D}${PTEST_PATH}/$testdir - install -m 0755 ${B}/src/$test ${D}${PTEST_PATH}/$test - echo "run_test $testdir $testprog || RC=1" >> ${D}${PTEST_PATH}/run-ptest - havetests="yes" - done < ${B}/.go_compiled_tests.list - if [ -n "$havetests" ]; then - echo "exit \$RC" >> ${D}${PTEST_PATH}/run-ptest - chmod +x ${D}${PTEST_PATH}/run-ptest - else - rm -rf ${D}${PTEST_PATH} - fi - do_install_ptest - chown -R root:root ${D}${PTEST_PATH} -} - EXPORT_FUNCTIONS do_unpack do_configure do_compile do_install FILES_${PN}-dev = "${libdir}/go/src" FILES_${PN}-staticdev = "${libdir}/go/pkg" INSANE_SKIP_${PN} += "ldflags" -INSANE_SKIP_${PN}-ptest += "ldflags" # Add -buildmode=pie to GOBUILDFLAGS to satisfy "textrel" QA checking, but mips # doesn't support -buildmode=pie, so skip the QA checking for mips and its diff --git a/poky/meta/classes/grub-efi-cfg.bbclass b/poky/meta/classes/grub-efi-cfg.bbclass index f661a69f8..8b5ff20c7 100644 --- a/poky/meta/classes/grub-efi-cfg.bbclass +++ b/poky/meta/classes/grub-efi-cfg.bbclass @@ -23,7 +23,6 @@ GRUB_TIMEOUT ?= "10" #FIXME: build this from the machine config GRUB_OPTS ?= "serial --unit=0 --speed=115200 --word=8 --parity=no --stop=1" -EFIDIR = "/EFI/BOOT" GRUB_ROOT ?= "${ROOT}" APPEND ?= "" diff --git a/poky/meta/classes/grub-efi.bbclass b/poky/meta/classes/grub-efi.bbclass index 90badc03a..8fc6999e5 100644 --- a/poky/meta/classes/grub-efi.bbclass +++ b/poky/meta/classes/grub-efi.bbclass @@ -1,39 +1,8 @@ inherit grub-efi-cfg +require conf/image-uefi.conf efi_populate() { - # DEST must be the root of the image so that EFIDIR is not - # nested under a top level directory. - DEST=$1 - - install -d ${DEST}${EFIDIR} - - GRUB_IMAGE="grub-efi-bootia32.efi" - DEST_IMAGE="bootia32.efi" - if [ "${TARGET_ARCH}" = "x86_64" ]; then - GRUB_IMAGE="grub-efi-bootx64.efi" - DEST_IMAGE="bootx64.efi" - fi - install -m 0644 ${DEPLOY_DIR_IMAGE}/${GRUB_IMAGE} ${DEST}${EFIDIR}/${DEST_IMAGE} - EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') - printf 'fs0:%s\%s\n' "$EFIPATH" "$DEST_IMAGE" >${DEST}/startup.nsh + efi_populate_common "$1" grub-efi install -m 0644 ${GRUB_CFG} ${DEST}${EFIDIR}/grub.cfg } - -efi_iso_populate() { - iso_dir=$1 - efi_populate $iso_dir - # Build a EFI directory to create efi.img - mkdir -p ${EFIIMGDIR}/${EFIDIR} - cp $iso_dir/${EFIDIR}/* ${EFIIMGDIR}${EFIDIR} - cp $iso_dir/${KERNEL_IMAGETYPE} ${EFIIMGDIR} - EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') - printf 'fs0:%s\%s\n' "$EFIPATH" "$GRUB_IMAGE" > ${EFIIMGDIR}/startup.nsh - if [ -f "$iso_dir/initrd" ] ; then - cp $iso_dir/initrd ${EFIIMGDIR} - fi -} - -efi_hddimg_populate() { - efi_populate $1 -} diff --git a/poky/meta/classes/gtk-icon-cache.bbclass b/poky/meta/classes/gtk-icon-cache.bbclass index 66fe781bd..91cb4ad40 100644 --- a/poky/meta/classes/gtk-icon-cache.bbclass +++ b/poky/meta/classes/gtk-icon-cache.bbclass @@ -6,7 +6,7 @@ PACKAGE_WRITE_DEPS += "gtk+3-native gdk-pixbuf-native" gtk_icon_cache_postinst() { if [ "x$D" != "x" ]; then - $INTERCEPT_DIR/postinst_intercept update_icon_cache ${PKG} \ + $INTERCEPT_DIR/postinst_intercept update_gtk_icon_cache ${PKG} \ mlprefix=${MLPREFIX} \ libdir_native=${libdir_native} else @@ -24,7 +24,7 @@ fi gtk_icon_cache_postrm() { if [ "x$D" != "x" ]; then - $INTERCEPT_DIR/postinst_intercept update_icon_cache ${PKG} \ + $INTERCEPT_DIR/postinst_intercept update_gtk_icon_cache ${PKG} \ mlprefix=${MLPREFIX} \ libdir=${libdir} else diff --git a/poky/meta/classes/icecc.bbclass b/poky/meta/classes/icecc.bbclass index edb0e1043..4376aa37d 100644 --- a/poky/meta/classes/icecc.bbclass +++ b/poky/meta/classes/icecc.bbclass @@ -57,7 +57,7 @@ ICECC_ENV_VERSION = "2" # See: https://github.com/icecc/icecream/issues/190 export ICECC_CARET_WORKAROUND ??= "0" -export ICECC_REMOTE_CPP ??= "1" +export ICECC_REMOTE_CPP ??= "0" ICECC_CFLAGS = "" CFLAGS += "${ICECC_CFLAGS}" @@ -73,10 +73,16 @@ ICECC_ENV_DEBUG ??= "" # # libgcc-initial - fails with CPP sanity check error if host sysroot contains # cross gcc built for another target tune/variant +# pixman - prng_state: TLS reference mismatches non-TLS reference, possibly due to +# pragma omp threadprivate(prng_state) +# systemtap - _HelperSDT.c undefs macros and uses the identifiers in macros emitting +# inline assembly # target-sdk-provides-dummy - ${HOST_PREFIX} is empty which triggers the "NULL # prefix" error. ICECC_SYSTEM_PACKAGE_BL += "\ libgcc-initial \ + pixman \ + systemtap \ target-sdk-provides-dummy \ " @@ -132,6 +138,10 @@ def use_icecc(bb,d): if icecc_is_cross_canadian(bb, d): return "no" + if d.getVar('INHIBIT_DEFAULT_DEPS', False): + # We don't have a compiler, so no icecc + return "no" + pn = d.getVar('PN') bpn = d.getVar('BPN') @@ -243,7 +253,11 @@ def icecc_get_external_tool(bb, d, tool): def icecc_get_tool_link(tool, d): import subprocess - return subprocess.check_output("readlink -f %s" % tool, shell=True).decode("utf-8")[:-1] + try: + return subprocess.check_output("readlink -f %s" % tool, shell=True).decode("utf-8")[:-1] + except subprocess.CalledProcessError as e: + bb.note("icecc: one of the tools probably disappeared during recipe parsing, cmd readlink -f %s returned %d:\n%s" % (tool, e.returncode, e.output.decode("utf-8"))) + return tool def icecc_get_path_tool(tool, d): # This is a little ugly, but we want to make sure we add an actual diff --git a/poky/meta/classes/image-live.bbclass b/poky/meta/classes/image-live.bbclass index af71be509..54058b350 100644 --- a/poky/meta/classes/image-live.bbclass +++ b/poky/meta/classes/image-live.bbclass @@ -37,7 +37,7 @@ do_bootimg[depends] += "dosfstools-native:do_populate_sysroot \ LABELS_LIVE ?= "boot install" ROOT_LIVE ?= "root=/dev/ram0" INITRD_IMAGE_LIVE ?= "${MLPREFIX}core-image-minimal-initramfs" -INITRD_LIVE ?= "${DEPLOY_DIR_IMAGE}/${INITRD_IMAGE_LIVE}-${MACHINE}.cpio.gz" +INITRD_LIVE ?= "${DEPLOY_DIR_IMAGE}/${INITRD_IMAGE_LIVE}-${MACHINE}.${INITRAMFS_FSTYPES}" LIVE_ROOTFS_TYPE ?= "ext4" ROOTFS ?= "${IMGDEPLOYDIR}/${IMAGE_LINK_NAME}.${LIVE_ROOTFS_TYPE}" diff --git a/poky/meta/classes/image.bbclass b/poky/meta/classes/image.bbclass index d2b2fb979..f4633da3d 100644 --- a/poky/meta/classes/image.bbclass +++ b/poky/meta/classes/image.bbclass @@ -305,11 +305,8 @@ fakeroot python do_image_qa () { bb.build.exec_func(cmd, d) except oe.utils.ImageQAFailed as e: qamsg = qamsg + '\tImage QA function %s failed: %s\n' % (e.name, e.description) - except bb.build.FuncFailed as e: - qamsg = qamsg + '\tImage QA function %s failed' % e.name - if e.logfile: - qamsg = qamsg + ' (log file is located at %s)' % e.logfile - qamsg = qamsg + '\n' + except Exception as e: + qamsg = qamsg + '\tImage QA function %s failed\n' % cmd if qamsg: imgname = d.getVar('IMAGE_NAME') @@ -328,7 +325,8 @@ addtask do_image_qa_setscene def setup_debugfs_variables(d): d.appendVar('IMAGE_ROOTFS', '-dbg') - d.appendVar('IMAGE_LINK_NAME', '-dbg') + if d.getVar('IMAGE_LINK_NAME'): + d.appendVar('IMAGE_LINK_NAME', '-dbg') d.appendVar('IMAGE_NAME','-dbg') d.setVar('IMAGE_BUILDING_DEBUGFS', 'true') debugfs_image_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS') @@ -528,7 +526,7 @@ def get_rootfs_size(d): base_size = size_kb * overhead_factor bb.debug(1, '%f = %d * %f' % (base_size, size_kb, overhead_factor)) base_size2 = max(base_size, rootfs_req_size) + rootfs_extra_space - bb.debug(1, '%f = max(%f, %d)[%f] + %d' % (base_size2, base_size, rootfs_req_size, max(base_size, rootfs_req_size), overhead_factor)) + bb.debug(1, '%f = max(%f, %d)[%f] + %d' % (base_size2, base_size, rootfs_req_size, max(base_size, rootfs_req_size), rootfs_extra_space)) base_size = base_size2 if base_size != int(base_size): @@ -666,10 +664,11 @@ reproducible_final_image_task () { } systemd_preset_all () { - systemctl --root="${IMAGE_ROOTFS}" --preset-mode=enable-only preset-all + if [ -e ${IMAGE_ROOTFS}${root_prefix}/lib/systemd/systemd ]; then + systemctl --root="${IMAGE_ROOTFS}" --preset-mode=enable-only preset-all + fi } -IMAGE_EXTRADEPENDS += "${@ 'systemd-systemctl-native' if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and not bb.utils.contains('IMAGE_FEATURES', 'stateless-rootfs', True, False, d) else ''}" IMAGE_PREPROCESS_COMMAND_append = " ${@ 'systemd_preset_all;' if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and not bb.utils.contains('IMAGE_FEATURES', 'stateless-rootfs', True, False, d) else ''} reproducible_final_image_task; " CVE_PRODUCT = "" diff --git a/poky/meta/classes/image_types.bbclass b/poky/meta/classes/image_types.bbclass index fd98a7d1b..2eeffbb36 100644 --- a/poky/meta/classes/image_types.bbclass +++ b/poky/meta/classes/image_types.bbclass @@ -284,7 +284,7 @@ COMPRESSIONTYPES ?= "" CONVERSIONTYPES = "gz bz2 lzma xz lz4 lzo zip sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap u-boot vmdk vdi qcow2 base64 ${COMPRESSIONTYPES}" CONVERSION_CMD_lzma = "lzma -k -f -7 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}" -CONVERSION_CMD_gz = "gzip -f -9 -n -c ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.gz" +CONVERSION_CMD_gz = "gzip -f -9 -n -c --rsyncable ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.gz" CONVERSION_CMD_bz2 = "pbzip2 -f -k ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}" CONVERSION_CMD_xz = "xz -f -k -c ${XZ_COMPRESSION_LEVEL} ${XZ_DEFAULTS} --check=${XZ_INTEGRITY_CHECK} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.xz" CONVERSION_CMD_lz4 = "lz4 -9 -z -l ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.lz4" @@ -324,7 +324,7 @@ RUNNABLE_MACHINE_PATTERNS ?= "qemu" DEPLOYABLE_IMAGE_TYPES ?= "hddimg iso" # The IMAGE_TYPES_MASKED variable will be used to mask out from the IMAGE_FSTYPES, -# images that will not be built at do_rootfs time: vmdk, vdi, qcow2, hdddirect, hddimg, iso, etc. +# images that will not be built at do_rootfs time: vmdk, vdi, qcow2, hddimg, iso, etc. IMAGE_TYPES_MASKED ?= "" # bmap requires python3 to be in the PATH diff --git a/poky/meta/classes/image_types_wic.bbclass b/poky/meta/classes/image_types_wic.bbclass index 5b40a9e91..f350dc272 100644 --- a/poky/meta/classes/image_types_wic.bbclass +++ b/poky/meta/classes/image_types_wic.bbclass @@ -3,7 +3,10 @@ WICVARS ?= "\ BBLAYERS IMGDEPLOYDIR DEPLOY_DIR_IMAGE FAKEROOTCMD IMAGE_BASENAME IMAGE_BOOT_FILES \ IMAGE_LINK_NAME IMAGE_ROOTFS INITRAMFS_FSTYPES INITRD INITRD_LIVE ISODIR RECIPE_SYSROOT_NATIVE \ - ROOTFS_SIZE STAGING_DATADIR STAGING_DIR STAGING_LIBDIR TARGET_SYS" + ROOTFS_SIZE STAGING_DATADIR STAGING_DIR STAGING_LIBDIR TARGET_SYS \ + KERNEL_IMAGETYPE MACHINE INITRAMFS_IMAGE INITRAMFS_IMAGE_BUNDLE INITRAMFS_LINK_NAME" + +inherit ${@bb.utils.contains('INITRAMFS_IMAGE_BUNDLE', '1', 'kernel-artifact-names', '', d)} WKS_FILE ??= "${IMAGE_BASENAME}.${MACHINE}.wks" WKS_FILES ?= "${WKS_FILE} ${IMAGE_BASENAME}.wks" @@ -44,7 +47,8 @@ do_image_wic[depends] += "${@' '.join('%s-native:do_populate_sysroot' % r for r # We ensure all artfacts are deployed (e.g virtual/bootloader) do_image_wic[recrdeptask] += "do_deploy" -WKS_FILE_DEPENDS_DEFAULT = "syslinux-native bmap-tools-native cdrtools-native btrfs-tools-native squashfs-tools-native e2fsprogs-native" +WKS_FILE_DEPENDS_DEFAULT = '${@bb.utils.contains_any("BUILD_ARCH", [ 'x86_64', 'i686' ], "syslinux-native", "",d)}' +WKS_FILE_DEPENDS_DEFAULT += "bmap-tools-native cdrtools-native btrfs-tools-native squashfs-tools-native e2fsprogs-native" WKS_FILE_DEPENDS_BOOTLOADERS = "" WKS_FILE_DEPENDS_BOOTLOADERS_x86 = "syslinux grub-efi systemd-boot" WKS_FILE_DEPENDS_BOOTLOADERS_x86-64 = "syslinux grub-efi systemd-boot" @@ -73,6 +77,11 @@ python do_write_wks_template () { wks_file = d.getVar('WKS_FULL_PATH') with open(wks_file, 'w') as f: f.write(template_body) + f.close() + # Copy the finalized wks file to the deploy directory for later use + depdir = d.getVar('IMGDEPLOYDIR') + basename = d.getVar('IMAGE_BASENAME') + bb.utils.copyfile(wks_file, "%s/%s" % (depdir, basename + '-' + os.path.basename(wks_file))) } python () { @@ -101,7 +110,7 @@ python () { # file in process_wks_template as well, so just put it in # a variable and let the metadata deal with the deps. d.setVar('_WKS_TEMPLATE', body) - bb.build.addtask('do_write_wks_template', 'do_image_wic', None, d) + bb.build.addtask('do_write_wks_template', 'do_image_wic', 'do_image', d) bb.build.addtask('do_image_wic', 'do_image_complete', None, d) } @@ -123,6 +132,10 @@ python do_rootfs_wicenv () { value = d.getVar(var) if value: envf.write('%s="%s"\n' % (var, value.strip())) + envf.close() + # Copy .env file to deploy directory for later use with stand alone wic + depdir = d.getVar('IMGDEPLOYDIR') + bb.utils.copyfile(os.path.join(outdir, basename) + '.env', os.path.join(depdir, basename) + '.env') } addtask do_rootfs_wicenv after do_image before do_image_wic do_rootfs_wicenv[vardeps] += "${WICVARS}" diff --git a/poky/meta/classes/insane.bbclass b/poky/meta/classes/insane.bbclass index 0695a0443..9b886d138 100644 --- a/poky/meta/classes/insane.bbclass +++ b/poky/meta/classes/insane.bbclass @@ -722,25 +722,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): filerdepends[subkey] = key[13:] if filerdepends: - next = rdepends done = rdepends[:] - # Find all the rdepends on the dependency chain - while next: - new = [] - for rdep in next: - rdep_data = oe.packagedata.read_subpkgdata(rdep, d) - sub_rdeps = rdep_data.get("RDEPENDS_" + rdep) - if not sub_rdeps: - continue - for sub_rdep in bb.utils.explode_deps(sub_rdeps): - if sub_rdep in done: - continue - if oe.packagedata.has_subpkgdata(sub_rdep, d): - # It's a new rdep - done.append(sub_rdep) - new.append(sub_rdep) - next = new - # Add the rprovides of itself if pkg not in done: done.insert(0, pkg) @@ -874,15 +856,14 @@ def package_qa_check_host_user(path, name, d, elf, messages): if exc.errno != errno.ENOENT: raise else: - rootfs_path = path[len(dest):] check_uid = int(d.getVar('HOST_USER_UID')) if stat.st_uid == check_uid: - package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid)) + package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid)) return False check_gid = int(d.getVar('HOST_USER_GID')) if stat.st_gid == check_gid: - package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid)) + package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid)) return False return True @@ -1238,6 +1219,11 @@ python () { if prog.search(pn): package_qa_handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) + # Some people mistakenly use DEPENDS_${PN} instead of DEPENDS and wonder + # why it doesn't work. + if (d.getVar(d.expand('DEPENDS_${PN}'))): + package_qa_handle_error("pkgvarcheck", "recipe uses DEPENDS_${PN}, should use DEPENDS", d) + issues = [] if (d.getVar('PACKAGES') or "").split(): for dep in (d.getVar('QADEPENDS') or "").split(): diff --git a/poky/meta/classes/kernel-fitimage.bbclass b/poky/meta/classes/kernel-fitimage.bbclass index b51882dce..1bcb09c59 100644 --- a/poky/meta/classes/kernel-fitimage.bbclass +++ b/poky/meta/classes/kernel-fitimage.bbclass @@ -207,7 +207,6 @@ EOF fitimage_emit_section_ramdisk() { ramdisk_csum="${FIT_HASH_ALG}" - ramdisk_ctype="none" ramdisk_loadline="" ramdisk_entryline="" @@ -218,24 +217,6 @@ fitimage_emit_section_ramdisk() { ramdisk_entryline="entry = <${UBOOT_RD_ENTRYPOINT}>;" fi - case $3 in - *.gz) - ramdisk_ctype="gzip" - ;; - *.bz2) - ramdisk_ctype="bzip2" - ;; - *.lzma) - ramdisk_ctype="lzma" - ;; - *.lzo) - ramdisk_ctype="lzo" - ;; - *.lz4) - ramdisk_ctype="lz4" - ;; - esac - cat << EOF >> ${1} ramdisk@${2} { description = "${INITRAMFS_IMAGE}"; @@ -243,7 +224,7 @@ fitimage_emit_section_ramdisk() { type = "ramdisk"; arch = "${UBOOT_ARCH}"; os = "linux"; - compression = "${ramdisk_ctype}"; + compression = "none"; ${ramdisk_loadline} ${ramdisk_entryline} hash@1 { diff --git a/poky/meta/classes/kernel-uboot.bbclass b/poky/meta/classes/kernel-uboot.bbclass index 0457c5d1d..87f02654f 100644 --- a/poky/meta/classes/kernel-uboot.bbclass +++ b/poky/meta/classes/kernel-uboot.bbclass @@ -3,10 +3,6 @@ uboot_prep_kimage() { vmlinux_path="arch/${ARCH}/boot/compressed/vmlinux" linux_suffix="" linux_comp="none" - elif [ -e arch/${ARCH}/boot/Image ] ; then - vmlinux_path="vmlinux" - linux_suffix=".gz" - linux_comp="gzip" elif [ -e arch/${ARCH}/boot/vmlinuz.bin ]; then rm -f linux.bin cp -l arch/${ARCH}/boot/vmlinuz.bin linux.bin diff --git a/poky/meta/classes/kernel.bbclass b/poky/meta/classes/kernel.bbclass index a60e15b57..ebcb79a52 100644 --- a/poky/meta/classes/kernel.bbclass +++ b/poky/meta/classes/kernel.bbclass @@ -96,8 +96,10 @@ python __anonymous () { d.setVar('ALLOW_EMPTY_%s-image-%s' % (kname, typelower), '1') image = d.getVar('INITRAMFS_IMAGE') - image_bundle = d.getVar('INITRAMFS_IMAGE_BUNDLE') - if image and bb.utils.to_boolean(image_bundle, False): + # If the INTIRAMFS_IMAGE is set but the INITRAMFS_IMAGE_BUNDLE is set to 0, + # the do_bundle_initramfs does nothing, but the INITRAMFS_IMAGE is built + # standalone for use by wic and other tools. + if image: d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') # NOTE: setting INITRAMFS_TASK is for backward compatibility @@ -454,7 +456,7 @@ do_shared_workdir () { cp .config $kerneldir/ mkdir -p $kerneldir/include/config cp include/config/kernel.release $kerneldir/include/config/kernel.release - if [ -e certs/signing_key.pem ]; then + if [ -e certs/signing_key.x509 ]; then # The signing_key.* files are stored in the certs/ dir in # newer Linux kernels mkdir -p $kerneldir/certs diff --git a/poky/meta/classes/libc-package.bbclass b/poky/meta/classes/libc-package.bbclass index 8859dad56..a66e54088 100644 --- a/poky/meta/classes/libc-package.bbclass +++ b/poky/meta/classes/libc-package.bbclass @@ -82,6 +82,9 @@ do_collect_bins_from_locale_tree() { parent=$(dirname ${localedir}) mkdir -p ${PKGD}/$parent tar -cf - -C $treedir/$parent -p $(basename ${localedir}) | tar -xf - -C ${PKGD}$parent + + # Finalize tree by chaning all duplicate files into hard links + cross-localedef-hardlink -c -v ${WORKDIR}/locale-tree } inherit qemu @@ -265,7 +268,7 @@ python package_do_split_gconvs () { bb.error("locale_arch_options not found for target_arch=" + target_arch) bb.fatal("unknown arch:" + target_arch + " for locale_arch_options") - localedef_opts += " --force --no-archive --prefix=%s \ + localedef_opts += " --force --no-hard-links --no-archive --prefix=%s \ --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \ % (treedir, treedir, datadir, locale, encoding, outputpath, name) @@ -273,7 +276,7 @@ python package_do_split_gconvs () { (path, i18npath, gconvpath, localedef_opts) else: # earlier slower qemu way qemu = qemu_target_binary(d) - localedef_opts = "--force --no-archive --prefix=%s \ + localedef_opts = "--force --no-hard-links --no-archive --prefix=%s \ --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ % (treedir, datadir, locale, encoding, name) diff --git a/poky/meta/classes/license_image.bbclass b/poky/meta/classes/license_image.bbclass index 2cfda81c9..3f102d0fb 100644 --- a/poky/meta/classes/license_image.bbclass +++ b/poky/meta/classes/license_image.bbclass @@ -198,10 +198,6 @@ def get_deployed_dependencies(d): deploy = {} # Get all the dependencies for the current task (rootfs). - # Also get EXTRA_IMAGEDEPENDS because the bootloader is - # usually in this var and not listed in rootfs. - # At last, get the dependencies from boot classes because - # it might contain the bootloader. taskdata = d.getVar("BB_TASKDEPDATA", False) depends = list(set([dep[0] for dep in list(taskdata.values()) diff --git a/poky/meta/classes/linuxloader.bbclass b/poky/meta/classes/linuxloader.bbclass index b4c413494..c0fbf2683 100644 --- a/poky/meta/classes/linuxloader.bbclass +++ b/poky/meta/classes/linuxloader.bbclass @@ -19,6 +19,8 @@ def get_musl_loader(d): dynamic_loader = "${base_libdir}/ld-musl-arm${ARMPKGSFX_ENDIAN}${ARMPKGSFX_EABI}.so.1" elif targetarch.startswith("aarch64"): dynamic_loader = "${base_libdir}/ld-musl-aarch64${ARMPKGSFX_ENDIAN_64}.so.1" + elif targetarch.startswith("riscv64"): + dynamic_loader = "${base_libdir}/ld-musl-riscv64${@['', '-sf'][d.getVar('TARGET_FPU') == 'soft']}.so.1" return dynamic_loader def get_glibc_loader(d): @@ -42,6 +44,8 @@ def get_glibc_loader(d): dynamic_loader = "${base_libdir}/ld-linux.so.3" elif targetarch.startswith("aarch64"): dynamic_loader = "${base_libdir}/ld-linux-aarch64${ARMPKGSFX_ENDIAN_64}.so.1" + elif targetarch.startswith("riscv64"): + dynamic_loader = "${base_libdir}/ld-linux-riscv64-lp64${@['d', ''][d.getVar('TARGET_FPU') == 'soft']}.so.1" return dynamic_loader def get_linuxloader(d): diff --git a/poky/meta/classes/live-vm-common.bbclass b/poky/meta/classes/live-vm-common.bbclass index 68105d9b8..74e7074a5 100644 --- a/poky/meta/classes/live-vm-common.bbclass +++ b/poky/meta/classes/live-vm-common.bbclass @@ -29,6 +29,39 @@ def pcbios(d): PCBIOS = "${@pcbios(d)}" PCBIOS_CLASS = "${@['','syslinux'][d.getVar('PCBIOS') == '1']}" +# efi_populate_common DEST BOOTLOADER +efi_populate_common() { + # DEST must be the root of the image so that EFIDIR is not + # nested under a top level directory. + DEST=$1 + + install -d ${DEST}${EFIDIR} + + install -m 0644 ${DEPLOY_DIR_IMAGE}/$2-${EFI_BOOT_IMAGE} ${DEST}${EFIDIR}/${EFI_BOOT_IMAGE} + EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') + printf 'fs0:%s\%s\n' "$EFIPATH" "${EFI_BOOT_IMAGE}" >${DEST}/startup.nsh +} + +efi_iso_populate() { + iso_dir=$1 + efi_populate $iso_dir + # Build a EFI directory to create efi.img + mkdir -p ${EFIIMGDIR}/${EFIDIR} + cp $iso_dir/${EFIDIR}/* ${EFIIMGDIR}${EFIDIR} + cp $iso_dir/${KERNEL_IMAGETYPE} ${EFIIMGDIR} + + EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') + printf 'fs0:%s\%s\n' "$EFIPATH" "${EFI_BOOT_IMAGE}" >${EFIIMGDIR}/startup.nsh + + if [ -f "$iso_dir/initrd" ] ; then + cp $iso_dir/initrd ${EFIIMGDIR} + fi +} + +efi_hddimg_populate() { + efi_populate $1 +} + inherit ${EFI_CLASS} inherit ${PCBIOS_CLASS} diff --git a/poky/meta/classes/meson.bbclass b/poky/meta/classes/meson.bbclass index 0edbfc181..efa623407 100644 --- a/poky/meta/classes/meson.bbclass +++ b/poky/meta/classes/meson.bbclass @@ -24,16 +24,7 @@ MESONOPTS = " --prefix ${prefix} \ --infodir ${@noprefix('infodir', d)} \ --sysconfdir ${sysconfdir} \ --localstatedir ${localstatedir} \ - --sharedstatedir ${sharedstatedir} \ - -Dc_args='${BUILD_CPPFLAGS} ${BUILD_CFLAGS}' \ - -Dc_link_args='${BUILD_LDFLAGS}' \ - -Dcpp_args='${BUILD_CPPFLAGS} ${BUILD_CXXFLAGS}' \ - -Dcpp_link_args='${BUILD_LDFLAGS}'" - -MESON_TOOLCHAIN_ARGS = "${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS}" -MESON_C_ARGS = "${MESON_TOOLCHAIN_ARGS} ${CFLAGS}" -MESON_CPP_ARGS = "${MESON_TOOLCHAIN_ARGS} ${CXXFLAGS}" -MESON_LINK_ARGS = "${MESON_TOOLCHAIN_ARGS} ${LDFLAGS}" + --sharedstatedir ${sharedstatedir} " EXTRA_OEMESON_append = " ${PACKAGECONFIG_CONFARGS}" @@ -64,6 +55,8 @@ def meson_cpu_family(var, d): return 'mips64' elif re.match(r"i[3-6]86", arch): return "x86" + elif arch == "microblazeel" or arch == "microblazeeb": + return "microblaze" else: return arch @@ -78,7 +71,7 @@ def meson_endian(prefix, d): bb.fatal("Cannot determine endianism for %s-%s" % (arch, os)) addtask write_config before do_configure -do_write_config[vardeps] += "MESON_C_ARGS MESON_CPP_ARGS MESON_LINK_ARGS CC CXX LD AR NM STRIP READELF" +do_write_config[vardeps] += "CC CXX LD AR NM STRIP READELF CFLAGS CXXFLAGS LDFLAGS" do_write_config() { # This needs to be Py to split the args into single-element lists cat >${WORKDIR}/meson.cross <<EOF @@ -91,14 +84,14 @@ ld = ${@meson_array('LD', d)} strip = ${@meson_array('STRIP', d)} readelf = ${@meson_array('READELF', d)} pkgconfig = 'pkg-config' -llvm-config = 'llvm-config8.0.0' +llvm-config = 'llvm-config${LLVMVERSION}' [properties] needs_exe_wrapper = true -c_args = ${@meson_array('MESON_C_ARGS', d)} -c_link_args = ${@meson_array('MESON_LINK_ARGS', d)} -cpp_args = ${@meson_array('MESON_CPP_ARGS', d)} -cpp_link_args = ${@meson_array('MESON_LINK_ARGS', d)} +c_args = ${@meson_array('CFLAGS', d)} +c_link_args = ${@meson_array('LDFLAGS', d)} +cpp_args = ${@meson_array('CXXFLAGS', d)} +cpp_link_args = ${@meson_array('LDFLAGS', d)} gtkdoc_exe_wrapper = '${B}/gtkdoc-qemuwrapper' [host_machine] @@ -135,6 +128,7 @@ override_native_tools() { export CXX="${BUILD_CXX}" export LD="${BUILD_LD}" export AR="${BUILD_AR}" + export STRIP="${BUILD_STRIP}" # These contain *target* flags but will be used as *native* flags. The # correct native flags will be passed via -Dc_args and so on, unset them so # they don't interfere with tools invoked by Meson (such as g-ir-scanner) diff --git a/poky/meta/classes/multilib.bbclass b/poky/meta/classes/multilib.bbclass index 7750221f7..1a9295d36 100644 --- a/poky/meta/classes/multilib.bbclass +++ b/poky/meta/classes/multilib.bbclass @@ -125,8 +125,55 @@ python __anonymous () { clsextend.map_variable("USERADD_PACKAGES") clsextend.map_variable("SYSTEMD_PACKAGES") clsextend.map_variable("UPDATERCPN") + + reset_alternative_priority(d) } +def reset_alternative_priority(d): + if not bb.data.inherits_class('update-alternatives', d): + return + + # There might be multiple multilibs at the same time, e.g., lib32 and + # lib64, each of them should have a different priority. + multilib_variants = d.getVar('MULTILIB_VARIANTS') + bbextendvariant = d.getVar('BBEXTENDVARIANT') + reset_gap = multilib_variants.split().index(bbextendvariant) + 1 + + # ALTERNATIVE_PRIORITY = priority + alt_priority_recipe = d.getVar('ALTERNATIVE_PRIORITY') + # Reset ALTERNATIVE_PRIORITY when found + if alt_priority_recipe: + reset_priority = int(alt_priority_recipe) - reset_gap + bb.debug(1, '%s: Setting ALTERNATIVE_PRIORITY to %s' % (d.getVar('PN'), reset_priority)) + d.setVar('ALTERNATIVE_PRIORITY', reset_priority) + + handled_pkgs = [] + for pkg in (d.getVar('PACKAGES') or "").split(): + # ALTERNATIVE_PRIORITY_pkg = priority + alt_priority_pkg = d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg) + # Reset ALTERNATIVE_PRIORITY_pkg when found + if alt_priority_pkg: + reset_priority = int(alt_priority_pkg) - reset_gap + if not pkg in handled_pkgs: + handled_pkgs.append(pkg) + bb.debug(1, '%s: Setting ALTERNATIVE_PRIORITY_%s to %s' % (pkg, pkg, reset_priority)) + d.setVar('ALTERNATIVE_PRIORITY_%s' % pkg, reset_priority) + + for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split(): + # ALTERNATIVE_PRIORITY_pkg[tool] = priority + alt_priority_pkg_name = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name) + # ALTERNATIVE_PRIORITY[tool] = priority + alt_priority_name = d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name) + + if alt_priority_pkg_name: + reset_priority = int(alt_priority_pkg_name) - reset_gap + bb.debug(1, '%s: Setting ALTERNATIVE_PRIORITY_%s[%s] to %s' % (pkg, pkg, alt_name, reset_priority)) + d.setVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, reset_priority) + elif alt_priority_name: + reset_priority = int(alt_priority_name) - reset_gap + bb.debug(1, '%s: Setting ALTERNATIVE_PRIORITY[%s] to %s' % (pkg, alt_name, reset_priority)) + d.setVarFlag('ALTERNATIVE_PRIORITY', alt_name, reset_priority) + PACKAGEFUNCS_append = " do_package_qa_multilib" python do_package_qa_multilib() { @@ -140,7 +187,7 @@ python do_package_qa_multilib() { if (not i.startswith('kernel-module')) and (not i.startswith(mlprefix)) and \ (not 'cross-canadian' in i) and (not i.startswith("nativesdk-")) and \ (not i.startswith("rtld")) and (not i.startswith('kernel-vmlinux')) \ - and (not i.startswith("kernel-image")): + and (not i.startswith("kernel-image")) and (not i.startswith("/")): candidates.append(i) if len(candidates) > 0: msg = "%s package %s - suspicious values '%s' in %s" \ diff --git a/poky/meta/classes/multilib_global.bbclass b/poky/meta/classes/multilib_global.bbclass index 19ce1a509..98f65c8aa 100644 --- a/poky/meta/classes/multilib_global.bbclass +++ b/poky/meta/classes/multilib_global.bbclass @@ -172,21 +172,27 @@ python multilib_virtclass_handler_global () { if bb.data.inherits_class('kernel', e.data) or \ bb.data.inherits_class('module-base', e.data) or \ d.getVar('BPN') in non_ml_recipes: + + # We need to avoid expanding KERNEL_VERSION which we can do by deleting it + # from a copy of the datastore + localdata = bb.data.createCopy(d) + localdata.delVar("KERNEL_VERSION") + variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split() import oe.classextend clsextends = [] for variant in variants: - clsextends.append(oe.classextend.ClassExtender(variant, e.data)) + clsextends.append(oe.classextend.ClassExtender(variant, localdata)) # Process PROVIDES - origprovs = provs = e.data.getVar("PROVIDES") or "" + origprovs = provs = localdata.getVar("PROVIDES") or "" for clsextend in clsextends: provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False) e.data.setVar("PROVIDES", provs) # Process RPROVIDES - origrprovs = rprovs = e.data.getVar("RPROVIDES") or "" + origrprovs = rprovs = localdata.getVar("RPROVIDES") or "" for clsextend in clsextends: rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False) if rprovs.strip(): @@ -194,7 +200,7 @@ python multilib_virtclass_handler_global () { # Process RPROVIDES_${PN}... for pkg in (e.data.getVar("PACKAGES") or "").split(): - origrprovs = rprovs = e.data.getVar("RPROVIDES_%s" % pkg) or "" + origrprovs = rprovs = localdata.getVar("RPROVIDES_%s" % pkg) or "" for clsextend in clsextends: rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES_%s" % pkg, setvar=False) rprovs = rprovs + " " + clsextend.extname + "-" + pkg @@ -202,5 +208,4 @@ python multilib_virtclass_handler_global () { } addhandler multilib_virtclass_handler_global -multilib_virtclass_handler_global[eventmask] = "bb.event.RecipeParsed" - +multilib_virtclass_handler_global[eventmask] = "bb.event.RecipeTaskPreProcess" diff --git a/poky/meta/classes/multilib_script.bbclass b/poky/meta/classes/multilib_script.bbclass index dc166d06c..b11efc1ec 100644 --- a/poky/meta/classes/multilib_script.bbclass +++ b/poky/meta/classes/multilib_script.bbclass @@ -17,18 +17,18 @@ multilibscript_rename() { python () { # Do nothing if multilib isn't being used if not d.getVar("MULTILIB_VARIANTS"): - return + return # Do nothing for native/cross if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d): - return + return for entry in (d.getVar("MULTILIB_SCRIPTS", False) or "").split(): - pkg, script = entry.split(":") - epkg = d.expand(pkg) - scriptname = os.path.basename(script) - d.appendVar("ALTERNATIVE_" + epkg, " " + scriptname + " ") - d.setVarFlag("ALTERNATIVE_LINK_NAME", scriptname, script) - d.setVarFlag("ALTERNATIVE_TARGET", scriptname, script + "-${MULTILIB_SUFFIX}") - d.appendVar("multilibscript_rename", "\n mv ${PKGD}" + script + " ${PKGD}" + script + "-${MULTILIB_SUFFIX}") - d.appendVar("FILES_" + epkg, " " + script + "-${MULTILIB_SUFFIX}") + pkg, script = entry.split(":") + epkg = d.expand(pkg) + scriptname = os.path.basename(script) + d.appendVar("ALTERNATIVE_" + epkg, " " + scriptname + " ") + d.setVarFlag("ALTERNATIVE_LINK_NAME", scriptname, script) + d.setVarFlag("ALTERNATIVE_TARGET", scriptname, script + "-${MULTILIB_SUFFIX}") + d.appendVar("multilibscript_rename", "\n mv ${PKGD}" + script + " ${PKGD}" + script + "-${MULTILIB_SUFFIX}") + d.appendVar("FILES_" + epkg, " " + script + "-${MULTILIB_SUFFIX}") } diff --git a/poky/meta/classes/nativesdk.bbclass b/poky/meta/classes/nativesdk.bbclass index f25b0c31b..03135aced 100644 --- a/poky/meta/classes/nativesdk.bbclass +++ b/poky/meta/classes/nativesdk.bbclass @@ -100,6 +100,8 @@ python () { clsextend.map_packagevars() clsextend.map_variable("PROVIDES") clsextend.map_regexp_variable("PACKAGES_DYNAMIC") + d.setVar("LIBCEXTENSION", "") + d.setVar("ABIEXTENSION", "") } addhandler nativesdk_virtclass_handler diff --git a/poky/meta/classes/package.bbclass b/poky/meta/classes/package.bbclass index 20d72bba7..d8bef3afb 100644 --- a/poky/meta/classes/package.bbclass +++ b/poky/meta/classes/package.bbclass @@ -40,6 +40,7 @@ inherit packagedata inherit chrpath +inherit package_pkgdata # Need the package_qa_handle_error() in insane.bbclass inherit insane @@ -1216,7 +1217,8 @@ python populate_packages () { src = os.path.join(src, p) dest = os.path.join(dest, p) fstat = cpath.stat(src) - os.mkdir(dest, fstat.st_mode) + os.mkdir(dest) + os.chmod(dest, fstat.st_mode) os.chown(dest, fstat.st_uid, fstat.st_gid) if p not in seen: seen.append(p) @@ -1356,12 +1358,16 @@ python emit_pkgdata() { import json def process_postinst_on_target(pkg, mlprefix): + pkgval = d.getVar('PKG_%s' % pkg) + if pkgval is None: + pkgval = pkg + defer_fragment = """ if [ -n "$D" ]; then $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s exit 0 fi -""" % (pkg, mlprefix) +""" % (pkgval, mlprefix) postinst = d.getVar('pkg_postinst_%s' % pkg) postinst_ontarget = d.getVar('pkg_postinst_ontarget_%s' % pkg) @@ -1570,10 +1576,11 @@ python package_do_filedeps() { d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg])) } -SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs2" +SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2" SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2" python package_do_shlibs() { + import itertools import re, pipes import subprocess @@ -1640,7 +1647,8 @@ python package_do_shlibs() { prov = (this_soname, ldir, pkgver) if not prov in sonames: # if library is private (only used by package) then do not build shlib for it - if not private_libs or this_soname not in private_libs: + import fnmatch + if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0: sonames.add(prov) if libdir_re.match(os.path.dirname(file)): needs_ldconfig = True @@ -1728,10 +1736,7 @@ python package_do_shlibs() { needed = {} - # Take shared lock since we're only reading, not writing - lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) shlib_provider = oe.package.read_shlib_providers(d) - bb.utils.unlockfile(lf) for pkg in shlib_pkgs: private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or "" @@ -1826,20 +1831,21 @@ python package_do_shlibs() { # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1 # but skipping it is still better alternative than providing own # version and then adding runtime dependency for the same system library - if private_libs and n[0] in private_libs: + import fnmatch + if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0: bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) continue if n[0] in shlib_provider.keys(): - shlib_provider_path = [] - for k in shlib_provider[n[0]].keys(): - shlib_provider_path.append(k) - match = None - for p in list(n[2]) + shlib_provider_path + libsearchpath: - if p in shlib_provider[n[0]]: - match = p - break - if match: - (dep_pkg, ver_needed) = shlib_provider[n[0]][match] + shlib_provider_map = shlib_provider[n[0]] + matches = set() + for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath): + if p in shlib_provider_map: + matches.add(p) + if len(matches) > 1: + matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches]) + bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1])) + elif len(matches) == 1: + (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()] bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1])) @@ -1917,14 +1923,11 @@ python package_do_pkgconfig () { f.write('%s\n' % p) f.close() - # Take shared lock since we're only reading, not writing - lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) - # Go from least to most specific since the last one found wins for dir in reversed(shlibs_dirs): if not os.path.exists(dir): continue - for file in os.listdir(dir): + for file in sorted(os.listdir(dir)): m = re.match(r'^(.*)\.pclist$', file) if m: pkg = m.group(1) @@ -1935,8 +1938,6 @@ python package_do_pkgconfig () { for l in lines: pkgconfig_provided[pkg].append(l.rstrip()) - bb.utils.unlockfile(lf) - for pkg in packages.split(): deps = [] for n in pkgconfig_needed[pkg]: @@ -2133,6 +2134,7 @@ def gen_packagevar(d): PACKAGE_PREPROCESS_FUNCS ?= "" # Functions for setting up PKGD PACKAGEBUILDPKGD ?= " \ + package_prepare_pkgdata \ perform_packagecopy \ ${PACKAGE_PREPROCESS_FUNCS} \ split_and_strip_files \ @@ -2253,19 +2255,19 @@ python do_package_setscene () { } addtask do_package_setscene -do_packagedata () { - : +# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both +# do_package_setscene and do_packagedata_setscene leading to races +python do_packagedata () { + src = d.expand("${PKGDESTWORK}") + dest = d.expand("${WORKDIR}/pkgdata-pdata-input") + oe.path.copyhardlinktree(src, dest) } addtask packagedata before do_build after do_package SSTATETASKS += "do_packagedata" -# PACKAGELOCK protects readers of PKGDATA_DIR against writes -# whilst code is reading in do_package -PACKAGELOCK = "${STAGING_DIR}/package-output.lock" -do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}" +do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input" do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}" -do_packagedata[sstate-lockfile] = "${PACKAGELOCK}" do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}" python do_packagedata_setscene () { diff --git a/poky/meta/classes/package_deb.bbclass b/poky/meta/classes/package_deb.bbclass index 6f8159165..790b26aef 100644 --- a/poky/meta/classes/package_deb.bbclass +++ b/poky/meta/classes/package_deb.bbclass @@ -6,6 +6,8 @@ inherit package IMAGE_PKGTYPE ?= "deb" +DPKG_BUILDCMD ??= "dpkg-deb" + DPKG_ARCH ?= "${@debian_arch_map(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'))}" DPKG_ARCH[vardepvalue] = "${DPKG_ARCH}" @@ -269,7 +271,8 @@ def deb_write_pkg(pkg, d): conffiles.close() os.chdir(basedir) - subprocess.check_output("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH"), root, pkgoutdir), + subprocess.check_output("PATH=\"%s\" %s -b %s %s" % (localdata.getVar("PATH"), localdata.getVar("DPKG_BUILDCMD"), + root, pkgoutdir), stderr=subprocess.STDOUT, shell=True) diff --git a/poky/meta/classes/package_ipk.bbclass b/poky/meta/classes/package_ipk.bbclass index d1b317b42..9f9da2f91 100644 --- a/poky/meta/classes/package_ipk.bbclass +++ b/poky/meta/classes/package_ipk.bbclass @@ -14,7 +14,7 @@ OPKG_ARGS += "--force_postinstall --prefer-arch-to-version" OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}" OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE') or "").split())][(d.getVar("PACKAGE_EXCLUDE") or "").strip() != ""]}" -OPKGLIBDIR = "${localstatedir}/lib" +OPKGLIBDIR ??= "${localstatedir}/lib" python do_package_ipk () { workdir = d.getVar('WORKDIR') diff --git a/poky/meta/classes/package_pkgdata.bbclass b/poky/meta/classes/package_pkgdata.bbclass new file mode 100644 index 000000000..18b7ed62e --- /dev/null +++ b/poky/meta/classes/package_pkgdata.bbclass @@ -0,0 +1,167 @@ +WORKDIR_PKGDATA = "${WORKDIR}/pkgdata-sysroot" + +def package_populate_pkgdata_dir(pkgdatadir, d): + import glob + + postinsts = [] + seendirs = set() + stagingdir = d.getVar("PKGDATA_DIR") + pkgarchs = ['${MACHINE_ARCH}'] + pkgarchs = pkgarchs + list(reversed(d.getVar("PACKAGE_EXTRA_ARCHS").split())) + pkgarchs.append('allarch') + + bb.utils.mkdirhier(pkgdatadir) + for pkgarch in pkgarchs: + for manifest in glob.glob(d.expand("${SSTATE_MANIFESTS}/manifest-%s-*.packagedata" % pkgarch)): + with open(manifest, "r") as f: + for l in f: + l = l.strip() + dest = l.replace(stagingdir, "") + if l.endswith("/"): + staging_copydir(l, pkgdatadir, dest, seendirs) + continue + try: + staging_copyfile(l, pkgdatadir, dest, postinsts, seendirs) + except FileExistsError: + continue + +python package_prepare_pkgdata() { + import copy + import glob + + taskdepdata = d.getVar("BB_TASKDEPDATA", False) + mytaskname = d.getVar("BB_RUNTASK") + if mytaskname.endswith("_setscene"): + mytaskname = mytaskname.replace("_setscene", "") + workdir = d.getVar("WORKDIR") + pn = d.getVar("PN") + stagingdir = d.getVar("PKGDATA_DIR") + pkgdatadir = d.getVar("WORKDIR_PKGDATA") + + # Detect bitbake -b usage + nodeps = d.getVar("BB_LIMITEDDEPS") or False + if nodeps: + staging_package_populate_pkgdata_dir(pkgdatadir, d) + return + + start = None + configuredeps = [] + for dep in taskdepdata: + data = taskdepdata[dep] + if data[1] == mytaskname and data[0] == pn: + start = dep + break + if start is None: + bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") + + # We need to figure out which sysroot files we need to expose to this task. + # This needs to match what would get restored from sstate, which is controlled + # ultimately by calls from bitbake to setscene_depvalid(). + # That function expects a setscene dependency tree. We build a dependency tree + # condensed to inter-sstate task dependencies, similar to that used by setscene + # tasks. We can then call into setscene_depvalid() and decide + # which dependencies we can "see" and should expose in the recipe specific sysroot. + setscenedeps = copy.deepcopy(taskdepdata) + + start = set([start]) + + sstatetasks = d.getVar("SSTATETASKS").split() + # Add recipe specific tasks referenced by setscene_depvalid() + sstatetasks.append("do_stash_locale") + + # If start is an sstate task (like do_package) we need to add in its direct dependencies + # else the code below won't recurse into them. + for dep in set(start): + for dep2 in setscenedeps[dep][3]: + start.add(dep2) + start.remove(dep) + + # Create collapsed do_populate_sysroot -> do_populate_sysroot tree + for dep in taskdepdata: + data = setscenedeps[dep] + if data[1] not in sstatetasks: + for dep2 in setscenedeps: + data2 = setscenedeps[dep2] + if dep in data2[3]: + data2[3].update(setscenedeps[dep][3]) + data2[3].remove(dep) + if dep in start: + start.update(setscenedeps[dep][3]) + start.remove(dep) + del setscenedeps[dep] + + # Remove circular references + for dep in setscenedeps: + if dep in setscenedeps[dep][3]: + setscenedeps[dep][3].remove(dep) + + # Direct dependencies should be present and can be depended upon + for dep in set(start): + if setscenedeps[dep][1] == "do_packagedata": + if dep not in configuredeps: + configuredeps.append(dep) + + msgbuf = [] + # Call into setscene_depvalid for each sub-dependency and only copy sysroot files + # for ones that would be restored from sstate. + done = list(start) + next = list(start) + while next: + new = [] + for dep in next: + data = setscenedeps[dep] + for datadep in data[3]: + if datadep in done: + continue + taskdeps = {} + taskdeps[dep] = setscenedeps[dep][:2] + taskdeps[datadep] = setscenedeps[datadep][:2] + retval = setscene_depvalid(datadep, taskdeps, [], d, msgbuf) + done.append(datadep) + new.append(datadep) + if retval: + msgbuf.append("Skipping setscene dependency %s" % datadep) + continue + if datadep not in configuredeps and setscenedeps[datadep][1] == "do_packagedata": + configuredeps.append(datadep) + msgbuf.append("Adding dependency on %s" % setscenedeps[datadep][0]) + else: + msgbuf.append("Following dependency on %s" % setscenedeps[datadep][0]) + next = new + + # This logging is too verbose for day to day use sadly + #bb.debug(2, "\n".join(msgbuf)) + + seendirs = set() + postinsts = [] + multilibs = {} + manifests = {} + + msg_adding = [] + + for dep in configuredeps: + c = setscenedeps[dep][0] + msg_adding.append(c) + + manifest, d2 = oe.sstatesig.find_sstate_manifest(c, setscenedeps[dep][2], "packagedata", d, multilibs) + destsysroot = pkgdatadir + + if manifest: + targetdir = destsysroot + with open(manifest, "r") as f: + manifests[dep] = manifest + for l in f: + l = l.strip() + dest = targetdir + l.replace(stagingdir, "") + if l.endswith("/"): + staging_copydir(l, targetdir, dest, seendirs) + continue + staging_copyfile(l, targetdir, dest, postinsts, seendirs) + + bb.note("Installed into pkgdata-sysroot: %s" % str(msg_adding)) + +} +package_prepare_pkgdata[cleandirs] = "${WORKDIR_PKGDATA}" +package_prepare_pkgdata[vardepsexclude] += "MACHINE_ARCH PACKAGE_EXTRA_ARCHS SDK_ARCH BUILD_ARCH SDK_OS BB_TASKDEPDATA" + + diff --git a/poky/meta/classes/package_rpm.bbclass b/poky/meta/classes/package_rpm.bbclass index 1a64cb271..9145717f9 100644 --- a/poky/meta/classes/package_rpm.bbclass +++ b/poky/meta/classes/package_rpm.bbclass @@ -36,7 +36,7 @@ def write_rpm_perfiledata(srcname, d): pkgd = d.getVar('PKGD') def dump_filerdeps(varname, outfile, d): - outfile.write("#!/usr/bin/env python\n\n") + outfile.write("#!/usr/bin/env python3\n\n") outfile.write("# Dependency table\n") outfile.write('deps = {\n') for pkg in packages.split(): @@ -409,7 +409,6 @@ python write_specfile () { if not file_list and localdata.getVar('ALLOW_EMPTY', False) != "1": bb.note("Not creating empty RPM package for %s" % splitname) else: - bb.note("Creating RPM package for %s" % splitname) spec_files_top.append('%files') if extra_pkgdata: package_rpm_extra_pkgdata(splitname, spec_files_top, localdata) @@ -418,7 +417,7 @@ python write_specfile () { bb.note("Creating RPM package for %s" % splitname) spec_files_top.extend(file_list) else: - bb.note("Creating EMPTY RPM Package for %s" % splitname) + bb.note("Creating empty RPM package for %s" % splitname) spec_files_top.append('') continue @@ -510,7 +509,7 @@ python write_specfile () { bb.note("Creating RPM package for %s" % splitname) spec_files_bottom.extend(file_list) else: - bb.note("Creating EMPTY RPM Package for %s" % splitname) + bb.note("Creating empty RPM package for %s" % splitname) spec_files_bottom.append('') del localdata diff --git a/poky/meta/classes/packagegroup.bbclass b/poky/meta/classes/packagegroup.bbclass index 94a59e0c0..1541c8fbf 100644 --- a/poky/meta/classes/packagegroup.bbclass +++ b/poky/meta/classes/packagegroup.bbclass @@ -8,7 +8,7 @@ PACKAGES = "${PN}" # By default, packagegroup packages do not depend on a certain architecture. # Only if dependencies are modified by MACHINE_FEATURES, packages -# need to be set to MACHINE_ARCH after inheriting packagegroup.bbclass +# need to be set to MACHINE_ARCH before inheriting packagegroup.bbclass PACKAGE_ARCH ?= "all" # Fully expanded - so it applies the overrides as well diff --git a/poky/meta/classes/populate_sdk_base.bbclass b/poky/meta/classes/populate_sdk_base.bbclass index 59920a57a..d03465b6f 100644 --- a/poky/meta/classes/populate_sdk_base.bbclass +++ b/poky/meta/classes/populate_sdk_base.bbclass @@ -8,6 +8,7 @@ COMPLEMENTARY_GLOB[doc-pkgs] = '*-doc' COMPLEMENTARY_GLOB[dbg-pkgs] = '*-dbg' COMPLEMENTARY_GLOB[src-pkgs] = '*-src' COMPLEMENTARY_GLOB[ptest-pkgs] = '*-ptest' +COMPLEMENTARY_GLOB[bash-completion-pkgs] = '*-bash-completion' def complementary_globs(featurevar, d): all_globs = d.getVarFlags('COMPLEMENTARY_GLOB') @@ -63,7 +64,7 @@ python () { SDK_RDEPENDS = "${TOOLCHAIN_TARGET_TASK} ${TOOLCHAIN_HOST_TASK}" SDK_DEPENDS = "virtual/fakeroot-native ${SDK_ARCHIVE_DEPENDS} cross-localedef-native nativesdk-qemuwrapper-cross ${@' '.join(["%s-qemuwrapper-cross" % m for m in d.getVar("MULTILIB_VARIANTS").split()])} qemuwrapper-cross" PATH_prepend = "${STAGING_DIR_HOST}${SDKPATHNATIVE}${bindir}/crossscripts:${@":".join(all_multilib_tune_values(d, 'STAGING_BINDIR_CROSS').split())}:" -SDK_DEPENDS_append_libc-glibc = " nativesdk-glibc-locale" +SDK_DEPENDS += "nativesdk-glibc-locale" # We want the MULTIARCH_TARGET_SYS to point to the TUNE_PKGARCH, not PACKAGE_ARCH as it # could be set to the MACHINE_ARCH diff --git a/poky/meta/classes/populate_sdk_ext.bbclass b/poky/meta/classes/populate_sdk_ext.bbclass index 800e1175d..9fda1c9e7 100644 --- a/poky/meta/classes/populate_sdk_ext.bbclass +++ b/poky/meta/classes/populate_sdk_ext.bbclass @@ -20,6 +20,7 @@ SDK_EXT_task-populate-sdk-ext = "-ext" SDK_EXT_TYPE ?= "full" SDK_INCLUDE_PKGDATA ?= "0" SDK_INCLUDE_TOOLCHAIN ?= "${@'1' if d.getVar('SDK_EXT_TYPE') == 'full' else '0'}" +SDK_INCLUDE_NATIVESDK ?= "0" SDK_RECRDEP_TASKS ?= "" @@ -378,6 +379,11 @@ python copy_buildsystem () { f.write('require conf/locked-sigs.inc\n') f.write('require conf/unlocked-sigs.inc\n') + if os.path.exists(builddir + '/cache/bb_unihashes.dat'): + bb.parse.siggen.save_unitaskhashes() + bb.utils.mkdirhier(os.path.join(baseoutpath, 'cache')) + shutil.copyfile(builddir + '/cache/bb_unihashes.dat', baseoutpath + '/cache/bb_unihashes.dat') + # Write a templateconf.cfg with open(baseoutpath + '/conf/templateconf.cfg', 'w') as f: f.write('meta/conf\n') @@ -401,9 +407,27 @@ python copy_buildsystem () { excluded_targets = get_sdk_install_targets(d, images_only=True) sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc' lockedsigs_pruned = baseoutpath + '/conf/locked-sigs.inc' + #nativesdk-only sigfile to merge into locked-sigs.inc + sdk_include_nativesdk = (d.getVar("SDK_INCLUDE_NATIVESDK") == '1') + nativesigfile = d.getVar('WORKDIR') + '/locked-sigs_nativesdk.inc' + nativesigfile_pruned = d.getVar('WORKDIR') + '/locked-sigs_nativesdk_pruned.inc' + + if sdk_include_nativesdk: + oe.copy_buildsystem.prune_lockedsigs([], + excluded_targets.split(), + nativesigfile, + True, + nativesigfile_pruned) + + oe.copy_buildsystem.merge_lockedsigs([], + sigfile, + nativesigfile_pruned, + sigfile) + oe.copy_buildsystem.prune_lockedsigs([], excluded_targets.split(), sigfile, + False, lockedsigs_pruned) sstate_out = baseoutpath + '/sstate-cache' @@ -414,13 +438,18 @@ python copy_buildsystem () { sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1') sdk_ext_type = d.getVar('SDK_EXT_TYPE') - if sdk_ext_type != 'minimal' or sdk_include_toolchain or derivative: + if (sdk_ext_type != 'minimal' or sdk_include_toolchain or derivative) and not sdk_include_nativesdk: # Create the filtered task list used to generate the sstate cache shipped with the SDK tasklistfn = d.getVar('WORKDIR') + '/tasklist.txt' create_filtered_tasklist(d, baseoutpath, tasklistfn, conf_initpath) else: tasklistfn = None + if os.path.exists(builddir + '/cache/bb_unihashes.dat'): + bb.parse.siggen.save_unitaskhashes() + bb.utils.mkdirhier(os.path.join(baseoutpath, 'cache')) + shutil.copyfile(builddir + '/cache/bb_unihashes.dat', baseoutpath + '/cache/bb_unihashes.dat') + # Add packagedata if enabled if d.getVar('SDK_INCLUDE_PKGDATA') == '1': lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base.inc' @@ -657,9 +686,16 @@ fakeroot python do_populate_sdk_ext() { d.setVar('SDKDEPLOYDIR', '${SDKEXTDEPLOYDIR}') # ESDKs have a libc from the buildtools so ensure we don't ship linguas twice d.delVar('SDKIMAGE_LINGUAS') + if d.getVar("SDK_INCLUDE_NATIVESDK") == '1': + generate_nativesdk_lockedsigs(d) populate_sdk_common(d) } +def generate_nativesdk_lockedsigs(d): + import oe.copy_buildsystem + sigfile = d.getVar('WORKDIR') + '/locked-sigs_nativesdk.inc' + oe.copy_buildsystem.generate_locked_sigs(sigfile, d) + def get_ext_sdk_depends(d): # Note: the deps varflag is a list not a string, so we need to specify expand=False deps = d.getVarFlag('do_image_complete', 'deps', False) diff --git a/poky/meta/classes/python3native.bbclass b/poky/meta/classes/python3native.bbclass index a3acaf61b..d98fb4c75 100644 --- a/poky/meta/classes/python3native.bbclass +++ b/poky/meta/classes/python3native.bbclass @@ -9,6 +9,14 @@ DEPENDS_append = " python3-native " export STAGING_INCDIR export STAGING_LIBDIR +# Packages can use +# find_package(PythonInterp REQUIRED) +# find_package(PythonLibs REQUIRED) +# which ends up using libs/includes from build host +# Therefore pre-empt that effort +export PYTHON_LIBRARY="${STAGING_LIBDIR}/lib${PYTHON_DIR}${PYTHON_ABI}.so" +export PYTHON_INCLUDE_DIR="${STAGING_INCDIR}/${PYTHON_DIR}${PYTHON_ABI}" + export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata" # suppress host user's site-packages dirs. diff --git a/poky/meta/classes/pythonnative.bbclass b/poky/meta/classes/pythonnative.bbclass index ae6600cd1..0e9019d1e 100644 --- a/poky/meta/classes/pythonnative.bbclass +++ b/poky/meta/classes/pythonnative.bbclass @@ -12,6 +12,14 @@ DEPENDS_append = " python-native " export STAGING_INCDIR export STAGING_LIBDIR +# Packages can use +# find_package(PythonInterp REQUIRED) +# find_package(PythonLibs REQUIRED) +# which ends up using libs/includes from build host +# Therefore pre-empt that effort +export PYTHON_LIBRARY="${STAGING_LIBDIR}/lib${PYTHON_DIR}${PYTHON_ABI}.so" +export PYTHON_INCLUDE_DIR="${STAGING_INCDIR}/${PYTHON_DIR}${PYTHON_ABI}" + # suppress host user's site-packages dirs. export PYTHONNOUSERSITE = "1" diff --git a/poky/meta/classes/report-error.bbclass b/poky/meta/classes/report-error.bbclass index 1c55abfbf..ea043b23e 100644 --- a/poky/meta/classes/report-error.bbclass +++ b/poky/meta/classes/report-error.bbclass @@ -25,6 +25,19 @@ def errorreport_savedata(e, newdata, file): json.dump(newdata, f, indent=4, sort_keys=True) return datafile +def get_conf_data(e, filename): + builddir = e.data.getVar('TOPDIR') + filepath = os.path.join(builddir, "conf", filename) + jsonstring = "" + if os.path.exists(filepath): + with open(filepath, 'r') as f: + for line in f.readlines(): + if line.startswith("#") or len(line.strip()) == 0: + continue + else: + jsonstring=jsonstring + line + return jsonstring + python errorreport_handler () { import json import codecs @@ -51,6 +64,8 @@ python errorreport_handler () { data['failures'] = [] data['component'] = " ".join(e.getPkgs()) data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data)) + data['local_conf'] = get_conf_data(e, 'local.conf') + data['auto_conf'] = get_conf_data(e, 'auto.conf') lock = bb.utils.lockfile(datafile + '.lock') errorreport_savedata(e, data, "error-report.txt") bb.utils.unlockfile(lock) diff --git a/poky/meta/classes/reproducible_build.bbclass b/poky/meta/classes/reproducible_build.bbclass index 8788ad714..39b6e40ca 100644 --- a/poky/meta/classes/reproducible_build.bbclass +++ b/poky/meta/classes/reproducible_build.bbclass @@ -39,19 +39,28 @@ inherit ${@oe.utils.ifelse(d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1', 'repr SDE_DIR ="${WORKDIR}/source-date-epoch" SDE_FILE = "${SDE_DIR}/__source_date_epoch.txt" +SDE_DEPLOYDIR = "${WORKDIR}/deploy-source-date-epoch" SSTATETASKS += "do_deploy_source_date_epoch" do_deploy_source_date_epoch () { echo "Deploying SDE to ${SDE_DIR}." + mkdir -p ${SDE_DEPLOYDIR} + if [ -e ${SDE_FILE} ]; then + cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt + fi } python do_deploy_source_date_epoch_setscene () { sstate_setscene(d) + bb.utils.mkdirhier(d.getVar('SDE_DIR')) + sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt') + if os.path.exists(sde_file): + os.rename(sde_file, d.getVar('SDE_FILE')) } -do_deploy_source_date_epoch[dirs] = "${SDE_DIR}" -do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DIR}" +do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}" +do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}" addtask do_deploy_source_date_epoch_setscene addtask do_deploy_source_date_epoch before do_configure after do_patch diff --git a/poky/meta/classes/rm_work.bbclass b/poky/meta/classes/rm_work.bbclass index c478f4a18..a6bd3f719 100644 --- a/poky/meta/classes/rm_work.bbclass +++ b/poky/meta/classes/rm_work.bbclass @@ -121,7 +121,7 @@ do_rm_work_all () { } do_rm_work_all[recrdeptask] = "do_rm_work" do_rm_work_all[noexec] = "1" -addtask rm_work_all after before do_build +addtask rm_work_all before do_build do_populate_sdk[postfuncs] += "rm_work_populatesdk" rm_work_populatesdk () { diff --git a/poky/meta/classes/rootfs-postcommands.bbclass b/poky/meta/classes/rootfs-postcommands.bbclass index 89f8efd32..2f171836f 100644 --- a/poky/meta/classes/rootfs-postcommands.bbclass +++ b/poky/meta/classes/rootfs-postcommands.bbclass @@ -29,7 +29,7 @@ APPEND_append = '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", " ro ROOTFS_POSTPROCESS_COMMAND += "write_image_test_data ; " # Write manifest -IMAGE_MANIFEST = "${IMGDEPLOYDIR}/${IMAGE_NAME}.rootfs.manifest" +IMAGE_MANIFEST = "${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.manifest" ROOTFS_POSTUNINSTALL_COMMAND =+ "write_image_manifest ; " # Set default postinst log file POSTINST_LOGFILE ?= "${localstatedir}/log/postinstall.log" @@ -95,6 +95,11 @@ read_only_rootfs_hook () { sed -i -e '/^[#[:space:]]*\/dev\/root/{s/defaults/ro/;s/\([[:space:]]*[[:digit:]]\)\([[:space:]]*\)[[:digit:]]$/\1\20/}' ${IMAGE_ROOTFS}/etc/fstab fi + # Tweak the "mount -o remount,rw /" command in busybox-inittab inittab + if [ -f ${IMAGE_ROOTFS}/etc/inittab ]; then + sed -i 's|/bin/mount -o remount,rw /|/bin/mount -o remount,ro /|' ${IMAGE_ROOTFS}/etc/inittab + fi + # If we're using openssh and the /etc/ssh directory has no pre-generated keys, # we should configure openssh to use the configuration file /etc/ssh/sshd_config_readonly # and the keys under /var/run/ssh. @@ -260,7 +265,7 @@ python write_image_manifest () { with open(manifest_name, 'w+') as image_manifest: image_manifest.write(format_pkg_list(pkgs, "ver")) - if os.path.exists(manifest_name): + if os.path.exists(manifest_name) and link_name: manifest_link = deploy_dir + "/" + link_name + ".manifest" if os.path.lexists(manifest_link): os.remove(manifest_link) @@ -328,7 +333,7 @@ python write_image_test_data() { searchString = "%s/"%(d.getVar("TOPDIR")).replace("//","/") export2json(d, testdata_name, searchString=searchString, replaceString="") - if os.path.exists(testdata_name): + if os.path.exists(testdata_name) and link_name: testdata_link = os.path.join(deploy_dir, "%s.testdata.json" % link_name) if os.path.lexists(testdata_link): os.remove(testdata_link) @@ -356,7 +361,9 @@ rootfs_reproducible () { echo $sformatted > ${IMAGE_ROOTFS}/etc/version bbnote "rootfs_reproducible: set /etc/version to $sformatted" - find ${IMAGE_ROOTFS}/etc/gconf -name '%gconf.xml' -print0 | xargs -0r \ - sed -i -e 's@\bmtime="[0-9][0-9]*"@mtime="'${REPRODUCIBLE_TIMESTAMP_ROOTFS}'"@g' + if [ -d ${IMAGE_ROOTFS}${sysconfdir}/gconf ]; then + find ${IMAGE_ROOTFS}${sysconfdir}/gconf -name '%gconf.xml' -print0 | xargs -0r \ + sed -i -e 's@\bmtime="[0-9][0-9]*"@mtime="'${REPRODUCIBLE_TIMESTAMP_ROOTFS}'"@g' + fi fi } diff --git a/poky/meta/classes/rootfs_ipk.bbclass b/poky/meta/classes/rootfs_ipk.bbclass index aabc370cf..e73d2bfda 100644 --- a/poky/meta/classes/rootfs_ipk.bbclass +++ b/poky/meta/classes/rootfs_ipk.bbclass @@ -21,7 +21,7 @@ OPKG_PREPROCESS_COMMANDS = "" OPKG_POSTPROCESS_COMMANDS = "" -OPKGLIBDIR = "${localstatedir}/lib" +OPKGLIBDIR ??= "${localstatedir}/lib" MULTILIBRE_ALLOW_REP = "${OPKGLIBDIR}/opkg|/usr/lib/opkg" diff --git a/poky/meta/classes/sanity.bbclass b/poky/meta/classes/sanity.bbclass index 9429202dc..2d3f49eb1 100644 --- a/poky/meta/classes/sanity.bbclass +++ b/poky/meta/classes/sanity.bbclass @@ -573,7 +573,7 @@ def sanity_check_conffiles(d): if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \ d.getVar(current_version) != d.getVar(required_version): try: - bb.build.exec_func(func, d, pythonexception=True) + bb.build.exec_func(func, d) except NotImplementedError as e: bb.fatal(str(e)) d.setVar("BB_INVALIDCONF", True) diff --git a/poky/meta/classes/sstate.bbclass b/poky/meta/classes/sstate.bbclass index 424acfb15..b47b9c23b 100644 --- a/poky/meta/classes/sstate.bbclass +++ b/poky/meta/classes/sstate.bbclass @@ -89,11 +89,6 @@ SSTATE_HASHEQUIV_METHOD[doc] = "The fully-qualified function used to calculate \ the output hash for a task, which in turn is used to determine equivalency. \ " -SSTATE_HASHEQUIV_SERVER ?= "" -SSTATE_HASHEQUIV_SERVER[doc] = "The hash equivalence sever. For example, \ - 'http://192.168.0.1:5000'. Do not include a trailing slash \ - " - SSTATE_HASHEQUIV_REPORT_TASKDATA ?= "0" SSTATE_HASHEQUIV_REPORT_TASKDATA[doc] = "Report additional useful data to the \ hash equivalency server, such as PN, PV, taskname, etc. This information \ @@ -329,7 +324,7 @@ def sstate_installpkg(ss, d): pstaging_fetch(sstatefetch, d) if not os.path.isfile(sstatepkg): - bb.note("Staging package %s does not exist" % sstatepkg) + bb.note("Sstate package %s does not exist" % sstatepkg) return False sstate_clean(ss, d) @@ -340,7 +335,8 @@ def sstate_installpkg(ss, d): if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False): signer = get_signer(d, 'local') if not signer.verify(sstatepkg + '.sig'): - bb.warn("Cannot verify signature on sstate package %s" % sstatepkg) + bb.warn("Cannot verify signature on sstate package %s, skipping acceleration..." % sstatepkg) + return False # Empty sstateinst directory, ensure its clean if os.path.exists(sstateinst): @@ -658,8 +654,12 @@ def sstate_package(ss, d): if d.getVar('SSTATE_SKIP_CREATION') == '1': return + sstate_create_package = ['sstate_report_unihash', 'sstate_create_package'] + if d.getVar('SSTATE_SIG_KEY'): + sstate_create_package.append('sstate_sign_package') + for f in (d.getVar('SSTATECREATEFUNCS') or '').split() + \ - ['sstate_report_unihash', 'sstate_create_package', 'sstate_sign_package'] + \ + sstate_create_package + \ (d.getVar('SSTATEPOSTCREATEFUNCS') or '').split(): # All hooks should run in SSTATE_BUILDDIR. bb.build.exec_func(f, d, (sstatebuild,)) @@ -750,6 +750,11 @@ sstate_task_postfunc[dirs] = "${WORKDIR}" sstate_create_package () { TFILE=`mktemp ${SSTATE_PKG}.XXXXXXXX` + # Exit earlu if it already exists + if [ -e ${SSTATE_PKG} ]; then + return + fi + # Use pigz if available OPT="-czS" if [ -x "$(command -v pigz)" ]; then @@ -769,19 +774,24 @@ sstate_create_package () { tar $OPT --file=$TFILE --files-from=/dev/null fi chmod 0664 $TFILE - mv -f $TFILE ${SSTATE_PKG} + # Skip if it was already created by some other process + if [ ! -e ${SSTATE_PKG} ]; then + mv -f $TFILE ${SSTATE_PKG} + else + rm $TFILE + fi } python sstate_sign_package () { from oe.gpg_sign import get_signer - if d.getVar('SSTATE_SIG_KEY'): - signer = get_signer(d, 'local') - sstate_pkg = d.getVar('SSTATE_PKG') - if os.path.exists(sstate_pkg + '.sig'): - os.unlink(sstate_pkg + '.sig') - signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None, - d.getVar('SSTATE_SIG_PASSPHRASE'), armor=False) + + signer = get_signer(d, 'local') + sstate_pkg = d.getVar('SSTATE_PKG') + if os.path.exists(sstate_pkg + '.sig'): + os.unlink(sstate_pkg + '.sig') + signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None, + d.getVar('SSTATE_SIG_PASSPHRASE'), armor=False) } python sstate_report_unihash() { @@ -808,29 +818,26 @@ sstate_unpack_package () { BB_HASHCHECK_FUNCTION = "sstate_checkhashes" -def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False, *, sq_unihash=None): - - ret = [] - missed = [] +def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, **kwargs): + found = set() + missed = set() extension = ".tgz" if siginfo: extension = extension + ".siginfo" def gethash(task): - if sq_unihash is not None: - return sq_unihash[task] - return sq_hash[task] + return sq_data['unihash'][task] def getpathcomponents(task, d): # Magic data from BB_HASHFILENAME - splithashfn = sq_hashfn[task].split(" ") + splithashfn = sq_data['hashfn'][task].split(" ") spec = splithashfn[1] if splithashfn[0] == "True": extrapath = d.getVar("NATIVELSBSTRING") + "/" else: extrapath = "" - - tname = sq_task[task][3:] + + tname = bb.runqueue.taskname_from_tid(task)[3:] if tname in ["fetch", "unpack", "patch", "populate_lic", "preconfigure"] and splithashfn[2]: spec = splithashfn[2] @@ -839,18 +846,18 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False, *, return spec, extrapath, tname - for task in range(len(sq_fn)): + for tid in sq_data['hash']: - spec, extrapath, tname = getpathcomponents(task, d) + spec, extrapath, tname = getpathcomponents(tid, d) - sstatefile = d.expand("${SSTATE_DIR}/" + extrapath + generate_sstatefn(spec, gethash(task), d) + "_" + tname + extension) + sstatefile = d.expand("${SSTATE_DIR}/" + extrapath + generate_sstatefn(spec, gethash(tid), d) + "_" + tname + extension) if os.path.exists(sstatefile): bb.debug(2, "SState: Found valid sstate file %s" % sstatefile) - ret.append(task) + found.add(tid) continue else: - missed.append(task) + missed.add(tid) bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile) mirrors = d.getVar("SSTATE_MIRRORS") @@ -880,7 +887,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False, *, thread_worker.connection_cache.close_connections() def checkstatus(thread_worker, arg): - (task, sstatefile) = arg + (tid, sstatefile) = arg localdata2 = bb.data.createCopy(localdata) srcuri = "file://" + sstatefile @@ -892,22 +899,22 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False, *, connection_cache=thread_worker.connection_cache) fetcher.checkstatus() bb.debug(2, "SState: Successful fetch test for %s" % srcuri) - ret.append(task) - if task in missed: - missed.remove(task) + found.add(tid) + if tid in missed: + missed.remove(tid) except: - missed.append(task) + missed.add(tid) bb.debug(2, "SState: Unsuccessful fetch test for %s" % srcuri) pass bb.event.fire(bb.event.ProcessProgress(msg, len(tasklist) - thread_worker.tasks.qsize()), d) tasklist = [] - for task in range(len(sq_fn)): - if task in ret: + for tid in sq_data['hash']: + if tid in found: continue - spec, extrapath, tname = getpathcomponents(task, d) - sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(task), d) + "_" + tname + extension) - tasklist.append((task, sstatefile)) + spec, extrapath, tname = getpathcomponents(tid, d) + sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(tid), d) + "_" + tname + extension) + tasklist.append((tid, sstatefile)) if tasklist: msg = "Checking sstate mirror object availability" @@ -927,35 +934,38 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False, *, bb.event.fire(bb.event.ProcessFinished(msg), d) + # Likely checking an individual task hash again for multiconfig sharing of sstate tasks so skip reporting + if len(sq_data['hash']) == 1: + return found + inheritlist = d.getVar("INHERIT") if "toaster" in inheritlist: evdata = {'missed': [], 'found': []}; - for task in missed: - spec, extrapath, tname = getpathcomponents(task, d) - sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(task), d) + "_" + tname + ".tgz") - evdata['missed'].append( (sq_fn[task], sq_task[task], gethash(task), sstatefile ) ) - for task in ret: - spec, extrapath, tname = getpathcomponents(task, d) - sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(task), d) + "_" + tname + ".tgz") - evdata['found'].append( (sq_fn[task], sq_task[task], gethash(task), sstatefile ) ) + for tid in missed: + spec, extrapath, tname = getpathcomponents(tid, d) + sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(tid), d) + "_" + tname + ".tgz") + evdata['missed'].append((bb.runqueue.fn_from_tid(tid), bb.runqueue.taskname_from_tid(tid), gethash(tid), sstatefile ) ) + for tid in found: + spec, extrapath, tname = getpathcomponents(tid, d) + sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(tid), d) + "_" + tname + ".tgz") + evdata['found'].append((bb.runqueue.fn_from_tid(tid), bb.runqueue.taskname_from_tid(tid), gethash(tid), sstatefile ) ) bb.event.fire(bb.event.MetadataEvent("MissedSstate", evdata), d) # Print some summary statistics about the current task completion and how much sstate # reuse there was. Avoid divide by zero errors. - total = len(sq_fn) - currentcount = d.getVar("BB_SETSCENE_STAMPCURRENT_COUNT") or 0 + total = len(sq_data['hash']) complete = 0 if currentcount: - complete = (len(ret) + currentcount) / (total + currentcount) * 100 + complete = (len(found) + currentcount) / (total + currentcount) * 100 match = 0 if total: - match = len(ret) / total * 100 - bb.plain("Sstate summary: Wanted %d Found %d Missed %d Current %d (%d%% match, %d%% complete)" % (total, len(ret), len(missed), currentcount, match, complete)) + match = len(found) / total * 100 + bb.plain("Sstate summary: Wanted %d Found %d Missed %d Current %d (%d%% match, %d%% complete)" % (total, len(found), len(missed), currentcount, match, complete)) if hasattr(bb.parse.siggen, "checkhashes"): - bb.parse.siggen.checkhashes(missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d) + bb.parse.siggen.checkhashes(sq_data, missed, found, d) - return ret + return found BB_SETSCENE_DEPVALID = "setscene_depvalid" diff --git a/poky/meta/classes/staging.bbclass b/poky/meta/classes/staging.bbclass index 920706022..55a9b52ed 100644 --- a/poky/meta/classes/staging.bbclass +++ b/poky/meta/classes/staging.bbclass @@ -197,7 +197,7 @@ def staging_populate_sysroot_dir(targetsysroot, nativesysroot, native, d): for pkgarch in pkgarchs: for manifest in glob.glob(d.expand("${SSTATE_MANIFESTS}/manifest-%s-*.populate_sysroot" % pkgarch)): if manifest.endswith("-initial.populate_sysroot"): - # skip glibc-initial and libgcc-initial due to file overlap + # skip libgcc-initial due to file overlap continue if not native and (manifest.endswith("-native.populate_sysroot") or "nativesdk-" in manifest): continue @@ -261,12 +261,10 @@ python extend_recipe_sysroot() { workdir = d.getVar("WORKDIR") #bb.warn(str(taskdepdata)) pn = d.getVar("PN") - mc = d.getVar("BB_CURRENT_MC") stagingdir = d.getVar("STAGING_DIR") sharedmanifests = d.getVar("COMPONENTS_DIR") + "/manifests" recipesysroot = d.getVar("RECIPE_SYSROOT") recipesysrootnative = d.getVar("RECIPE_SYSROOT_NATIVE") - current_variant = d.getVar("BBEXTENDVARIANT") # Detect bitbake -b usage nodeps = d.getVar("BB_LIMITEDDEPS") or False @@ -452,11 +450,6 @@ python extend_recipe_sysroot() { msg_adding = [] for dep in configuredeps: - if mc != 'default': - # We should not care about other multiconfigs - depmc = dep.split(':')[1] - if depmc != mc: - continue c = setscenedeps[dep][0] if c not in installed: continue @@ -584,17 +577,6 @@ python do_prepare_recipe_sysroot () { } addtask do_prepare_recipe_sysroot before do_configure after do_fetch -# Clean out the recipe specific sysroots before do_fetch -# (use a prefunc so we can order before extend_recipe_sysroot if it gets added) -python clean_recipe_sysroot() { - # We remove these stamps since we're removing any content they'd have added with - # cleandirs. This removes the sigdata too, likely not a big deal, - oe.path.remove(d.getVar("STAMP") + "*addto_recipe_sysroot*") - return -} -clean_recipe_sysroot[cleandirs] += "${RECIPE_SYSROOT} ${RECIPE_SYSROOT_NATIVE}" -do_fetch[prefuncs] += "clean_recipe_sysroot" - python staging_taskhandler() { bbtasks = e.tasklist for task in bbtasks: diff --git a/poky/meta/classes/syslinux.bbclass b/poky/meta/classes/syslinux.bbclass index 031dacbf7..894f6b371 100644 --- a/poky/meta/classes/syslinux.bbclass +++ b/poky/meta/classes/syslinux.bbclass @@ -75,11 +75,6 @@ syslinux_hddimg_install() { syslinux ${IMGDEPLOYDIR}/${IMAGE_NAME}.hddimg } -syslinux_hdddirect_install() { - DEST=$1 - syslinux $DEST -} - python build_syslinux_cfg () { import copy import sys diff --git a/poky/meta/classes/systemd-boot.bbclass b/poky/meta/classes/systemd-boot.bbclass index 3cd6811a6..336c4c2ff 100644 --- a/poky/meta/classes/systemd-boot.bbclass +++ b/poky/meta/classes/systemd-boot.bbclass @@ -11,50 +11,25 @@ do_bootimg[depends] += "${MLPREFIX}systemd-boot:do_deploy" -EFIDIR = "/EFI/BOOT" +require conf/image-uefi.conf # Need UUID utility code. inherit fs-uuid efi_populate() { - DEST=$1 + efi_populate_common "$1" systemd - EFI_IMAGE="systemd-bootia32.efi" - DEST_EFI_IMAGE="bootia32.efi" - if [ "${TARGET_ARCH}" = "x86_64" ]; then - EFI_IMAGE="systemd-bootx64.efi" - DEST_EFI_IMAGE="bootx64.efi" - fi - - install -d ${DEST}${EFIDIR} # systemd-boot requires these paths for configuration files # they are not customizable so no point in new vars install -d ${DEST}/loader install -d ${DEST}/loader/entries - install -m 0644 ${DEPLOY_DIR_IMAGE}/${EFI_IMAGE} ${DEST}${EFIDIR}/${DEST_EFI_IMAGE} - EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') - printf 'fs0:%s\%s\n' "$EFIPATH" "$DEST_EFI_IMAGE" >${DEST}/startup.nsh install -m 0644 ${SYSTEMD_BOOT_CFG} ${DEST}/loader/loader.conf for i in ${SYSTEMD_BOOT_ENTRIES}; do install -m 0644 ${i} ${DEST}/loader/entries done } -efi_iso_populate() { - iso_dir=$1 - efi_populate $iso_dir - mkdir -p ${EFIIMGDIR}/${EFIDIR} - cp $iso_dir/${EFIDIR}/* ${EFIIMGDIR}${EFIDIR} +efi_iso_populate_append() { cp -r $iso_dir/loader ${EFIIMGDIR} - cp $iso_dir/${KERNEL_IMAGETYPE} ${EFIIMGDIR} - EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') - echo "fs0:${EFIPATH}\\${DEST_EFI_IMAGE}" > ${EFIIMGDIR}/startup.nsh - if [ -f "$iso_dir/initrd" ] ; then - cp $iso_dir/initrd ${EFIIMGDIR} - fi -} - -efi_hddimg_populate() { - efi_populate $1 } inherit systemd-boot-cfg diff --git a/poky/meta/classes/systemd.bbclass b/poky/meta/classes/systemd.bbclass index d1cb17dc8..747055b8f 100644 --- a/poky/meta/classes/systemd.bbclass +++ b/poky/meta/classes/systemd.bbclass @@ -214,7 +214,6 @@ python rm_systemd_unitdir (){ if (os.path.exists(systemd_libdir) and not os.listdir(systemd_libdir)): os.rmdir(systemd_libdir) } -do_install[postfuncs] += "rm_systemd_unitdir " python rm_sysvinit_initddir (){ import shutil @@ -229,4 +228,8 @@ python rm_sysvinit_initddir (){ if (os.path.exists(systemd_system_unitdir) and os.listdir(systemd_system_unitdir)): shutil.rmtree(sysv_initddir) } -do_install[postfuncs] += "rm_sysvinit_initddir " + +do_install[postfuncs] += "${RMINITDIR} " +RMINITDIR_class-target = " rm_sysvinit_initddir rm_systemd_unitdir " +RMINITDIR = "" + diff --git a/poky/meta/classes/uboot-extlinux-config.bbclass b/poky/meta/classes/uboot-extlinux-config.bbclass index b5b1a81df..f4bf94be0 100644 --- a/poky/meta/classes/uboot-extlinux-config.bbclass +++ b/poky/meta/classes/uboot-extlinux-config.bbclass @@ -104,13 +104,16 @@ python do_create_extlinux_config() { if default: cfgfile.write('DEFAULT %s\n' % (default)) - for label in labels.split(): + # Need to deconflict the labels with existing overrides + label_overrides = labels.split() + default_overrides = localdata.getVar('OVERRIDES').split(':') + # We're keeping all the existing overrides that aren't used as a label + # an override for that label will be added back in while we're processing that label + keep_overrides = list(filter(lambda x: x not in label_overrides, default_overrides)) - overrides = localdata.getVar('OVERRIDES') - if not overrides: - bb.fatal('OVERRIDES not defined') + for label in labels.split(): - localdata.setVar('OVERRIDES', label + ':' + overrides) + localdata.setVar('OVERRIDES', ':'.join(keep_overrides + [label])) extlinux_console = localdata.getVar('UBOOT_EXTLINUX_CONSOLE') diff --git a/poky/meta/classes/uboot-sign.bbclass b/poky/meta/classes/uboot-sign.bbclass index de81ad1b3..982ed46d0 100644 --- a/poky/meta/classes/uboot-sign.bbclass +++ b/poky/meta/classes/uboot-sign.bbclass @@ -66,7 +66,7 @@ concat_dtb_helper() { install ${UBOOT_BINARY} ${DEPLOYDIR}/${UBOOT_IMAGE} elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "$deployed_uboot_dtb_binary" ]; then cd ${DEPLOYDIR} - cat ${UBOOT_NODTB_IMAGE} $deployed_uboot_dtb_binary | tee ${UBOOT_BINARY} > ${UBOOT_IMAGE} + cat ${UBOOT_NODTB_IMAGE} $deployed_uboot_dtb_binary | tee ${B}/${CONFIG_B_PATH}/${UBOOT_BINARY} > ${UBOOT_IMAGE} else bbwarn "Failure while adding public key to u-boot binary. Verified boot won't be available." fi @@ -77,10 +77,12 @@ concat_dtb() { mkdir -p ${DEPLOYDIR} if [ -n "${UBOOT_CONFIG}" ]; then for config in ${UBOOT_MACHINE}; do + CONFIG_B_PATH="${config}" cd ${B}/${config} concat_dtb_helper done else + CONFIG_B_PATH="" cd ${B} concat_dtb_helper fi diff --git a/poky/meta/classes/update-alternatives.bbclass b/poky/meta/classes/update-alternatives.bbclass index b702e77ee..8c2b66e7f 100644 --- a/poky/meta/classes/update-alternatives.bbclass +++ b/poky/meta/classes/update-alternatives.bbclass @@ -284,8 +284,11 @@ python populate_packages_updatealternatives () { bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg) bb.note('%s' % alt_setup_links) - postinst = d.getVar('pkg_postinst_%s' % pkg) or '#!/bin/sh\n' - postinst += alt_setup_links + postinst = d.getVar('pkg_postinst_%s' % pkg) + if postinst: + postinst = alt_setup_links + postinst + else: + postinst = '#!/bin/sh\n' + alt_setup_links d.setVar('pkg_postinst_%s' % pkg, postinst) bb.note('%s' % alt_remove_links) diff --git a/poky/meta/classes/xmlcatalog.bbclass b/poky/meta/classes/xmlcatalog.bbclass index 075aef8c0..ae4811fde 100644 --- a/poky/meta/classes/xmlcatalog.bbclass +++ b/poky/meta/classes/xmlcatalog.bbclass @@ -1,3 +1,5 @@ +DEPENDS = "libxml2-native" + # A whitespace-separated list of XML catalogs to be registered, for example # "${sysconfdir}/xml/docbook-xml.xml". XMLCATALOGS ?= "" |