summaryrefslogtreecommitdiff
path: root/poky/meta/lib/oe
diff options
context:
space:
mode:
authorAndrew Geissler <geissonator@yahoo.com>2020-04-13 21:39:40 +0300
committerAndrew Geissler <geissonator@yahoo.com>2020-05-05 16:30:44 +0300
commit82c905dc58a36aeae40b1b273a12f63fb1973cf4 (patch)
tree38caf00263451b5036435cdc36e035b25d32e623 /poky/meta/lib/oe
parent83ecb75644b3d677c274188f9ac0b2374d6f6925 (diff)
downloadopenbmc-82c905dc58a36aeae40b1b273a12f63fb1973cf4.tar.xz
meta-openembedded and poky: subtree updates
Squash of the following due to dependencies among them and OpenBMC changes: meta-openembedded: subtree update:d0748372d2..9201611135 meta-openembedded: subtree update:9201611135..17fd382f34 poky: subtree update:9052e5b32a..2e11d97b6c poky: subtree update:2e11d97b6c..a8544811d7 The change log was too large for the jenkins plugin to handle therefore it has been removed. Here is the first and last commit of each subtree: meta-openembedded:d0748372d2 cppzmq: bump to version 4.6.0 meta-openembedded:17fd382f34 mpv: Remove X11 dependency poky:9052e5b32a package_ipk: Remove pointless comment to trigger rebuild poky:a8544811d7 pbzip2: Fix license warning Change-Id: If0fc6c37629642ee207a4ca2f7aa501a2c673cd6 Signed-off-by: Andrew Geissler <geissonator@yahoo.com>
Diffstat (limited to 'poky/meta/lib/oe')
-rw-r--r--poky/meta/lib/oe/elf.py6
-rw-r--r--poky/meta/lib/oe/package.py33
-rw-r--r--poky/meta/lib/oe/package_manager.py42
-rw-r--r--poky/meta/lib/oe/packagegroup.py8
-rw-r--r--poky/meta/lib/oe/path.py24
-rw-r--r--poky/meta/lib/oe/prservice.py69
-rw-r--r--poky/meta/lib/oe/qa.py4
-rw-r--r--poky/meta/lib/oe/recipeutils.py3
-rw-r--r--poky/meta/lib/oe/rootfs.py5
-rw-r--r--poky/meta/lib/oe/sstatesig.py50
-rw-r--r--poky/meta/lib/oe/utils.py49
11 files changed, 177 insertions, 116 deletions
diff --git a/poky/meta/lib/oe/elf.py b/poky/meta/lib/oe/elf.py
index 2562cea1d..df0a4593f 100644
--- a/poky/meta/lib/oe/elf.py
+++ b/poky/meta/lib/oe/elf.py
@@ -15,13 +15,13 @@ def machine_dict(d):
"aarch64" : (183, 0, 0, True, 64),
"aarch64_be" :(183, 0, 0, False, 64),
"i586" : (3, 0, 0, True, 32),
+ "i686" : (3, 0, 0, True, 32),
"x86_64": (62, 0, 0, True, 64),
"epiphany": (4643, 0, 0, True, 32),
"lm32": (138, 0, 0, False, 32),
"mips": ( 8, 0, 0, False, 32),
"mipsel": ( 8, 0, 0, True, 32),
"microblaze": (189, 0, 0, False, 32),
- "microblazeeb":(189, 0, 0, False, 32),
"microblazeel":(189, 0, 0, True, 32),
"powerpc": (20, 0, 0, False, 32),
"riscv32": (243, 0, 0, True, 32),
@@ -34,6 +34,7 @@ def machine_dict(d):
"armeb": (40, 97, 0, False, 32),
"powerpc": (20, 0, 0, False, 32),
"powerpc64": (21, 0, 0, False, 64),
+ "powerpc64le": (21, 0, 0, True, 64),
"i386": ( 3, 0, 0, True, 32),
"i486": ( 3, 0, 0, True, 32),
"i586": ( 3, 0, 0, True, 32),
@@ -58,7 +59,6 @@ def machine_dict(d):
"sh4": (42, 0, 0, True, 32),
"sparc": ( 2, 0, 0, False, 32),
"microblaze": (189, 0, 0, False, 32),
- "microblazeeb":(189, 0, 0, False, 32),
"microblazeel":(189, 0, 0, True, 32),
},
"linux-musl" : {
@@ -68,6 +68,7 @@ def machine_dict(d):
"armeb": ( 40, 97, 0, False, 32),
"powerpc": ( 20, 0, 0, False, 32),
"powerpc64": ( 21, 0, 0, False, 64),
+ "powerpc64le": (21, 0, 0, True, 64),
"i386": ( 3, 0, 0, True, 32),
"i486": ( 3, 0, 0, True, 32),
"i586": ( 3, 0, 0, True, 32),
@@ -78,7 +79,6 @@ def machine_dict(d):
"mips64": ( 8, 0, 0, False, 64),
"mips64el": ( 8, 0, 0, True, 64),
"microblaze": (189, 0, 0, False, 32),
- "microblazeeb":(189, 0, 0, False, 32),
"microblazeel":(189, 0, 0, True, 32),
"riscv32": (243, 0, 0, True, 32),
"riscv64": (243, 0, 0, True, 64),
diff --git a/poky/meta/lib/oe/package.py b/poky/meta/lib/oe/package.py
index b8585d425..dd700cbb0 100644
--- a/poky/meta/lib/oe/package.py
+++ b/poky/meta/lib/oe/package.py
@@ -283,36 +283,3 @@ def read_shlib_providers(d):
shlib_provider[s[0]] = {}
shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
return shlib_provider
-
-
-def npm_split_package_dirs(pkgdir):
- """
- Work out the packages fetched and unpacked by BitBake's npm fetcher
- Returns a dict of packagename -> (relpath, package.json) ordered
- such that it is suitable for use in PACKAGES and FILES
- """
- from collections import OrderedDict
- import json
- packages = {}
- for root, dirs, files in os.walk(pkgdir):
- if os.path.basename(root) == 'node_modules':
- for dn in dirs:
- relpth = os.path.relpath(os.path.join(root, dn), pkgdir)
- pkgitems = ['${PN}']
- for pathitem in relpth.split('/'):
- if pathitem == 'node_modules':
- continue
- pkgitems.append(pathitem)
- pkgname = '-'.join(pkgitems).replace('_', '-')
- pkgname = pkgname.replace('@', '')
- pkgfile = os.path.join(root, dn, 'package.json')
- data = None
- if os.path.exists(pkgfile):
- with open(pkgfile, 'r') as f:
- data = json.loads(f.read())
- packages[pkgname] = (relpth, data)
- # We want the main package for a module sorted *after* its subpackages
- # (so that it doesn't otherwise steal the files for the subpackage), so
- # this is a cheap way to do that whilst still having an otherwise
- # alphabetical sort
- return OrderedDict((key, packages[key]) for key in sorted(packages, key=lambda pkg: pkg + '~'))
diff --git a/poky/meta/lib/oe/package_manager.py b/poky/meta/lib/oe/package_manager.py
index 4ff19cf09..b0660411e 100644
--- a/poky/meta/lib/oe/package_manager.py
+++ b/poky/meta/lib/oe/package_manager.py
@@ -40,8 +40,9 @@ def opkg_query(cmd_output):
ver = ""
filename = ""
dep = []
+ prov = []
pkgarch = ""
- for line in cmd_output.splitlines():
+ for line in cmd_output.splitlines()+['']:
line = line.rstrip()
if ':' in line:
if line.startswith("Package: "):
@@ -64,6 +65,10 @@ def opkg_query(cmd_output):
dep.append("%s [REC]" % recommend)
elif line.startswith("PackageArch: "):
pkgarch = line.split(": ")[1]
+ elif line.startswith("Provides: "):
+ provides = verregex.sub('', line.split(": ")[1])
+ for provide in provides.split(", "):
+ prov.append(provide)
# When there is a blank line save the package information
elif not line:
@@ -72,20 +77,15 @@ def opkg_query(cmd_output):
filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
if pkg:
output[pkg] = {"arch":arch, "ver":ver,
- "filename":filename, "deps": dep, "pkgarch":pkgarch }
+ "filename":filename, "deps": dep, "pkgarch":pkgarch, "provs": prov}
pkg = ""
arch = ""
ver = ""
filename = ""
dep = []
+ prov = []
pkgarch = ""
- if pkg:
- if not filename:
- filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
- output[pkg] = {"arch":arch, "ver":ver,
- "filename":filename, "deps": dep }
-
return output
def failed_postinsts_abort(pkgs, log_path):
@@ -107,6 +107,7 @@ def generate_locale_archive(d, rootfs, target_arch, localedir):
"sh4": ["--uint32-align=4", "--big-endian"],
"powerpc": ["--uint32-align=4", "--big-endian"],
"powerpc64": ["--uint32-align=4", "--big-endian"],
+ "powerpc64le": ["--uint32-align=4", "--little-endian"],
"mips": ["--uint32-align=4", "--big-endian"],
"mipsisa32r6": ["--uint32-align=4", "--big-endian"],
"mips64": ["--uint32-align=4", "--big-endian"],
@@ -131,7 +132,7 @@ def generate_locale_archive(d, rootfs, target_arch, localedir):
env = dict(os.environ)
env["LOCALEARCHIVE"] = oe.path.join(localedir, "locale-archive")
- for name in os.listdir(localedir):
+ for name in sorted(os.listdir(localedir)):
path = os.path.join(localedir, name)
if os.path.isdir(path):
cmd = ["cross-localedef", "--verbose"]
@@ -360,7 +361,7 @@ class DpkgPkgsList(PkgsList):
"--admindir=%s/var/lib/dpkg" % self.rootfs_dir,
"-W"]
- cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\n\n")
+ cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\nProvides: ${Provides}\n\n")
try:
cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8")
@@ -580,6 +581,11 @@ class PackageManager(object, metaclass=ABCMeta):
# oe-pkgdata-util reads it from a file
with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
pkgs = self.list_installed()
+
+ provided_pkgs = set()
+ for pkg in pkgs.values():
+ provided_pkgs |= set(pkg.get('provs', []))
+
output = oe.utils.format_pkg_list(pkgs, "arch")
installed_pkgs.write(output)
installed_pkgs.flush()
@@ -591,10 +597,15 @@ class PackageManager(object, metaclass=ABCMeta):
if exclude:
cmd.extend(['--exclude=' + '|'.join(exclude.split())])
try:
- bb.note("Installing complementary packages ...")
bb.note('Running %s' % cmd)
complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8")
- self.install(complementary_pkgs.split(), attempt_only=True)
+ complementary_pkgs = set(complementary_pkgs.split())
+ skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
+ install_pkgs = sorted(complementary_pkgs - provided_pkgs)
+ bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
+ ' '.join(install_pkgs),
+ ' '.join(skip_pkgs)))
+ self.install(install_pkgs, attempt_only=True)
except subprocess.CalledProcessError as e:
bb.fatal("Could not compute complementary packages list. Command "
"'%s' returned %d:\n%s" %
@@ -768,6 +779,8 @@ class RpmPM(PackageManager):
# This prevents accidental matching against libsolv's built-in policies
if len(archs) <= 1:
archs = archs + ["bogusarch"]
+ # This architecture needs to be upfront so that packages using it are properly prioritized
+ archs = ["sdk_provides_dummy_target"] + archs
confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/")
bb.utils.mkdirhier(confdir)
open(confdir + "arch", 'w').write(":".join(archs))
@@ -1621,7 +1634,7 @@ class DpkgPM(OpkgDpkgPM):
os.environ['APT_CONFIG'] = self.apt_conf_file
- cmd = "%s %s install --force-yes --allow-unauthenticated %s" % \
+ cmd = "%s %s install --force-yes --allow-unauthenticated --no-remove %s" % \
(self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
try:
@@ -1783,8 +1796,7 @@ class DpkgPM(OpkgDpkgPM):
open(os.path.join(target_dpkg_dir, "available"), "w+").close()
def remove_packaging_data(self):
- bb.utils.remove(os.path.join(self.target_rootfs,
- self.d.getVar('opkglibdir')), True)
+ bb.utils.remove(self.target_rootfs + self.d.getVar('opkglibdir'), True)
bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True)
def fix_broken_dependencies(self):
diff --git a/poky/meta/lib/oe/packagegroup.py b/poky/meta/lib/oe/packagegroup.py
index 2419cbb6d..8fcaecde8 100644
--- a/poky/meta/lib/oe/packagegroup.py
+++ b/poky/meta/lib/oe/packagegroup.py
@@ -5,17 +5,11 @@
import itertools
def is_optional(feature, d):
- packages = d.getVar("FEATURE_PACKAGES_%s" % feature)
- if packages:
- return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional"))
- else:
- return bool(d.getVarFlag("PACKAGE_GROUP_%s" % feature, "optional"))
+ return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional"))
def packages(features, d):
for feature in features:
packages = d.getVar("FEATURE_PACKAGES_%s" % feature)
- if not packages:
- packages = d.getVar("PACKAGE_GROUP_%s" % feature)
for pkg in (packages or "").split():
yield pkg
diff --git a/poky/meta/lib/oe/path.py b/poky/meta/lib/oe/path.py
index fa209b979..082972457 100644
--- a/poky/meta/lib/oe/path.py
+++ b/poky/meta/lib/oe/path.py
@@ -99,7 +99,22 @@ def copyhardlinktree(src, dst):
if os.path.isdir(src) and not len(os.listdir(src)):
return
- if (os.stat(src).st_dev == os.stat(dst).st_dev):
+ canhard = False
+ testfile = None
+ for root, dirs, files in os.walk(src):
+ if len(files):
+ testfile = os.path.join(root, files[0])
+ break
+
+ if testfile is not None:
+ try:
+ os.link(testfile, os.path.join(dst, 'testfile'))
+ os.unlink(os.path.join(dst, 'testfile'))
+ canhard = True
+ except Exception as e:
+ bb.debug(2, "Hardlink test failed with " + str(e))
+
+ if (canhard):
# Need to copy directories only with tar first since cp will error if two
# writers try and create a directory at the same time
cmd = "cd %s; find . -type d -print | tar --xattrs --xattrs-include='*' -cf - -S -C %s -p --no-recursion --files-from - | tar --xattrs --xattrs-include='*' -xhf - -C %s" % (src, src, dst)
@@ -121,12 +136,9 @@ def copyhardlinktree(src, dst):
def copyhardlink(src, dst):
"""Make a hard link when possible, otherwise copy."""
- # We need to stat the destination directory as the destination file probably
- # doesn't exist yet.
- dstdir = os.path.dirname(dst)
- if os.stat(src).st_dev == os.stat(dstdir).st_dev:
+ try:
os.link(src, dst)
- else:
+ except OSError:
shutil.copy(src, dst)
def remove(path, recurse=True):
diff --git a/poky/meta/lib/oe/prservice.py b/poky/meta/lib/oe/prservice.py
index b1132ccb1..2d3c9c7e5 100644
--- a/poky/meta/lib/oe/prservice.py
+++ b/poky/meta/lib/oe/prservice.py
@@ -3,6 +3,10 @@
#
def prserv_make_conn(d, check = False):
+ # Otherwise this fails when called from recipes which e.g. inherit python3native (which sets _PYTHON_SYSCONFIGDATA_NAME) with:
+ # No module named '_sysconfigdata'
+ if '_PYTHON_SYSCONFIGDATA_NAME' in os.environ:
+ del os.environ['_PYTHON_SYSCONFIGDATA_NAME']
import prserv.serv
host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
try:
@@ -79,41 +83,40 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
df = d.getVar('PRSERV_DUMPFILE')
#write data
lf = bb.utils.lockfile("%s.lock" % df)
- f = open(df, "a")
- if metainfo:
- #dump column info
- f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
- f.write("#Table: %s\n" % metainfo['tbl_name'])
- f.write("#Columns:\n")
- f.write("#name \t type \t notn \t dflt \t pk\n")
- f.write("#----------\t --------\t --------\t --------\t ----\n")
- for i in range(len(metainfo['col_info'])):
- f.write("#%10s\t %8s\t %8s\t %8s\t %4s\n" %
- (metainfo['col_info'][i]['name'],
- metainfo['col_info'][i]['type'],
- metainfo['col_info'][i]['notnull'],
- metainfo['col_info'][i]['dflt_value'],
- metainfo['col_info'][i]['pk']))
- f.write("\n")
+ with open(df, "a") as f:
+ if metainfo:
+ #dump column info
+ f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
+ f.write("#Table: %s\n" % metainfo['tbl_name'])
+ f.write("#Columns:\n")
+ f.write("#name \t type \t notn \t dflt \t pk\n")
+ f.write("#----------\t --------\t --------\t --------\t ----\n")
+ for i in range(len(metainfo['col_info'])):
+ f.write("#%10s\t %8s\t %8s\t %8s\t %4s\n" %
+ (metainfo['col_info'][i]['name'],
+ metainfo['col_info'][i]['type'],
+ metainfo['col_info'][i]['notnull'],
+ metainfo['col_info'][i]['dflt_value'],
+ metainfo['col_info'][i]['pk']))
+ f.write("\n")
- if lockdown:
- f.write("PRSERV_LOCKDOWN = \"1\"\n\n")
+ if lockdown:
+ f.write("PRSERV_LOCKDOWN = \"1\"\n\n")
- if datainfo:
- idx = {}
- for i in range(len(datainfo)):
- pkgarch = datainfo[i]['pkgarch']
- value = datainfo[i]['value']
- if pkgarch not in idx:
- idx[pkgarch] = i
- elif value > datainfo[idx[pkgarch]]['value']:
- idx[pkgarch] = i
- f.write("PRAUTO$%s$%s$%s = \"%s\"\n" %
- (str(datainfo[i]['version']), pkgarch, str(datainfo[i]['checksum']), str(value)))
- if not nomax:
- for i in idx:
- f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
- f.close()
+ if datainfo:
+ idx = {}
+ for i in range(len(datainfo)):
+ pkgarch = datainfo[i]['pkgarch']
+ value = datainfo[i]['value']
+ if pkgarch not in idx:
+ idx[pkgarch] = i
+ elif value > datainfo[idx[pkgarch]]['value']:
+ idx[pkgarch] = i
+ f.write("PRAUTO$%s$%s$%s = \"%s\"\n" %
+ (str(datainfo[i]['version']), pkgarch, str(datainfo[i]['checksum']), str(value)))
+ if not nomax:
+ for i in idx:
+ f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
bb.utils.unlockfile(lf)
def prserv_check_avail(d):
diff --git a/poky/meta/lib/oe/qa.py b/poky/meta/lib/oe/qa.py
index 21066c4dc..ea831b930 100644
--- a/poky/meta/lib/oe/qa.py
+++ b/poky/meta/lib/oe/qa.py
@@ -41,13 +41,15 @@ class ELFFile:
def __init__(self, name):
self.name = name
self.objdump_output = {}
+ self.data = None
# Context Manager functions to close the mmap explicitly
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
- self.data.close()
+ if self.data:
+ self.data.close()
def open(self):
with open(self.name, "rb") as f:
diff --git a/poky/meta/lib/oe/recipeutils.py b/poky/meta/lib/oe/recipeutils.py
index 630ae967a..732420e46 100644
--- a/poky/meta/lib/oe/recipeutils.py
+++ b/poky/meta/lib/oe/recipeutils.py
@@ -421,6 +421,8 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True, all_variants=F
# Ensure we handle class-target if we're dealing with one of the variants
variants.append('target')
for variant in variants:
+ if variant.startswith("devupstream"):
+ localdata.setVar('SRCPV', 'git')
localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant)
fetch_urls(localdata)
@@ -1059,7 +1061,6 @@ def get_recipe_upgrade_status(recipes=None):
data_copy_list = []
copy_vars = ('SRC_URI',
'PV',
- 'GITDIR',
'DL_DIR',
'PN',
'CACHE',
diff --git a/poky/meta/lib/oe/rootfs.py b/poky/meta/lib/oe/rootfs.py
index c62fa5f54..cd65e6203 100644
--- a/poky/meta/lib/oe/rootfs.py
+++ b/poky/meta/lib/oe/rootfs.py
@@ -126,17 +126,16 @@ class Rootfs(object, metaclass=ABCMeta):
bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(dir))
shutil.copytree(self.image_rootfs + '-orig' + dir, self.image_rootfs + dir, symlinks=True)
- cpath = oe.cachedpath.CachedPath()
# Copy files located in /usr/lib/debug or /usr/src/debug
for dir in ["/usr/lib/debug", "/usr/src/debug"]:
src = self.image_rootfs + '-orig' + dir
- if cpath.exists(src):
+ if os.path.exists(src):
dst = self.image_rootfs + dir
bb.utils.mkdirhier(os.path.dirname(dst))
shutil.copytree(src, dst)
# Copy files with suffix '.debug' or located in '.debug' dir.
- for root, dirs, files in cpath.walk(self.image_rootfs + '-orig'):
+ for root, dirs, files in os.walk(self.image_rootfs + '-orig'):
relative_dir = root[len(self.image_rootfs + '-orig'):]
for f in files:
if f.endswith('.debug') or '/.debug' in relative_dir:
diff --git a/poky/meta/lib/oe/sstatesig.py b/poky/meta/lib/oe/sstatesig.py
index c566ce5a0..d24e3738a 100644
--- a/poky/meta/lib/oe/sstatesig.py
+++ b/poky/meta/lib/oe/sstatesig.py
@@ -103,6 +103,8 @@ class SignatureGeneratorOEBasicHashMixIn(object):
self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
"").split()
self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
+ self.buildarch = data.getVar('BUILD_ARCH')
+ self._internal = False
pass
def tasks_resolved(self, virtmap, virtpnmap, dataCache):
@@ -140,8 +142,27 @@ class SignatureGeneratorOEBasicHashMixIn(object):
self.dump_lockedsigs(sigfile)
return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
+ def prep_taskhash(self, tid, deps, dataCache):
+ super().prep_taskhash(tid, deps, dataCache)
+ if hasattr(self, "extramethod"):
+ (_, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
+ inherits = " ".join(dataCache.inherits[fn])
+ if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
+ self.extramethod[tid] = ":" + self.buildarch
+
def get_taskhash(self, tid, deps, dataCache):
- h = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskhash(tid, deps, dataCache)
+ if tid in self.lockedhashes:
+ if self.lockedhashes[tid]:
+ return self.lockedhashes[tid]
+ else:
+ return super().get_taskhash(tid, deps, dataCache)
+
+ # get_taskhash will call get_unihash internally in the parent class, we
+ # need to disable our filter of it whilst this runs else
+ # incorrect hashes can be calculated.
+ self._internal = True
+ h = super().get_taskhash(tid, deps, dataCache)
+ self._internal = False
(mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
@@ -169,8 +190,9 @@ class SignatureGeneratorOEBasicHashMixIn(object):
h_locked = self.lockedsigs[recipename][task][0]
var = self.lockedsigs[recipename][task][1]
self.lockedhashes[tid] = h_locked
- unihash = super().get_unihash(tid)
- self.taskhash[tid] = h_locked
+ self._internal = True
+ unihash = self.get_unihash(tid)
+ self._internal = False
#bb.warn("Using %s %s %s" % (recipename, task, h))
if h != h_locked and h_locked != unihash:
@@ -178,17 +200,24 @@ class SignatureGeneratorOEBasicHashMixIn(object):
% (recipename, task, h, h_locked, var))
return h_locked
+
+ self.lockedhashes[tid] = False
#bb.warn("%s %s %s" % (recipename, task, h))
return h
+ def get_stampfile_hash(self, tid):
+ if tid in self.lockedhashes and self.lockedhashes[tid]:
+ return self.lockedhashes[tid]
+ return super().get_stampfile_hash(tid)
+
def get_unihash(self, tid):
- if tid in self.lockedhashes:
+ if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
return self.lockedhashes[tid]
return super().get_unihash(tid)
def dump_sigtask(self, fn, task, stampbase, runtime):
tid = fn + ":" + task
- if tid in self.lockedhashes:
+ if tid in self.lockedhashes and self.lockedhashes[tid]:
return
super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
@@ -448,11 +477,14 @@ def OEOuthashBasic(path, sigfile, task, d):
h = hashlib.sha256()
prev_dir = os.getcwd()
include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
+ extra_content = d.getVar('HASHEQUIV_HASH_VERSION')
try:
os.chdir(path)
update_hash("OEOuthashBasic\n")
+ if extra_content:
+ update_hash(extra_content + "\n")
# It is only currently useful to get equivalent hashes for things that
# can be restored from sstate. Since the sstate object is named using
@@ -512,8 +544,12 @@ def OEOuthashBasic(path, sigfile, task, d):
add_perm(stat.S_IXOTH, 'x')
if include_owners:
- update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
- update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
+ try:
+ update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
+ update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
+ except KeyError:
+ bb.warn("KeyError in %s" % path)
+ raise
update_hash(" ")
if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
diff --git a/poky/meta/lib/oe/utils.py b/poky/meta/lib/oe/utils.py
index 652b2be14..13f4271da 100644
--- a/poky/meta/lib/oe/utils.py
+++ b/poky/meta/lib/oe/utils.py
@@ -169,7 +169,7 @@ def any_distro_features(d, features, truevalue="1", falsevalue=""):
"""
return bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d)
-def parallel_make(d):
+def parallel_make(d, makeinst=False):
"""
Return the integer value for the number of parallel threads to use when
building, scraped out of PARALLEL_MAKE. If no parallelization option is
@@ -177,7 +177,10 @@ def parallel_make(d):
e.g. if PARALLEL_MAKE = "-j 10", this will return 10 as an integer.
"""
- pm = (d.getVar('PARALLEL_MAKE') or '').split()
+ if makeinst:
+ pm = (d.getVar('PARALLEL_MAKEINST') or '').split()
+ else:
+ pm = (d.getVar('PARALLEL_MAKE') or '').split()
# look for '-j' and throw other options (e.g. '-l') away
while pm:
opt = pm.pop(0)
@@ -192,7 +195,7 @@ def parallel_make(d):
return None
-def parallel_make_argument(d, fmt, limit=None):
+def parallel_make_argument(d, fmt, limit=None, makeinst=False):
"""
Helper utility to construct a parallel make argument from the number of
parallel threads specified in PARALLEL_MAKE.
@@ -205,7 +208,7 @@ def parallel_make_argument(d, fmt, limit=None):
e.g. if PARALLEL_MAKE = "-j 10", parallel_make_argument(d, "-n %d") will return
"-n 10"
"""
- v = parallel_make(d)
+ v = parallel_make(d, makeinst)
if v:
if limit:
v = min(limit, v)
@@ -245,9 +248,10 @@ def trim_version(version, num_parts=2):
trimmed = ".".join(parts[:num_parts])
return trimmed
-def cpu_count():
+def cpu_count(at_least=1):
import multiprocessing
- return multiprocessing.cpu_count()
+ cpus = multiprocessing.cpu_count()
+ return max(cpus, at_least)
def execute_pre_post_process(d, cmds):
if cmds is None:
@@ -369,6 +373,37 @@ def format_pkg_list(pkg_dict, ret_format=None):
return output_str
+
+# Helper function to get the host compiler version
+# Do not assume the compiler is gcc
+def get_host_compiler_version(d, taskcontextonly=False):
+ import re, subprocess
+
+ if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
+ return
+
+ compiler = d.getVar("BUILD_CC")
+ # Get rid of ccache since it is not present when parsing.
+ if compiler.startswith('ccache '):
+ compiler = compiler[7:]
+ try:
+ env = os.environ.copy()
+ # datastore PATH does not contain session PATH as set by environment-setup-...
+ # this breaks the install-buildtools use-case
+ # env["PATH"] = d.getVar("PATH")
+ output = subprocess.check_output("%s --version" % compiler, \
+ shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
+ except subprocess.CalledProcessError as e:
+ bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
+
+ match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
+ if not match:
+ bb.fatal("Can't get compiler version from %s --version output" % compiler)
+
+ version = match.group(1)
+ return compiler, version
+
+
def host_gcc_version(d, taskcontextonly=False):
import re, subprocess
@@ -387,7 +422,7 @@ def host_gcc_version(d, taskcontextonly=False):
except subprocess.CalledProcessError as e:
bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
- match = re.match(r".* (\d\.\d)\.\d.*", output.split('\n')[0])
+ match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
if not match:
bb.fatal("Can't get compiler version from %s --version output" % compiler)