summaryrefslogtreecommitdiff
path: root/poky/meta/lib
diff options
context:
space:
mode:
Diffstat (limited to 'poky/meta/lib')
-rw-r--r--poky/meta/lib/buildstats.py4
-rw-r--r--poky/meta/lib/oe/maketype.py7
-rw-r--r--poky/meta/lib/oe/packagedata.py12
-rw-r--r--poky/meta/lib/oe/rootfs.py2
-rw-r--r--poky/meta/lib/oe/sbom.py74
-rw-r--r--poky/meta/lib/oe/spdx.py335
-rw-r--r--poky/meta/lib/oe/sstatesig.py52
-rw-r--r--poky/meta/lib/oeqa/core/target/ssh.py4
-rw-r--r--poky/meta/lib/oeqa/runtime/cases/parselogs.py2
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/bblogging.py104
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/bbtests.py4
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/devtool.py2
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/gotoolchain.py6
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/oescripts.py2
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/recipetool.py2
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/recipeutils.py2
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/tinfoil.py6
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/wic.py40
-rw-r--r--poky/meta/lib/oeqa/utils/buildproject.py3
-rw-r--r--poky/meta/lib/oeqa/utils/qemurunner.py2
-rw-r--r--poky/meta/lib/oeqa/utils/targetbuild.py4
21 files changed, 636 insertions, 33 deletions
diff --git a/poky/meta/lib/buildstats.py b/poky/meta/lib/buildstats.py
index 8627ed3c3..c52b6c3b7 100644
--- a/poky/meta/lib/buildstats.py
+++ b/poky/meta/lib/buildstats.py
@@ -43,8 +43,8 @@ class SystemStats:
# depends on the heartbeat event, which fires less often.
self.min_seconds = 1
- self.meminfo_regex = re.compile(b'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)')
- self.diskstats_regex = re.compile(b'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$')
+ self.meminfo_regex = re.compile(rb'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)')
+ self.diskstats_regex = re.compile(rb'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$')
self.diskstats_ltime = None
self.diskstats_data = None
self.stat_ltimes = None
diff --git a/poky/meta/lib/oe/maketype.py b/poky/meta/lib/oe/maketype.py
index d929c8b3e..d36082c53 100644
--- a/poky/meta/lib/oe/maketype.py
+++ b/poky/meta/lib/oe/maketype.py
@@ -10,12 +10,7 @@ the arguments of the type's factory for details.
import inspect
import oe.types as types
-try:
- # Python 3.7+
- from collections.abc import Callable
-except ImportError:
- # Python < 3.7
- from collections import Callable
+from collections.abc import Callable
available_types = {}
diff --git a/poky/meta/lib/oe/packagedata.py b/poky/meta/lib/oe/packagedata.py
index 0b17897e4..02c81e5a5 100644
--- a/poky/meta/lib/oe/packagedata.py
+++ b/poky/meta/lib/oe/packagedata.py
@@ -57,6 +57,18 @@ def read_subpkgdata_dict(pkg, d):
ret[newvar] = subd[var]
return ret
+def read_subpkgdata_extended(pkg, d):
+ import json
+ import bb.compress.zstd
+
+ fn = d.expand("${PKGDATA_DIR}/extended/%s.json.zstd" % pkg)
+ try:
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+ with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return None
+
def _pkgmap(d):
"""Return a dictionary mapping package to recipe name."""
diff --git a/poky/meta/lib/oe/rootfs.py b/poky/meta/lib/oe/rootfs.py
index 4ea5adb9f..b0dd62553 100644
--- a/poky/meta/lib/oe/rootfs.py
+++ b/poky/meta/lib/oe/rootfs.py
@@ -253,7 +253,7 @@ class Rootfs(object, metaclass=ABCMeta):
# Remove the run-postinsts package if no delayed postinsts are found
delayed_postinsts = self._get_delayed_postinsts()
if delayed_postinsts is None:
- if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")) or os.path.exists(self.d.expand("${IMAGE_ROOTFS}${systemd_unitdir}/system/run-postinsts.service")):
+ if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")) or os.path.exists(self.d.expand("${IMAGE_ROOTFS}${systemd_system_unitdir}/run-postinsts.service")):
self.pm.remove(["run-postinsts"])
image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
diff --git a/poky/meta/lib/oe/sbom.py b/poky/meta/lib/oe/sbom.py
new file mode 100644
index 000000000..848812c0b
--- /dev/null
+++ b/poky/meta/lib/oe/sbom.py
@@ -0,0 +1,74 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import collections
+
+DepRecipe = collections.namedtuple("DepRecipe", ("doc", "doc_sha1", "recipe"))
+DepSource = collections.namedtuple("DepSource", ("doc", "doc_sha1", "recipe", "file"))
+
+
+def get_recipe_spdxid(d):
+ return "SPDXRef-%s-%s" % ("Recipe", d.getVar("PN"))
+
+
+def get_package_spdxid(pkg):
+ return "SPDXRef-Package-%s" % pkg
+
+
+def get_source_file_spdxid(d, idx):
+ return "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), idx)
+
+
+def get_packaged_file_spdxid(pkg, idx):
+ return "SPDXRef-PackagedFile-%s-%d" % (pkg, idx)
+
+
+def get_image_spdxid(img):
+ return "SPDXRef-Image-%s" % img
+
+
+def write_doc(d, spdx_doc, subdir, spdx_deploy=None):
+ from pathlib import Path
+
+ if spdx_deploy is None:
+ spdx_deploy = Path(d.getVar("SPDXDEPLOY"))
+
+ dest = spdx_deploy / subdir / (spdx_doc.name + ".spdx.json")
+ dest.parent.mkdir(exist_ok=True, parents=True)
+ with dest.open("wb") as f:
+ doc_sha1 = spdx_doc.to_json(f, sort_keys=True)
+
+ l = spdx_deploy / "by-namespace" / spdx_doc.documentNamespace.replace("/", "_")
+ l.parent.mkdir(exist_ok=True, parents=True)
+ l.symlink_to(os.path.relpath(dest, l.parent))
+
+ return doc_sha1
+
+
+def read_doc(fn):
+ import hashlib
+ import oe.spdx
+ import io
+ import contextlib
+
+ @contextlib.contextmanager
+ def get_file():
+ if isinstance(fn, io.IOBase):
+ yield fn
+ else:
+ with fn.open("rb") as f:
+ yield f
+
+ with get_file() as f:
+ sha1 = hashlib.sha1()
+ while True:
+ chunk = f.read(4096)
+ if not chunk:
+ break
+ sha1.update(chunk)
+
+ f.seek(0)
+ doc = oe.spdx.SPDXDocument.from_json(f)
+
+ return (doc, sha1.hexdigest())
diff --git a/poky/meta/lib/oe/spdx.py b/poky/meta/lib/oe/spdx.py
new file mode 100644
index 000000000..4416194e0
--- /dev/null
+++ b/poky/meta/lib/oe/spdx.py
@@ -0,0 +1,335 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+#
+# This library is intended to capture the JSON SPDX specification in a type
+# safe manner. It is not intended to encode any particular OE specific
+# behaviors, see the sbom.py for that.
+#
+# The documented SPDX spec document doesn't cover the JSON syntax for
+# particular configuration, which can make it hard to determine what the JSON
+# syntax should be. I've found it is actually much simpler to read the official
+# SPDX JSON schema which can be found here: https://github.com/spdx/spdx-spec
+# in schemas/spdx-schema.json
+#
+
+import hashlib
+import itertools
+import json
+
+SPDX_VERSION = "2.2"
+
+
+#
+# The following are the support classes that are used to implement SPDX object
+#
+
+class _Property(object):
+ """
+ A generic SPDX object property. The different types will derive from this
+ class
+ """
+
+ def __init__(self, *, default=None):
+ self.default = default
+
+ def setdefault(self, dest, name):
+ if self.default is not None:
+ dest.setdefault(name, self.default)
+
+
+class _String(_Property):
+ """
+ A scalar string property for an SPDX object
+ """
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ def set_property(self, attrs, name):
+ def get_helper(obj):
+ return obj._spdx[name]
+
+ def set_helper(obj, value):
+ obj._spdx[name] = value
+
+ def del_helper(obj):
+ del obj._spdx[name]
+
+ attrs[name] = property(get_helper, set_helper, del_helper)
+
+ def init(self, source):
+ return source
+
+
+class _Object(_Property):
+ """
+ A scalar SPDX object property of a SPDX object
+ """
+
+ def __init__(self, cls, **kwargs):
+ super().__init__(**kwargs)
+ self.cls = cls
+
+ def set_property(self, attrs, name):
+ def get_helper(obj):
+ if not name in obj._spdx:
+ obj._spdx[name] = self.cls()
+ return obj._spdx[name]
+
+ def set_helper(obj, value):
+ obj._spdx[name] = value
+
+ def del_helper(obj):
+ del obj._spdx[name]
+
+ attrs[name] = property(get_helper, set_helper)
+
+ def init(self, source):
+ return self.cls(**source)
+
+
+class _ListProperty(_Property):
+ """
+ A list of SPDX properties
+ """
+
+ def __init__(self, prop, **kwargs):
+ super().__init__(**kwargs)
+ self.prop = prop
+
+ def set_property(self, attrs, name):
+ def get_helper(obj):
+ if not name in obj._spdx:
+ obj._spdx[name] = []
+ return obj._spdx[name]
+
+ def del_helper(obj):
+ del obj._spdx[name]
+
+ attrs[name] = property(get_helper, None, del_helper)
+
+ def init(self, source):
+ return [self.prop.init(o) for o in source]
+
+
+class _StringList(_ListProperty):
+ """
+ A list of strings as a property for an SPDX object
+ """
+
+ def __init__(self, **kwargs):
+ super().__init__(_String(), **kwargs)
+
+
+class _ObjectList(_ListProperty):
+ """
+ A list of SPDX objects as a property for an SPDX object
+ """
+
+ def __init__(self, cls, **kwargs):
+ super().__init__(_Object(cls), **kwargs)
+
+
+class MetaSPDXObject(type):
+ """
+ A metaclass that allows properties (anything derived from a _Property
+ class) to be defined for a SPDX object
+ """
+ def __new__(mcls, name, bases, attrs):
+ attrs["_properties"] = {}
+
+ for key in attrs.keys():
+ if isinstance(attrs[key], _Property):
+ prop = attrs[key]
+ attrs["_properties"][key] = prop
+ prop.set_property(attrs, key)
+
+ return super().__new__(mcls, name, bases, attrs)
+
+
+class SPDXObject(metaclass=MetaSPDXObject):
+ """
+ The base SPDX object; all SPDX spec classes must derive from this class
+ """
+ def __init__(self, **d):
+ self._spdx = {}
+
+ for name, prop in self._properties.items():
+ prop.setdefault(self._spdx, name)
+ if name in d:
+ self._spdx[name] = prop.init(d[name])
+
+ def serializer(self):
+ return self._spdx
+
+ def __setattr__(self, name, value):
+ if name in self._properties or name == "_spdx":
+ super().__setattr__(name, value)
+ return
+ raise KeyError("%r is not a valid SPDX property" % name)
+
+#
+# These are the SPDX objects implemented from the spec. The *only* properties
+# that can be added to these objects are ones directly specified in the SPDX
+# spec, however you may add helper functions to make operations easier.
+#
+# Defaults should *only* be specified if the SPDX spec says there is a certain
+# required value for a field (e.g. dataLicense), or if the field is mandatory
+# and has some sane "this field is unknown" (e.g. "NOASSERTION")
+#
+
+class SPDXAnnotation(SPDXObject):
+ annotationDate = _String()
+ annotationType = _String()
+ annotator = _String()
+ comment = _String()
+
+class SPDXChecksum(SPDXObject):
+ algorithm = _String()
+ checksumValue = _String()
+
+
+class SPDXRelationship(SPDXObject):
+ spdxElementId = _String()
+ relatedSpdxElement = _String()
+ relationshipType = _String()
+ comment = _String()
+
+
+class SPDXExternalReference(SPDXObject):
+ referenceCategory = _String()
+ referenceType = _String()
+ referenceLocator = _String()
+
+
+class SPDXPackageVerificationCode(SPDXObject):
+ packageVerificationCodeValue = _String()
+ packageVerificationCodeExcludedFiles = _StringList()
+
+
+class SPDXPackage(SPDXObject):
+ name = _String()
+ SPDXID = _String()
+ versionInfo = _String()
+ downloadLocation = _String(default="NOASSERTION")
+ packageSupplier = _String(default="NOASSERTION")
+ homepage = _String()
+ licenseConcluded = _String(default="NOASSERTION")
+ licenseDeclared = _String(default="NOASSERTION")
+ summary = _String()
+ description = _String()
+ sourceInfo = _String()
+ copyrightText = _String(default="NOASSERTION")
+ licenseInfoFromFiles = _StringList(default=["NOASSERTION"])
+ externalRefs = _ObjectList(SPDXExternalReference)
+ packageVerificationCode = _Object(SPDXPackageVerificationCode)
+ hasFiles = _StringList()
+ packageFileName = _String()
+ annotations = _ObjectList(SPDXAnnotation)
+
+
+class SPDXFile(SPDXObject):
+ SPDXID = _String()
+ fileName = _String()
+ licenseConcluded = _String(default="NOASSERTION")
+ copyrightText = _String(default="NOASSERTION")
+ licenseInfoInFiles = _StringList(default=["NOASSERTION"])
+ checksums = _ObjectList(SPDXChecksum)
+ fileTypes = _StringList()
+
+
+class SPDXCreationInfo(SPDXObject):
+ created = _String()
+ licenseListVersion = _String()
+ comment = _String()
+ creators = _StringList()
+
+
+class SPDXExternalDocumentRef(SPDXObject):
+ externalDocumentId = _String()
+ spdxDocument = _String()
+ checksum = _Object(SPDXChecksum)
+
+
+class SPDXExtractedLicensingInfo(SPDXObject):
+ name = _String()
+ comment = _String()
+ licenseId = _String()
+ extractedText = _String()
+
+
+class SPDXDocument(SPDXObject):
+ spdxVersion = _String(default="SPDX-" + SPDX_VERSION)
+ dataLicense = _String(default="CC0-1.0")
+ SPDXID = _String(default="SPDXRef-DOCUMENT")
+ name = _String()
+ documentNamespace = _String()
+ creationInfo = _Object(SPDXCreationInfo)
+ packages = _ObjectList(SPDXPackage)
+ files = _ObjectList(SPDXFile)
+ relationships = _ObjectList(SPDXRelationship)
+ externalDocumentRefs = _ObjectList(SPDXExternalDocumentRef)
+ hasExtractedLicensingInfos = _ObjectList(SPDXExtractedLicensingInfo)
+
+ def __init__(self, **d):
+ super().__init__(**d)
+
+ def to_json(self, f, *, sort_keys=False, indent=None, separators=None):
+ class Encoder(json.JSONEncoder):
+ def default(self, o):
+ if isinstance(o, SPDXObject):
+ return o.serializer()
+
+ return super().default(o)
+
+ sha1 = hashlib.sha1()
+ for chunk in Encoder(
+ sort_keys=sort_keys,
+ indent=indent,
+ separators=separators,
+ ).iterencode(self):
+ chunk = chunk.encode("utf-8")
+ f.write(chunk)
+ sha1.update(chunk)
+
+ return sha1.hexdigest()
+
+ @classmethod
+ def from_json(cls, f):
+ return cls(**json.load(f))
+
+ def add_relationship(self, _from, relationship, _to, *, comment=None):
+ if isinstance(_from, SPDXObject):
+ from_spdxid = _from.SPDXID
+ else:
+ from_spdxid = _from
+
+ if isinstance(_to, SPDXObject):
+ to_spdxid = _to.SPDXID
+ else:
+ to_spdxid = _to
+
+ r = SPDXRelationship(
+ spdxElementId=from_spdxid,
+ relatedSpdxElement=to_spdxid,
+ relationshipType=relationship,
+ )
+
+ if comment is not None:
+ r.comment = comment
+
+ self.relationships.append(r)
+
+ def find_by_spdxid(self, spdxid):
+ for o in itertools.chain(self.packages, self.files):
+ if o.SPDXID == spdxid:
+ return o
+ return None
+
+ def find_external_document_ref(self, namespace):
+ for r in self.externalDocumentRefs:
+ if r.spdxDocument == namespace:
+ return r
+ return None
diff --git a/poky/meta/lib/oe/sstatesig.py b/poky/meta/lib/oe/sstatesig.py
index 78cdf878f..0c3b4589c 100644
--- a/poky/meta/lib/oe/sstatesig.py
+++ b/poky/meta/lib/oe/sstatesig.py
@@ -108,7 +108,6 @@ class SignatureGeneratorOEBasicHashMixIn(object):
self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
"").split()
self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
- self.buildarch = data.getVar('BUILD_ARCH')
self._internal = False
pass
@@ -147,13 +146,6 @@ class SignatureGeneratorOEBasicHashMixIn(object):
self.dump_lockedsigs(sigfile)
return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
- def prep_taskhash(self, tid, deps, dataCaches):
- super().prep_taskhash(tid, deps, dataCaches)
- if hasattr(self, "extramethod"):
- (mc, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
- inherits = " ".join(dataCaches[mc].inherits[fn])
- if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
- self.extramethod[tid] = ":" + self.buildarch
def get_taskhash(self, tid, deps, dataCaches):
if tid in self.lockedhashes:
@@ -478,6 +470,8 @@ def OEOuthashBasic(path, sigfile, task, d):
import stat
import pwd
import grp
+ import re
+ import fnmatch
def update_hash(s):
s = s.encode('utf-8')
@@ -487,16 +481,29 @@ def OEOuthashBasic(path, sigfile, task, d):
h = hashlib.sha256()
prev_dir = os.getcwd()
+ corebase = d.getVar("COREBASE")
+ tmpdir = d.getVar("TMPDIR")
include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
if "package_write_" in task or task == "package_qa":
include_owners = False
include_timestamps = False
+ include_root = True
if task == "package":
include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1'
+ include_root = False
extra_content = d.getVar('HASHEQUIV_HASH_VERSION')
+ filemaps = {}
+ for m in (d.getVar('SSTATE_HASHEQUIV_FILEMAP') or '').split():
+ entry = m.split(":")
+ if len(entry) != 3 or entry[0] != task:
+ continue
+ filemaps.setdefault(entry[1], [])
+ filemaps[entry[1]].append(entry[2])
+
try:
os.chdir(path)
+ basepath = os.path.normpath(path)
update_hash("OEOuthashBasic\n")
if extra_content:
@@ -578,8 +585,13 @@ def OEOuthashBasic(path, sigfile, task, d):
else:
update_hash(" " * 9)
+ filterfile = False
+ for entry in filemaps:
+ if fnmatch.fnmatch(path, entry):
+ filterfile = True
+
update_hash(" ")
- if stat.S_ISREG(s.st_mode):
+ if stat.S_ISREG(s.st_mode) and not filterfile:
update_hash("%10d" % s.st_size)
else:
update_hash(" " * 10)
@@ -588,9 +600,24 @@ def OEOuthashBasic(path, sigfile, task, d):
fh = hashlib.sha256()
if stat.S_ISREG(s.st_mode):
# Hash file contents
- with open(path, 'rb') as d:
- for chunk in iter(lambda: d.read(4096), b""):
+ if filterfile:
+ # Need to ignore paths in crossscripts and postinst-useradd files.
+ with open(path, 'rb') as d:
+ chunk = d.read()
+ chunk = chunk.replace(bytes(basepath, encoding='utf8'), b'')
+ for entry in filemaps:
+ if not fnmatch.fnmatch(path, entry):
+ continue
+ for r in filemaps[entry]:
+ if r.startswith("regex-"):
+ chunk = re.sub(bytes(r[6:], encoding='utf8'), b'', chunk)
+ else:
+ chunk = chunk.replace(bytes(r, encoding='utf8'), b'')
fh.update(chunk)
+ else:
+ with open(path, 'rb') as d:
+ for chunk in iter(lambda: d.read(4096), b""):
+ fh.update(chunk)
update_hash(fh.hexdigest())
else:
update_hash(" " * len(fh.hexdigest()))
@@ -603,7 +630,8 @@ def OEOuthashBasic(path, sigfile, task, d):
update_hash("\n")
# Process this directory and all its child files
- process(root)
+ if include_root or root != ".":
+ process(root)
for f in files:
if f == 'fixmepath':
continue
diff --git a/poky/meta/lib/oeqa/core/target/ssh.py b/poky/meta/lib/oeqa/core/target/ssh.py
index 923a223b2..f956a7744 100644
--- a/poky/meta/lib/oeqa/core/target/ssh.py
+++ b/poky/meta/lib/oeqa/core/target/ssh.py
@@ -44,6 +44,7 @@ class OESSHTarget(OETarget):
self.ssh = self.ssh + [ '-p', port ]
self.scp = self.scp + [ '-P', port ]
self._monitor_dumper = None
+ self.target_dumper = None
def start(self, **kwargs):
pass
@@ -102,7 +103,8 @@ class OESSHTarget(OETarget):
if self.monitor_dumper:
self.monitor_dumper.dump_monitor()
if status == 255:
- self.target_dumper.dump_target()
+ if self.target_dumper:
+ self.target_dumper.dump_target()
if self.monitor_dumper:
self.monitor_dumper.dump_monitor()
return (status, output)
diff --git a/poky/meta/lib/oeqa/runtime/cases/parselogs.py b/poky/meta/lib/oeqa/runtime/cases/parselogs.py
index 5db021659..2b8893d84 100644
--- a/poky/meta/lib/oeqa/runtime/cases/parselogs.py
+++ b/poky/meta/lib/oeqa/runtime/cases/parselogs.py
@@ -91,6 +91,7 @@ qemux86_common = [
"glamor initialization failed",
"blk_update_request: I/O error, dev fd0, sector 0 op 0x0:(READ)",
"floppy: error",
+ 'failed to IDENTIFY (I/O error, err_mask=0x4)',
] + common_errors
ignore_errors = {
@@ -98,7 +99,6 @@ ignore_errors = {
'qemux86' : [
'Failed to access perfctr msr (MSR',
'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
- 'failed to IDENTIFY (I/O error, err_mask=0x4)',
] + qemux86_common,
'qemux86-64' : qemux86_common,
'qemumips' : [
diff --git a/poky/meta/lib/oeqa/selftest/cases/bblogging.py b/poky/meta/lib/oeqa/selftest/cases/bblogging.py
new file mode 100644
index 000000000..ea6c3c8c7
--- /dev/null
+++ b/poky/meta/lib/oeqa/selftest/cases/bblogging.py
@@ -0,0 +1,104 @@
+#
+# SPDX-License-Identifier: MIT
+#
+
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class BitBakeLogging(OESelftestTestCase):
+
+ def assertCount(self, item, entry, count):
+ self.assertEqual(item.count(entry), count, msg="Output:\n'''\n%s\n'''\ndoesn't contain %d copies of:\n'''\n%s\n'''\n" % (item, count, entry))
+
+ def test_shell_logging(self):
+ # no logs, no verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c shelltest -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertNotIn("This is shell stdout", result.output)
+ self.assertNotIn("This is shell stderr", result.output)
+
+ # logs, no verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c shelltest -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertCount(result.output, "This is shell stdout", 1)
+ self.assertCount(result.output, "This is shell stderr", 1)
+
+ # no logs, verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # two copies due to set +x
+ self.assertCount(result.output, "This is shell stdout", 2)
+ self.assertCount(result.output, "This is shell stderr", 2)
+
+ # logs, verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # two copies due to set +x
+ self.assertCount(result.output, "This is shell stdout", 2)
+ self.assertCount(result.output, "This is shell stderr", 2)
+
+ def test_python_exit_logging(self):
+ # no logs, no verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertNotIn("This is python stdout", result.output)
+
+ # logs, no verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # A sys.exit() should include the output
+ self.assertCount(result.output, "This is python stdout", 1)
+
+ # no logs, verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # python tasks don't log output with -v currently
+ #self.assertCount(result.output, "This is python stdout", 1)
+
+ # logs, verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # python tasks don't log output with -v currently
+ #self.assertCount(result.output, "This is python stdout", 1)
+
+ def test_python_fatal_logging(self):
+ # no logs, no verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ self.assertNotIn("This is python fatal test stdout", result.output)
+ self.assertCount(result.output, "This is a fatal error", 1)
+
+ # logs, no verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # A bb.fatal() should not include the output
+ self.assertNotIn("This is python fatal test stdout", result.output)
+ self.assertCount(result.output, "This is a fatal error", 1)
+
+ # no logs, verbose
+ self.write_config('BBINCLUDELOGS = ""')
+ result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # python tasks don't log output with -v currently
+ #self.assertCount(result.output, "This is python fatal test stdout", 1)
+ self.assertCount(result.output, "This is a fatal error", 1)
+
+ # logs, verbose
+ self.write_config('BBINCLUDELOGS = "yes"')
+ result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
+ self.assertIn("ERROR: Logfile of failure stored in:", result.output)
+ # python tasks don't log output with -v currently
+ #self.assertCount(result.output, "This is python fatal test stdout", 1)
+ self.assertCount(result.output, "This is a fatal error", 1)
+
diff --git a/poky/meta/lib/oeqa/selftest/cases/bbtests.py b/poky/meta/lib/oeqa/selftest/cases/bbtests.py
index 8831de606..656236407 100644
--- a/poky/meta/lib/oeqa/selftest/cases/bbtests.py
+++ b/poky/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -83,8 +83,10 @@ class BitbakeTests(OESelftestTestCase):
def test_force_task_1(self):
# test 1 from bug 5875
+ import uuid
test_recipe = 'zlib'
- test_data = "Microsoft Made No Profit From Anyone's Zunes Yo"
+ # Need to use uuid otherwise hash equivlance would change the workflow
+ test_data = "Microsoft Made No Profit From Anyone's Zunes Yo %s" % uuid.uuid1()
bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe)
image_dir = bb_vars['D']
pkgsplit_dir = bb_vars['PKGDEST']
diff --git a/poky/meta/lib/oeqa/selftest/cases/devtool.py b/poky/meta/lib/oeqa/selftest/cases/devtool.py
index 6d9cd46bf..f495e84c7 100644
--- a/poky/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/poky/meta/lib/oeqa/selftest/cases/devtool.py
@@ -649,7 +649,7 @@ class DevtoolModifyTests(DevtoolBase):
self.track_for_cleanup(self.workspacedir)
self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
- testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk meta-ide-support'.split()
+ testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk'.split()
# Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose
result = runCmd('bitbake-layers show-recipes gcc-source*')
for line in result.output.splitlines():
diff --git a/poky/meta/lib/oeqa/selftest/cases/gotoolchain.py b/poky/meta/lib/oeqa/selftest/cases/gotoolchain.py
index 4fc3605f4..c809d7c9b 100644
--- a/poky/meta/lib/oeqa/selftest/cases/gotoolchain.py
+++ b/poky/meta/lib/oeqa/selftest/cases/gotoolchain.py
@@ -43,6 +43,12 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
@classmethod
def tearDownClass(cls):
+ # Go creates file which are readonly
+ for dirpath, dirnames, filenames in os.walk(cls.tmpdir_SDKQA):
+ for filename in filenames + dirnames:
+ f = os.path.join(dirpath, filename)
+ if not os.path.islink(f):
+ os.chmod(f, 0o775)
shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True)
super(oeGoToolchainSelfTest, cls).tearDownClass()
diff --git a/poky/meta/lib/oeqa/selftest/cases/oescripts.py b/poky/meta/lib/oeqa/selftest/cases/oescripts.py
index 8a10ff357..1decce39e 100644
--- a/poky/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/poky/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -150,7 +150,7 @@ class OEListPackageconfigTests(OEScriptTests):
expected_endlines = []
expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
expected_endlines.append("pinentry gtk2 libcap ncurses qt secret")
- expected_endlines.append("tar acl")
+ expected_endlines.append("tar acl selinux")
self.check_endlines(results, expected_endlines)
diff --git a/poky/meta/lib/oeqa/selftest/cases/recipetool.py b/poky/meta/lib/oeqa/selftest/cases/recipetool.py
index 6f531dfa3..c2a53815d 100644
--- a/poky/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/poky/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -481,7 +481,7 @@ class RecipetoolTests(RecipetoolBase):
result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
self.assertTrue(os.path.isfile(recipefile))
checkvars = {}
- checkvars['LICENSE'] = set(['PSF', '&', 'BSD', 'GPL'])
+ checkvars['LICENSE'] = set(['PSF', '&', 'BSD-3-Clause', 'GPL'])
checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING.txt;md5=35a23d42b615470583563132872c97d6'
checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-${PV}.tar.gz'
checkvars['SRC_URI[md5sum]'] = 'c53768d63db3873b7d452833553469de'
diff --git a/poky/meta/lib/oeqa/selftest/cases/recipeutils.py b/poky/meta/lib/oeqa/selftest/cases/recipeutils.py
index 97edad88b..f1dd63f65 100644
--- a/poky/meta/lib/oeqa/selftest/cases/recipeutils.py
+++ b/poky/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -40,7 +40,7 @@ class RecipeUtilsTests(OESelftestTestCase):
SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
HOMEPAGE = "http://github.com/gitpython-developers/async"
SECTION = "devel/python"
--LICENSE = "BSD"
+-LICENSE = "BSD-3-Clause"
+LICENSE = "something"
LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
diff --git a/poky/meta/lib/oeqa/selftest/cases/tinfoil.py b/poky/meta/lib/oeqa/selftest/cases/tinfoil.py
index 51092805d..8fd48bb05 100644
--- a/poky/meta/lib/oeqa/selftest/cases/tinfoil.py
+++ b/poky/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -94,14 +94,13 @@ class TinfoilTests(OESelftestTestCase):
pass
pattern = 'conf'
- res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine')
+ res = tinfoil.run_command('testCookerCommandEvent', pattern)
self.assertTrue(res)
eventreceived = False
commandcomplete = False
start = time.time()
# Wait for maximum 60s in total so we'd detect spurious heartbeat events for example
- # The test is IO load sensitive too
while (not (eventreceived == True and commandcomplete == True)
and (time.time() - start < 60)):
# if we received both events (on let's say a good day), we are done
@@ -111,7 +110,8 @@ class TinfoilTests(OESelftestTestCase):
commandcomplete = True
elif isinstance(event, bb.event.FilesMatchingFound):
self.assertEqual(pattern, event._pattern)
- self.assertIn('qemuarm.conf', event._matches)
+ self.assertIn('A', event._matches)
+ self.assertIn('B', event._matches)
eventreceived = True
elif isinstance(event, logging.LogRecord):
continue
diff --git a/poky/meta/lib/oeqa/selftest/cases/wic.py b/poky/meta/lib/oeqa/selftest/cases/wic.py
index 3b4143414..5fc8e6514 100644
--- a/poky/meta/lib/oeqa/selftest/cases/wic.py
+++ b/poky/meta/lib/oeqa/selftest/cases/wic.py
@@ -744,6 +744,17 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
% (wks_file, self.resultdir), ignore_status=True).status)
os.remove(wks_file)
+ def test_extra_space(self):
+ """Test --extra-space wks option."""
+ extraspace = 1024**3
+ runCmd("wic create wictestdisk "
+ "--image-name core-image-minimal "
+ "--extra-space %i -o %s" % (extraspace ,self.resultdir))
+ wicout = glob(self.resultdir + "wictestdisk-*.direct")
+ self.assertEqual(1, len(wicout))
+ size = os.path.getsize(wicout[0])
+ self.assertTrue(size > extraspace)
+
class Wic2(WicTestCase):
def test_bmap_short(self):
@@ -1147,6 +1158,35 @@ class Wic2(WicTestCase):
out = glob(self.resultdir + "%s-*.direct" % wksname)
self.assertEqual(1, len(out))
+ @only_for_arch(['i586', 'i686', 'x86_64'])
+ def test_efi_plugin_unified_kernel_image_qemu(self):
+ """Test efi plugin's Unified Kernel Image feature in qemu"""
+ config = 'IMAGE_FSTYPES = "wic"\n'\
+ 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
+ 'WKS_FILE = "test_efi_plugin.wks"\n'\
+ 'MACHINE_FEATURES:append = " efi"\n'
+ self.append_config(config)
+ self.assertEqual(0, bitbake('core-image-minimal core-image-minimal-initramfs ovmf').status)
+ self.remove_config(config)
+
+ with runqemu('core-image-minimal', ssh=False,
+ runqemuparams='ovmf', image_fstype='wic') as qemu:
+ # Check that /boot has EFI bootx64.efi (required for EFI)
+ cmd = "ls /boot/EFI/BOOT/bootx64.efi | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '1')
+ # Check that /boot has EFI/Linux/linux.efi (required for Unified Kernel Images auto detection)
+ cmd = "ls /boot/EFI/Linux/linux.efi | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '1')
+ # Check that /boot doesn't have loader/entries/boot.conf (Unified Kernel Images are auto detected by the bootloader)
+ cmd = "ls /boot/loader/entries/boot.conf 2&>/dev/null | wc -l"
+ status, output = qemu.run_serial(cmd)
+ self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
+ self.assertEqual(output, '0')
+
def test_fs_types(self):
"""Test filesystem types for empty and not empty partitions"""
img = 'core-image-minimal'
diff --git a/poky/meta/lib/oeqa/utils/buildproject.py b/poky/meta/lib/oeqa/utils/buildproject.py
index e6d80cc8d..dfb966186 100644
--- a/poky/meta/lib/oeqa/utils/buildproject.py
+++ b/poky/meta/lib/oeqa/utils/buildproject.py
@@ -18,6 +18,7 @@ class BuildProject(metaclass=ABCMeta):
def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None):
self.uri = uri
self.archive = os.path.basename(uri)
+ self.tempdirobj = None
if not tmpdir:
self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-')
tmpdir = self.tempdirobj.name
@@ -57,6 +58,8 @@ class BuildProject(metaclass=ABCMeta):
return self._run('cd %s; make install %s' % (self.targetdir, install_args))
def clean(self):
+ if self.tempdirobj:
+ self.tempdirobj.cleanup()
if not self.needclean:
return
self._run('rm -rf %s' % self.targetdir)
diff --git a/poky/meta/lib/oeqa/utils/qemurunner.py b/poky/meta/lib/oeqa/utils/qemurunner.py
index d55248c49..d961a9a21 100644
--- a/poky/meta/lib/oeqa/utils/qemurunner.py
+++ b/poky/meta/lib/oeqa/utils/qemurunner.py
@@ -265,7 +265,7 @@ class QemuRunner:
r = os.fdopen(r)
x = r.read()
os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM)
- sys.exit(0)
+ os._exit(0)
self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid)
self.logger.debug("waiting at most %s seconds for qemu pid (%s)" %
diff --git a/poky/meta/lib/oeqa/utils/targetbuild.py b/poky/meta/lib/oeqa/utils/targetbuild.py
index 1055810ca..09738add1 100644
--- a/poky/meta/lib/oeqa/utils/targetbuild.py
+++ b/poky/meta/lib/oeqa/utils/targetbuild.py
@@ -19,6 +19,7 @@ class BuildProject(metaclass=ABCMeta):
self.d = d
self.uri = uri
self.archive = os.path.basename(uri)
+ self.tempdirobj = None
if not tmpdir:
tmpdir = self.d.getVar('WORKDIR')
if not tmpdir:
@@ -71,9 +72,10 @@ class BuildProject(metaclass=ABCMeta):
return self._run('cd %s; make install %s' % (self.targetdir, install_args))
def clean(self):
+ if self.tempdirobj:
+ self.tempdirobj.cleanup()
self._run('rm -rf %s' % self.targetdir)
subprocess.check_call('rm -f %s' % self.localarchive, shell=True)
- pass
class TargetBuildProject(BuildProject):