summaryrefslogtreecommitdiff
path: root/poky/meta/lib
diff options
context:
space:
mode:
Diffstat (limited to 'poky/meta/lib')
-rw-r--r--poky/meta/lib/oe/gpg_sign.py27
-rw-r--r--poky/meta/lib/oe/license.py6
-rw-r--r--poky/meta/lib/oe/package_manager/__init__.py2
-rw-r--r--poky/meta/lib/oe/packagedata.py2
-rw-r--r--poky/meta/lib/oe/qa.py34
-rw-r--r--poky/meta/lib/oe/reproducible.py84
-rw-r--r--poky/meta/lib/oe/spdx.py6
-rw-r--r--poky/meta/lib/oe/sstatesig.py29
-rw-r--r--poky/meta/lib/oe/utils.py3
-rw-r--r--poky/meta/lib/oeqa/manual/eclipse-plugin.json6
-rw-r--r--poky/meta/lib/oeqa/runtime/cases/parselogs.py2
-rw-r--r--poky/meta/lib/oeqa/runtime/cases/rpm.py6
-rw-r--r--poky/meta/lib/oeqa/runtime/context.py29
-rw-r--r--poky/meta/lib/oeqa/sdk/buildtools-cases/build.py2
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/bbtests.py3
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/eSDK.py2
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/fitimage.py8
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/oescripts.py2
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/reproducible.py1
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/runtime_test.py10
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/signing.py8
-rw-r--r--poky/meta/lib/oeqa/selftest/cases/sstatetests.py36
-rw-r--r--poky/meta/lib/oeqa/selftest/context.py2
-rw-r--r--poky/meta/lib/oeqa/utils/qemurunner.py2
24 files changed, 227 insertions, 85 deletions
diff --git a/poky/meta/lib/oe/gpg_sign.py b/poky/meta/lib/oe/gpg_sign.py
index 492f096eaa..1bce6cb792 100644
--- a/poky/meta/lib/oe/gpg_sign.py
+++ b/poky/meta/lib/oe/gpg_sign.py
@@ -109,16 +109,33 @@ class LocalSigner(object):
bb.fatal("Could not get gpg version: %s" % e)
- def verify(self, sig_file):
+ def verify(self, sig_file, valid_sigs = ''):
"""Verify signature"""
- cmd = self.gpg_cmd + ["--verify", "--no-permission-warning"]
+ cmd = self.gpg_cmd + ["--verify", "--no-permission-warning", "--status-fd", "1"]
if self.gpg_path:
cmd += ["--homedir", self.gpg_path]
cmd += [sig_file]
- status = subprocess.call(cmd)
- ret = False if status else True
- return ret
+ status = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ # Valid if any key matches if unspecified
+ if not valid_sigs:
+ ret = False if status.returncode else True
+ return ret
+
+ import re
+ goodsigs = []
+ sigre = re.compile(r'^\[GNUPG:\] GOODSIG (\S+)\s(.*)$')
+ for l in status.stdout.decode("utf-8").splitlines():
+ s = sigre.match(l)
+ if s:
+ goodsigs += [s.group(1)]
+
+ for sig in valid_sigs.split():
+ if sig in goodsigs:
+ return True
+ if len(goodsigs):
+ bb.warn('No accepted signatures found. Good signatures found: %s.' % ' '.join(goodsigs))
+ return False
def get_signer(d, backend):
diff --git a/poky/meta/lib/oe/license.py b/poky/meta/lib/oe/license.py
index 665d32ecbb..b5d378a549 100644
--- a/poky/meta/lib/oe/license.py
+++ b/poky/meta/lib/oe/license.py
@@ -74,6 +74,9 @@ class FlattenVisitor(LicenseVisitor):
def visit_Str(self, node):
self.licenses.append(node.s)
+ def visit_Constant(self, node):
+ self.licenses.append(node.value)
+
def visit_BinOp(self, node):
if isinstance(node.op, ast.BitOr):
left = FlattenVisitor(self.choose_licenses)
@@ -227,6 +230,9 @@ class ListVisitor(LicenseVisitor):
def visit_Str(self, node):
self.licenses.add(node.s)
+ def visit_Constant(self, node):
+ self.licenses.add(node.value)
+
def list_licenses(licensestr):
"""Simply get a list of all licenses mentioned in a license string.
Binary operators are not applied or taken into account in any way"""
diff --git a/poky/meta/lib/oe/package_manager/__init__.py b/poky/meta/lib/oe/package_manager/__init__.py
index 8f7b60e077..80bc1a6bc6 100644
--- a/poky/meta/lib/oe/package_manager/__init__.py
+++ b/poky/meta/lib/oe/package_manager/__init__.py
@@ -321,7 +321,7 @@ class PackageManager(object, metaclass=ABCMeta):
# TODO don't have sdk here but have a property on the superclass
# (and respect in install_complementary)
if sdk:
- pkgdatadir = self.d.expand("${TMPDIR}/pkgdata/${SDK_SYS}")
+ pkgdatadir = self.d.getVar("PKGDATA_DIR_SDK")
else:
pkgdatadir = self.d.getVar("PKGDATA_DIR")
diff --git a/poky/meta/lib/oe/packagedata.py b/poky/meta/lib/oe/packagedata.py
index 02c81e5a52..212f048bc6 100644
--- a/poky/meta/lib/oe/packagedata.py
+++ b/poky/meta/lib/oe/packagedata.py
@@ -19,7 +19,7 @@ def read_pkgdatafile(fn):
import re
with open(fn, 'r') as f:
lines = f.readlines()
- r = re.compile("(^.+?):\s+(.*)")
+ r = re.compile(r"(^.+?):\s+(.*)")
for l in lines:
m = r.match(l)
if m:
diff --git a/poky/meta/lib/oe/qa.py b/poky/meta/lib/oe/qa.py
index e8a854a302..efab7e8564 100644
--- a/poky/meta/lib/oe/qa.py
+++ b/poky/meta/lib/oe/qa.py
@@ -171,6 +171,40 @@ def elf_machine_to_string(machine):
except:
return "Unknown (%s)" % repr(machine)
+def write_error(type, error, d):
+ logfile = d.getVar('QA_LOGFILE')
+ if logfile:
+ p = d.getVar('P')
+ with open(logfile, "a+") as f:
+ f.write("%s: %s [%s]\n" % (p, error, type))
+
+def handle_error(error_class, error_msg, d):
+ if error_class in (d.getVar("ERROR_QA") or "").split():
+ write_error(error_class, error_msg, d)
+ bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
+ d.setVar("QA_ERRORS_FOUND", "True")
+ return False
+ elif error_class in (d.getVar("WARN_QA") or "").split():
+ write_error(error_class, error_msg, d)
+ bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
+ else:
+ bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
+ return True
+
+def add_message(messages, section, new_msg):
+ if section not in messages:
+ messages[section] = new_msg
+ else:
+ messages[section] = messages[section] + "\n" + new_msg
+
+def exit_with_message_if_errors(message, d):
+ qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False)
+ if qa_fatal_errors:
+ bb.fatal(message)
+
+def exit_if_errors(d):
+ exit_with_message_if_errors("Fatal QA errors were found, failing task.", d)
+
if __name__ == "__main__":
import sys
diff --git a/poky/meta/lib/oe/reproducible.py b/poky/meta/lib/oe/reproducible.py
index 204b9bd734..4fb99d963c 100644
--- a/poky/meta/lib/oe/reproducible.py
+++ b/poky/meta/lib/oe/reproducible.py
@@ -5,6 +5,57 @@ import os
import subprocess
import bb
+# For reproducible builds, this code sets the default SOURCE_DATE_EPOCH in each
+# component's build environment. The format is number of seconds since the
+# system epoch.
+#
+# Upstream components (generally) respect this environment variable,
+# using it in place of the "current" date and time.
+# See https://reproducible-builds.org/specs/source-date-epoch/
+#
+# The default value of SOURCE_DATE_EPOCH comes from the function
+# get_source_date_epoch_value which reads from the SDE_FILE, or if the file
+# is not available will use the fallback of SOURCE_DATE_EPOCH_FALLBACK.
+#
+# The SDE_FILE is normally constructed from the function
+# create_source_date_epoch_stamp which is typically added as a postfuncs to
+# the do_unpack task. If a recipe does NOT have do_unpack, it should be added
+# to a task that runs after the source is available and before the
+# do_deploy_source_date_epoch task is executed.
+#
+# If a recipe wishes to override the default behavior it should set it's own
+# SOURCE_DATE_EPOCH or override the do_deploy_source_date_epoch_stamp task
+# with recipe-specific functionality to write the appropriate
+# SOURCE_DATE_EPOCH into the SDE_FILE.
+#
+# SOURCE_DATE_EPOCH is intended to be a reproducible value. This value should
+# be reproducible for anyone who builds the same revision from the same
+# sources.
+#
+# There are 4 ways the create_source_date_epoch_stamp function determines what
+# becomes SOURCE_DATE_EPOCH:
+#
+# 1. Use the value from __source_date_epoch.txt file if this file exists.
+# This file was most likely created in the previous build by one of the
+# following methods 2,3,4.
+# Alternatively, it can be provided by a recipe via SRC_URI.
+#
+# If the file does not exist:
+#
+# 2. If there is a git checkout, use the last git commit timestamp.
+# Git does not preserve file timestamps on checkout.
+#
+# 3. Use the mtime of "known" files such as NEWS, CHANGLELOG, ...
+# This works for well-kept repositories distributed via tarball.
+#
+# 4. Use the modification time of the youngest file in the source tree, if
+# there is one.
+# This will be the newest file from the distribution tarball, if any.
+#
+# 5. Fall back to a fixed timestamp (SOURCE_DATE_EPOCH_FALLBACK).
+#
+# Once the value is determined, it is stored in the recipe's SDE_FILE.
+
def get_source_date_epoch_from_known_files(d, sourcedir):
source_date_epoch = None
newest_file = None
@@ -106,3 +157,36 @@ def get_source_date_epoch(d, sourcedir):
fixed_source_date_epoch(d) # Last resort
)
+def epochfile_read(epochfile, d):
+ cached, efile = d.getVar('__CACHED_SOURCE_DATE_EPOCH') or (None, None)
+ if cached and efile == epochfile:
+ return cached
+
+ if cached and epochfile != efile:
+ bb.debug(1, "Epoch file changed from %s to %s" % (efile, epochfile))
+
+ source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
+ try:
+ with open(epochfile, 'r') as f:
+ s = f.read()
+ try:
+ source_date_epoch = int(s)
+ except ValueError:
+ bb.warn("SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK" % s)
+ source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
+ bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
+ except FileNotFoundError:
+ bb.debug(1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" % (epochfile, source_date_epoch))
+
+ d.setVar('__CACHED_SOURCE_DATE_EPOCH', (str(source_date_epoch), epochfile))
+ return str(source_date_epoch)
+
+def epochfile_write(source_date_epoch, epochfile, d):
+
+ bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
+ bb.utils.mkdirhier(os.path.dirname(epochfile))
+
+ tmp_file = "%s.new" % epochfile
+ with open(tmp_file, 'w') as f:
+ f.write(str(source_date_epoch))
+ os.rename(tmp_file, epochfile)
diff --git a/poky/meta/lib/oe/spdx.py b/poky/meta/lib/oe/spdx.py
index 4416194e06..9e7ced5a15 100644
--- a/poky/meta/lib/oe/spdx.py
+++ b/poky/meta/lib/oe/spdx.py
@@ -196,6 +196,7 @@ class SPDXRelationship(SPDXObject):
relatedSpdxElement = _String()
relationshipType = _String()
comment = _String()
+ annotations = _ObjectList(SPDXAnnotation)
class SPDXExternalReference(SPDXObject):
@@ -300,7 +301,7 @@ class SPDXDocument(SPDXObject):
def from_json(cls, f):
return cls(**json.load(f))
- def add_relationship(self, _from, relationship, _to, *, comment=None):
+ def add_relationship(self, _from, relationship, _to, *, comment=None, annotation=None):
if isinstance(_from, SPDXObject):
from_spdxid = _from.SPDXID
else:
@@ -320,6 +321,9 @@ class SPDXDocument(SPDXObject):
if comment is not None:
r.comment = comment
+ if annotation is not None:
+ r.annotations.append(annotation)
+
self.relationships.append(r)
def find_by_spdxid(self, spdxid):
diff --git a/poky/meta/lib/oe/sstatesig.py b/poky/meta/lib/oe/sstatesig.py
index 0c3b4589c5..038404e377 100644
--- a/poky/meta/lib/oe/sstatesig.py
+++ b/poky/meta/lib/oe/sstatesig.py
@@ -489,7 +489,7 @@ def OEOuthashBasic(path, sigfile, task, d):
include_timestamps = False
include_root = True
if task == "package":
- include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1'
+ include_timestamps = True
include_root = False
extra_content = d.getVar('HASHEQUIV_HASH_VERSION')
@@ -552,21 +552,22 @@ def OEOuthashBasic(path, sigfile, task, d):
else:
add_perm(stat.S_IXUSR, 'x')
- add_perm(stat.S_IRGRP, 'r')
- add_perm(stat.S_IWGRP, 'w')
- if stat.S_ISGID & s.st_mode:
- add_perm(stat.S_IXGRP, 's', 'S')
- else:
- add_perm(stat.S_IXGRP, 'x')
+ if include_owners:
+ # Group/other permissions are only relevant in pseudo context
+ add_perm(stat.S_IRGRP, 'r')
+ add_perm(stat.S_IWGRP, 'w')
+ if stat.S_ISGID & s.st_mode:
+ add_perm(stat.S_IXGRP, 's', 'S')
+ else:
+ add_perm(stat.S_IXGRP, 'x')
- add_perm(stat.S_IROTH, 'r')
- add_perm(stat.S_IWOTH, 'w')
- if stat.S_ISVTX & s.st_mode:
- update_hash('t')
- else:
- add_perm(stat.S_IXOTH, 'x')
+ add_perm(stat.S_IROTH, 'r')
+ add_perm(stat.S_IWOTH, 'w')
+ if stat.S_ISVTX & s.st_mode:
+ update_hash('t')
+ else:
+ add_perm(stat.S_IXOTH, 'x')
- if include_owners:
try:
update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
diff --git a/poky/meta/lib/oe/utils.py b/poky/meta/lib/oe/utils.py
index 238af314d1..cf65639647 100644
--- a/poky/meta/lib/oe/utils.py
+++ b/poky/meta/lib/oe/utils.py
@@ -508,7 +508,8 @@ class ThreadedWorker(Thread):
try:
func(self, *args, **kargs)
except Exception as e:
- print(e)
+ # Eat all exceptions
+ bb.mainlogger.debug("Worker task raised %s" % e, exc_info=e)
finally:
self.tasks.task_done()
diff --git a/poky/meta/lib/oeqa/manual/eclipse-plugin.json b/poky/meta/lib/oeqa/manual/eclipse-plugin.json
index d77d0e673b..6c110d0656 100644
--- a/poky/meta/lib/oeqa/manual/eclipse-plugin.json
+++ b/poky/meta/lib/oeqa/manual/eclipse-plugin.json
@@ -44,7 +44,7 @@
"expected_results": ""
},
"2": {
- "action": "wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/qemu (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n",
+ "action": "wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/ (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n",
"expected_results": " Qemu can be lauched normally."
},
"3": {
@@ -60,7 +60,7 @@
"expected_results": ""
},
"6": {
- "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n",
+ "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n",
"expected_results": ""
},
"7": {
@@ -247,7 +247,7 @@
"execution": {
"1": {
"action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n",
- "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on http://autobuilder.yoctoproject.org/pub/releases/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n"
+ "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on https://downloads.yoctoproject.org/releases/yocto/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n"
},
"2": {
"action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n",
diff --git a/poky/meta/lib/oeqa/runtime/cases/parselogs.py b/poky/meta/lib/oeqa/runtime/cases/parselogs.py
index 2b8893d842..50101b7851 100644
--- a/poky/meta/lib/oeqa/runtime/cases/parselogs.py
+++ b/poky/meta/lib/oeqa/runtime/cases/parselogs.py
@@ -32,7 +32,7 @@ common_errors = [
"Failed to load module \"fbdev\"",
"Failed to load module fbdev",
"Failed to load module glx",
- "[drm] Cannot find any crtc or sizes - going 1024x768",
+ "[drm] Cannot find any crtc or sizes",
"_OSC failed (AE_NOT_FOUND); disabling ASPM",
"Open ACPI failed (/var/run/acpid.socket) (No such file or directory)",
"NX (Execute Disable) protection cannot be enabled: non-PAE kernel!",
diff --git a/poky/meta/lib/oeqa/runtime/cases/rpm.py b/poky/meta/lib/oeqa/runtime/cases/rpm.py
index 7a9d62c003..a4339116bf 100644
--- a/poky/meta/lib/oeqa/runtime/cases/rpm.py
+++ b/poky/meta/lib/oeqa/runtime/cases/rpm.py
@@ -116,12 +116,12 @@ class RpmInstallRemoveTest(OERuntimeTestCase):
Author: Alexander Kanavin <alex.kanavin@gmail.com>
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
"""
- db_files_cmd = 'ls /var/lib/rpm/__db.*'
+ db_files_cmd = 'ls /var/lib/rpm/rpmdb.sqlite*'
check_log_cmd = "grep RPM /var/log/messages | wc -l"
- # Make sure that some database files are under /var/lib/rpm as '__db.xxx'
+ # Make sure that some database files are under /var/lib/rpm as 'rpmdb.sqlite'
status, output = self.target.run(db_files_cmd)
- msg = 'Failed to find database files under /var/lib/rpm/ as __db.xxx'
+ msg = 'Failed to find database files under /var/lib/rpm/ as rpmdb.sqlite'
self.assertEqual(0, status, msg=msg)
self.tc.target.copyTo(self.test_file, self.dst)
diff --git a/poky/meta/lib/oeqa/runtime/context.py b/poky/meta/lib/oeqa/runtime/context.py
index 3826f27642..d707ab263a 100644
--- a/poky/meta/lib/oeqa/runtime/context.py
+++ b/poky/meta/lib/oeqa/runtime/context.py
@@ -5,6 +5,7 @@
#
import os
+import sys
from oeqa.core.context import OETestContext, OETestContextExecutor
from oeqa.core.target.ssh import OESSHTarget
@@ -119,8 +120,7 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
# XXX: Don't base your targets on this code it will be refactored
# in the near future.
# Custom target module loading
- target_modules_path = kwargs.get('target_modules_path', '')
- controller = OERuntimeTestContextExecutor.getControllerModule(target_type, target_modules_path)
+ controller = OERuntimeTestContextExecutor.getControllerModule(target_type)
target = controller(logger, target_ip, server_ip, **kwargs)
return target
@@ -130,15 +130,15 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
# AttributeError raised if not found.
# ImportError raised if a provided module can not be imported.
@staticmethod
- def getControllerModule(target, target_modules_path):
- controllerslist = OERuntimeTestContextExecutor._getControllerModulenames(target_modules_path)
+ def getControllerModule(target):
+ controllerslist = OERuntimeTestContextExecutor._getControllerModulenames()
controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist)
return controller
# Return a list of all python modules in lib/oeqa/controllers for each
# layer in bbpath
@staticmethod
- def _getControllerModulenames(target_modules_path):
+ def _getControllerModulenames():
controllerslist = []
@@ -153,9 +153,8 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
else:
raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module)
- extpath = target_modules_path.split(':')
- for p in extpath:
- controllerpath = os.path.join(p, 'lib', 'oeqa', 'controllers')
+ for p in sys.path:
+ controllerpath = os.path.join(p, 'oeqa', 'controllers')
if os.path.exists(controllerpath):
add_controller_list(controllerpath)
return controllerslist
@@ -175,16 +174,12 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
# Search for and return a controller or None from given module name
@staticmethod
def _loadControllerFromModule(target, modulename):
- obj = None
- # import module, allowing it to raise import exception
- module = __import__(modulename, globals(), locals(), [target])
- # look for target class in the module, catching any exceptions as it
- # is valid that a module may not have the target class.
try:
- obj = getattr(module, target)
- except:
- obj = None
- return obj
+ import importlib
+ module = importlib.import_module(modulename)
+ return getattr(module, target)
+ except AttributeError:
+ return None
@staticmethod
def readPackagesManifest(manifest):
diff --git a/poky/meta/lib/oeqa/sdk/buildtools-cases/build.py b/poky/meta/lib/oeqa/sdk/buildtools-cases/build.py
index 9c9a84bf8a..aee2e5a8c0 100644
--- a/poky/meta/lib/oeqa/sdk/buildtools-cases/build.py
+++ b/poky/meta/lib/oeqa/sdk/buildtools-cases/build.py
@@ -25,6 +25,6 @@ class BuildTests(OESDKTestCase):
self._run('. %s/oe-init-build-env %s && bitbake virtual/libc' % (corebase, testdir))
finally:
delay = 10
- while delay and os.path.exists(testdir + "/bitbake.lock"):
+ while delay and (os.path.exists(testdir + "/bitbake.lock") or os.path.exists(testdir + "/cache/hashserv.db-wal")):
time.sleep(1)
delay = delay - 1
diff --git a/poky/meta/lib/oeqa/selftest/cases/bbtests.py b/poky/meta/lib/oeqa/selftest/cases/bbtests.py
index 6562364074..6779e62103 100644
--- a/poky/meta/lib/oeqa/selftest/cases/bbtests.py
+++ b/poky/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -151,9 +151,6 @@ INHERIT:remove = \"report-error\"
self.delete_recipeinc('man-db')
self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output)
- line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.')
- self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \
-doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
def test_rename_downloaded_file(self):
# TODO unique dldir instead of using cleanall
diff --git a/poky/meta/lib/oeqa/selftest/cases/eSDK.py b/poky/meta/lib/oeqa/selftest/cases/eSDK.py
index 862849af35..d0c402ba8a 100644
--- a/poky/meta/lib/oeqa/selftest/cases/eSDK.py
+++ b/poky/meta/lib/oeqa/selftest/cases/eSDK.py
@@ -100,7 +100,7 @@ SSTATE_MIRRORS = "file://.* file://%s/PATH"
@classmethod
def tearDownClass(cls):
for i in range(0, 10):
- if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')):
+ if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')) or os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'cache/hashserv.db-wal')):
time.sleep(1)
else:
break
diff --git a/poky/meta/lib/oeqa/selftest/cases/fitimage.py b/poky/meta/lib/oeqa/selftest/cases/fitimage.py
index 184c8778d2..f6f6a8e795 100644
--- a/poky/meta/lib/oeqa/selftest/cases/fitimage.py
+++ b/poky/meta/lib/oeqa/selftest/cases/fitimage.py
@@ -742,6 +742,7 @@ UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
UBOOT_EXTLINUX = "0"
FIT_GENERATE_KEYS = "1"
KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
+FIT_KERNEL_COMP_ALG = "none"
FIT_HASH_ALG = "sha256"
"""
self.write_config(config)
@@ -763,9 +764,8 @@ FIT_HASH_ALG = "sha256"
kernel_load = str(get_bb_var('UBOOT_LOADADDRESS'))
kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT'))
- initramfs_bundle_format = str(get_bb_var('KERNEL_IMAGETYPE_REPLACEMENT'))
+ kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG'))
uboot_arch = str(get_bb_var('UBOOT_ARCH'))
- initramfs_bundle = "arch/" + uboot_arch + "/boot/" + initramfs_bundle_format + ".initramfs"
fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
its_file = open(fitimage_its_path)
@@ -775,11 +775,11 @@ FIT_HASH_ALG = "sha256"
exp_node_lines = [
'kernel-1 {',
'description = "Linux kernel";',
- 'data = /incbin/("' + initramfs_bundle + '");',
+ 'data = /incbin/("linux.bin");',
'type = "kernel";',
'arch = "' + uboot_arch + '";',
'os = "linux";',
- 'compression = "none";',
+ 'compression = "' + kernel_compression + '";',
'load = <' + kernel_load + '>;',
'entry = <' + kernel_entry + '>;',
'hash-1 {',
diff --git a/poky/meta/lib/oeqa/selftest/cases/oescripts.py b/poky/meta/lib/oeqa/selftest/cases/oescripts.py
index 1decce39e9..91abf9654a 100644
--- a/poky/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/poky/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -167,7 +167,7 @@ class OEListPackageconfigTests(OEScriptTests):
def test_packageconfig_flags_option_all(self):
results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
expected_endlines = []
- expected_endlines.append("pinentry-1.1.1")
+ expected_endlines.append("pinentry-1.2.0")
expected_endlines.append("PACKAGECONFIG ncurses libcap")
expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
diff --git a/poky/meta/lib/oeqa/selftest/cases/reproducible.py b/poky/meta/lib/oeqa/selftest/cases/reproducible.py
index e4582cb82a..2e983d2f17 100644
--- a/poky/meta/lib/oeqa/selftest/cases/reproducible.py
+++ b/poky/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -219,7 +219,6 @@ class ReproducibleTests(OESelftestTestCase):
bb.utils.remove(tmpdir, recurse=True)
config = textwrap.dedent('''\
- INHERIT += "reproducible_build"
PACKAGE_CLASSES = "{package_classes}"
INHIBIT_PACKAGE_STRIP = "1"
TMPDIR = "{tmpdir}"
diff --git a/poky/meta/lib/oeqa/selftest/cases/runtime_test.py b/poky/meta/lib/oeqa/selftest/cases/runtime_test.py
index 129503de63..a90f62bfe1 100644
--- a/poky/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/poky/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -213,17 +213,21 @@ class TestImage(OESelftestTestCase):
"""
import subprocess, os
+ distro = oe.lsb.distro_identifier()
+ if distro and distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04']:
+ self.skipTest('virgl headless cannot be tested with %s' %(distro))
+
render_hint = """If /dev/dri/renderD* is absent due to lack of suitable GPU, 'modprobe vgem' will create one sutable for mesa llvmpipe sofware renderer."""
try:
content = os.listdir("/dev/dri")
if len([i for i in content if i.startswith('render')]) == 0:
- self.skipTest("No render nodes found in /dev/dri: %s. %s" %(content, render_hint))
+ self.fail("No render nodes found in /dev/dri: %s. %s" %(content, render_hint))
except FileNotFoundError:
- self.skipTest("/dev/dri directory does not exist; no render nodes available on this machine. %s" %(render_hint))
+ self.fail("/dev/dri directory does not exist; no render nodes available on this machine. %s" %(render_hint))
try:
dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True)
except subprocess.CalledProcessError as e:
- self.skipTest("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
+ self.fail("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
features = 'INHERIT += "testimage"\n'
if 'opengl' not in qemu_distrofeatures:
diff --git a/poky/meta/lib/oeqa/selftest/cases/signing.py b/poky/meta/lib/oeqa/selftest/cases/signing.py
index af7a0b8b45..6f3d4aeae9 100644
--- a/poky/meta/lib/oeqa/selftest/cases/signing.py
+++ b/poky/meta/lib/oeqa/selftest/cases/signing.py
@@ -159,13 +159,13 @@ class Signing(OESelftestTestCase):
bitbake('-c clean %s' % test_recipe)
bitbake('-c populate_lic %s' % test_recipe)
- recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz.sig')
- recipe_tgz = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz')
+ recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst.sig')
+ recipe_archive = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst')
self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.')
- self.assertEqual(len(recipe_tgz), 1, 'Failed to find .tgz file.')
+ self.assertEqual(len(recipe_archive), 1, 'Failed to find .tar.zst file.')
- ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_tgz[0]))
+ ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_archive[0]))
# gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30
# gpg: Good signature from "testuser (nocomment) <testuser@email.com>"
self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.')
diff --git a/poky/meta/lib/oeqa/selftest/cases/sstatetests.py b/poky/meta/lib/oeqa/selftest/cases/sstatetests.py
index 17a1545506..3dab607eeb 100644
--- a/poky/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/poky/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -68,7 +68,7 @@ class SStateTests(SStateBase):
results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific)
if distro_nonspecific:
for r in results:
- if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo", "_fetch.tgz.siginfo", "_unpack.tgz.siginfo", "_patch.tgz.siginfo")):
+ if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo", "_fetch.tar.zst.siginfo", "_unpack.tar.zst.siginfo", "_patch.tar.zst.siginfo")):
continue
file_tracker.append(r)
else:
@@ -98,15 +98,15 @@ class SStateTests(SStateBase):
bitbake(['-ccleansstate'] + targets)
bitbake(targets)
- tgz_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
- self.assertTrue(tgz_created, msg="Could not find sstate .tgz files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_created)))
+ archives_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
+ self.assertTrue(archives_created, msg="Could not find sstate .tar.zst files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_created)))
siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created)))
bitbake(['-ccleansstate'] + targets)
- tgz_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
- self.assertTrue(not tgz_removed, msg="do_cleansstate didn't remove .tgz sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_removed)))
+ archives_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
+ self.assertTrue(not archives_removed, msg="do_cleansstate didn't remove .tar.zst sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_removed)))
def test_cleansstate_task_distro_specific_nonspecific(self):
targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
@@ -129,15 +129,15 @@ class SStateTests(SStateBase):
bitbake(['-ccleansstate'] + targets)
bitbake(targets)
- results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=False, distro_nonspecific=True)
+ results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=False, distro_nonspecific=True)
filtered_results = []
for r in results:
- if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo")):
+ if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo")):
continue
filtered_results.append(r)
self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results)))
- file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
- self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+ file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
+ self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
self.track_for_cleanup(self.distro_specific_sstate + "_old")
shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
@@ -145,14 +145,14 @@ class SStateTests(SStateBase):
bitbake(['-cclean'] + targets)
bitbake(targets)
- file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
- self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+ file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
+ self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
- self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated)))
+ self.assertTrue(not_recreated == [], msg="The following sstate files were not recreated: %s" % ', '.join(map(str, not_recreated)))
created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
- self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once)))
+ self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once)))
def test_rebuild_distro_specific_sstate_cross_native_targets(self):
self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True)
@@ -188,23 +188,23 @@ class SStateTests(SStateBase):
if not sstate_arch in sstate_archs_list:
sstate_archs_list.append(sstate_arch)
if target_config[idx] == target_config[-1]:
- target_sstate_before_build = self.search_sstate(target + r'.*?\.tgz$')
+ target_sstate_before_build = self.search_sstate(target + r'.*?\.tar.zst$')
bitbake("-cclean %s" % target)
result = bitbake(target, ignore_status=True)
if target_config[idx] == target_config[-1]:
- target_sstate_after_build = self.search_sstate(target + r'.*?\.tgz$')
+ target_sstate_after_build = self.search_sstate(target + r'.*?\.tar.zst$')
expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)]
self.remove_config(global_config[idx])
self.remove_recipeinc(target, target_config[idx])
self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output))
runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list))))
- actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)]
+ actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tar.zst$') if not any(pattern in x for pattern in ignore_patterns)]
actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
- self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected)))
+ self.assertFalse(actual_not_expected, msg="Files should have been removed but were not: %s" % ', '.join(map(str, actual_not_expected)))
expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate]
- self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual)))
+ self.assertFalse(expected_not_actual, msg="Extra files were removed: %s" ', '.join(map(str, expected_not_actual)))
def test_sstate_cache_management_script_using_pr_1(self):
global_config = []
diff --git a/poky/meta/lib/oeqa/selftest/context.py b/poky/meta/lib/oeqa/selftest/context.py
index 1659926975..78c7a467e2 100644
--- a/poky/meta/lib/oeqa/selftest/context.py
+++ b/poky/meta/lib/oeqa/selftest/context.py
@@ -39,7 +39,7 @@ class NonConcurrentTestSuite(unittest.TestSuite):
def removebuilddir(d):
delay = 5
- while delay and os.path.exists(d + "/bitbake.lock"):
+ while delay and (os.path.exists(d + "/bitbake.lock") or os.path.exists(d + "/cache/hashserv.db-wal")):
time.sleep(1)
delay = delay - 1
# Deleting these directories takes a lot of time, use autobuilder
diff --git a/poky/meta/lib/oeqa/utils/qemurunner.py b/poky/meta/lib/oeqa/utils/qemurunner.py
index d961a9a218..0397148082 100644
--- a/poky/meta/lib/oeqa/utils/qemurunner.py
+++ b/poky/meta/lib/oeqa/utils/qemurunner.py
@@ -183,7 +183,7 @@ class QemuRunner:
# then add in the site-packages path components and add that
# to the python sys.path so qmp.py can be found.
python_path = os.path.dirname(os.path.dirname(self.logfile))
- python_path += "/recipe-sysroot-native/usr/lib/python3.9/site-packages"
+ python_path += "/recipe-sysroot-native/usr/lib/qemu-python"
sys.path.append(python_path)
importlib.invalidate_caches()
try: