From 08902b01500fb82ac050ec2dce9b6c4358075a17 Mon Sep 17 00:00:00 2001 From: Brad Bishop Date: Tue, 20 Aug 2019 09:16:51 -0400 Subject: poky: subtree update:835f7eac06..20946c63c2 Aaron Chan (1): python3-dbus: Add native and nativesdk variants Adrian Bunk (8): gnome: Remove the gnome class bind: Remove RECIPE_NO_UPDATE_REASON and follow the ESV releases webkitgtk: Reenable on mips mtd-utils: Upgrade to 2.1.1 Change ftp:// URIs to http(s):// webkitgtk: Stop disabling gold on aarch64 and mips grub/libmpc/gdb: Use GNU_MIRROR in more recipes screen: Backport fix for an implicit function declaration Alexander Kanavin (28): btrfs-tools: update 5.1.1 -> 5.2.1 libmodulemd: update to 2.6.0 libwebp: upgrade 1.0.2 -> 1.0.3 createrepo-c: upgrade 0.14.2 -> 0.14.3 webkitgtk: upgrade 2.24.2 -> 2.24.3 bzip2: fix upstream version check stress-ng: add a recipe that replaces the original stress meson: update 0.50.1 -> 0.51.1 meson.bbclass: do not pass native compiler/linker flags via command line meson: add a backported patch to address vala cross-compilation errors libedit: fix upstream verison check maintainers.inc: assign acpica to Ross stress-ng: add a patch to remove unneeded bash dependency elfutils: use PRIVATE_LIBS for the ptest package apt: add a missing perl runtime dependency attr: add a missing perl runtime dependency ofono: correct the python3 runtime dependency bluez5: correct the python3 runtime dependency local.conf.sample: do not add sdl to nativesdk qemu config maintainers.inc: give python recipes to Oleksandr Kravchuk python-numpy: remove the python 2.x version of the recipe python-scons: remove the python 2.x version of the recipe python-nose: remove the python 2.x version of the recipe lib/oeqa/utils/qemurunner.py: add runqemuparams after kvm/nographic/snapshot/slirp mesa: enable glx-tls option in native and nativesdk builds insane.bbclass: in file-rdeps do not look into RDEPENDS recursively sudo: correct SRC_URI ovmf: fix upstream version check Andreas Obergschwandtner (1): bzip2: set the autoconf package version to the recipe version Anuj Mittal (11): mpg123: upgrade 1.25.10 -> 1.25.11 libsdl: remove pulseaudio: don't include consolekit when systemd is enabled libsdl2: upgrade 2.0.9 -> 2.0.10 grub: upgrade 2.02 -> 2.04 patch: fix CVE-2019-13636 python: fix CVE-2018-20852 python: CVE-2019-9947 is same as CVE-2019-9740 libtasn1: upgrade 4.13 -> 4.14 pango: upgrade 1.42.4 -> 1.44.3 harfbuzz: upgrade 2.4.0 -> 2.5.3 Bartosz Golaszewski (1): qemu: add a patch fixing the native build on newer kernels Bedel, Alban (3): rng-tools: start rngd early in the boot process again kernel-uboot: remove useless special casing of arm64 Image boost: Fix build and enable context and coroutines on aarch64 Bruce Ashfield (2): linux-yocto/4.19: update to v4.19.61 linux-yocto-dev: bump to 5.3-rcX Changqing Li (6): runqemu: add lockfile for port used when slirp enabled runqemu: fix get portlock fail for multi users qemuboot-x86: move QB_SYSTEM_NAME to corresponding conf genericx86-64.conf/genericx86.conf: add QB_SYSTEM_NAME grub/grub-efi: fix conflict for aach64 go-runtime: remove conflict files from -dev packages Chen Qi (1): sudo: use nonarch_libdir instead of libdir for tmpfiles.d Chin Huat Ang (1): cve-update-db-native: fix https proxy issues Chris Laplante via bitbake-devel (1): bitbake: fetch2/wget: avoid 'maximum recursion depth' RuntimeErrors when handling 403 codes Daniel Ammann (2): image_types: Remove remnants of hdddirect bitbake: toaster: Sync list of fs_types with oe-core Denys Dmytriyenko (2): wayland-protocols: upgrade 1.17 -> 1.18 weston: upgrade 6.0.0 -> 6.0.1 Diego Rondini (1): image_types.bbclass: make gzipped images rsyncable Dmitry Eremin-Solenikov (1): kernel.bbclass: fix installation of modules signing certificates Frederic Ouellet (1): systemd: Add partial support of drop-in configuration files to systemd-systemctl-native Hongxu Jia (1): grub: add grub-native Jason Wessel (6): sqlite3: Fix zlib determinism problem pseudo: Fix openat() with a symlink pointing to a directory image_types_wic.bbclass: Copy the .wks and .env files to deploy image dir wic: Add partition type for msdos partition tables wic: Make disk partition size consistently computed dpkg: Provide update-alternative for start-stop-daemon Johann Fridriksson (1): ruby: Adding zlib-native to native dependencies Joshua Lock via Openembedded-core (3): sstate: fix log message classes/sstate: don't use unsigned sstate when verification enabled classes/sstate: regenerate sstate when signing enabled Joshua Watt (1): bitbake: hashserv: SQL Optimizations Kai Kang (3): subversion: add packageconfig boost epiphany: set imcompatible with tune mips e2fsprogs: 1.44.5 -> 1.45.3 Khem Raj (23): strace: Upgrade to 5.2 linux-libc-header: Fix ptrace.h and prctl.h conflict on aarch64 libnss-nis: Fix build with glibc 2.30 lttng-ust: Check for gettid libc API ltp: Fix build with glibc 2.30 lttng-tools: Fix build with glibc 2.30 xserver-xorg: Backport patch to remove using sys/io.h Apache-2.0-with-LLVM-exception: Add new license file libedit: Move from meta-oe groff: Fix math.h inclusion from system headers issue webkitgtk: Fix compile failures with clang glibc: Update to glibc 2.30 virglrender: Fix endianness check on musl syslinux: Override hardcoded toolnames in Makefile systemd-boot: Add option to specify cross objcopy and use it mesa,llvm,meson: Update llvm to 8.0.1 plus define and use LLVM version globally musl: Update to master tip oeqa/buildgalculator.py: Add dependency on gtk+3 oeqa/parselogs: grep for exact errors list keywords gcc-runtime: Move content from gcclibdir into libdir gdb: Do not set musl specific CFLAGS linuxloader: Add entries for riscv64 musl: Delete GLIBC_LDSO before creating symlink with lnr Luca Boccassi (1): python3-pygobject: remove python3-setuptools from RDEPENDS Mads Andreasen (1): bitbake: fetch2/npm: Use npm pack to download node modules instead of wget Mark Hatle (2): glibc-package.inc: Add linux-libc-headers-dev to glibc-dev bitbake: layerindexlib: Fix parsing of recursive layer dependencies Martin Jansa (3): icecc.bbclass: catch subprocess.CalledProcessError powertop: import a fix from buildroot meson: backport fix for builds with -Werror=return-type Ming Liu (5): libx11-compose-data: add recipe libxkbcommon: RDEPENDS on libx11 compose data weston: change to use meson build system license_image.bbclass: drop invalid comments opensbi: handle deploy task under sstate Naveen Saini (2): gdk-pixbuf: enable x11 PACKAGECONFIG option image_types_wic: add syslinux-native dependency conditional Oleksandr Kravchuk (17): python3-pip: update to 19.2.1 python3-git: update to 2.1.12 ethtool: update to 5.2 python3-git: update to 2.1.13 xorgproto: update to 2019.1 xserver-xorg: update to 1.20.5 ell: update to 0.21 libinput: update to 1.14.0 wpa-supplicant: update to 2.9 aspell: update to 0.60.7 linux-firmware: add PE back xf86-input-libinput: update to 0.29.0 git: update to 2.22.1 xrandr: update to 1.5.1 python3-git: update to 3.0.0 librepo: update to 1.10.5 libevent: update to 2.1.11 Pascal Bach (2): cmake: 3.14.5 -> 3.15.1 cmake: 3.15.1 -> 3.15.2 Paul Eggleton (2): scripts/create-pull-request: improve handling of non-SSH remote URLs scripts/create-pull-request: fix putting subject containing / into cover letter Piotr Tworek (2): pulseaudio: Backport upstream fix new alsa compatibility. libdrm: Move amdgpu.ids file into libdrm-amdgpu package. Randy MacLeod (1): ptest-runner: update from 2.3.1 to 2.3.2 Rasmus Villemoes (1): iproute2: drop pointless configure-cross.patch Ricardo Neri (5): ovmf: Update to version edk2-stable201905 ovmf: Set PV ovmf: Use HOSTTOOLS' python3 ovmf: Generate test Platform key and first Key Exchange Key runqemu: Add support to handle EnrollDefaultKeys PK/KEK1 certificate Ricardo Ribalda Delgado (2): packagegroup-core-base-utils: Make it machine specific inetutils: Fix abort on invalid files Richard Purdie (50): package: Improve determinism sstate: Reduce race windows bitbake: siggen: Import unihash code from OE-Core bitbake: cache: Add SimpleCache class bitbake: runqueue: Improve scenequeue processing logic bitbake: siggen: Add new unitaskhashes data variable which is cached bitbake: siggen: Convert to use self.unitaskhashes bitbake: runqueue: Enable dynamic task adjustment to hash equivalency bitbake: runqueue: Improve determinism bitbake: cooker/hashserv: Allow autostarting of a local hash server using BB_HASHSERVE bitbake: hashserv: Turn off sqlite synchronous mode bitbake: prserv: Use a memory journal bitbake: hashserv: Use separate threads for answering requests and handling them bitbake: hashserv: Switch from threads to multiprocessing bitbake: runqueue: Clean up BB_HASHCHECK_FUNCTION API bitbake: siggen: Clean up task reference formats bitbake: build/utils: Drop bb.build.FuncFailed bitbake: tests/runqueue: Add hashserv+runqueue test bitbake: bitbake: Bump version to 1.43.1 for API changes sanity.conf: Require bitbake 1.43.1 classes/lib: Remove bb.build.FuncFailed sstatesig: Move unihash siggen code to bitbake sstatesig: Add debug for incorrect hash server settings sstatesig: Adpat to recent bitbake hash equiv runqueue changes sstatesig: Update to handle BB_HASHSERVE sstate/sstatesig: Update to new form of BB_HASHCHECK_FUNCTION sstatesig: Updates to match bitbake siggen changes gstreamer: Add fix for glibc 2.30 sstatesig: Fix leftover splitting issue from siggen change python3-pygobject: Add missing pkgutil RDEPENDS bitbake: runqueue: Fix corruption issue bitbake: runqueue: Improve setscene task handling logic bitbake: tests/runqueue: Add further hash equivalence tests bitbake: cooker: Improve hash server startup code to avoid exit tracebacks bitbake: runqueue: Wait for covered tasks to complete before trying setscene bitbake: runqueue: Fix next_buildable_task performance problem bitbake: runqueue: Improve scenequeue debugging bitbake: runqueue: Recompute holdoff tasks from scratch bitbake: runqueue: Fix event timing race bitbake: runqueue: Drop debug statement causing performance issues bitbake: runqueue: Add further debug information bitbake: runqueue: Add missing setscene task corner case bitbake: runqueue: Ensure we clear the stamp cache poky: Retire opensuse 42.3 from SANITY_TESTED_DISTROS gcc-cross-canadian: Drop obsolete shlibs exclusion bitbake: tests/runqueue: Fix tests bitbake: runqueue: Fix data corruption problem bitbake: runqueue: Ensure data is handled correctly bitbake: hashserv: Ensure we don't accumulate sockets in TIME_WAIT state bitbake: runqueue: Ensure target_tids is filtered Robert Yang (3): bitbake: cooker: Cleanup the queue before call process.join() bitbake: knotty: Fix for the Second Keyboard Interrupt bitbake: bitbake: server/process: Handle BBHandledException to avoid unexpected exceptions Ross Burton (23): libidn2: remove build paths from libidn2.pc gnutls: don't use HOSTTOOLS_DIR/bash as a shell on target libical: upgrade to 3.0.5 perl: fix whitespace perl: add PACKAGECONFIG for db fortran-helloworld: neaten recipe python3: remove empty python3-distutils-staticdev python3: support recommends in manifest python3: split out the Windows distutils installer stubs insane: check if the recipe incorrectly uses DEPENDS_${PN} libxx86misc: remove this now redundant library xserver-xorg: clean up xorgproto dependencies xserver-xorg: add PACKAGECONFIG for DGA xdpyinfo: don't depend on DGA libxx86dga: remove obsolete client libary xserver-xorg: remove embedded build path in the source libx11: update to 1.6.8 sanity: update for new bb.build.exec_func() behaviour libx11-diet: remove qemu: fix patch Upstream-Status xserver-xorg: refresh build path removal patch waffle: upgrade 1.5.2 -> 1.6.0 libx11: replace libtool patch with upstreamed patch Tim Blechmann (1): deb: allow custom dpkg command Trevor Gamblin (2): gzip: update ptest package dependencies patch: fix CVE-2019-13638 Wenlin Kang (1): db: add switch for building database verification Will Page (1): uboot: fixes to uboot-extlinux-config attribute values William Bourque (1): meta/lib/oeqa: Remove ext4 for bootimg-biosplusefi Yi Zhao (1): libx11-compose-data: upgrade 1.6.7 -> 1.6.8 Yuan Chao (4): glib-2.0:upgrade 2.60.5 -> 2.60.6 nettle:upgrade 3.4.1 -> 3.5.1 python3-pbr:upgrade 5.4.1 -> 5.4.2 gpgme:upgrade 1.13.0 -> 1.13.1 Zang Ruochen (8): msmtp: upgrade 1.8.4 -> 1.8.5 curl: upgrade 7.65.2 -> 7.65.3 iso-codes: upgrade 4.2 -> 4.3 python-scons:upgrade 3.0.5 -> 3.1.0 libgudev:upgrade 232 -> 233 libglu:upgrade 9.0.0 -> 9.0.1 man-db:upgrade 2.8.5 -> 2.8.6.1 libnewt:upgrade 0.52.20 -> 0.52.21 Zheng Ruoqin (1): python3-mako: 1.0.14 -> 1.1.0 Zoltan Kuscsik (1): kmscube: update to latest revision Change-Id: I2cd1a0d59da46725b1aba5a79b63eb6121b3c79e Signed-off-by: Brad Bishop --- poky/meta/lib/oe/copy_buildsystem.py | 2 +- poky/meta/lib/oe/gpg_sign.py | 5 +- poky/meta/lib/oe/package.py | 2 +- poky/meta/lib/oe/sstatesig.py | 244 ++++++----------------------------- poky/meta/lib/oe/useradd.py | 2 +- poky/meta/lib/oe/utils.py | 2 +- 6 files changed, 46 insertions(+), 211 deletions(-) (limited to 'poky/meta/lib/oe') diff --git a/poky/meta/lib/oe/copy_buildsystem.py b/poky/meta/lib/oe/copy_buildsystem.py index 246ff5825..cb663b21c 100644 --- a/poky/meta/lib/oe/copy_buildsystem.py +++ b/poky/meta/lib/oe/copy_buildsystem.py @@ -174,7 +174,7 @@ class BuildSystem(object): def generate_locked_sigs(sigfile, d): bb.utils.mkdirhier(os.path.dirname(sigfile)) depd = d.getVar('BB_TASKDEPDATA', False) - tasks = ['%s.%s' % (v[2], v[1]) for v in depd.values()] + tasks = ['%s:%s' % (v[2], v[1]) for v in depd.values()] bb.parse.siggen.dump_lockedsigs(sigfile, tasks) def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, pruned_output): diff --git a/poky/meta/lib/oe/gpg_sign.py b/poky/meta/lib/oe/gpg_sign.py index 2fd8c3b1a..d7624804d 100644 --- a/poky/meta/lib/oe/gpg_sign.py +++ b/poky/meta/lib/oe/gpg_sign.py @@ -89,8 +89,7 @@ class LocalSigner(object): (_, stderr) = job.communicate(passphrase.encode("utf-8")) if job.returncode: - raise bb.build.FuncFailed("GPG exited with code %d: %s" % - (job.returncode, stderr.decode("utf-8"))) + bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8"))) except IOError as e: bb.error("IO error (%s): %s" % (e.errno, e.strerror)) @@ -108,7 +107,7 @@ class LocalSigner(object): ver_str = subprocess.check_output(cmd).split()[2].decode("utf-8") return tuple([int(i) for i in ver_str.split("-")[0].split('.')]) except subprocess.CalledProcessError as e: - raise bb.build.FuncFailed("Could not get gpg version: %s" % e) + bb.fatal("Could not get gpg version: %s" % e) def verify(self, sig_file): diff --git a/poky/meta/lib/oe/package.py b/poky/meta/lib/oe/package.py index b59513227..b8585d425 100644 --- a/poky/meta/lib/oe/package.py +++ b/poky/meta/lib/oe/package.py @@ -265,7 +265,7 @@ def read_shlib_providers(d): bb.debug(2, "Reading shlib providers in %s" % (dir)) if not os.path.exists(dir): continue - for file in os.listdir(dir): + for file in sorted(os.listdir(dir)): m = list_re.match(file) if m: dep_pkg = m.group(1) diff --git a/poky/meta/lib/oe/sstatesig.py b/poky/meta/lib/oe/sstatesig.py index 13af16e47..0c7a6f5ed 100644 --- a/poky/meta/lib/oe/sstatesig.py +++ b/poky/meta/lib/oe/sstatesig.py @@ -59,7 +59,7 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache): # is machine specific. # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes) # and we reccomend a kernel-module, we exclude the dependency. - depfn = dep.rsplit(".", 1)[0] + depfn = dep.rsplit(":", 1)[0] if dataCache and isKernel(depfn) and not isKernel(fn): for pkg in dataCache.runrecs[fn]: if " ".join(dataCache.runrecs[fn][pkg]).find("kernel-module-") != -1: @@ -142,8 +142,10 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): self.dump_lockedsigs(sigfile) return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options) - def get_taskhash(self, fn, task, deps, dataCache): - h = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskhash(fn, task, deps, dataCache) + def get_taskhash(self, tid, deps, dataCache): + h = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskhash(tid, deps, dataCache) + + (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) recipename = dataCache.pkg_fn[fn] self.lockedpnmap[fn] = recipename @@ -153,34 +155,23 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): if recipename in self.unlockedrecipes: unlocked = True else: - def get_mc(tid): - tid = tid.rsplit('.', 1)[0] - if tid.startswith('mc:'): - elems = tid.split(':') - return elems[1] def recipename_from_dep(dep): - # The dep entry will look something like - # /path/path/recipename.bb.task, virtual:native:/p/foo.bb.task, - # ... - - fn = dep.rsplit('.', 1)[0] + fn = bb.runqueue.fn_from_tid(dep) return dataCache.pkg_fn[fn] - mc = get_mc(fn) # If any unlocked recipe is in the direct dependencies then the # current recipe should be unlocked as well. - depnames = [ recipename_from_dep(x) for x in deps if mc == get_mc(x)] + depnames = [ recipename_from_dep(x) for x in deps if mc == bb.runqueue.mc_from_tid(x)] if any(x in y for y in depnames for x in self.unlockedrecipes): self.unlockedrecipes[recipename] = '' unlocked = True if not unlocked and recipename in self.lockedsigs: if task in self.lockedsigs[recipename]: - k = fn + "." + task h_locked = self.lockedsigs[recipename][task][0] var = self.lockedsigs[recipename][task][1] - self.lockedhashes[k] = h_locked - self.taskhash[k] = h_locked + self.lockedhashes[tid] = h_locked + self.taskhash[tid] = h_locked #bb.warn("Using %s %s %s" % (recipename, task, h)) if h != h_locked: @@ -192,36 +183,35 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): return h def dump_sigtask(self, fn, task, stampbase, runtime): - k = fn + "." + task - if k in self.lockedhashes: + tid = fn + ":" + task + if tid in self.lockedhashes: return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime) def dump_lockedsigs(self, sigfile, taskfilter=None): types = {} - for k in self.runtaskdeps: + for tid in self.runtaskdeps: if taskfilter: - if not k in taskfilter: + if not tid in taskfilter: continue - fn = k.rsplit(".",1)[0] + fn = bb.runqueue.fn_from_tid(tid) t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] t = 't-' + t.replace('_', '-') if t not in types: types[t] = [] - types[t].append(k) + types[t].append(tid) with open(sigfile, "w") as f: l = sorted(types) for t in l: f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t) types[t].sort() - sortedk = sorted(types[t], key=lambda k: self.lockedpnmap[k.rsplit(".",1)[0]]) - for k in sortedk: - fn = k.rsplit(".",1)[0] - task = k.rsplit(".",1)[1] - if k not in self.taskhash: + sortedtid = sorted(types[t], key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)]) + for tid in sortedtid: + (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) + if tid not in self.taskhash: continue - f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n") + f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[tid] + " \\\n") f.write(' "\n') f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l))) @@ -229,25 +219,26 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): with open(sigfile, "w") as f: tasks = [] for taskitem in self.taskhash: - (fn, task) = taskitem.rsplit(".", 1) + (fn, task) = taskitem.rsplit(":", 1) pn = self.lockedpnmap[fn] tasks.append((pn, task, fn, self.taskhash[taskitem])) for (pn, task, fn, taskhash) in sorted(tasks): - f.write('%s.%s %s %s\n' % (pn, task, fn, taskhash)) + f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash)) - def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d): + def checkhashes(self, sq_data, missed, found, d): warn_msgs = [] error_msgs = [] sstate_missing_msgs = [] - for task in range(len(sq_fn)): - if task not in ret: + for tid in sq_data['hash']: + if tid not in found: for pn in self.lockedsigs: - if sq_hash[task] in iter(self.lockedsigs[pn].values()): - if sq_task[task] == 'do_shared_workdir': + taskname = bb.runqueue.taskname_from_tid(tid) + if sq_data['hash'][tid] in iter(self.lockedsigs[pn].values()): + if taskname == 'do_shared_workdir': continue sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" - % (pn, sq_task[task], sq_hash[task])) + % (pn, taskname, sq_data['hash'][tid])) checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") if checklevel == 'warn': @@ -266,176 +257,21 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): if error_msgs: bb.fatal("\n".join(error_msgs)) -class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHash): +class SignatureGeneratorOEEquivHash(bb.siggen.SignatureGeneratorUniHashMixIn, SignatureGeneratorOEBasicHash): name = "OEEquivHash" def init_rundepcheck(self, data): super().init_rundepcheck(data) - self.server = data.getVar('SSTATE_HASHEQUIV_SERVER') + autostart = data.getVar('BB_HASHSERVE') + if autostart: + self.server = "http://" + autostart + else: + self.server = data.getVar('SSTATE_HASHEQUIV_SERVER') + if not self.server: + bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_SERVER or BB_HASHSERVE to be set") self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') - self.unihashes = bb.persist_data.persist('SSTATESIG_UNIHASH_CACHE_v1_' + self.method.replace('.', '_'), data) - - def get_taskdata(self): - return (self.server, self.method) + super().get_taskdata() - - def set_taskdata(self, data): - self.server, self.method = data[:2] - super().set_taskdata(data[2:]) - - def __get_task_unihash_key(self, task): - # TODO: The key only *needs* to be the taskhash, the task is just - # convenient - return '%s:%s' % (task, self.taskhash[task]) - - def get_stampfile_hash(self, task): - if task in self.taskhash: - # If a unique hash is reported, use it as the stampfile hash. This - # ensures that if a task won't be re-run if the taskhash changes, - # but it would result in the same output hash - unihash = self.unihashes.get(self.__get_task_unihash_key(task)) - if unihash is not None: - return unihash - - return super().get_stampfile_hash(task) - - def get_unihash(self, task): - import urllib - import json - - taskhash = self.taskhash[task] - - key = self.__get_task_unihash_key(task) - - # TODO: This cache can grow unbounded. It probably only needs to keep - # for each task - unihash = self.unihashes.get(key) - if unihash is not None: - return unihash - - # In the absence of being able to discover a unique hash from the - # server, make it be equivalent to the taskhash. The unique "hash" only - # really needs to be a unique string (not even necessarily a hash), but - # making it match the taskhash has a few advantages: - # - # 1) All of the sstate code that assumes hashes can be the same - # 2) It provides maximal compatibility with builders that don't use - # an equivalency server - # 3) The value is easy for multiple independent builders to derive the - # same unique hash from the same input. This means that if the - # independent builders find the same taskhash, but it isn't reported - # to the server, there is a better chance that they will agree on - # the unique hash. - unihash = taskhash - - try: - url = '%s/v1/equivalent?%s' % (self.server, - urllib.parse.urlencode({'method': self.method, 'taskhash': self.taskhash[task]})) - - request = urllib.request.Request(url) - response = urllib.request.urlopen(request) - data = response.read().decode('utf-8') - - json_data = json.loads(data) - - if json_data: - unihash = json_data['unihash'] - # A unique hash equal to the taskhash is not very interesting, - # so it is reported it at debug level 2. If they differ, that - # is much more interesting, so it is reported at debug level 1 - bb.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, task, self.server)) - else: - bb.debug(2, 'No reported unihash for %s:%s from %s' % (task, taskhash, self.server)) - except urllib.error.URLError as e: - bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) - except (KeyError, json.JSONDecodeError) as e: - bb.warn('Poorly formatted response from %s: %s' % (self.server, str(e))) - - self.unihashes[key] = unihash - return unihash - - def report_unihash(self, path, task, d): - import urllib - import json - import tempfile - import base64 - import importlib - - taskhash = d.getVar('BB_TASKHASH') - unihash = d.getVar('BB_UNIHASH') - report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' - tempdir = d.getVar('T') - fn = d.getVar('BB_FILENAME') - key = fn + '.do_' + task + ':' + taskhash - - # Sanity checks - cache_unihash = self.unihashes.get(key) - if cache_unihash is None: - bb.fatal('%s not in unihash cache. Please report this error' % key) - - if cache_unihash != unihash: - bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash)) - - sigfile = None - sigfile_name = "depsig.do_%s.%d" % (task, os.getpid()) - sigfile_link = "depsig.do_%s" % task - - try: - sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b') - - locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d} - - (module, method) = self.method.rsplit('.', 1) - locs['method'] = getattr(importlib.import_module(module), method) - - outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs) - - try: - url = '%s/v1/equivalent' % self.server - task_data = { - 'taskhash': taskhash, - 'method': self.method, - 'outhash': outhash, - 'unihash': unihash, - 'owner': d.getVar('SSTATE_HASHEQUIV_OWNER') - } - - if report_taskdata: - sigfile.seek(0) - - task_data['PN'] = d.getVar('PN') - task_data['PV'] = d.getVar('PV') - task_data['PR'] = d.getVar('PR') - task_data['task'] = task - task_data['outhash_siginfo'] = sigfile.read().decode('utf-8') - - headers = {'content-type': 'application/json'} - - request = urllib.request.Request(url, json.dumps(task_data).encode('utf-8'), headers) - response = urllib.request.urlopen(request) - data = response.read().decode('utf-8') - - json_data = json.loads(data) - new_unihash = json_data['unihash'] - - if new_unihash != unihash: - bb.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) - else: - bb.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) - except urllib.error.URLError as e: - bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) - except (KeyError, json.JSONDecodeError) as e: - bb.warn('Poorly formatted response from %s: %s' % (self.server, str(e))) - finally: - if sigfile: - sigfile.close() - - sigfile_link_path = os.path.join(tempdir, sigfile_link) - bb.utils.remove(sigfile_link_path) - - try: - os.symlink(sigfile_name, sigfile_link_path) - except OSError: - pass + if not self.method: + bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") # Insert these classes into siggen's namespace so it can see and select them bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic @@ -452,7 +288,7 @@ def find_siginfo(pn, taskname, taskhashlist, d): if not taskname: # We have to derive pn and taskname key = pn - splitit = key.split('.bb.') + splitit = key.split('.bb:') taskname = splitit[1] pn = os.path.basename(splitit[0]).split('_')[0] if key.startswith('virtual:native:'): diff --git a/poky/meta/lib/oe/useradd.py b/poky/meta/lib/oe/useradd.py index bedfe0ecb..8fc77568f 100644 --- a/poky/meta/lib/oe/useradd.py +++ b/poky/meta/lib/oe/useradd.py @@ -14,7 +14,7 @@ class myArgumentParser(argparse.ArgumentParser): error(message) def error(self, message): - raise bb.build.FuncFailed(message) + bb.fatal(message) def split_commands(params): params = re.split('''[ \t]*;[ \t]*(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', params.strip()) diff --git a/poky/meta/lib/oe/utils.py b/poky/meta/lib/oe/utils.py index d686ce1bf..652b2be14 100644 --- a/poky/meta/lib/oe/utils.py +++ b/poky/meta/lib/oe/utils.py @@ -486,7 +486,7 @@ def write_ld_so_conf(d): f.write(d.getVar("base_libdir") + '\n') f.write(d.getVar("libdir") + '\n') -class ImageQAFailed(bb.build.FuncFailed): +class ImageQAFailed(Exception): def __init__(self, description, name=None, logfile=None): self.description = description self.name = name -- cgit v1.2.3