summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib
diff options
context:
space:
mode:
authorAndrew Geissler <geissonator@yahoo.com>2020-12-01 04:58:47 +0300
committerAndrew Geissler <geissonator@yahoo.com>2020-12-01 18:27:18 +0300
commit6ce62a20847b1bd500386c842cf8b801b678bd1c (patch)
tree69d169c5d109b03251c4300f39cce5a575194e6f /poky/bitbake/lib
parentf31b8bdb5991e0570aeaf04a9bc50f41d55bccbe (diff)
downloadopenbmc-6ce62a20847b1bd500386c842cf8b801b678bd1c.tar.xz
poky: subtree update:7231c10430..0ac99625bf
Alban Bedel (1): systemd: Fix systemd when used with busybox less Alejandro Hernandez Samaniego (3): poky-tiny: Reduce busybox size by 13% poky-tiny: Enable size optimization by default python3: Update manifest Alexander Kamensky (1): kexec: arm64: disabled check if kaslr-seed dtb property was wiped Alexander Kanavin (128): systemd-boot: upgrade 246.2 -> 246.6 glib-2.0: upgrade 2.64.5 -> 2.66.1 cmake: update 3.18.2 -> 3.18.4 python3-pygobject: upgrade 3.36.1 -> 3.38.0 libdazzle: upgrade 3.36.0 -> 3.38.0 gobject-introspection: upgrade 1.64.1 -> 1.66.1 json-glib: upgrade 1.4.4 -> 1.6.0 ovmf: update edk2-stable202005 -> edk2-stable202008 gnu-config: update to latest revision file: enable all built-in compression checkers rpm: update 4.15.1 -> 4.16.0 elfutils: update 0.180 -> 0.181 ghostscript: update 9.52 -> 9.53.3 ltp: update 20200515 -> 20200930 gsettings-desktop-schemas: update 3.36.1 -> 3.38.0 libsecret: update 0.20.3 -> 0.20.4 mesa: update 20.1.8 -> 20.2.1 xf86-video-vesa: update 2.4.0 -> 2.5.0 lttng-modules: update 2.12.2 -> 2.12.3 webkitgtk: update 2.28.4 -> 2.30.1 dos2unix: update 7.4.1 -> 7.4.2 gnutls: update 3.16.4 -> 3.16.5 libcap: update 2.43 -> 2.44 vte: update 0.60.3 -> 0.62.1 libhandy: upgrade 0.0.13 -> 1.0.0 libportal: add a recipe epiphany: upgrade 3.36.4 -> 3.38.1 gtk-doc: upgrade 1.32 -> 1.33.0 rpm: adjust MIPS64 N32 support apt: remove host contamination with gtest opkg-utils: correct priority matching in update-alternatives libxml2: add a patch to fix python 3.9 support python: update 3.8.5 -> 3.9.0 glib-2.0: update 2.66.1 -> 2.66.2 json-glib: fix reproducibility spirv-tools: correctly set PV spirv-tools: upgrade 2019.5 -> 2020.5 glslang: fix upstream version check glslang: upgrade 8.13.3559 -> 8.13.3743 glslang: bump to a newer commit shaderc: upgrade 2019.0 -> 2020.3 vulkan: update 1.2.135 -> 1.2.154 vulkan-samples: replace vulkan-demos piglit: upgrade to latest revision acpica: upgrade 20200717 -> 20200925 adwaita-icon-theme: upgrade 3.36.1 -> 3.38.0 at-spi2-atk: upgrade 2.34.2 -> 2.38.0 at-spi2-core: upgrade 2.36.1 -> 2.38.0 bison: upgrade 3.7.2 -> 3.7.3 createrepo-c: upgrade 0.16.0 -> 0.16.1 curl: upgrade 7.72.0 -> 7.73.0 debianutils: upgrade 4.11.1 -> 4.11.2 dhcpcd: upgrade 9.2.0 -> 9.3.1 dmidecode: upgrade 3.2 -> 3.3 dnf: upgrade 4.2.23 -> 4.4.0 ethtool: upgrade 5.8 -> 5.9 expat: upgrade 2.2.9 -> 2.2.10 gcr: upgrade 3.36.0 -> 3.38.0 glib-networking: upgrade 2.64.3 -> 2.66.0 gtk+3: upgrade 3.24.22 -> 3.24.23 help2man: upgrade 1.47.15 -> 1.47.16 i2c-tools: upgrade 4.1 -> 4.2 iw: upgrade 5.8 -> 5.9 kmscube: upgrade to latest revision less: upgrade 562 -> 563 libdnf: upgrade 0.48.0 -> 0.54.2 libgudev: upgrade 233 -> 234 libinput: upgrade 1.16.1 -> 1.16.2 libuv: upgrade 1.39.0 -> 1.40.0 libva: upgrade 2.8.0 -> 2.9.0 libva-utils: update 2.8.0 -> 2.9.1 libwpe: upgrade 1.7.1 -> 1.8.0 libxkbcommon: upgrade 0.10.0 -> 1.0.1 openssh: upgrade 8.3p1 -> 8.4p1 openssl: upgrade 1.1.1g -> 1.1.1h strace: upgrade 5.8 -> 5.9 sudo: upgrade 1.9.3 -> 1.9.3p1 vala: upgrade 0.48.9 -> 0.50.1 wpebackend-fdo: upgrade 1.7.1 -> 1.8.0 xkeyboard-config: upgrade 2.30 -> 2.31 u-boot: upgrade 2020.07 -> 2020.10 usbutils: upgrade 012 -> 013 nfs-utils: upgrade 2.5.1 -> 2.5.2 dropbear: upgrade 2020.80 -> 2020.81 btrfs-tools: upgrade 5.7 -> 5.9 git: upgrade 2.28.0 -> 2.29.2 go: upgrade 1.15.2 -> 1.15.3 mtools: upgrade 4.0.24 -> 4.0.25 python3-numpy: upgrade 1.19.1 -> 1.19.3 python3-git: upgrade 3.1.7 -> 3.1.11 python3-pyelftools: upgrade 0.26 -> 0.27 python3-pygments: upgrade 2.6.1 -> 2.7.2 python3-setuptools: upgrade 49.6.0 -> 50.3.2 asciidoc: upgrade 9.0.2 -> 9.0.4 iptables: upgrade 1.8.5 -> 1.8.6 libsolv: upgrade 0.7.14 -> 0.7.16 stress-ng: upgrade 0.11.21 -> 0.11.23 libhandy: upgrade 1.0.0 -> 1.0.1 freetype: upgrade 2.10.2 -> 2.10.4 linux-firmware: upgrade 20200817 -> 20201022 alsa: upgrade 1.2.3 -> 1.2.4 gstreamer1.0: upgrade 1.18.0 -> 1.18.1 x264: upgrade to latest revision rt-tests/hwlatdetect: upgrade 1.8 -> 1.9 webkitgtk: upgrade 2.30.1 -> 2.30.2 diffoscope: upgrade 160 -> 161 enchant2: upgrade 2.2.9 -> 2.2.12 libassuan: upgrade 2.5.3 -> 2.5.4 libcap-ng: upgrade 0.7.11 -> 0.8 libevdev: upgrade 1.9.1 -> 1.10.0 libgcrypt: upgrade 1.8.6 -> 1.8.7 libmpc: upgrade 1.2.0 -> 1.2.1 libsoup-2.4: upgrade 2.70.0 -> 2.72.0 numactl: upgrade 2.0.13 -> 2.0.14 kea: use odd-even version scheme for updates mesa: fix a build race clutter-gst-3.0: do not call out to host gstreamer plugin scanner conf-notes.txt: mention more important images than just sato weston-init: correctly start under systemd weston-init: fall back to fbdev under x32 wayland-utils: introduce a recipe poky/conf-notes.txt: mention more important images than just sato python3: split python target configuration into own class python3-pycairo: use python3targetconfig distutils3-base.bbclass: use python3targetconfig meta: drop _PYTHON_SYSCONFIGDATA_NAME hacks gpgme: use python3targetconfig bitbake: lib/bb/fetch2/__init__.py: drop _PYTHON_SYSCONFIGDATA_NAME unsetting Alexander Vickberg (1): socat: make building with OpenSSL support optional Alistair (1): weston-init: Fix incorrect idle-time setting Andrej Valek (1): autotools: CONFIG_SHELL defaults Andrey Zhizhikin (1): insane: add GitLab /archive/ tests Anibal Limon (1): recipes-graphics: libxkbcommon disable build of libxkbregistry Anuj Mittal (2): glib-2.0: RDEPEND on dbusmock only when GI_DATA_ENABLED is True distutils-common-base: fix LINKSHARED expansion Bruce Ashfield (17): kernel: provide module.lds for out of tree builds in v5.10+ linux-yocto/5.8: update to v5.8.15 linux-yocto/5.4: update to v5.4.71 linux-yocto/5.8: update to v5.8.16 linux-yocto/5.4: update to v5.4.72 linux-yocto/5.8: update to v5.8.17 linux-yocto/5.4: update to v5.4.73 linux-yocto-dev: move to v5.10-rc linux-yocto/5.4: config cleanup / warnings linux-yocto/5.8: config cleanup / warnings linux-yocto/5.8: update to v5.8.18 linux-yocto/5.4: update to v5.4.75 kernel: relocate copy of module.lds to module compilation task linux-yocto/5.4: perf: Alias SYS_futex with SYS_futex_time64 on 32-bit arches with 64bit time_t linux-yocto/5.8: perf: Alias SYS_futex with SYS_futex_time64 on 32-bit arches with 64bit time_t linux-yocto/5.8: ext4/tipc warning fixups linux-yocto/5.4: update to v5.4.78 Chaitanya Vadrevu (1): isoimage-isohybrid.py: Support adding files/dirs Changqing Li (2): timezone: upgrade to 2020d vulkan-samples: fix do_compile failure Chee Yang Lee (2): bluez5: update to 5.55 ruby: update to 2.7.2 Chris Laplante (4): bitbake: main: extract creation of argument parser into function so it can be utilized externally, e.g. by unit tests bitbake: bb.ui: delete __init__.py to make bb.ui a namespace package bitbake: cookerdata: tweak to avoid mutable default argument cases/bbtests.py: ensure PACKAGE_CLASSES is set to RPM for bbtests.BitbakeTests.test_force_task_1 Dan Callaghan (1): gdb: add PACKAGECONFIG for xz (lzma) compression support Denys Dmytriyenko (1): grep: upgrade 3.4 -> 3.5 Denys Zagorui (1): binutils: reproducibility: reuse debug-prefix-map for stabs Federico Pellegrin (1): openssl: Add c_rehash to misc package and add perl runtime dependency Fedor Ross (2): sysvinit: remove bashism to be compatible with dash eudev: remove bashism to be compatible with dash Fredrik Gustafsson (1): package management: Allow dynamic loading of PM Gratian Crisan (1): kernel-module-split.bbclass: identify kernel modconf files as configuration files He Zhe (1): lttng-modules: Backport a patch to fix btrfs build failure Hombourger, Cedric (1): bitbake: fetch2: use relative symlinks for anything pulled from PREMIRRORS Hongxu Jia (1): bitbake: Revert "bb.ui: delete __init__.py to make bb.ui a namespace package" INC@Cisco) (1): kernel-devsrc: improve reproducibility for arm64 Jason Wessel (2): base-files/profile: Add universal resize function systemd-serialgetty: Switch to TERM=linux Jose Quaresma (31): spirv-tools: import from meta-oe to OE core spirv-tools: enable native build and install more header files glslang: add receipe shaderc: add receipe spirv-tools: fix identation and cleanup install append maintainers.inc: Add Jose Quaresma gstreamer1.0: Fix reproducibility issue around libcap gstreamer1.0: upgrade to version 1.18.0 gstreamer1.0-plugins-base: upgrade to version 1.18.0 gstreamer1.0-plugins-base: add new meson option as PACKAGECONFIG gstreamer1.0-plugins-good: upgrade to version 1.18.0 gstreamer1.0-plugins-good: disable new meson options gstreamer1.0-plugins-good: add new meson option as PACKAGECONFIG gstreamer1.0-plugins-bad: upgrade to version 1.18.0 gstreamer1.0-plugins-bad: disable new meson options gstreamer1.0-plugins-bad: add new meson options as PACKAGECONFIG gstreamer1.0-plugins-ugly: upgrade to version 1.18.0 gstreamer1.0-python: upgrade to version 1.18.0 gstreamer1.0-python: install append is not need any more gstreamer1.0-rtsp-server: upgrade to version 1.18.0 gstreamer1.0-vaapi: upgrade to version 1.18.0 gst-examples: upgrade to version 1.18.0 gstreamer1.0-omx: upgrade to version 1.18.0 gstreamer1.0-libav: upgrade to version 1.18.0 gst-devtools: add version 1.18.0 (gst-validate -> gst-devtools) orc: Upgrade 0.4.31 -> 0.4.32 gstreamer1.0-plugins-good: on wayland qt5 needs qtwayland gstreamer1.0-libav: add comercial license flags as ffmpeg needs this gstreamer1.0-plugins-bad: add srt package config knob ffmpeg: add srt package config knob gstreamer1.0-plugins-good: add package config knob for the Raspberry Pi Joseph Reynolds (1): add new extrausers command passwd-expire Joshua Watt (8): documentation: Add Pipenv support systemd: Re-enable chvt as non-root user without polkit python3-pycryptodomex: upgrade 3.9.8 -> 3.9.9 weston-init: Stop running weston as root python3-pycryptodome: upgrade 3.9.8 -> 3.9.9 bitbake: bitbake: hashserve: Add async client bitbake: bitbake: hashserve: Add support for readonly upstream bitbake: bitbake: cache: Remove bad keys() function Kai Kang (1): sudo: fix multilib conflict Khasim Mohammed (1): grub: add grub-nativesdk Khem Raj (34): webkitgtk: Disable gold linker and JIT on riscv init-ifupdown: Define interfaces file for riscv emulators init-ifupdown: Merge all interface files for differnet qemus musl: Update to latest master qemuboot.bbclass: Fix a typo musl: Add .file directive in crt assembly files musl: Update to latest rpm: Fix error.h handing properly on musl gdb: Update to 10.x release numactl: Link with libatomic on rv64/rv32 gstreamer: Fix build on 32bit arches with 64bit time_t rt-tests: Enable only for x86/ppc64 architectures lto: Add global LTO distro policy file python3: Enable lto if its in DISTRO_FEATURES lto.inc: Add -ffat-lto-objects and -fuse-linker-plugin lto: Introduce LTOEXTRA variable libaio: Disable LTO weston: Fix linking with LTO lto.inc: Disable LTO for xserver-xorg gcc: Do no parameterize LTO configuration flags puzzles: Check for excessive constant arguments lto.inc: Disable LTO for perf gcc: Handle duplicate names for variables musl: Update to latest master lrzsz: Use Cross AR during compile gawk: Avoid using host ar during cross compile lto.inc: Disable LTO for webkit python-numpy: Add support for riscv32 arch-riscv: Enable qemu-usermode on rv32 python3targetconfig.bbclass: Make py3 dep and tasks only for target recipes go: Update to 1.15.5 binutils: Fix linker errors on chromium/ffmpeg on aarch64 python3-numpy: Upgrade to 1.19.4 python3-numpy: Add ptest Konrad Weihmann (3): oeqa/core/context: expose results as variable oeqa/core/context: initialize _run_end_time testimage: print results for interrupted runs Lee Chee Yang (5): bitbake: BBHandler: prompt error when task name contain expression libproxy: fix CVE-2020-26154 python3: fix CVE-2020-27619 python3: whitelist CVE-2020-15523 qemu: fix CVE-2020-24352 Loic Domaigne (1): roofs_*.bbclass: fix missing vardeps for do_rootfs Luca Boccassi (1): dbus: split -common and -tools out of main package Mark Jonas (4): libsdl2: Fix directfb syntax error libsdl2: Fix directfb SDL_RenderFillRect libbsd: Remove BSD-4-Clause from main package libsdl2: Add directfb to PACKAGECONFIG rdepends Martin Jansa (5): tune-arm9tdmi.inc: include arm9tdmi in PACKAGE_ARCHS gnutls: explicitly set --with-librt-prefix webkitgtk: fix opengl PACKAGECONFIG webkitgtk: fix build with x11 enabled weston: add pam to REQUIRED_DISTRO_FEATURES Matt Madison (1): layer.conf: fix syntax error in PATH setting Max Krummenacher (1): linux-firmware: rdepend on license for all nvidia packages Maxime Roussin-BĂ©langer (3): meta: fix some unresponsive homepages and bugtracker links bitbake: cache: remove unused variables. bitbake: monitordisk: remove unused function parameter Mert Kirpici (2): bitbake: fetch2: add zstd support to unpack bitbake: doc/conf.py: add missing import sys Mingli Yu (2): bitbake.conf: Exclude ${CCACHE_DIR} from pseudo database update_udev_hwdb: clean hwdb.bin Nathan Rossi (4): vim: add nativesdk to BBCLASSEXTEND rsync: add nativesdk to BBCLASSEXTEND diffstat: add nativesdk to BBCLASSEXTEND cml1.bbclass: Handle ncurses-native being available via pkg-config Nicolas Dechesne (17): conf: update for release 3.2 poky.yaml: remove unused variables poky.yaml: updates for 3.2 sphinx: releases: add link to 3.1.3 what-i-wish-id-known: replace labels with references to section title sdk-manual: replace labels with references to section title ref-manual: replace labels with references to section title dev-manual: replace labels with references to section title kernel-dev: replace labels with references to section title test-manual: remove unused labels bsp-guide: remove unused labels kernel-dev: remove unused labels profile-manual: remove unused labels sdk-manual: remove unused labels toaster-manual: remove unused labels Makefile: enable parallel build bitbake: docs: Makefile: enable parallel build Norbert Kaminski (1): grub: Add support for RISC-V Paul Barker (11): conf.py: Improve TOC and Outline depth in PDF output conf.py: Add oe_git directive documentation/README: Refer to top-level README for contributions dev-manual-common-tasks: Fix refs to testing branches dev-manual-common-tasks: Update & move patchwork reference dev-manual-common-tasks: Tidy up patch submission process dev-manual-common-tasks: Describe git-send-email accurately dev-manual-common-tasks: Describe how to handle patch feedback dev-manual-common-tasks: Describe how to propose changes to stable branches dev-manual-common-tasks: Re-order patch submission instructions poky.yaml: Define DISTRO_NAME_NO_CAP_LTS Paul Eggleton (10): ref-manual: add reference anchors for each QA check ref-manual: fix for features_check class change ref-manual: QA check updates ref-manual: add PSEUDO_IGNORE_PATHS ref-manual: add IMAGE_VERSION_SUFFIX variable ref-manual: add IMAGE_NAME_SUFFIX variable ref-manual: add migration section for 3.2 ref-manual: add IMAGE_LINK_NAME ref-manual: add migration info for image-artifact-names ref-manual: add migration info about MLPREFIX changes Peter Bergin (2): rt-tests: backport patch that enable build for all archs Revert "rt-tests: Enable only for x86/ppc64 architectures" Purushottam choudhary (1): systemd: selinux hook handling to enumerate nexthop Randy MacLeod (1): libsdl2: Disable video-rpi Randy Witt (4): numactl: Add the recipe for numactl numactl: Remove COMPATIBLE_HOST restrictions numactl: Skip the ptests when numa is not supported rt-tests: Update recipes to use 1.8 Ricardo Salveti (1): dosfstools: add mkfs.vfat to ALTERNATIVE Richard Leitner (4): deb: replace deprecated apt force-yes argument xcb-proto: update to 1.14.1 deb: export INTERCEPT_DIR for remove actions weston-init: introduce WESTON_GROUP Richard Purdie (21): ref-manual/faq: Add entry for why binaries are changed in images dev-manual: Add a note about prelink changing prebuild binaries sstatesig: Log timestamps for hashequiv in reprodubile builds for do_package netbase: Add whitespace to purge bogus hash equivalence from autobuilder scripts/buildhistory_analysis: Avoid tracebacks from file comparision code maintainers: Add myself as numactl maintainer to avoid QA errors bitbake: bitbake: Post release version bump poky.conf: Post release version bump libxcb: Fix install file owner/group bitbake: siggen: Remove broken optimisation bitbake: fetch2/git: Document that we won't support passwords in git urls sstatesig: Remove workaround for bitbake taskhash bug ptest-runner: Fix license as it contains 'or later' clause libdnf: Fix license as it contains 'or later' clause alsa-utils: Fix license to GPLv2 only overview-manual-concepts: Fix the compiler bootstrap process bitbake: Add missing documentation Makefile oeqa/commands: Fix compatibility with python 3.9 fs-perms: Ensure /usr/src/debug/ file modes are correct e2fsprogs: Fix a ptest permissions determinism issue uninative: Don't use single sstate for pseudo-native Robert P. J. Day (3): ref-manual/ref-variables: "PACKAGE_FEEDS_ARCHS" -> "PACKAGE_FEED_ARCHS" README: "yocto-project-qs" -> "brief-yoctoprojectqs" adt-manual: delete obsolete ADT manual, and related content Ross Burton (13): rpm: use libgcrypt instead of OpenSSL for cryptography syslinux: add link to upstream discussion in patch json-glib: use PACKAGECONFIG for tests json-glib: update patch status libical: backport a patch to fix build with ICU 68.1 webkitgtk: fix build with ICU 68.1 cve-check: show real PN/PV python3: add CVE-2007-4559 to whitelist sqlite3: add CVE-2015-3717 to whitelist gstreamer1.0-rtsp-server: set CVE_PRODUCT gstreamer1.0-plugins-base: set CVE_PRODUCT bitbake: providers: selected version not available should be a warning cve-update-db-native: handle all-wildcard versions Saul Wold (1): classes/buildhistory: record LICENSE Sinan Kaya (2): volatile-binds: add /srv to mount and install kernel-uboot: allow compression option to be configurable Stacy Gaikovaia (1): valgrind: helgrind: Intercept libc functions Steve Sakoman (3): netbase: update SRC_URI to reflect new file name openssh: whitelist CVE-2014-9278 cups: whitelist CVE-2018-6553 Tim Orling (22): python3-atomicwrites: move from meta-python python3-attrs: move from meta-python python3-iniconfig: move from meta-python python3-more-itertools: move from meta-python python3-pathlib2: move from meta-python python3-toml: move from meta-python python3-py: move from meta-python python3-setuptools-scm: move from meta-python python3-packaging: move from meta-python python3-wcwidth: move from meta-python python3-zipp: move from meta-python python3-importlib-metadata: move from meta-python python3-pluggy: move from meta-python python3-pytest: move from meta-python maintainers.inc: add self for new pytest packages python3-more-itertools: upgrade 8.5.0 -> 8.6.0 python3-importlib-metadata: upgrade 2.0.0 to 3.1.0 python3-pytest: RDEPENDS on python3-toml python3-hypothesis: move from meta-python python3-sortedcontainers: move from meta-python maintainers.inc: add self for new python recipes python3-hypothesis: upgrade 5.41.3 -> 5.41.4 Tom Hochstein (1): mesa: Add xcb-fixes to loader when using x11 and dri3 Vyacheslav Yurkov (1): license_image.bbclass: use canonical name for license files Wonmin Jung (1): kernel: Set proper LD in KERNEL_KCONFIG_COMMAND Yann Dirson (6): systemtap: split examples and python scripts out of main package systemtap: remove extra dependencies systemtap: clarify the relation between exporter and python3-probes feature systemtap: fix install when python3-probes is disabled in PACKAGECONFIG systemtap: split runtime material in its own package systemtap: avoid RDEPENDS on python3-core when not using python3 Yann E. MORIN (2): common-licenses: add bzip2-1.0.4 recipes-core/busybox: fixup licensing information Yi Zhao (5): resolvconf: do not install dhclient hooks connman: set service to conflict with systemd-networkd pulseaudio: unify volatiles file name dhcpcd: install dhcpcd to /sbin rather than /usr/sbin dhcpcd: upgrade 9.3.1 -> 9.3.2 Yongxin Liu (2): grub: fix several CVEs in grub 2.04 grub: clean up CVE patches zangrc (18): python3-pycairo: upgrade 1.19.1 -> 1.20.0 iproute2: upgrade 5.8.0 -> 5.9.0 icu: upgrade 67.1 -> 68.1 libdnf: upgrade 0.54.2 -> 0.55.0 libinput: upgrade 1.16.2 -> 1.16.3 enchant2: upgrade 2.2.12 -> 2.2.13 libdrm: upgrade 2.4.102 -> 2.4.103 gmp: upgrade 6.2.0 -> 6.2.1 gpgme: upgrade 1.14.0 -> 1.15.0 libunwind: upgrade 1.4.0 -> 1.5.0 msmtp: upgrade 1.8.12 -> 1.8.13 gtk-doc: upgrade 1.33.0 -> 1.33.1 hdparm: upgrade 9.58 -> 9.60 libcap-ng: upgrade 0.8 -> 0.8.1 libjpeg-turbo: upgrade 2.0.5 -> 2.0.6 libxkbcommon: upgrade 1.0.1 -> 1.0.3 pulseaudio: upgrade 13.0 -> 14.0 wireless-regdb: upgrade 2020.04.29 -> 2020.11.20 Signed-off-by: Andrew Geissler <geissonator@yahoo.com> Change-Id: I22fa6c7160be5ff2105113cc63acc25f8977ae4e
Diffstat (limited to 'poky/bitbake/lib')
-rw-r--r--poky/bitbake/lib/bb/__init__.py2
-rw-r--r--poky/bitbake/lib/bb/cache.py6
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py6
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py16
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py5
-rwxr-xr-xpoky/bitbake/lib/bb/main.py333
-rw-r--r--poky/bitbake/lib/bb/monitordisk.py4
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/BBHandler.py6
-rw-r--r--poky/bitbake/lib/bb/providers.py4
-rw-r--r--poky/bitbake/lib/bb/siggen.py8
-rw-r--r--poky/bitbake/lib/hashserv/__init__.py52
-rw-r--r--poky/bitbake/lib/hashserv/client.py243
-rw-r--r--poky/bitbake/lib/hashserv/server.py149
-rw-r--r--poky/bitbake/lib/hashserv/tests.py147
14 files changed, 600 insertions, 381 deletions
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index 09e161fef..b21773734 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "1.48.0"
+__version__ = "1.49.0"
import sys
if sys.version_info < (3, 5, 0):
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 9e0c931a0..b8054e028 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -449,9 +449,7 @@ class Cache(NoCache):
return cachesize
def load_cachefile(self, progress):
- cachesize = self.cachesize()
previous_progress = 0
- previous_percent = 0
for cache_class in self.caches_array:
cachefile = self.getCacheFile(cache_class.cachefile)
@@ -816,10 +814,6 @@ class MulticonfigCache(Mapping):
for k in self.__caches:
yield k
- def keys(self):
- return self.__caches[key]
-
-
def init(cooker):
"""
The Objective: Cache the minimum amount of data possible yet get to the
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index 91cc4347f..c39b56813 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -23,8 +23,8 @@ logger = logging.getLogger("BitBake")
parselog = logging.getLogger("BitBake.Parsing")
class ConfigParameters(object):
- def __init__(self, argv=sys.argv):
- self.options, targets = self.parseCommandLine(argv)
+ def __init__(self, argv=None):
+ self.options, targets = self.parseCommandLine(argv or sys.argv)
self.environment = self.parseEnvironment()
self.options.pkgs_to_build = targets or []
@@ -209,7 +209,7 @@ def findConfigFile(configfile, data):
return None
#
-# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working
+# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working
# up to /. If that fails, we search for a conf/bitbake.conf in BBPATH.
#
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 551bfb70f..290773072 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -853,11 +853,6 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
if val:
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
- # Ensure that a _PYTHON_SYSCONFIGDATA_NAME value set by a recipe
- # (for example via python3native.bbclass since warrior) is not set for
- # host Python (otherwise tools like git-make-shallow will fail)
- cmd = 'unset _PYTHON_SYSCONFIGDATA_NAME; ' + cmd
-
# Disable pseudo as it may affect ssh, potentially causing it to hang.
cmd = 'export PSEUDO_DISABLED=1; ' + cmd
@@ -1026,7 +1021,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
origud.method.build_mirror_data(origud, ld)
return origud.localpath
# Otherwise the result is a local file:// and we symlink to it
- ensure_symlink(ud.localpath, origud.localpath)
+ ensure_symlink(ud.localpath, origud.localpath, relative=True)
+
update_stamp(origud, ld)
return ud.localpath
@@ -1060,7 +1056,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
bb.utils.unlockfile(lf)
-def ensure_symlink(target, link_name):
+def ensure_symlink(target, link_name, relative=False):
if not os.path.exists(link_name):
if os.path.islink(link_name):
# Broken symbolic link
@@ -1071,6 +1067,8 @@ def ensure_symlink(target, link_name):
# same time, in which case we do not want the second task to
# fail when the link has already been created by the first task.
try:
+ if relative is True:
+ target = os.path.relpath(target, os.path.dirname(link_name))
os.symlink(target, link_name)
except FileExistsError:
pass
@@ -1461,6 +1459,10 @@ class FetchMethod(object):
cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
elif file.endswith('.7z'):
cmd = '7za x -y %s 1>/dev/null' % file
+ elif file.endswith('.tzst') or file.endswith('.tar.zst'):
+ cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file
+ elif file.endswith('.zst'):
+ cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
elif file.endswith('.zip') or file.endswith('.jar'):
try:
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index b97967b48..490d57fbb 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -595,6 +595,11 @@ class Git(FetchMethod):
"""
Return the repository URL
"""
+ # Note that we do not support passwords directly in the git urls. There are several
+ # reasons. SRC_URI can be written out to things like buildhistory and people don't
+ # want to leak passwords like that. Its also all too easy to share metadata without
+ # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as
+ # alternatives so we will not take patches adding password support here.
if ud.user:
username = ud.user + '@'
else:
diff --git a/poky/bitbake/lib/bb/main.py b/poky/bitbake/lib/bb/main.py
index e92e409f0..06bad495a 100755
--- a/poky/bitbake/lib/bb/main.py
+++ b/poky/bitbake/lib/bb/main.py
@@ -119,178 +119,181 @@ warnings.filterwarnings("ignore", category=ImportWarning)
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
-class BitBakeConfigParameters(cookerdata.ConfigParameters):
- def parseCommandLine(self, argv=sys.argv):
- parser = optparse.OptionParser(
- formatter=BitbakeHelpFormatter(),
- version="BitBake Build Tool Core version %s" % bb.__version__,
- usage="""%prog [options] [recipename/target recipe:do_task ...]
+def create_bitbake_parser():
+ parser = optparse.OptionParser(
+ formatter=BitbakeHelpFormatter(),
+ version="BitBake Build Tool Core version %s" % bb.__version__,
+ usage="""%prog [options] [recipename/target recipe:do_task ...]
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
will provide the layer, BBFILES and other configuration information.""")
- parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None,
- help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
- "not handle any dependencies from other recipes.")
-
- parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True,
- help="Continue as much as possible after an error. While the target that "
- "failed and anything depending on it cannot be built, as much as "
- "possible will be built before stopping.")
-
- parser.add_option("-f", "--force", action="store_true", dest="force", default=False,
- help="Force the specified targets/task to run (invalidating any "
- "existing stamp file).")
-
- parser.add_option("-c", "--cmd", action="store", dest="cmd",
- help="Specify the task to execute. The exact options available "
- "depend on the metadata. Some examples might be 'compile'"
- " or 'populate_sysroot' or 'listtasks' may give a list of "
- "the tasks available.")
-
- parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp",
- help="Invalidate the stamp for the specified task such as 'compile' "
- "and then run the default task for the specified target(s).")
-
- parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
- help="Read the specified file before bitbake.conf.")
-
- parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
- help="Read the specified file after bitbake.conf.")
-
- parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
- help="Enable tracing of shell tasks (with 'set -x'). "
- "Also print bb.note(...) messages to stdout (in "
- "addition to writing them to ${T}/log.do_<task>).")
-
- parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
- help="Increase the debug level. You can specify this "
- "more than once. -D sets the debug level to 1, "
- "where only bb.debug(1, ...) messages are printed "
- "to stdout; -DD sets the debug level to 2, where "
- "both bb.debug(1, ...) and bb.debug(2, ...) "
- "messages are printed; etc. Without -D, no debug "
- "messages are printed. Note that -D only affects "
- "output to stdout. All debug messages are written "
- "to ${T}/log.do_taskname, regardless of the debug "
- "level.")
-
- parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0,
- help="Output less log message data to the terminal. You can specify this more than once.")
-
- parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
- help="Don't execute, just go through the motions.")
-
- parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
- default=[], metavar="SIGNATURE_HANDLER",
- help="Dump out the signature construction information, with no task "
- "execution. The SIGNATURE_HANDLER parameter is passed to the "
- "handler. Two common values are none and printdiff but the handler "
- "may define more/less. none means only dump the signature, printdiff"
- " means compare the dumped signature with the cached one.")
-
- parser.add_option("-p", "--parse-only", action="store_true",
- dest="parse_only", default=False,
- help="Quit after parsing the BB recipes.")
-
- parser.add_option("-s", "--show-versions", action="store_true",
- dest="show_versions", default=False,
- help="Show current and preferred versions of all recipes.")
-
- parser.add_option("-e", "--environment", action="store_true",
- dest="show_environment", default=False,
- help="Show the global or per-recipe environment complete with information"
- " about where variables were set/changed.")
-
- parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
- help="Save dependency tree information for the specified "
- "targets in the dot syntax.")
-
- parser.add_option("-I", "--ignore-deps", action="append",
- dest="extra_assume_provided", default=[],
- help="Assume these dependencies don't exist and are already provided "
- "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
- "graphs more appealing")
-
- parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
- help="Show debug logging for the specified logging domains")
-
- parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
- help="Profile the command and save reports.")
-
- # @CHOICES@ is substituted out by BitbakeHelpFormatter above
- parser.add_option("-u", "--ui", action="store", dest="ui",
- default=os.environ.get('BITBAKE_UI', 'knotty'),
- help="The user interface to use (@CHOICES@ - default %default).")
-
- parser.add_option("", "--token", action="store", dest="xmlrpctoken",
- default=os.environ.get("BBTOKEN"),
- help="Specify the connection token to be used when connecting "
- "to a remote server.")
-
- parser.add_option("", "--revisions-changed", action="store_true",
- dest="revisions_changed", default=False,
- help="Set the exit code depending on whether upstream floating "
- "revisions have changed or not.")
-
- parser.add_option("", "--server-only", action="store_true",
- dest="server_only", default=False,
- help="Run bitbake without a UI, only starting a server "
- "(cooker) process.")
-
- parser.add_option("-B", "--bind", action="store", dest="bind", default=False,
- help="The name/address for the bitbake xmlrpc server to bind to.")
-
- parser.add_option("-T", "--idle-timeout", type=float, dest="server_timeout",
- default=os.getenv("BB_SERVER_TIMEOUT"),
- help="Set timeout to unload bitbake server due to inactivity, "
- "set to -1 means no unload, "
- "default: Environment variable BB_SERVER_TIMEOUT.")
-
- parser.add_option("", "--no-setscene", action="store_true",
- dest="nosetscene", default=False,
- help="Do not run any setscene tasks. sstate will be ignored and "
- "everything needed, built.")
-
- parser.add_option("", "--skip-setscene", action="store_true",
- dest="skipsetscene", default=False,
- help="Skip setscene tasks if they would be executed. Tasks previously "
- "restored from sstate will be kept, unlike --no-setscene")
-
- parser.add_option("", "--setscene-only", action="store_true",
- dest="setsceneonly", default=False,
- help="Only run setscene tasks, don't run any real tasks.")
-
- parser.add_option("", "--remote-server", action="store", dest="remote_server",
- default=os.environ.get("BBSERVER"),
- help="Connect to the specified server.")
-
- parser.add_option("-m", "--kill-server", action="store_true",
- dest="kill_server", default=False,
- help="Terminate any running bitbake server.")
-
- parser.add_option("", "--observe-only", action="store_true",
- dest="observe_only", default=False,
- help="Connect to a server as an observing-only client.")
-
- parser.add_option("", "--status-only", action="store_true",
- dest="status_only", default=False,
- help="Check the status of the remote bitbake server.")
-
- parser.add_option("-w", "--write-log", action="store", dest="writeeventlog",
- default=os.environ.get("BBEVENTLOG"),
- help="Writes the event log of the build to a bitbake event json file. "
- "Use '' (empty string) to assign the name automatically.")
-
- parser.add_option("", "--runall", action="append", dest="runall",
- help="Run the specified task for any recipe in the taskgraph of the specified target (even if it wouldn't otherwise have run).")
-
- parser.add_option("", "--runonly", action="append", dest="runonly",
- help="Run only the specified task within the taskgraph of the specified targets (and any task dependencies those tasks may have).")
+ parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None,
+ help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
+ "not handle any dependencies from other recipes.")
+
+ parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True,
+ help="Continue as much as possible after an error. While the target that "
+ "failed and anything depending on it cannot be built, as much as "
+ "possible will be built before stopping.")
+
+ parser.add_option("-f", "--force", action="store_true", dest="force", default=False,
+ help="Force the specified targets/task to run (invalidating any "
+ "existing stamp file).")
+
+ parser.add_option("-c", "--cmd", action="store", dest="cmd",
+ help="Specify the task to execute. The exact options available "
+ "depend on the metadata. Some examples might be 'compile'"
+ " or 'populate_sysroot' or 'listtasks' may give a list of "
+ "the tasks available.")
+
+ parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp",
+ help="Invalidate the stamp for the specified task such as 'compile' "
+ "and then run the default task for the specified target(s).")
+
+ parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
+ help="Read the specified file before bitbake.conf.")
+
+ parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
+ help="Read the specified file after bitbake.conf.")
+
+ parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
+ help="Enable tracing of shell tasks (with 'set -x'). "
+ "Also print bb.note(...) messages to stdout (in "
+ "addition to writing them to ${T}/log.do_<task>).")
+
+ parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
+ help="Increase the debug level. You can specify this "
+ "more than once. -D sets the debug level to 1, "
+ "where only bb.debug(1, ...) messages are printed "
+ "to stdout; -DD sets the debug level to 2, where "
+ "both bb.debug(1, ...) and bb.debug(2, ...) "
+ "messages are printed; etc. Without -D, no debug "
+ "messages are printed. Note that -D only affects "
+ "output to stdout. All debug messages are written "
+ "to ${T}/log.do_taskname, regardless of the debug "
+ "level.")
+
+ parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0,
+ help="Output less log message data to the terminal. You can specify this more than once.")
+
+ parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
+ help="Don't execute, just go through the motions.")
+
+ parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
+ default=[], metavar="SIGNATURE_HANDLER",
+ help="Dump out the signature construction information, with no task "
+ "execution. The SIGNATURE_HANDLER parameter is passed to the "
+ "handler. Two common values are none and printdiff but the handler "
+ "may define more/less. none means only dump the signature, printdiff"
+ " means compare the dumped signature with the cached one.")
+
+ parser.add_option("-p", "--parse-only", action="store_true",
+ dest="parse_only", default=False,
+ help="Quit after parsing the BB recipes.")
+
+ parser.add_option("-s", "--show-versions", action="store_true",
+ dest="show_versions", default=False,
+ help="Show current and preferred versions of all recipes.")
+
+ parser.add_option("-e", "--environment", action="store_true",
+ dest="show_environment", default=False,
+ help="Show the global or per-recipe environment complete with information"
+ " about where variables were set/changed.")
+
+ parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
+ help="Save dependency tree information for the specified "
+ "targets in the dot syntax.")
+
+ parser.add_option("-I", "--ignore-deps", action="append",
+ dest="extra_assume_provided", default=[],
+ help="Assume these dependencies don't exist and are already provided "
+ "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
+ "graphs more appealing")
+
+ parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
+ help="Show debug logging for the specified logging domains")
+
+ parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
+ help="Profile the command and save reports.")
+
+ # @CHOICES@ is substituted out by BitbakeHelpFormatter above
+ parser.add_option("-u", "--ui", action="store", dest="ui",
+ default=os.environ.get('BITBAKE_UI', 'knotty'),
+ help="The user interface to use (@CHOICES@ - default %default).")
+
+ parser.add_option("", "--token", action="store", dest="xmlrpctoken",
+ default=os.environ.get("BBTOKEN"),
+ help="Specify the connection token to be used when connecting "
+ "to a remote server.")
+
+ parser.add_option("", "--revisions-changed", action="store_true",
+ dest="revisions_changed", default=False,
+ help="Set the exit code depending on whether upstream floating "
+ "revisions have changed or not.")
+
+ parser.add_option("", "--server-only", action="store_true",
+ dest="server_only", default=False,
+ help="Run bitbake without a UI, only starting a server "
+ "(cooker) process.")
+
+ parser.add_option("-B", "--bind", action="store", dest="bind", default=False,
+ help="The name/address for the bitbake xmlrpc server to bind to.")
+
+ parser.add_option("-T", "--idle-timeout", type=float, dest="server_timeout",
+ default=os.getenv("BB_SERVER_TIMEOUT"),
+ help="Set timeout to unload bitbake server due to inactivity, "
+ "set to -1 means no unload, "
+ "default: Environment variable BB_SERVER_TIMEOUT.")
+
+ parser.add_option("", "--no-setscene", action="store_true",
+ dest="nosetscene", default=False,
+ help="Do not run any setscene tasks. sstate will be ignored and "
+ "everything needed, built.")
+
+ parser.add_option("", "--skip-setscene", action="store_true",
+ dest="skipsetscene", default=False,
+ help="Skip setscene tasks if they would be executed. Tasks previously "
+ "restored from sstate will be kept, unlike --no-setscene")
+
+ parser.add_option("", "--setscene-only", action="store_true",
+ dest="setsceneonly", default=False,
+ help="Only run setscene tasks, don't run any real tasks.")
+
+ parser.add_option("", "--remote-server", action="store", dest="remote_server",
+ default=os.environ.get("BBSERVER"),
+ help="Connect to the specified server.")
+
+ parser.add_option("-m", "--kill-server", action="store_true",
+ dest="kill_server", default=False,
+ help="Terminate any running bitbake server.")
+
+ parser.add_option("", "--observe-only", action="store_true",
+ dest="observe_only", default=False,
+ help="Connect to a server as an observing-only client.")
+
+ parser.add_option("", "--status-only", action="store_true",
+ dest="status_only", default=False,
+ help="Check the status of the remote bitbake server.")
+
+ parser.add_option("-w", "--write-log", action="store", dest="writeeventlog",
+ default=os.environ.get("BBEVENTLOG"),
+ help="Writes the event log of the build to a bitbake event json file. "
+ "Use '' (empty string) to assign the name automatically.")
+
+ parser.add_option("", "--runall", action="append", dest="runall",
+ help="Run the specified task for any recipe in the taskgraph of the specified target (even if it wouldn't otherwise have run).")
+
+ parser.add_option("", "--runonly", action="append", dest="runonly",
+ help="Run only the specified task within the taskgraph of the specified targets (and any task dependencies those tasks may have).")
+ return parser
+class BitBakeConfigParameters(cookerdata.ConfigParameters):
+ def parseCommandLine(self, argv=sys.argv):
+ parser = create_bitbake_parser()
options, targets = parser.parse_args(argv)
if options.quiet and options.verbose:
@@ -466,7 +469,7 @@ def setup_bitbake(configParams, extrafeatures=None):
logger.info("Retrying server connection (#%d)..." % tryno)
else:
logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc()))
-
+
if not retries:
bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).")
bb.event.print_ui_queue()
diff --git a/poky/bitbake/lib/bb/monitordisk.py b/poky/bitbake/lib/bb/monitordisk.py
index e7c07264a..98f2109ed 100644
--- a/poky/bitbake/lib/bb/monitordisk.py
+++ b/poky/bitbake/lib/bb/monitordisk.py
@@ -59,7 +59,7 @@ def getMountedDev(path):
pass
return None
-def getDiskData(BBDirs, configuration):
+def getDiskData(BBDirs):
"""Prepare disk data for disk space monitor"""
@@ -168,7 +168,7 @@ class diskMonitor:
BBDirs = configuration.getVar("BB_DISKMON_DIRS") or None
if BBDirs:
- self.devDict = getDiskData(BBDirs, configuration)
+ self.devDict = getDiskData(BBDirs)
if self.devDict:
self.spaceInterval, self.inodeInterval = getInterval(configuration)
if self.spaceInterval and self.inodeInterval:
diff --git a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 215f940b6..8a520e307 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -13,7 +13,7 @@
#
import re, bb, os
-import bb.build, bb.utils
+import bb.build, bb.utils, bb.data_smart
from . import ConfHandler
from .. import resolve_file, ast, logger, ParseError
@@ -233,6 +233,10 @@ def feeder(lineno, s, fn, root, statements, eof=False):
if taskexpression.count(word) > 1:
logger.warning("addtask contained multiple '%s' keywords, only one is supported" % word)
+ # Check and warn for having task with exprssion as part of task name
+ for te in taskexpression:
+ if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ):
+ raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn)
ast.handleAddTask(statements, fn, lineno, m)
return
diff --git a/poky/bitbake/lib/bb/providers.py b/poky/bitbake/lib/bb/providers.py
index 81459c36d..3f66a3d99 100644
--- a/poky/bitbake/lib/bb/providers.py
+++ b/poky/bitbake/lib/bb/providers.py
@@ -151,7 +151,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
if item:
itemstr = " (for item %s)" % item
if preferred_file is None:
- logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
+ logger.warn("preferred version %s of %s not available%s", pv_str, pn, itemstr)
available_vers = []
for file_set in pkg_pn:
for f in file_set:
@@ -163,7 +163,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
available_vers.append(ver_str)
if available_vers:
available_vers.sort()
- logger.info("versions of %s available: %s", pn, ' '.join(available_vers))
+ logger.warn("versions of %s available: %s", pn, ' '.join(available_vers))
else:
logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 86e0e16f3..0ac395246 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -311,13 +311,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
data = self.basehash[tid]
for dep in self.runtaskdeps[tid]:
- if dep in self.unihash:
- if self.unihash[dep] is None:
- data = data + self.taskhash[dep]
- else:
- data = data + self.unihash[dep]
- else:
- data = data + self.get_unihash(dep)
+ data = data + self.get_unihash(dep)
for (f, cs) in self.file_checksum_values[tid]:
if cs:
diff --git a/poky/bitbake/lib/hashserv/__init__.py b/poky/bitbake/lib/hashserv/__init__.py
index f95e8f43f..55f48410d 100644
--- a/poky/bitbake/lib/hashserv/__init__.py
+++ b/poky/bitbake/lib/hashserv/__init__.py
@@ -3,6 +3,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import asyncio
from contextlib import closing
import re
import sqlite3
@@ -21,6 +22,24 @@ ADDR_TYPE_TCP = 1
# is necessary
DEFAULT_MAX_CHUNK = 32 * 1024
+TABLE_DEFINITION = (
+ ("method", "TEXT NOT NULL"),
+ ("outhash", "TEXT NOT NULL"),
+ ("taskhash", "TEXT NOT NULL"),
+ ("unihash", "TEXT NOT NULL"),
+ ("created", "DATETIME"),
+
+ # Optional fields
+ ("owner", "TEXT"),
+ ("PN", "TEXT"),
+ ("PV", "TEXT"),
+ ("PR", "TEXT"),
+ ("task", "TEXT"),
+ ("outhash_siginfo", "TEXT"),
+)
+
+TABLE_COLUMNS = tuple(name for name, _ in TABLE_DEFINITION)
+
def setup_database(database, sync=True):
db = sqlite3.connect(database)
db.row_factory = sqlite3.Row
@@ -29,23 +48,10 @@ def setup_database(database, sync=True):
cursor.execute('''
CREATE TABLE IF NOT EXISTS tasks_v2 (
id INTEGER PRIMARY KEY AUTOINCREMENT,
- method TEXT NOT NULL,
- outhash TEXT NOT NULL,
- taskhash TEXT NOT NULL,
- unihash TEXT NOT NULL,
- created DATETIME,
-
- -- Optional fields
- owner TEXT,
- PN TEXT,
- PV TEXT,
- PR TEXT,
- task TEXT,
- outhash_siginfo TEXT,
-
+ %s
UNIQUE(method, outhash, taskhash)
)
- ''')
+ ''' % " ".join("%s %s," % (name, typ) for name, typ in TABLE_DEFINITION))
cursor.execute('PRAGMA journal_mode = WAL')
cursor.execute('PRAGMA synchronous = %s' % ('NORMAL' if sync else 'OFF'))
@@ -88,10 +94,10 @@ def chunkify(msg, max_chunk):
yield "\n"
-def create_server(addr, dbname, *, sync=True):
+def create_server(addr, dbname, *, sync=True, upstream=None):
from . import server
db = setup_database(dbname, sync=sync)
- s = server.Server(db)
+ s = server.Server(db, upstream=upstream)
(typ, a) = parse_address(addr)
if typ == ADDR_TYPE_UNIX:
@@ -113,3 +119,15 @@ def create_client(addr):
c.connect_tcp(*a)
return c
+
+async def create_async_client(addr):
+ from . import client
+ c = client.AsyncClient()
+
+ (typ, a) = parse_address(addr)
+ if typ == ADDR_TYPE_UNIX:
+ await c.connect_unix(*a)
+ else:
+ await c.connect_tcp(*a)
+
+ return c
diff --git a/poky/bitbake/lib/hashserv/client.py b/poky/bitbake/lib/hashserv/client.py
index a29af836d..ae5875d1b 100644
--- a/poky/bitbake/lib/hashserv/client.py
+++ b/poky/bitbake/lib/hashserv/client.py
@@ -3,189 +3,216 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import asyncio
import json
import logging
import socket
import os
-from . import chunkify, DEFAULT_MAX_CHUNK
+from . import chunkify, DEFAULT_MAX_CHUNK, create_async_client
-logger = logging.getLogger('hashserv.client')
+logger = logging.getLogger("hashserv.client")
class HashConnectionError(Exception):
pass
-class Client(object):
+class AsyncClient(object):
MODE_NORMAL = 0
MODE_GET_STREAM = 1
def __init__(self):
- self._socket = None
self.reader = None
self.writer = None
self.mode = self.MODE_NORMAL
self.max_chunk = DEFAULT_MAX_CHUNK
- def connect_tcp(self, address, port):
- def connect_sock():
- s = socket.create_connection((address, port))
-
- s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
- s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
- s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
- return s
+ async def connect_tcp(self, address, port):
+ async def connect_sock():
+ return await asyncio.open_connection(address, port)
self._connect_sock = connect_sock
- def connect_unix(self, path):
- def connect_sock():
- s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- # AF_UNIX has path length issues so chdir here to workaround
- cwd = os.getcwd()
- try:
- os.chdir(os.path.dirname(path))
- s.connect(os.path.basename(path))
- finally:
- os.chdir(cwd)
- return s
+ async def connect_unix(self, path):
+ async def connect_sock():
+ return await asyncio.open_unix_connection(path)
self._connect_sock = connect_sock
- def connect(self):
- if self._socket is None:
- self._socket = self._connect_sock()
-
- self.reader = self._socket.makefile('r', encoding='utf-8')
- self.writer = self._socket.makefile('w', encoding='utf-8')
+ async def _connect(self):
+ if self.reader is None or self.writer is None:
+ (self.reader, self.writer) = await self._connect_sock()
- self.writer.write('OEHASHEQUIV 1.1\n\n')
- self.writer.flush()
+ self.writer.write("OEHASHEQUIV 1.1\n\n".encode("utf-8"))
+ await self.writer.drain()
- # Restore mode if the socket is being re-created
cur_mode = self.mode
self.mode = self.MODE_NORMAL
- self._set_mode(cur_mode)
+ await self._set_mode(cur_mode)
- return self._socket
+ async def close(self):
+ self.reader = None
- def close(self):
- if self._socket is not None:
- self._socket.close()
- self._socket = None
- self.reader = None
+ if self.writer is not None:
+ self.writer.close()
self.writer = None
- def _send_wrapper(self, proc):
+ async def _send_wrapper(self, proc):
count = 0
while True:
try:
- self.connect()
- return proc()
- except (OSError, HashConnectionError, json.JSONDecodeError, UnicodeDecodeError) as e:
- logger.warning('Error talking to server: %s' % e)
+ await self._connect()
+ return await proc()
+ except (
+ OSError,
+ HashConnectionError,
+ json.JSONDecodeError,
+ UnicodeDecodeError,
+ ) as e:
+ logger.warning("Error talking to server: %s" % e)
if count >= 3:
if not isinstance(e, HashConnectionError):
raise HashConnectionError(str(e))
raise e
- self.close()
+ await self.close()
count += 1
- def send_message(self, msg):
- def get_line():
- line = self.reader.readline()
+ async def send_message(self, msg):
+ async def get_line():
+ line = await self.reader.readline()
if not line:
- raise HashConnectionError('Connection closed')
+ raise HashConnectionError("Connection closed")
+
+ line = line.decode("utf-8")
- if not line.endswith('\n'):
- raise HashConnectionError('Bad message %r' % message)
+ if not line.endswith("\n"):
+ raise HashConnectionError("Bad message %r" % message)
return line
- def proc():
+ async def proc():
for c in chunkify(json.dumps(msg), self.max_chunk):
- self.writer.write(c)
- self.writer.flush()
+ self.writer.write(c.encode("utf-8"))
+ await self.writer.drain()
- l = get_line()
+ l = await get_line()
m = json.loads(l)
- if 'chunk-stream' in m:
+ if "chunk-stream" in m:
lines = []
while True:
- l = get_line().rstrip('\n')
+ l = (await get_line()).rstrip("\n")
if not l:
break
lines.append(l)
- m = json.loads(''.join(lines))
+ m = json.loads("".join(lines))
return m
- return self._send_wrapper(proc)
+ return await self._send_wrapper(proc)
- def send_stream(self, msg):
- def proc():
- self.writer.write("%s\n" % msg)
- self.writer.flush()
- l = self.reader.readline()
+ async def send_stream(self, msg):
+ async def proc():
+ self.writer.write(("%s\n" % msg).encode("utf-8"))
+ await self.writer.drain()
+ l = await self.reader.readline()
if not l:
- raise HashConnectionError('Connection closed')
- return l.rstrip()
+ raise HashConnectionError("Connection closed")
+ return l.decode("utf-8").rstrip()
- return self._send_wrapper(proc)
+ return await self._send_wrapper(proc)
- def _set_mode(self, new_mode):
+ async def _set_mode(self, new_mode):
if new_mode == self.MODE_NORMAL and self.mode == self.MODE_GET_STREAM:
- r = self.send_stream('END')
- if r != 'ok':
- raise HashConnectionError('Bad response from server %r' % r)
+ r = await self.send_stream("END")
+ if r != "ok":
+ raise HashConnectionError("Bad response from server %r" % r)
elif new_mode == self.MODE_GET_STREAM and self.mode == self.MODE_NORMAL:
- r = self.send_message({'get-stream': None})
- if r != 'ok':
- raise HashConnectionError('Bad response from server %r' % r)
+ r = await self.send_message({"get-stream": None})
+ if r != "ok":
+ raise HashConnectionError("Bad response from server %r" % r)
elif new_mode != self.mode:
- raise Exception('Undefined mode transition %r -> %r' % (self.mode, new_mode))
+ raise Exception(
+ "Undefined mode transition %r -> %r" % (self.mode, new_mode)
+ )
self.mode = new_mode
- def get_unihash(self, method, taskhash):
- self._set_mode(self.MODE_GET_STREAM)
- r = self.send_stream('%s %s' % (method, taskhash))
+ async def get_unihash(self, method, taskhash):
+ await self._set_mode(self.MODE_GET_STREAM)
+ r = await self.send_stream("%s %s" % (method, taskhash))
if not r:
return None
return r
- def report_unihash(self, taskhash, method, outhash, unihash, extra={}):
- self._set_mode(self.MODE_NORMAL)
+ async def report_unihash(self, taskhash, method, outhash, unihash, extra={}):
+ await self._set_mode(self.MODE_NORMAL)
m = extra.copy()
- m['taskhash'] = taskhash
- m['method'] = method
- m['outhash'] = outhash
- m['unihash'] = unihash
- return self.send_message({'report': m})
-
- def report_unihash_equiv(self, taskhash, method, unihash, extra={}):
- self._set_mode(self.MODE_NORMAL)
+ m["taskhash"] = taskhash
+ m["method"] = method
+ m["outhash"] = outhash
+ m["unihash"] = unihash
+ return await self.send_message({"report": m})
+
+ async def report_unihash_equiv(self, taskhash, method, unihash, extra={}):
+ await self._set_mode(self.MODE_NORMAL)
m = extra.copy()
- m['taskhash'] = taskhash
- m['method'] = method
- m['unihash'] = unihash
- return self.send_message({'report-equiv': m})
-
- def get_taskhash(self, method, taskhash, all_properties=False):
- self._set_mode(self.MODE_NORMAL)
- return self.send_message({'get': {
- 'taskhash': taskhash,
- 'method': method,
- 'all': all_properties
- }})
-
- def get_stats(self):
- self._set_mode(self.MODE_NORMAL)
- return self.send_message({'get-stats': None})
-
- def reset_stats(self):
- self._set_mode(self.MODE_NORMAL)
- return self.send_message({'reset-stats': None})
+ m["taskhash"] = taskhash
+ m["method"] = method
+ m["unihash"] = unihash
+ return await self.send_message({"report-equiv": m})
+
+ async def get_taskhash(self, method, taskhash, all_properties=False):
+ await self._set_mode(self.MODE_NORMAL)
+ return await self.send_message(
+ {"get": {"taskhash": taskhash, "method": method, "all": all_properties}}
+ )
+
+ async def get_stats(self):
+ await self._set_mode(self.MODE_NORMAL)
+ return await self.send_message({"get-stats": None})
+
+ async def reset_stats(self):
+ await self._set_mode(self.MODE_NORMAL)
+ return await self.send_message({"reset-stats": None})
+
+ async def backfill_wait(self):
+ await self._set_mode(self.MODE_NORMAL)
+ return (await self.send_message({"backfill-wait": None}))["tasks"]
+
+
+class Client(object):
+ def __init__(self):
+ self.client = AsyncClient()
+ self.loop = asyncio.new_event_loop()
+
+ for call in (
+ "connect_tcp",
+ "connect_unix",
+ "close",
+ "get_unihash",
+ "report_unihash",
+ "report_unihash_equiv",
+ "get_taskhash",
+ "get_stats",
+ "reset_stats",
+ "backfill_wait",
+ ):
+ downcall = getattr(self.client, call)
+ setattr(self, call, self._get_downcall_wrapper(downcall))
+
+ def _get_downcall_wrapper(self, downcall):
+ def wrapper(*args, **kwargs):
+ return self.loop.run_until_complete(downcall(*args, **kwargs))
+
+ return wrapper
+
+ @property
+ def max_chunk(self):
+ return self.client.max_chunk
+
+ @max_chunk.setter
+ def max_chunk(self, value):
+ self.client.max_chunk = value
diff --git a/poky/bitbake/lib/hashserv/server.py b/poky/bitbake/lib/hashserv/server.py
index 81050715e..3ff4c51cc 100644
--- a/poky/bitbake/lib/hashserv/server.py
+++ b/poky/bitbake/lib/hashserv/server.py
@@ -3,7 +3,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from contextlib import closing
+from contextlib import closing, contextmanager
from datetime import datetime
import asyncio
import json
@@ -12,8 +12,9 @@ import math
import os
import signal
import socket
+import sys
import time
-from . import chunkify, DEFAULT_MAX_CHUNK
+from . import chunkify, DEFAULT_MAX_CHUNK, create_async_client, TABLE_COLUMNS
logger = logging.getLogger('hashserv.server')
@@ -111,16 +112,40 @@ class Stats(object):
class ClientError(Exception):
pass
+def insert_task(cursor, data, ignore=False):
+ keys = sorted(data.keys())
+ query = '''INSERT%s INTO tasks_v2 (%s) VALUES (%s)''' % (
+ " OR IGNORE" if ignore else "",
+ ', '.join(keys),
+ ', '.join(':' + k for k in keys))
+ cursor.execute(query, data)
+
+async def copy_from_upstream(client, db, method, taskhash):
+ d = await client.get_taskhash(method, taskhash, True)
+ if d is not None:
+ # Filter out unknown columns
+ d = {k: v for k, v in d.items() if k in TABLE_COLUMNS}
+ keys = sorted(d.keys())
+
+
+ with closing(db.cursor()) as cursor:
+ insert_task(cursor, d)
+ db.commit()
+
+ return d
+
class ServerClient(object):
FAST_QUERY = 'SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
ALL_QUERY = 'SELECT * FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
- def __init__(self, reader, writer, db, request_stats):
+ def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream):
self.reader = reader
self.writer = writer
self.db = db
self.request_stats = request_stats
self.max_chunk = DEFAULT_MAX_CHUNK
+ self.backfill_queue = backfill_queue
+ self.upstream = upstream
self.handlers = {
'get': self.handle_get,
@@ -130,10 +155,18 @@ class ServerClient(object):
'get-stats': self.handle_get_stats,
'reset-stats': self.handle_reset_stats,
'chunk-stream': self.handle_chunk,
+ 'backfill-wait': self.handle_backfill_wait,
}
async def process_requests(self):
+ if self.upstream is not None:
+ self.upstream_client = await create_async_client(self.upstream)
+ else:
+ self.upstream_client = None
+
try:
+
+
self.addr = self.writer.get_extra_info('peername')
logger.debug('Client %r connected' % (self.addr,))
@@ -171,6 +204,9 @@ class ServerClient(object):
except ClientError as e:
logger.error(str(e))
finally:
+ if self.upstream_client is not None:
+ await self.upstream_client.close()
+
self.writer.close()
async def dispatch_message(self, msg):
@@ -239,15 +275,19 @@ class ServerClient(object):
if row is not None:
logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
d = {k: row[k] for k in row.keys()}
-
- self.write_message(d)
+ elif self.upstream_client is not None:
+ d = await copy_from_upstream(self.upstream_client, self.db, method, taskhash)
else:
- self.write_message(None)
+ d = None
+
+ self.write_message(d)
async def handle_get_stream(self, request):
self.write_message('ok')
while True:
+ upstream = None
+
l = await self.reader.readline()
if not l:
return
@@ -272,6 +312,12 @@ class ServerClient(object):
if row is not None:
msg = ('%s\n' % row['unihash']).encode('utf-8')
#logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
+ elif self.upstream_client is not None:
+ upstream = await self.upstream_client.get_unihash(method, taskhash)
+ if upstream:
+ msg = ("%s\n" % upstream).encode("utf-8")
+ else:
+ msg = "\n".encode("utf-8")
else:
msg = '\n'.encode('utf-8')
@@ -282,6 +328,11 @@ class ServerClient(object):
await self.writer.drain()
+ # Post to the backfill queue after writing the result to minimize
+ # the turn around time on a request
+ if upstream is not None:
+ await self.backfill_queue.put((method, taskhash))
+
async def handle_report(self, data):
with closing(self.db.cursor()) as cursor:
cursor.execute('''
@@ -324,11 +375,7 @@ class ServerClient(object):
if k in data:
insert_data[k] = data[k]
- cursor.execute('''INSERT INTO tasks_v2 (%s) VALUES (%s)''' % (
- ', '.join(sorted(insert_data.keys())),
- ', '.join(':' + k for k in sorted(insert_data.keys()))),
- insert_data)
-
+ insert_task(cursor, insert_data)
self.db.commit()
logger.info('Adding taskhash %s with unihash %s',
@@ -358,11 +405,7 @@ class ServerClient(object):
if k in data:
insert_data[k] = data[k]
- cursor.execute('''INSERT OR IGNORE INTO tasks_v2 (%s) VALUES (%s)''' % (
- ', '.join(sorted(insert_data.keys())),
- ', '.join(':' + k for k in sorted(insert_data.keys()))),
- insert_data)
-
+ insert_task(cursor, insert_data, ignore=True)
self.db.commit()
# Fetch the unihash that will be reported for the taskhash. If the
@@ -394,6 +437,13 @@ class ServerClient(object):
self.request_stats.reset()
self.write_message(d)
+ async def handle_backfill_wait(self, request):
+ d = {
+ 'tasks': self.backfill_queue.qsize(),
+ }
+ await self.backfill_queue.join()
+ self.write_message(d)
+
def query_equivalent(self, method, taskhash, query):
# This is part of the inner loop and must be as fast as possible
try:
@@ -405,7 +455,7 @@ class ServerClient(object):
class Server(object):
- def __init__(self, db, loop=None):
+ def __init__(self, db, loop=None, upstream=None):
self.request_stats = Stats()
self.db = db
@@ -416,6 +466,8 @@ class Server(object):
self.loop = loop
self.close_loop = False
+ self.upstream = upstream
+
self._cleanup_socket = None
def start_tcp_server(self, host, port):
@@ -458,7 +510,7 @@ class Server(object):
async def handle_client(self, reader, writer):
# writer.transport.set_write_buffer_limits(0)
try:
- client = ServerClient(reader, writer, self.db, self.request_stats)
+ client = ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream)
await client.process_requests()
except Exception as e:
import traceback
@@ -467,23 +519,60 @@ class Server(object):
writer.close()
logger.info('Client disconnected')
+ @contextmanager
+ def _backfill_worker(self):
+ async def backfill_worker_task():
+ client = await create_async_client(self.upstream)
+ try:
+ while True:
+ item = await self.backfill_queue.get()
+ if item is None:
+ self.backfill_queue.task_done()
+ break
+ method, taskhash = item
+ await copy_from_upstream(client, self.db, method, taskhash)
+ self.backfill_queue.task_done()
+ finally:
+ await client.close()
+
+ async def join_worker(worker):
+ await self.backfill_queue.put(None)
+ await worker
+
+ if self.upstream is not None:
+ worker = asyncio.ensure_future(backfill_worker_task())
+ try:
+ yield
+ finally:
+ self.loop.run_until_complete(join_worker(worker))
+ else:
+ yield
+
def serve_forever(self):
def signal_handler():
self.loop.stop()
- self.loop.add_signal_handler(signal.SIGTERM, signal_handler)
-
+ asyncio.set_event_loop(self.loop)
try:
- self.loop.run_forever()
- except KeyboardInterrupt:
- pass
+ self.backfill_queue = asyncio.Queue()
+
+ self.loop.add_signal_handler(signal.SIGTERM, signal_handler)
- self.server.close()
- self.loop.run_until_complete(self.server.wait_closed())
- logger.info('Server shutting down')
+ with self._backfill_worker():
+ try:
+ self.loop.run_forever()
+ except KeyboardInterrupt:
+ pass
- if self.close_loop:
- self.loop.close()
+ self.server.close()
+
+ self.loop.run_until_complete(self.server.wait_closed())
+ logger.info('Server shutting down')
+ finally:
+ if self.close_loop:
+ if sys.version_info >= (3, 6):
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
- if self._cleanup_socket is not None:
- self._cleanup_socket()
+ if self._cleanup_socket is not None:
+ self._cleanup_socket()
diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py
index 4566f2473..3dd9a31be 100644
--- a/poky/bitbake/lib/hashserv/tests.py
+++ b/poky/bitbake/lib/hashserv/tests.py
@@ -16,35 +16,54 @@ import threading
import unittest
import socket
+def _run_server(server, idx):
+ # logging.basicConfig(level=logging.DEBUG, filename='bbhashserv.log', filemode='w',
+ # format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
+ sys.stdout = open('bbhashserv-%d.log' % idx, 'w')
+ sys.stderr = sys.stdout
+ server.serve_forever()
class TestHashEquivalenceServer(object):
METHOD = 'TestMethod'
- def _run_server(self):
- # logging.basicConfig(level=logging.DEBUG, filename='bbhashserv.log', filemode='w',
- # format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
- self.server.serve_forever()
+ server_index = 0
+
+ def start_server(self, dbpath=None, upstream=None):
+ self.server_index += 1
+ if dbpath is None:
+ dbpath = os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+
+ def cleanup_thread(thread):
+ thread.terminate()
+ thread.join()
+
+ server = create_server(self.get_server_addr(self.server_index), dbpath, upstream=upstream)
+ server.dbpath = dbpath
+
+ server.thread = multiprocessing.Process(target=_run_server, args=(server, self.server_index))
+ server.thread.start()
+ self.addCleanup(cleanup_thread, server.thread)
+
+ def cleanup_client(client):
+ client.close()
+
+ client = create_client(server.address)
+ self.addCleanup(cleanup_client, client)
+
+ return (client, server)
def setUp(self):
if sys.version_info < (3, 5, 0):
self.skipTest('Python 3.5 or later required')
self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv')
- self.dbfile = os.path.join(self.temp_dir.name, 'db.sqlite')
-
- self.server = create_server(self.get_server_addr(), self.dbfile)
- self.server_thread = multiprocessing.Process(target=self._run_server)
- self.server_thread.start()
- self.client = create_client(self.server.address)
-
- def tearDown(self):
- # Shutdown server
- s = getattr(self, 'server', None)
- if s is not None:
- self.server_thread.terminate()
- self.server_thread.join()
- self.client.close()
- self.temp_dir.cleanup()
+ self.addCleanup(self.temp_dir.cleanup)
+
+ (self.client, self.server) = self.start_server()
+
+ def assertClientGetHash(self, client, taskhash, unihash):
+ result = client.get_unihash(self.METHOD, taskhash)
+ self.assertEqual(result, unihash)
def test_create_hash(self):
# Simple test that hashes can be created
@@ -52,8 +71,7 @@ class TestHashEquivalenceServer(object):
outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
- result = self.client.get_unihash(self.METHOD, taskhash)
- self.assertIsNone(result, msg='Found unexpected task, %r' % result)
+ self.assertClientGetHash(self.client, taskhash, None)
result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
@@ -84,22 +102,19 @@ class TestHashEquivalenceServer(object):
unihash = '218e57509998197d570e2c98512d0105985dffc9'
self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
- result = self.client.get_unihash(self.METHOD, taskhash)
- self.assertEqual(result, unihash)
+ self.assertClientGetHash(self.client, taskhash, unihash)
outhash2 = '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d'
unihash2 = 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'
self.client.report_unihash(taskhash, self.METHOD, outhash2, unihash2)
- result = self.client.get_unihash(self.METHOD, taskhash)
- self.assertEqual(result, unihash)
+ self.assertClientGetHash(self.client, taskhash, unihash)
outhash3 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
unihash3 = '9217a7d6398518e5dc002ed58f2cbbbc78696603'
self.client.report_unihash(taskhash, self.METHOD, outhash3, unihash3)
- result = self.client.get_unihash(self.METHOD, taskhash)
- self.assertEqual(result, unihash)
+ self.assertClientGetHash(self.client, taskhash, unihash)
def test_huge_message(self):
# Simple test that hashes can be created
@@ -107,8 +122,7 @@ class TestHashEquivalenceServer(object):
outhash = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44'
unihash = '90e9bc1d1f094c51824adca7f8ea79a048d68824'
- result = self.client.get_unihash(self.METHOD, taskhash)
- self.assertIsNone(result, msg='Found unexpected task, %r' % result)
+ self.assertClientGetHash(self.client, taskhash, None)
siginfo = "0" * (self.client.max_chunk * 4)
@@ -156,14 +170,83 @@ class TestHashEquivalenceServer(object):
self.assertFalse(failures)
+ def test_upstream_server(self):
+ # Tests upstream server support. This is done by creating two servers
+ # that share a database file. The downstream server has it upstream
+ # set to the test server, whereas the side server doesn't. This allows
+ # verification that the hash requests are being proxied to the upstream
+ # server by verifying that they appear on the downstream client, but not
+ # the side client. It also verifies that the results are pulled into
+ # the downstream database by checking that the downstream and side servers
+ # match after the downstream is done waiting for all backfill tasks
+ (down_client, down_server) = self.start_server(upstream=self.server.address)
+ (side_client, side_server) = self.start_server(dbpath=down_server.dbpath)
+
+ def check_hash(taskhash, unihash, old_sidehash):
+ nonlocal down_client
+ nonlocal side_client
+
+ # check upstream server
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ # Hash should *not* be present on the side server
+ self.assertClientGetHash(side_client, taskhash, old_sidehash)
+
+ # Hash should be present on the downstream server, since it
+ # will defer to the upstream server. This will trigger
+ # the backfill in the downstream server
+ self.assertClientGetHash(down_client, taskhash, unihash)
+
+ # After waiting for the downstream client to finish backfilling the
+ # task from the upstream server, it should appear in the side server
+ # since the database is populated
+ down_client.backfill_wait()
+ self.assertClientGetHash(side_client, taskhash, unihash)
+
+ # Basic report
+ taskhash = '8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a'
+ outhash = 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e'
+ unihash = '218e57509998197d570e2c98512d0105985dffc9'
+ self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+
+ check_hash(taskhash, unihash, None)
+
+ # Duplicated taskhash with multiple output hashes and unihashes.
+ # All servers should agree with the originally reported hash
+ outhash2 = '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d'
+ unihash2 = 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'
+ self.client.report_unihash(taskhash, self.METHOD, outhash2, unihash2)
+
+ check_hash(taskhash, unihash, unihash)
+
+ # Report an equivalent task. The sideload will originally report
+ # no unihash until backfilled
+ taskhash3 = "044c2ec8aaf480685a00ff6ff49e6162e6ad34e1"
+ unihash3 = "def64766090d28f627e816454ed46894bb3aab36"
+ self.client.report_unihash(taskhash3, self.METHOD, outhash, unihash3)
+
+ check_hash(taskhash3, unihash, None)
+
+ # Test that reporting a unihash in the downstream client isn't
+ # propagating to the upstream server
+ taskhash4 = "e3da00593d6a7fb435c7e2114976c59c5fd6d561"
+ outhash4 = "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a"
+ unihash4 = "3b5d3d83f07f259e9086fcb422c855286e18a57d"
+ down_client.report_unihash(taskhash4, self.METHOD, outhash4, unihash4)
+ down_client.backfill_wait()
+
+ self.assertClientGetHash(down_client, taskhash4, unihash4)
+ self.assertClientGetHash(side_client, taskhash4, unihash4)
+ self.assertClientGetHash(self.client, taskhash4, None)
+
class TestHashEquivalenceUnixServer(TestHashEquivalenceServer, unittest.TestCase):
- def get_server_addr(self):
- return "unix://" + os.path.join(self.temp_dir.name, 'sock')
+ def get_server_addr(self, server_idx):
+ return "unix://" + os.path.join(self.temp_dir.name, 'sock%d' % server_idx)
class TestHashEquivalenceTCPServer(TestHashEquivalenceServer, unittest.TestCase):
- def get_server_addr(self):
+ def get_server_addr(self, server_idx):
# Some hosts cause asyncio module to misbehave, when IPv6 is not enabled.
# If IPv6 is enabled, it should be safe to use localhost directly, in general
# case it is more reliable to resolve the IP address explicitly.