summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib
diff options
context:
space:
mode:
authorPatrick Williams <patrick@stwcx.xyz>2024-02-20 17:07:48 +0300
committerPatrick Williams <patrick@stwcx.xyz>2024-03-01 00:30:06 +0300
commit73bd93f1d0a338767f36fd1acb54c52ad057db39 (patch)
treef5c80db4a466b90aacce702b852945785bdd51a3 /poky/bitbake/lib
parentf5510eca70b356348a25198e2d9c38039f68365b (diff)
downloadopenbmc-73bd93f1d0a338767f36fd1acb54c52ad057db39.tar.xz
subtree updates
poky: 348d9aba33..fc8e5d7c13: Adithya Balakumar (1): wic: implement reproducible Disk GUID Adrian Freihofer (20): cmake.bbclass: use --install devtool: support plugins with plugins devtool: refactor exec_fakeroot devtool: refactor deploy to use exec_fakeroot_no_d devtool: refactor deploy-target recipetool: cleanup imports oeqa: replace deprecated assertEquals oeqa/selftest/recipetool: fix for python 3.12 oeqa/selftest/oelib/buildhistory: git default branch scripts: python 3.12 regex feature-microblaze-versions.inc: python 3.12 regex meta/lib/oeqa: python 3.12 regex meta/lib/patchtest: python 3.12 regex meta/recipes: python 3.12 regex bitbake: bitbake/lib/bs4/tests/test_tree.py: python 3.12 regex devtool: new ide-sdk plugin oe-selftest devtool: ide-sdk tests devtool: ide-sdk make deploy-target quicker vscode: drop .vscode folder oe-init-build-env: generate .vscode from template Aleksey Smirnov (2): conf/machine: Add Power8 tune to PowerPC architecture busybox: Explicitly specify tty device for serial consoles Alex Kiernan (1): wireless-regdb: Upgrade 2023.09.01 -> 2024.01.23 Alex Stewart (3): opkg: upgrade to 0.6.3 opkg: add deprecation warning for internal solver opkg-arch-config: update recipe HOMEPAGE Alexander Kanavin (26): sysroot user management postinsts: run with /bin/sh -e to report errors when they happen classes/multilib: expand PACKAGE_WRITE_DEPS in addition to DEPENDS classes/staging: capture output of sysroot postinsts into logs classes/package_rpm: write file permissions and ownership explicitly into .spec classes/package_rpm: use weak user/group dependencies classes/package_rpm: set bogus locations for passwd/group files oeqa/runtime/rpm: fail tests if test rpm file cannot be found rpm: update 4.18.1 -> 4.19.1 classes/package_rpm: correctly escape percent characters setftest/cdn tests: check for exceptions also in fetcher diagnostics rpm: override curl executable search with just 'curl' classes/package_rpm: additionally escape \ and " in filenames classes/package_rpm: streamline the logic in one of the condition blocks lzlib: add a recipe file: enable additional internal compressor support selftest/SStateCacheManagement: do not manipulate ERROR_QA selftest/SStateCacheManagement: pre-populate the cache shadow: add a packageconfig for logind support meta/conf/templates/default/conf-notes.txt: remove scripts/oe-setup-layers: write a list of layer paths into the checkout's top dir meta/conf/templates/default/conf-summary.txt: add a template summary meta/lib/bblayers/buildconf.py: add support for configuration summaries scripts/oe-setup-builddir: add support for configuration summaries oe-setup-build: add a tool for discovering config templates and setting up builds meta-poky/conf/templates/default/conf-summary.txt: add a template summary bitbake: Revert "bitbake: wget.py: always use the custom user agent" Alexis Lothoré (3): patchtest-send-results: remove unused variable patchtest-send-results: properly parse test status testimage: retrieve ptests directory when ptests fail André Draszik (4): sstate-cache-management: fix regex for 'sigdata' stamp files bitbake: fetch/git2: support git's safe.bareRepository bitbake: tests/fetch: support git's safe.bareRepository bitbake: git-make-shallow: support git's safe.bareRepository Anibal Limon (1): ptest-runner: Bump to 2.4.3 (92c1b97) Anuj Mittal (8): enchant2: upgrade 2.6.5 -> 2.6.7 libproxy: upgrade 0.5.3 -> 0.5.4 sqlite3: upgrade 3.44.2 -> 3.45.1 orc: upgrade 0.4.36 -> 0.4.37 stress-ng: upgrade 0.17.04 -> 0.17.05 libcap-ng: fix build with swig 4.2.0 gstreamer1.0: upgrade 1.22.9 -> 1.22.10 swig: upgrade 4.1.1 -> 4.2.0 Bruce Ashfield (13): lttng-modules: fix v6.8+ build linux-yocto-dev: update to v6.8 linux-yocto/6.6: features/qat/qat.cfg: enable CONFIG_PCIEAER linux-yocto/6.6: beaglebone: drop nonassignable kernel options linux-yocto/6.6: update to v6.6.13 linux-yocto/6.6: update CVE exclusions linux-yocto/6.6: can: drop obsolete CONFIG_PCH_CAN linux-yocto/6.6: update to v6.6.15 linux-yocto/6.6: update CVE exclusions yocto-bsp: update reference boards to v6.6.15 linux-yocto/6.6: update to v6.6.16 linux-yocto/6.6: update CVE exclusions linux-yocto/6.6: qemuriscv: enable goldfish RTC Chen Qi (5): multilib_global.bbclass: fix parsing error with no kernel module split gnupg: disable tests to avoid running target binaries at build time bitbake: fetch2/git.py: fix a corner case in try_premirror bitbake: tests/fetch.py: add test case for using premirror in restricted network bitbake: fetch2/git.py: add comment in try_premirrors Chi Xu (1): xz: Add ptest support Claus Stovgaard (2): kernel-devsrc: fix RDEPENDS for make kernel-devsrc: RDEPENDS on gawk Clément Péron (1): libpcap: extend with nativesdk Colin McAllister (1): initscripts: Add custom mount args for /var/lib David Reyna (1): bitbake: taskexp_ncurses: ncurses version of taskexp.py Denys Dmytriyenko (3): lttng-modules: upgrade 2.13.10 -> 2.13.11 zlib: upgrade 1.3 -> 1.3.1 xz: upgrade 5.4.5 -> 5.4.6 Enguerrand de Ribaucourt (3): devtool: ide_sdk: Use bitbake's python3 for generated scripts devtool: ide: vscode: Configure read-only files meson: use absolute cross-compiler paths Enrico Jörns (1): rootfs-postcommands: remove make_zimage_symlink_relative() Etienne Cordonnier (1): dropbear: remove unnecessary line Fabien Mahot (1): ldconfig-native: Fix to point correctly on the DT_NEEDED entries in an ELF file Fabio Estevam (3): piglit: Update to latest revision mesa: Upgrade 23.3.3 -> 23.3.4 mesa: Upgrade 23.3.4 -> 23.3.5 Jamin Lin (3): uboot-sign: set load address and entrypoint uboot-sign: Fix to install nonexistent dtb file u-boot-sign:uboot-config: support to verify signed FIT image Jermain Horsman (2): bitbake-layers: Add ability to update the reference of repositories bitbake-layers: Add test case layers setup for custom references Joe Slater (1): eudev: allow for predictable network interface names Johannes Schneider (2): initramfs-framework: overlayroot: fix kernel commandline clash initramfs-framework: overlayroot: align bootparams with module name Jon Mason (2): tunes/sve: Add support for sve2 instructions arm/armv*: add all the Arm tunes in GCC 13.2.0 Jonathan GUILLOT (3): lib/oe/package: replace in place PN-locale-* packages in PACKAGES lib/oe/package: add LOCALE_PATHS to add define all locations for locales cups: use LOCALE_PATHS to split localized HTML templates Jose Quaresma (3): go: update 1.20.12 -> 1.20.13 systemd: pack pre-defined pcrlock files installed with tpm2 qemu: disbale AF_XDP network backend support Joshua Watt (8): bitbake: hashserv: Add Unihash Garbage Collection bitbake: hashserv: sqlalchemy: Use _execute() helper bitbake: hashserv: Add unihash-exists API bitbake: asyncrpc: Add Client Pool object bitbake: hashserv: Add Client Pool bitbake: siggen: Add parallel query API bitbake: siggen: Add parallel unihash exist API sstatesig: Implement new siggen API Kai Kang (2): rpm: fix dependency for package config imaevm ghostscript: correct LICENSE with AGPLv3 Khem Raj (27): elfutils: Fix build with gcc trunk python3: Initialize struct termios before calling tcgetattr() qemu: Replace the basename patch with backport xwayland: Upgrade 23.2.3 -> 23.2.4 armv8/armv9: Avoid using -march when -mcpu is chosen kexec-tools: Fix build with gas 2.42 systemtap: Backport GCC-14 related calloc fixes sdk/assimp.py: Fix build on 32bit arches with 64bit time_t binutils: Upgrade to binutils 2.42 qemu-native: Use inherit_defer for including native class syslinux: Disable error on implicit-function-declaration glibc: Upgrade to 2.39 strace: Upgrade to 6.7 rust/cargo: Build fixes to rust for rv32 target buildcpio.py: Switch to using cpio-2.15 ptest.bbclass: Handle the case when Makefile does not exist in do_install_ptest_base kernel-devsrc: Add needed fixes for 6.1+ kernel build on target on RISCV python3: Fix ptests with expat 2.6+ expat: Upgrade to 2.6.0 gcc-runtime: Move gdb pretty printer file to auto-load location core-image-ptest: Increase disk size to 1.5G for strace ptest image tcmode-default: Do not define LLVMVERSION glibc: Update to latest on 2.39 glibc: Update to bring mips32/clone3 fix piglit: Fix build with musl llvm: Upgrade to LLVM-18 RC2 binutils: Update to tip of 2.42 release branch Konrad Weihmann (1): python3-yamllint: add missing dependency Lee Chee Yang (1): migration-guide: add release notes for 4.0.16 Maanya Goenka (2): toolchain-shar-relocate: allow 'find' access to libraries in symlinked directories bash: nativesdk-bash does not provide /bin/bash so don't claim to Marek Vasut (1): Revert "lzop: remove recipe from oe-core" Mark Hatle (5): qemu: Allow native and nativesdk versions on Linux older then 4.17 tune-cortexa78.inc: Add cortexa78 tune, based on cortexa77 feature-arm-vfp.inc: Allow hard-float on newer simd targets tune-cortexr5: Add hard-float variant tune-cortexr52: Add hard-float variant Markus Volk (6): gtk4: update 4.12.4 -> 4.12.5 mesa: update 23.3.5 -> 24.0.0 mesa: update 24.0.0 -> 24.0.1 libadwaita: update 1.4.2 -> 1.4.3 wayland-protocols: update 1.32 -> 1.33 ell: update 0.61 -> 0.62 Martin Jansa (5): qemu: fix target build with ccache enabled package_manager: ipk: add OPKG_MAKE_INDEX_EXTRA_PARAMS variable package_rpm: add RPMBUILD_EXTRA_PARAMS variable bitbake: bitbake-diffsigs: fix walking the task dependencies and show better error bitbake: tests: fetch.py: use real subversion repository Michael Opdenacker (9): dev-manual: start: remove idle line docs: remove support for mickledore (4.2) release release-notes-4.3: fix spacing alsa-lib: upgrade 1.2.10 -> 1.2.11 alsa-tools: upgrade 1.2.5 -> 1.2.11 alsa-ucm-conf: upgrade 1.2.10 -> 1.2.11 alsa-utils: upgrade 1.2.10 -> 1.2.11 oeqa/runtime/cases: fix typo in information message bitbake: doc: README: simpler link to contributor guide Michal Sieron (1): sanity.bbclass: raise_sanity_error if /tmp is noexec Nick Owens (1): systemd: recommend libelf, libdw for elfutils flag Ola x Nilsson (1): python3-numpy: Use Large File Support version of fallocate Paul Gortmaker (1): bitbake: hashserv: improve the loglevel error message to be more helpful Pavel Zhukov (3): systemd.bbclass: Check for existence of the symlink too bitbake: fetch2/git.py: Fetch mirror into HEAD bitbake: tests/fetch.py: add multiple fetches test Peter Kjellerstedt (12): devtool: modify: Correct appending of type=git-dependency to URIs devtool: standard: Add some missing whitespace devtool: _extract_source: Correct the removal of an old backup directory bitbake: tests/fetch: Make test_git_latest_versionstring support a max version bitbake: fetch2/git: A bit of clean-up of latest_versionstring() bitbake: fetch2/git: Make latest_versionstring extract tags with slashes correctly lib/oe/patch: Make extractPatches() not extract ignored commits lib/oe/patch: Add GitApplyTree.commitIgnored() devtool: Make use of oe.patch.GitApplyTree.commitIgnored() patch.bbclass: Make use of oe.patch.GitApplyTree.commitIgnored() lib/oe/patch: Use git notes to store the filenames for the patches insane.bbclass: Allow the warning about virtual/ to be disabled Peter Marko (2): openssl: Upgrade 3.2.0 -> 3.2.1 util-linux: add alternative link for scriptreplay Petr Vorel (1): ltp: Update to 20240129 Philip Lorenz (1): ipk: Remove temporary package lists during SDK creation Priyal Doshi (1): tzdata : Upgrade to 2024a Quentin Schulz (1): u-boot: add missing dependency on pyelftools-native Randolph Sapp (1): mirrors.bbclass: add infraroot as an https mirror Randy MacLeod (4): valgrind: make ptest depend on all components valgrind: update from 3.21.0 to 3.22.0 valgrind: skip 14 ptests in 3.22 valgrind: Skip 22 arm64 ptests Richard Purdie (34): oeqa/qemurunner: Handle rare shutdown race pseudo: Update to pull in gcc14 fix and missing statvfs64 intercept numactl: upgrade 2.0.16 -> 2.0.17 conf: Move selftest config to dedicated inc file oeqa/selftest/bbtests: Tweak to use no-gplv3 inc file python3-markupsafe: upgrade 2.1.3 -> 2.1.5 python3-markupsafe: Switch to python_setuptools_build_meta qemu: Upgrade 8.2.0 -> 8.2.1 ltp: Enable extra test groups ltp: Try re-enabling problematic test meta-yocto-bsp: Remove accidentally added files oeqa/runtime: Move files from oe-core to bsp layer mirrors: Allow shallow glibc to work correctly ptest-packagelists: Mark python3 as problematic on riscv64 kernel-devsrc: Clean up whitespace selftest/recipetool: Factor tomllib test to a function selftest/recipetool: Improve test failure output layer.conf: Update for the scarthgap release series layer.conf: Update for the scarthgap release series bitbake: process: Add profile logging for main loop bitbake: process/server: Fix typo kernel-arch: Simplify strip support insane: Clarify runtime/ warning bitbake: runqueue: Improve performance for executing tasks bitbake: runqueue: Optimise taskname lookups in next_buildable_task bitbake: runqueue: Improve setcene performance when encoutering many 'hard' dependencies openssh: Add a work around for ICE on mips/mips64 kernel-devsrc: Improve vdso-offsets handling for qemuriscv64 u-boot: Pass in prefix mapping variables to the compiler testsdk: Avoid PATH contamination oeqa/selftest/rust: Exclude failing riscv tests bitbake: bitbake: Bump version to 2.7.3 for hashserv changes sanity.conf: Require bitbake 2.7.3 python: Drop ${PYTHON_PN} Robert Joslyn (2): curl: Update to 8.6.0 gtk: Set CVE_PRODUCT Robert Yang (1): gnu-config: Update to latest version Ross Burton (13): grub2: ignore CVE-2023-4001, this is Red Hat-specific openssl: backport fix for CVE-2023-6129 lib/oeqa: rename assertRaisesRegexp to assertRaisesRegex oeqa/selftest/recipetool: downgrade meson version to not use pyproject.toml recipetool: don't dump stack traces if a toml parser can't be found xz: remove redundant PTEST_ENABLED conditional libpam: remove redundant PTEST_ENABLED conditional glib-2.0: backport memory monitor test fixes python3: move dataclasses to python3-core python3-unittest-automake-output: upgrade to 0.2 meson: remove TMPDIR workaround meson: set the sysroot in the cross files libffi: upgrade to 3.4.5 Simone Weiß (12): gnutls: Upgrade 3.8.2 -> 3.8.3 maintainers.inc: Add self for libseccomp and gnutls bsp-guide: correct formfactor recipe name dev-manual: gen-tapdevs need iptables installed gnutls: print log if ptest fails patchtest: log errors and failures at end grub2: ignore CVE-2024-1048, Redhat only issue libgit2: update 1.7.1 -> 1.7.2 libuv: Upgrade 1.47.0 -> 1.48.0 qemu: Set CVE_STATUS for wrong CVEs patchtest: Add selftest for test cve_check_ignore patchtest: add stronger indication for failed tests Siong W.LIM (1): useradd.bbclass: Fix missing space when appending vardeps. Thomas Perrot (2): opensbi: append LDFLAGS to TARGET_CC_ARCH bitbake: wget.py: always use the custom user agent Tim Orling (13): libxml-parser-perl: upgrade 2.46 -> 2.47 python3-pyyaml: add PACKAGECONFIG for libyaml python3-pyyaml: enable ptest python3-cryptography: upgrade 41.0.7 to 42.0.2 openssh: upgrade 9.5p1 -> 9.6p1 python3-poetry-core: upgrade 1.8.1 -> 1.9.0 python3-attrs: skip test failing with pytest-8 vim: upgrade from 9.0.2130 -> 9.1.0114 python3-pyproject-metadata: move from meta-python python3-pyproject-metadata: HOMEPAGE; DESCRIPTION python3-meson-python: move from meta-python python_mesonpy.bbclass: move from meta-python recipetool; add support for python_mesonpy class Tobias Hagelborn (2): sstate.bbclass: Only sign packages at the time of their creation bitbake: bitbake: hashserv: Postgres adaptations for ignoring duplicate inserts Toni Lammi (1): bitbake: support temporary AWS credentials Trevor Gamblin (7): patchtest.README: update mailing list cmake: upgrade 3.27.7 -> 3.28.3 python3-numpy: upgrade 1.26.3 -> 1.26.4 patchtest-send-results: Add 'References' header patchtest-send-results: use Message-ID directly patchtest: Fix grammar in log output patchtest-send-results: add --debug option Valek Andrej (1): glibc: Refresh CVE status w.r.t 2.39 release Vikas Katariya (1): bmap-tools: Add missing runtime dependency Wang Mingyu (36): at-spi2-core: upgrade 2.50.0 -> 2.50.1 cpio: upgrade 2.14 -> 2.15 ethtool: upgrade 6.6 -> 6.7 iso-codes: upgrade 4.15.0 -> 4.16.0 libinput: upgrade 1.24.0 -> 1.25.0 libtest-warnings-perl: upgrade 0.032 -> 0.033 libwpe: upgrade 1.14.1 -> 1.14.2 lzip: upgrade 1.23 -> 1.24 createrepo-c: upgrade 1.0.2 -> 1.0.3 diffstat: upgrade 1.65 -> 1.66 dos2unix: upgrade 7.5.1 -> 7.5.2 ed: upgrade 1.19 -> 1.20 gnupg: upgrade 2.4.3 -> 2.4.4 gstreamer: upgrade 1.22.8 -> 1.22.9 libidn2: upgrade 2.3.4 -> 2.3.7 libpng: upgrade 1.6.40 -> 1.6.41 libsolv: upgrade 0.7.27 -> 0.7.28 liburi-perl: upgrade 5.21 -> 5.25 nghttp2: upgrade 1.58.0 -> 1.59.0 repo: upgrade 2.40 -> 2.41 orc: upgrade 0.4.34 -> 0.4.36 pkgconf: upgrade 2.0.3 -> 2.1.0 python3-sphinxcontrib-applehelp: upgrade 1.0.7 -> 1.0.8 python3-sphinxcontrib-devhelp: upgrade 1.0.5 -> 1.0.6 python3-sphinxcontrib-htmlhelp: upgrade 2.0.4 -> 2.0.5 python3-sphinxcontrib-qthelp: upgrade 1.0.6 -> 1.0.7 python3-sphinxcontrib-serializinghtml: upgrade 1.1.9 -> 1.1.10 python3-beartype: upgrade 0.16.4 -> 0.17.0 python3-mako: upgrade 1.3.0 -> 1.3.2 python3-hatchling: upgrade 1.21.0 -> 1.21.1 python3-hypothesis: upgrade 6.92.9 -> 6.97.3 python3-pluggy: upgrade 1.3.0 -> 1.4.0 python3-psutil: upgrade 5.9.7 -> 5.9.8 python3-pyopenssl: upgrade 23.3.0 -> 24.0.0 python3-pytz: upgrade 2023.3 -> 2023.4 python3-pytest: upgrade 7.4.4 -> 8.0.0 Xiangyu Chen (1): bash: rebase the patch to fix ptest failure Yi Zhao (2): rpm: add missing dependencies for packageconfig libsdl2: upgrade 2.28.5 -> 2.30.0 Yoann Congal (2): kexec-tools: Replace a submitted patch by the backported one waf.bbclass: Print waf output on unparsable version Yogita Urade (1): tiff: fix CVE-2023-52355 and CVE-2023-52356 baruch@tkos.co.il (3): contributor-guide: fix lore URL overlayfs: add missing closing parenthesis in selftest overlayfs-etc: add option to skip creation of mount dirs meta-arm: 6bb1fc8d8c..025f76a14f: Ali Can Ozaslan (1): arm-bsp/u-boot:corstone1000: Fix deployment of capsule files Drew Reed (4): bsp: Move Corstone-1000 U-Boot configuration entries bsp: Move machine settings bsp,ci: Switch to poky distro bsp: Rename corstone1000-image Harsimran Singh Tungal (2): n1sdp:arm arm-bsp: fix tftf tests for n1sdp arm-bsp/optee: upgrade optee to 4.1.0 for N1SDP Jon Mason (3): arm/opencsd: update to v1.5.1 arm/optee: update to 4.1 arm-bsp/optee: remove unused v3.22.0 recipes Khem Raj (1): layer.conf: Update for the scarthgap release series Ross Burton (5): CI: support extra kas files from environment CI/cve.yml: add a CVE-checking Kas fragment CI: add explanatory comments to variables CI: allow the runner to set a NVD API key CI: use https: to fetch meta-virtualization Vincent Stehlé (1): arm-bsp/documentation: corstone1000: fix typo meta-security: b2e1511338..30e755c592: Armin Kuster (3): python3-pyinotify: do not rely on smtpd module python3-fail2ban: remove unused distutils dependency scap-security-guide: update to 0.1.71 BELOUARGA Mohamed (2): checksec: Add more runtime dependencies to checksec tool lynis: Add missing runtime dependencies Leon Anavi (2): linux-yocto%.bbappend: Add audit.cfg integrity-image-minimal: Fix IMAGE_INSTALL Mikko Rapeli (1): parsec-tool: fix serialNumber check Yi Zhao (1): openscap: fix build with python 3.12 Yushi Sun (1): meta-security: libhoth: SRCREV bump e520f8f...e482716 meta-raspberrypi: 9c901bf170..dbf1113a82: Kevin Hao (1): rpidistro-ffmpeg: Fix old override syntax Khem Raj (3): linux-raspberrypi_6.1.bb: Upgrade to 6.1.74 linux-raspberrypi: Upgrade to 6.1.77 layer.conf: Update for the scarthgap release series Martin Jansa (1): libcamera-apps: fix build with libcamera-0.2.0 Matthew Draws (1): rpi-eeprom_git: v.2024.01.05-2712 Update recipe to latest rpi-eeprom repo This follows the current latest release of rpi-eeprom: https://github.com/raspberrypi/rpi-eeprom Pascal Huerst (1): rpi-base: Add missing hifiberry overlay meta-openembedded: 9953ca1ac0..528f273006: Alex Kiernan (3): mdns: Fix SIGSEGV during DumpStateLog() mdns: Upgrade 2200.60.25.0.4 -> 2200.80.16 c-ares: Upgrade 1.24.0 -> 1.26.0 Angelo Ribeiro (1): flatcc: Add tool recipe Angelo.Ribeiro (1): e2tools: Add tool recipe Archana Polampalli (1): nodejs: update to latest v20 version 20.11.0 Beniamin Sandu (3): mbedtls: upgrade 3.5.1 -> 3.5.2 mbedtls: upgrade 2.28.4 -> 2.28.7 opencv: upgrade 4.8.0 -> 4.9.0 Changqing Li (1): cpuid: fix do_install Chirag Shilwant (1): kernel-selftest: Add few more testcases Christophe Vu-Brugier (4): dropwatch: add new recipe switchtec-user: upgrade 4.1 -> 4.2 libnvme: upgrade 1.7.1 -> 1.8 nvme-cli: upgrade 2.7.1 -> 2.8 Clément Péron (2): proj: extend class to native and nativesdk proj: upgrade 9.3.0 -> 9.3.1 Denys Dmytriyenko (1): libcamera: update 0.1.0 -> 0.2.0 Derek Straka (36): python3-bandit: update to version 1.7.7 python3-web3: update to version 6.15.0 python3-argcomplete: update to version 3.2.2 python3-cytoolz: update to version 0.12.3 python3-pdm: update to version 2.12.2 python3-google-api-python-client: update to version 2.115.0 python3-coverage: update to version 7.4.1 python3-gmqtt: update to version 0.6.14 python3-colorlog: update to version 6.8.2 python3-argh: update to version 0.31.2 python3-luma-core: update to version 2.4.2 python-pdm: update to version 2.12.3 python3-parse: update to version 1.20.1 python3-grpcio: update to version 1.60.1 python3-dill: update to version 0.3.8 python3-types-setuptools: update to version 69.0.0.20240125 python3-pymisp: update to version 2.4.184 python3-cbor2: update to version 5.6.1 python3-sentry-sdk: update to version 1.40.0 python3-pytest-asyncio: update to version 0.23.4 python3-google-api-core: update to version 2.16.1 python3-google-api-python-client: update to version 2.116.0 python3-google-auth: update to version 2.27.0 python3-jsonrpcclient: update to version 4.0.3 python3-dnspython: update to version 2.5.0 python3-eventlet: update to version 0.35.1 python3-platformdirs: update to version 4.2.0 python3-ipython: update to version 8.21.0 python3-grpcio-tools: update to version 1.60.1 python3-cachecontrol: update to version 0.14.0 python3-binwalk: update the regex version for upstream checks python3-pymodbus: update to version 3.6.3 python3-pyyaml-include: add initial recipe for version 1.3.2 python3-appdirs: add ptest into PTESTS_FAST_META_PYTHON items python3-yarl: add ptest into PTESTS_FAST_META_PYTHON items python3-ujson: add ptest into PTESTS_FAST_META_PYTHON items Emil Kronborg (1): php-fpm: fix systemd Etienne Cordonnier (2): uutils-coreutils: upgrade 0.0.23 -> 0.0.24 uutils_coreutils: merge .inc and .bb Fathi Boudra (4): whitenoise: add a new recipe python3-django: upgrade to Django 4.2.10 LTS release libtinyxml2: fix the homepage URL libtinyxml2: allow to build both shared and static libraries Geoff Parker (2): python3-aiodns python3-pycares: Add native & nativesdk support python3-aiohappyeyeballs: Add native & nativesdk support Jean-Marc BOUCHE (1): rtkit: missing files/directories in package Jose Quaresma (1): ostree: Upgrade 2023.8 -> 2024.1 Jörg Sommer (1): bonnie++: New recipe for version 2.0 Khem Raj (18): uftrace: Upgrade to 0.15.2 i2cdev: Set PV correctly minicoredumper: Fix build with clang python3-pytest-mock: Fix ptest failures with python 3.12 ndctl: Update to v78 vk-gl-cts: Disable Werror on amber external module vulkan-cts: Upgrade to 1.3.7.3 uftrace: Adjust the summary to reflect rust and python support libcamera: Fix build with clang-18 breakpad: Upgrade to 2023.06.01 release bpftool: Add missing dep on elfutils-native flatcc: Fix build warnings found with clang-18 Revert "lzop: add (from oe-core)" can-isotp: Update to latest and skip it openflow: Switch SRC_URI to github mirror ot-br-posix: upgrade to latest trunk libcereal: Disable c++11-narrowing-const-reference warning as error ot-br-posix: Limit vla-cxx-extension option to clang >= 18 Li Wang (1): radvd: add '--shell /sbin/nologin' to /etc/passwd Mark Hatle (1): opencv: Fix python3 package generation Markus Volk (9): luajit: allow to build on supported platforms pipewire: fix build with libcamera-0.2 system-config-printer: fix runtime for system-config-printer iwd: update 2.8 -> 2.13 pipewire: update 1.0.1 -> 1.0.3 flatpak: remove unneeded RDEPENDS libosinfo: use hwdata for ids files libnfs: update 5.0.2 -> 5.0.3 hwdata: update 0.378 -> 0.379 Martin Jansa (18): libtalloc, libtevent, libtdb, libldb: set PYTHONARCHDIR for waf to respect python libdir jack: fix build with python3 on host redis: restore Upstream-Status libvpx: restore Upstream-Status python-jsonref: add missing Upstream-Status flatcc: respect baselib flatcc: drop 'r' from gitr and ${SRCPV} recipes: drop ${SRCPV} usage recipes: drop remaining +gitr cases gitpkgv.bbclass: adjust the example in comment a bit ne10: append +git instead of gitr+ evemu-tools: use better PV nana: upgrade to latest commit from github xfstests: upgrade to latest 2024.01.14 xfstests: add gawk to RDEPENDS xfstests: use master branch instead of 'for-next' xfstests: drop the upstream rejected install-sh hack xfstests: fix make install race condition Max Krummenacher (2): libusbgx: fix usbgx.service stop / restart libusbgx: uprev to the latest commit Maxime Roussin-Belanger (1): xdg-desktop-portal: add missing glib-2.0-native dependency Maxime Roussin-Bélanger (1): polkit: fix rules.d permissions Ming Liu (1): plymouth: uprev to 24.004.60 Niko Mauno (4): python3-pybind11: Amend HOMEPAGE python3-pybind11: Prune redundant inherit python3-pybind11: Fix LICENSE python3-pybind11: Cosmetic fixes Pavel Zhukov (1): python3-tzlocal: Add zoneinfo dependency Peter Kjellerstedt (1): xfstests: Only specify the main SRCREV once Peter Marko (2): syslog-ng: ignore CVE-2022-38725 libqmi: correct PV Pratik Manvar (1): python3-pybind11: Remove the Boost dependency Richard Leitner (7): python3-janus: add recipe for v1.0.0 python3-moteus: add recipe for v0.3.67 python3-socksio: add recipe for v1.0.0 python3-anyio: add recipe for v4.2.0 python3-sniffio: add recipe for v1.3.0 python3-httpcore: add recipe for v1.0.2 python3-httpx: add recipe for v0.26.0 Sascha Hauer (1): signing.bbclass: make it work with eliptic curve keys Simone Weiß (1): scapy: Add difftools and logutils in RDEPENDS Thomas Perrot (3): dvb-apps: no longer skip ldflags QA etcd-cpp-apiv3: no longer skip ldflags QA kernel-selftest: no longer skip ldflags QA Tim Orling (60): python3-uritemplate: switch to pytest --automake python3-unidiff: switch to pytest --automake python3-ujson: switch to pytest --automake python3-pytest-lazy-fixture: switch to pytest --automake python3-fastjsonschema: switch to pytest --automake python3-tomlkit: switch to pytest --automake python3-inotify: switch to pytest --automake python3-requests-file: switch to pytest --automake python3-covdefaults: switch to pytest --automake python3-dominate: switch to pytest --automake python3-scrypt: switch to pytest --automake python3-u-msgpack-python: switch to pytest --automake python3-iso3166: switch to pytest --automake python3-trustme: switch to pytest --automake python3-asgiref: switch to pytest --automake python3-html2text: switch to pytest --automake python3-pyasn1-modules: switch to pytest --automake python3-intervals: switch to pytest --automake python3-py-cpuinfo: switch to pytest --automake python3-backports-functools-lru-cache: drop folder python3-whoosh: switch to pytest --automake python3-xlrd: switch to pytest --automake python3-dnspython: switch to pytest --automake python3-prettytable: switch to pytest --automake python3-ptyprocess: switch to pytest --automake python3-gunicorn: switch to pytest --automake python3-pytest-mock: switch to pytest --automake python3-pyroute2: switch to pytest --automake python3-smpplib: switch to pytest --automake python3-pyzmq: switch to pytest --automake python3-multidict: switch to pytest --automake python3-geojson: switch to pytest --automake python3-serpent: switch to pytest --automake python3-soupsieve: switch to pytest --automake python3-requests-toolbelt: switch to pytest --automake python3-yarl: switch to pytest --automake python3-cbor2: switch to pytest --automake python3-ansicolors: switch to pytest --automake python3-ipy: switch to pytest --automake python3-sqlparse: switch to pytest --automake python3-precise-runner: switch to pytest --automake python3-parse-type: switch to pytest --automake python3-inflection: switch to pytest --automake python3-blinker: switch to pytest --automake python3-service-identity: switch to pytest --automake python3-cachetools: switch to pytest --automake python3-simpleeval: switch to pytest --automake python3-appdirs: switch to pytest --automake python3-pillow: switch to pytest --automake python3-semver: switch to pytest --automake python3-platformdirs: switch to pytest --automake python3-polyline: switch to pytest --automake python3-betamax: switch to pytest --automake python3-pytoml: switch to pytest --automake python3-pyserial: switch to pytest --automake python3-typeguard: switch to pytest --automake python3-execnet: switch to pytest --automake python3-pyyaml-include: switch to pytest --automake python3-xxhash: switch to pytest --automake python3-pylint: switch to pytest --automake Tom Geelen (1): python3-pychromecast: add missing RDEPENDS, and add initial recipe for dependency. Wang Mingyu (90): btop: upgrade 1.2.13 -> 1.3.0 ccid: upgrade 1.5.4 -> 1.5.5 ctags: upgrade 6.1.20231231.0 -> 6.1.20240114.0 gcr3: upgrade 3.41.1 -> 3.41.2 htop: upgrade 3.2.2 -> 3.3.0 hwdata: upgrade 0.377 -> 0.378 libdecor: upgrade 0.2.1 -> 0.2.2 libvpx: upgrade 1.13.1 -> 1.14.0 lldpd: upgrade 1.0.17 -> 1.0.18 gjs: upgrade 1.78.2 -> 1.78.3 wireshark: upgrade 4.2.0 -> 4.2.2 capnproto: upgrade 1.0.1.1 -> 1.0.2 dnfdragora: upgrade 2.1.5 -> 2.1.6 libyang: upgrade 2.1.128 -> 2.1.148 lshw: upgrade 02.19.2 -> 02.20 md4c: upgrade 0.4.8 -> 0.5.0 python3-apscheduler: add new recipe redis: upgrade 7.2.3 -> 7.2.4 sanlock: upgrade 3.8.5 -> 3.9.0 python3-eth-keys: upgrade 0.4.0 -> 0.5.0 python3-xmlschema: upgrade 2.5.1 -> 3.0.1 plocate: upgrade 1.1.20 -> 1.1.22 python3-absl: upgrade 2.0.0 -> 2.1.0 python3-asyncinotify: upgrade 4.0.5 -> 4.0.6 python3-beautifulsoup4: upgrade 4.12.2 -> 4.12.3 python3-cantools: upgrade 39.4.2 -> 39.4.3 python3-cbor2: upgrade 5.5.1 -> 5.6.0 python3-dbus-fast: upgrade 2.21.0 -> 2.21.1 python3-django: upgrade 5.0 -> 5.0.1 python3-eth-abi: upgrade 4.2.1 -> 5.0.0 python3-eth-typing: upgrade 3.5.2 -> 4.0.0 python3-eth-utils: upgrade 2.3.1 -> 3.0.0 python3-eventlet: upgrade 0.34.2 -> 0.34.3 python3-flask: upgrade 3.0.0 -> 3.0.1 python3-git-pw: upgrade 2.5.0 -> 2.6.0 python3-google-api-python-client: upgrade 2.113.0 -> 2.114.0 python3-haversine: upgrade 2.8.0 -> 2.8.1 python3-ipython: upgrade 8.19.0 -> 8.20.0 python3-pdm: upgrade 2.11.2 -> 2.12.1 python3-pyatspi: upgrade 2.46.0 -> 2.46.1 python3-sentry-sdk: upgrade 1.39.1 -> 1.39.2 python3-robotframework: upgrade 6.1.1 -> 7.0 python3-pychromecast: upgrade 13.0.8 -> 13.1.0 python3-tox: upgrade 4.11.4 -> 4.12.1 python3-types-psutil: upgrade 5.9.5.17 -> 5.9.5.20240106 qpdf: upgrade 11.7.0 -> 11.8.0 smemstat: upgrade 0.02.12 -> 0.02.13 tesseract: upgrade 5.3.3 -> 5.3.4 libsmi: Fix buildpaths warning. minicoredumper: upgrade 2.0.6 -> 2.0.7 cmocka: Fix install conflict when enable multilib. czmq: Fix install conflict when enable multilib. czmq: Fix buildpaths warning. bdwgc: upgrade 8.2.4 -> 8.2.6 cmark: upgrade 0.30.3 -> 0.31.0 gensio: upgrade 2.8.2 -> 2.8.3 geos: upgrade 3.12.0 -> 3.12.1 imlib2: upgrade 1.12.1 -> 1.12.2 libcbor: upgrade 0.10.2 -> 0.11.0 libinih: upgrade 57 -> 58 libio-socket-ssl-perl: upgrade 2.084 -> 2.085 libjcat: upgrade 0.2.0 -> 0.2.1 libqmi: upgrade 1.35.1 -> 1.35.2 md4c: upgrade 0.5.0 -> 0.5.2 nanomsg: upgrade 1.2 -> 1.2.1 neatvnc: upgrade 0.7.1 -> 0.7.2 network-manager-applet: upgrade 1.34.0 -> 1.36.0 libgsf: upgrade 1.14.51 -> 1.14.52 ndisc6: upgrade 1.0.7 -> 1.0.8 squid: upgrade 6.6 -> 6.7 iotop: upgrade 1.25 -> 1.26 libblockdev: upgrade 3.0.4 -> 3.1.0 neon: upgrade 0.32.5 -> 0.33.0 pkcs11-provider: upgrade 0.2 -> 0.3 sanlock: upgrade 3.9.0 -> 3.9.1 satyr: upgrade 0.42 -> 0.43 python3-astroid: upgrade 3.0.2 -> 3.0.3 python3-elementpath: upgrade 4.1.5 -> 4.2.0 python3-flask: upgrade 3.0.1 -> 3.0.2 python3-google-api-core: upgrade 2.16.1 -> 2.16.2 python3-gspread: upgrade 5.12.4 -> 6.0.0 python3-path: upgrade 16.9.0 -> 16.10.0 python3-gcovr: upgrade 6.0 -> 7.0 python3-types-psutil: upgrade 5.9.5.20240106 -> 5.9.5.20240205 python3-waitress: upgrade 2.1.2 -> 3.0.0 rdma-core: upgrade 48.0 -> 50.0 ser2net: upgrade 4.6.0 -> 4.6.1 sip: upgrade 6.8.1 -> 6.8.2 span-lite: upgrade 0.10.3 -> 0.11.0 tcpslice: upgrade 1.6 -> 1.7 William A. Kennington III (3): nanopb: Update 0.4.7 -> 0.4.8 nanopb: Split into 2 packages nanopb-runtime: Enable shared library Yoann Congal (6): ibus: backport a reproducibility fix radvd: Fix build in reproducible test mariadb: Move useradd handling in target side of the recipe kexec-tools-klibc: Fix building on x86_64 with binutils 2.41 freeradius: Add missing 'radiusd' static group id ntp: Add missing 'ntp' static group id alperak (18): python3-flask-marshmallow: upgrade 0.15.0 -> 1.1.0 python3-netaddr: upgrade 0.10.0 -> 0.10.1 python3-toolz: upgrade 0.12.0 -> 0.12.1 python3-aiohappyeyeballs: add recipe python3-aiohttp: upgrade 3.9.1 -> 3.9.2 python3-eth-rlp: upgrade 1.0.0 -> 1.0.1 python3-aiohttp: upgrade 3.9.2 -> 3.9.3 python3-google-auth-oauthlib: add recipe python3-scikit-build: upgrade 0.16.7 -> 0.17.6 python3-eth-account: upgrade 0.10.0 -> 0.11.0 python3-pyunormalize: add recipe python3-web3: upgrade 6.15.0 -> 6.15.1 python3-gspread: upgrade 6.0.0 -> 6.0.1 python3-strenum: add recipe python3-flask-marshmallow: upgrade 1.1.0 -> 1.2.0 python3-werkzeug: upgrade 2.3.6 -> 3.0.1 python3-imageio: upgrade 2.33.1 -> 2.34.0 python3-werkzeug: add missing runtime dependencies virendra thakur (1): nodejs: Set CVE_PRODUCT to "node.js" Change-Id: If9fadba6ede9e8de3b778d470bbd61f208f48e54 Signed-off-by: Patrick Williams <patrick@stwcx.xyz>
Diffstat (limited to 'poky/bitbake/lib')
-rw-r--r--poky/bitbake/lib/bb/__init__.py2
-rw-r--r--poky/bitbake/lib/bb/asyncrpc/__init__.py2
-rw-r--r--poky/bitbake/lib/bb/asyncrpc/client.py77
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py1
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py49
-rw-r--r--poky/bitbake/lib/bb/runqueue.py49
-rw-r--r--poky/bitbake/lib/bb/server/process.py16
-rw-r--r--poky/bitbake/lib/bb/siggen.py177
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py81
-rwxr-xr-xpoky/bitbake/lib/bb/ui/taskexp_ncurses.py1511
-rw-r--r--poky/bitbake/lib/bs4/tests/test_tree.py2
-rw-r--r--poky/bitbake/lib/hashserv/client.py155
-rw-r--r--poky/bitbake/lib/hashserv/server.py164
-rw-r--r--poky/bitbake/lib/hashserv/sqlalchemy.py515
-rw-r--r--poky/bitbake/lib/hashserv/sqlite.py221
-rw-r--r--poky/bitbake/lib/hashserv/tests.py320
16 files changed, 2967 insertions, 375 deletions
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index 5e22d34747..768cce84e9 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "2.7.2"
+__version__ = "2.7.3"
import sys
if sys.version_info < (3, 8, 0):
diff --git a/poky/bitbake/lib/bb/asyncrpc/__init__.py b/poky/bitbake/lib/bb/asyncrpc/__init__.py
index a4371643d7..639e1607f8 100644
--- a/poky/bitbake/lib/bb/asyncrpc/__init__.py
+++ b/poky/bitbake/lib/bb/asyncrpc/__init__.py
@@ -5,7 +5,7 @@
#
-from .client import AsyncClient, Client
+from .client import AsyncClient, Client, ClientPool
from .serv import AsyncServer, AsyncServerConnection
from .connection import DEFAULT_MAX_CHUNK
from .exceptions import (
diff --git a/poky/bitbake/lib/bb/asyncrpc/client.py b/poky/bitbake/lib/bb/asyncrpc/client.py
index 0d7cd85780..a6228bb0ba 100644
--- a/poky/bitbake/lib/bb/asyncrpc/client.py
+++ b/poky/bitbake/lib/bb/asyncrpc/client.py
@@ -10,6 +10,8 @@ import json
import os
import socket
import sys
+import contextlib
+from threading import Thread
from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK
from .exceptions import ConnectionClosedError, InvokeError
@@ -180,3 +182,78 @@ class Client(object):
def __exit__(self, exc_type, exc_value, traceback):
self.close()
return False
+
+
+class ClientPool(object):
+ def __init__(self, max_clients):
+ self.avail_clients = []
+ self.num_clients = 0
+ self.max_clients = max_clients
+ self.loop = None
+ self.client_condition = None
+
+ @abc.abstractmethod
+ async def _new_client(self):
+ raise NotImplementedError("Must be implemented in derived class")
+
+ def close(self):
+ if self.client_condition:
+ self.client_condition = None
+
+ if self.loop:
+ self.loop.run_until_complete(self.__close_clients())
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+ self.loop = None
+
+ def run_tasks(self, tasks):
+ if not self.loop:
+ self.loop = asyncio.new_event_loop()
+
+ thread = Thread(target=self.__thread_main, args=(tasks,))
+ thread.start()
+ thread.join()
+
+ @contextlib.asynccontextmanager
+ async def get_client(self):
+ async with self.client_condition:
+ if self.avail_clients:
+ client = self.avail_clients.pop()
+ elif self.num_clients < self.max_clients:
+ self.num_clients += 1
+ client = await self._new_client()
+ else:
+ while not self.avail_clients:
+ await self.client_condition.wait()
+ client = self.avail_clients.pop()
+
+ try:
+ yield client
+ finally:
+ async with self.client_condition:
+ self.avail_clients.append(client)
+ self.client_condition.notify()
+
+ def __thread_main(self, tasks):
+ async def process_task(task):
+ async with self.get_client() as client:
+ await task(client)
+
+ asyncio.set_event_loop(self.loop)
+ if not self.client_condition:
+ self.client_condition = asyncio.Condition()
+ tasks = [process_task(t) for t in tasks]
+ self.loop.run_until_complete(asyncio.gather(*tasks))
+
+ async def __close_clients(self):
+ for c in self.avail_clients:
+ await c.close()
+ self.avail_clients = []
+ self.num_clients = 0
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+ return False
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 677968a6a9..ac0f4dfa1d 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -875,6 +875,7 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH',
'AWS_ROLE_ARN',
'AWS_WEB_IDENTITY_TOKEN_FILE',
'AWS_DEFAULT_REGION',
+ 'AWS_SESSION_TOKEN',
'GIT_CACHE_PATH',
'REMOTE_CONTAINERS_IPC',
'SSL_CERT_DIR']
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 0deeb5cee1..df33fb6aeb 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -87,6 +87,7 @@ from contextlib import contextmanager
from bb.fetch2 import FetchMethod
from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger
+from bb.fetch2 import trusted_network
sha1_re = re.compile(r'^[0-9a-f]{40}$')
@@ -258,7 +259,7 @@ class Git(FetchMethod):
for name in ud.names:
ud.unresolvedrev[name] = 'HEAD'
- ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat"
+ ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all"
write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -355,6 +356,16 @@ class Git(FetchMethod):
# is not possible
if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
return True
+ # If the url is not in trusted network, that is, BB_NO_NETWORK is set to 0
+ # and BB_ALLOWED_NETWORKS does not contain the host that ud.url uses, then
+ # we need to try premirrors first as using upstream is destined to fail.
+ if not trusted_network(d, ud.url):
+ return True
+ # the following check is to ensure incremental fetch in downloads, this is
+ # because the premirror might be old and does not contain the new rev required,
+ # and this will cause a total removal and new clone. So if we can reach to
+ # network, we prefer upstream over premirror, though the premirror might contain
+ # the new rev.
if os.path.exists(ud.clonedir):
return False
return True
@@ -375,7 +386,11 @@ class Git(FetchMethod):
else:
tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=tmpdir)
- fetch_cmd = "LANG=C %s fetch -f --progress %s " % (ud.basecmd, shlex.quote(tmpdir))
+ output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
+ if 'mirror' in output:
+ runfetchcmd("%s remote rm mirror" % ud.basecmd, d, workdir=ud.clonedir)
+ runfetchcmd("%s remote add --mirror=fetch mirror %s" % (ud.basecmd, tmpdir), d, workdir=ud.clonedir)
+ fetch_cmd = "LANG=C %s fetch -f --update-head-ok --progress mirror " % (ud.basecmd)
runfetchcmd(fetch_cmd, d, workdir=ud.clonedir)
repourl = self._get_repo_url(ud)
@@ -514,7 +529,7 @@ class Git(FetchMethod):
logger.info("Creating tarball of git repository")
with create_atomic(ud.fullmirror) as tfile:
- mtime = runfetchcmd("git log --all -1 --format=%cD", d,
+ mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d,
quiet=True, workdir=ud.clonedir)
runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ."
% (tfile, mtime), d, workdir=ud.clonedir)
@@ -812,38 +827,42 @@ class Git(FetchMethod):
"""
pupver = ('', '')
- tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
try:
output = self._lsremote(ud, d, "refs/tags/*")
except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
bb.note("Could not list remote: %s" % str(e))
return pupver
+ rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
+ pver_re = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
+ nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
+
verstring = ""
- revision = ""
for line in output.split("\n"):
if not line:
break
- tag_head = line.split("/")[-1]
+ m = rev_tag_re.match(line)
+ if not m:
+ continue
+
+ (revision, tag) = m.groups()
+
# Ignore non-released branches
- m = re.search(r"(alpha|beta|rc|final)+", tag_head)
- if m:
+ if nonrel_re.search(tag):
continue
# search for version in the line
- tag = tagregex.search(tag_head)
- if tag is None:
+ m = pver_re.search(tag)
+ if not m:
continue
- tag = tag.group('pver')
- tag = tag.replace("_", ".")
+ pver = m.group('pver').replace("_", ".")
- if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
+ if verstring and bb.utils.vercmp(("0", pver, ""), ("0", verstring, "")) < 0:
continue
- verstring = tag
- revision = line.split()[0]
+ verstring = pver
pupver = (verstring, revision)
return pupver
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index af11e9a8f4..e86ccd8c61 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -270,11 +270,11 @@ class RunQueueScheduler(object):
best = None
bestprio = None
for tid in buildable:
- taskname = taskname_from_tid(tid)
- if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
- continue
prio = self.rev_prio_map[tid]
if bestprio is None or bestprio > prio:
+ taskname = taskname_from_tid(tid)
+ if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
+ continue
stamp = self.stamps[tid]
if stamp in self.rq.build_stamps.values():
continue
@@ -1840,6 +1840,7 @@ class RunQueueExecute:
self.failed_tids = []
self.sq_deferred = {}
self.sq_needed_harddeps = set()
+ self.sq_harddep_deferred = set()
self.stampcache = {}
@@ -1914,6 +1915,8 @@ class RunQueueExecute:
event = bb.event.StaleSetSceneTasks(found[mc])
bb.event.fire(event, self.cooker.databuilder.mcdata[mc])
+ self.build_taskdepdata_cache()
+
def runqueue_process_waitpid(self, task, status, fakerootlog=None):
# self.build_stamps[pid] may not exist when use shared work directory.
@@ -2161,7 +2164,7 @@ class RunQueueExecute:
if not self.sqdone and self.can_start_task():
# Find the next setscene to run
for nexttask in self.sorted_setscene_tids:
- if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values():
+ if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
nexttask not in self.sq_needed_harddeps and \
self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
@@ -2182,6 +2185,7 @@ class RunQueueExecute:
self.sq_buildable.add(dep)
self.sq_needed_harddeps.add(dep)
updated = True
+ self.sq_harddep_deferred.add(nexttask)
if updated:
return True
continue
@@ -2413,6 +2417,22 @@ class RunQueueExecute:
ret.add(dep)
return ret
+ # Build the individual cache entries in advance once to save time
+ def build_taskdepdata_cache(self):
+ taskdepdata_cache = {}
+ for task in self.rqdata.runtaskentries:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
+ pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
+ deps = self.rqdata.runtaskentries[task].depends
+ provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
+ taskhash = self.rqdata.runtaskentries[task].hash
+ unihash = self.rqdata.runtaskentries[task].unihash
+ deps = self.filtermcdeps(task, mc, deps)
+ hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
+ taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn]
+
+ self.taskdepdata_cache = taskdepdata_cache
+
# We filter out multiconfig dependencies from taskdepdata we pass to the tasks
# as most code can't handle them
def build_taskdepdata(self, task):
@@ -2424,16 +2444,9 @@ class RunQueueExecute:
while next:
additional = []
for revdep in next:
- (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
- pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
- deps = self.rqdata.runtaskentries[revdep].depends
- provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
- taskhash = self.rqdata.runtaskentries[revdep].hash
- unihash = self.rqdata.runtaskentries[revdep].unihash
- deps = self.filtermcdeps(task, mc, deps)
- hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn]
- for revdep2 in deps:
+ self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash
+ taskdepdata[revdep] = self.taskdepdata_cache[revdep]
+ for revdep2 in self.taskdepdata_cache[revdep][3]:
if revdep2 not in taskdepdata:
additional.append(revdep2)
next = additional
@@ -2667,6 +2680,7 @@ class RunQueueExecute:
if changed:
self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
self.sq_needed_harddeps = set()
+ self.sq_harddep_deferred = set()
self.holdoff_need_update = True
def scenequeue_updatecounters(self, task, fail=False):
@@ -2701,6 +2715,13 @@ class RunQueueExecute:
new.add(dep)
next = new
+ # If this task was one which other setscene tasks have a hard dependency upon, we need
+ # to walk through the hard dependencies and allow execution of those which have completed dependencies.
+ if task in self.sqdata.sq_harddeps:
+ for dep in self.sq_harddep_deferred.copy():
+ if self.sqdata.sq_harddeps_rev[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
+ self.sq_harddep_deferred.remove(dep)
+
self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
self.holdoff_need_update = True
diff --git a/poky/bitbake/lib/bb/server/process.py b/poky/bitbake/lib/bb/server/process.py
index 6d77ce4786..76b189291d 100644
--- a/poky/bitbake/lib/bb/server/process.py
+++ b/poky/bitbake/lib/bb/server/process.py
@@ -402,6 +402,22 @@ class ProcessServer():
serverlog("".join(msg))
def idle_thread(self):
+ if self.cooker.configuration.profile:
+ try:
+ import cProfile as profile
+ except:
+ import profile
+ prof = profile.Profile()
+
+ ret = profile.Profile.runcall(prof, self.idle_thread_internal)
+
+ prof.dump_stats("profile-mainloop.log")
+ bb.utils.process_profilelog("profile-mainloop.log")
+ serverlog("Raw profiling information saved to profile-mainloop.log and processed statistics to profile-mainloop.log.processed")
+ else:
+ self.idle_thread_internal()
+
+ def idle_thread_internal(self):
def remove_idle_func(function):
with bb.utils.lock_timeout(self._idlefuncsLock):
del self._idlefuns[function]
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 5a584cadf9..3ab8431a08 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -102,9 +102,18 @@ class SignatureGenerator(object):
if flag:
self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
+ def get_cached_unihash(self, tid):
+ return None
+
def get_unihash(self, tid):
+ unihash = self.get_cached_unihash(tid)
+ if unihash:
+ return unihash
return self.taskhash[tid]
+ def get_unihashes(self, tids):
+ return {tid: self.get_unihash(tid) for tid in tids}
+
def prep_taskhash(self, tid, deps, dataCaches):
return
@@ -521,31 +530,45 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
class SignatureGeneratorUniHashMixIn(object):
def __init__(self, data):
self.extramethod = {}
+ # NOTE: The cache only tracks hashes that exist. Hashes that don't
+ # exist are always queries from the server since it is possible for
+ # hashes to appear over time, but much less likely for them to
+ # disappear
+ self.unihash_exists_cache = set()
super().__init__(data)
def get_taskdata(self):
- return (self.server, self.method, self.extramethod) + super().get_taskdata()
+ return (self.server, self.method, self.extramethod, self.max_parallel) + super().get_taskdata()
def set_taskdata(self, data):
- self.server, self.method, self.extramethod = data[:3]
- super().set_taskdata(data[3:])
+ self.server, self.method, self.extramethod, self.max_parallel = data[:4]
+ super().set_taskdata(data[4:])
def client(self):
if getattr(self, '_client', None) is None:
self._client = hashserv.create_client(self.server)
return self._client
+ def client_pool(self):
+ if getattr(self, '_client_pool', None) is None:
+ self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel)
+ return self._client_pool
+
def reset(self, data):
- if getattr(self, '_client', None) is not None:
- self._client.close()
- self._client = None
+ self.__close_clients()
return super().reset(data)
def exit(self):
+ self.__close_clients()
+ return super().exit()
+
+ def __close_clients(self):
if getattr(self, '_client', None) is not None:
self._client.close()
self._client = None
- return super().exit()
+ if getattr(self, '_client_pool', None) is not None:
+ self._client_pool.close()
+ self._client_pool = None
def get_stampfile_hash(self, tid):
if tid in self.taskhash:
@@ -578,7 +601,7 @@ class SignatureGeneratorUniHashMixIn(object):
return None
return unihash
- def get_unihash(self, tid):
+ def get_cached_unihash(self, tid):
taskhash = self.taskhash[tid]
# If its not a setscene task we can return
@@ -593,40 +616,101 @@ class SignatureGeneratorUniHashMixIn(object):
self.unihash[tid] = unihash
return unihash
- # In the absence of being able to discover a unique hash from the
- # server, make it be equivalent to the taskhash. The unique "hash" only
- # really needs to be a unique string (not even necessarily a hash), but
- # making it match the taskhash has a few advantages:
- #
- # 1) All of the sstate code that assumes hashes can be the same
- # 2) It provides maximal compatibility with builders that don't use
- # an equivalency server
- # 3) The value is easy for multiple independent builders to derive the
- # same unique hash from the same input. This means that if the
- # independent builders find the same taskhash, but it isn't reported
- # to the server, there is a better chance that they will agree on
- # the unique hash.
- unihash = taskhash
+ return None
- try:
- method = self.method
- if tid in self.extramethod:
- method = method + self.extramethod[tid]
- data = self.client().get_unihash(method, self.taskhash[tid])
- if data:
- unihash = data
+ def _get_method(self, tid):
+ method = self.method
+ if tid in self.extramethod:
+ method = method + self.extramethod[tid]
+
+ return method
+
+ def unihashes_exist(self, query):
+ if len(query) == 0:
+ return {}
+
+ uncached_query = {}
+ result = {}
+ for key, unihash in query.items():
+ if unihash in self.unihash_exists_cache:
+ result[key] = True
+ else:
+ uncached_query[key] = unihash
+
+ if self.max_parallel <= 1 or len(uncached_query) <= 1:
+ # No parallelism required. Make the query serially with the single client
+ uncached_result = {
+ key: self.client().unihash_exists(value) for key, value in uncached_query.items()
+ }
+ else:
+ uncached_result = self.client_pool().unihashes_exist(uncached_query)
+
+ for key, exists in uncached_result.items():
+ if exists:
+ self.unihash_exists_cache.add(query[key])
+ result[key] = exists
+
+ return result
+
+ def get_unihash(self, tid):
+ return self.get_unihashes([tid])[tid]
+
+ def get_unihashes(self, tids):
+ """
+ For a iterable of tids, returns a dictionary that maps each tid to a
+ unihash
+ """
+ result = {}
+ queries = {}
+ query_result = {}
+
+ for tid in tids:
+ unihash = self.get_cached_unihash(tid)
+ if unihash:
+ result[tid] = unihash
+ else:
+ queries[tid] = (self._get_method(tid), self.taskhash[tid])
+
+ if len(queries) == 0:
+ return result
+
+ if self.max_parallel <= 1 or len(queries) <= 1:
+ # No parallelism required. Make the query serially with the single client
+ for tid, args in queries.items():
+ query_result[tid] = self.client().get_unihash(*args)
+ else:
+ query_result = self.client_pool().get_unihashes(queries)
+
+ for tid, unihash in query_result.items():
+ # In the absence of being able to discover a unique hash from the
+ # server, make it be equivalent to the taskhash. The unique "hash" only
+ # really needs to be a unique string (not even necessarily a hash), but
+ # making it match the taskhash has a few advantages:
+ #
+ # 1) All of the sstate code that assumes hashes can be the same
+ # 2) It provides maximal compatibility with builders that don't use
+ # an equivalency server
+ # 3) The value is easy for multiple independent builders to derive the
+ # same unique hash from the same input. This means that if the
+ # independent builders find the same taskhash, but it isn't reported
+ # to the server, there is a better chance that they will agree on
+ # the unique hash.
+ taskhash = self.taskhash[tid]
+ if unihash:
# A unique hash equal to the taskhash is not very interesting,
# so it is reported it at debug level 2. If they differ, that
# is much more interesting, so it is reported at debug level 1
hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
else:
hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
- except ConnectionError as e:
- bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
+ unihash = taskhash
- self.set_unihash(tid, unihash)
- self.unihash[tid] = unihash
- return unihash
+
+ self.set_unihash(tid, unihash)
+ self.unihash[tid] = unihash
+ result[tid] = unihash
+
+ return result
def report_unihash(self, path, task, d):
import importlib
@@ -754,6 +838,7 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
super().init_rundepcheck(data)
self.server = data.getVar('BB_HASHSERVE')
self.method = "sstate_output_hash"
+ self.max_parallel = 1
def clean_checksum_file_path(file_checksum_tuple):
f, cs = file_checksum_tuple
@@ -849,10 +934,18 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
formatparams.update(values)
return formatstr.format(**formatparams)
- with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
- a_data = json.load(f, object_hook=SetDecoder)
- with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
- b_data = json.load(f, object_hook=SetDecoder)
+ try:
+ with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
+ a_data = json.load(f, object_hook=SetDecoder)
+ except (TypeError, OSError) as err:
+ bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
+ raise err
+ try:
+ with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
+ b_data = json.load(f, object_hook=SetDecoder)
+ except (TypeError, OSError) as err:
+ bb.error("Failed to open sigdata file '%s': %s" % (b, str(err)))
+ raise err
for data in [a_data, b_data]:
handle_renames(data)
@@ -1090,8 +1183,12 @@ def calc_taskhash(sigdata):
def dump_sigfile(a):
output = []
- with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
- a_data = json.load(f, object_hook=SetDecoder)
+ try:
+ with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
+ a_data = json.load(f, object_hook=SetDecoder)
+ except (TypeError, OSError) as err:
+ bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
+ raise err
handle_renames(a_data)
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index c7a23407c1..5ed5b5607f 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -416,9 +416,9 @@ class FetcherTest(unittest.TestCase):
def git(self, cmd, cwd=None):
if isinstance(cmd, str):
- cmd = 'git ' + cmd
+ cmd = 'git -c safe.bareRepository=all ' + cmd
else:
- cmd = ['git'] + cmd
+ cmd = ['git', '-c', 'safe.bareRepository=all'] + cmd
if cwd is None:
cwd = self.gitdir
return bb.process.run(cmd, cwd=cwd)[0]
@@ -1108,6 +1108,25 @@ class FetcherNetworkTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing')
@skipIfNoNetwork()
+ def test_git_submodule_restricted_network_premirrors(self):
+ # this test is to ensure that premirrors will be tried in restricted network
+ # that is, BB_ALLOWED_NETWORKS does not contain the domain the url uses
+ url = "gitsm://github.com/grpc/grpc.git;protocol=https;name=grpc;branch=v1.60.x;rev=0ef13a7555dbaadd4633399242524129eef5e231"
+ # create a download directory to be used as premirror later
+ tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
+ dl_premirror = os.path.join(tempdir, "download-premirror")
+ os.mkdir(dl_premirror)
+ self.d.setVar("DL_DIR", dl_premirror)
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ # now use the premirror in restricted network
+ self.d.setVar("DL_DIR", self.dldir)
+ self.d.setVar("PREMIRRORS", "gitsm://.*/.* gitsm://%s/git2/MIRRORNAME;protocol=file" % dl_premirror)
+ self.d.setVar("BB_ALLOWED_NETWORKS", "*.some.domain")
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+
+ @skipIfNoNetwork()
def test_git_submodule_dbus_broker(self):
# The following external repositories have show failures in fetch and unpack operations
# We want to avoid regressions!
@@ -1247,8 +1266,9 @@ class SVNTest(FetcherTest):
cwd=repo_dir)
bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
- # Github will emulate SVN. Use this to check if we're downloding...
- bb.process.run("svn propset svn:externals 'bitbake https://github.com/PhilipHazel/pcre2.git' .",
+ # Github won't emulate SVN anymore (see https://github.blog/2023-01-20-sunsetting-subversion-support/)
+ # Use still accessible svn repo (only trunk to avoid longer downloads)
+ bb.process.run("svn propset svn:externals 'bitbake https://svn.apache.org/repos/asf/serf/trunk' .",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
bb.process.run("svn commit --non-interactive -m 'Add external'",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1276,8 +1296,8 @@ class SVNTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
- self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should NOT exist")
- self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should NOT exit")
+ self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should NOT exist")
+ self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should NOT exit")
@skipIfNoSvn()
def test_external_svn(self):
@@ -1290,8 +1310,8 @@ class SVNTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
- self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should exist")
- self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should exit")
+ self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should exist")
+ self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should exit")
class TrustedNetworksTest(FetcherTest):
def test_trusted_network(self):
@@ -1369,37 +1389,39 @@ class FetchLatestVersionTest(FetcherTest):
test_git_uris = {
# version pattern "X.Y.Z"
- ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
+ ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "", "")
: "1.99.4",
# version pattern "vX.Y"
# mirror of git.infradead.org since network issues interfered with testing
- ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git;branch=master;protocol=https", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
+ ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git;branch=master;protocol=https", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "", "")
: "1.5.0",
# version pattern "pkg_name-X.Y"
# mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing
- ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto;branch=master;protocol=https", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
+ ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto;branch=master;protocol=https", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "", "")
: "1.0",
# version pattern "pkg_name-vX.Y.Z"
- ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
+ ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
: "1.4.0",
# combination version pattern
- ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
+ ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
: "1.2.0",
- ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
+ ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
: "2014.01",
# version pattern "yyyymmdd"
- ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
+ ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
: "20120614",
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
# mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
- ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap;branch=master;protocol=https", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))")
+ ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap;branch=master;protocol=https", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))", "")
: "0.4.3",
- ("build-appliance-image", "git://git.yoctoproject.org/poky;branch=master;protocol=https", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))")
+ ("build-appliance-image", "git://git.yoctoproject.org/poky;branch=master;protocol=https", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))", "")
: "11.0.0",
- ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+ ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))", "")
: "1.3.59",
- ("remake", "git://github.com/rocky/remake.git;protocol=https;branch=master", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+ ("remake", "git://github.com/rocky/remake.git;protocol=https;branch=master", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))", "")
: "3.82+dbg0.9",
+ ("sysdig", "git://github.com/draios/sysdig.git;branch=dev;protocol=https", "4fb6288275f567f63515df0ff0a6518043ecfa9b", r"^(?P<pver>\d+(\.\d+)+)", "10.0.0")
+ : "0.28.0",
}
test_wget_uris = {
@@ -1467,6 +1489,9 @@ class FetchLatestVersionTest(FetcherTest):
self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
r = bb.utils.vercmp_string(v, verstring)
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
+ if k[4]:
+ r = bb.utils.vercmp_string(verstring, k[4])
+ self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4]))
def test_wget_latest_versionstring(self):
testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
@@ -3082,6 +3107,24 @@ class FetchPremirroronlyLocalTest(FetcherTest):
self.git("checkout {}".format(head), self.gitdir)
return newrev
+ def test_mirror_multiple_fetches(self):
+ self.make_git_repo()
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ ## New commit in premirror. it's not in the download_dir
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher2 = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher2.download()
+ fetcher2.unpack(self.unpackdir)
+ ## New commit in premirror. it's not in the download_dir
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher3 = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher3.download()
+ fetcher3.unpack(self.unpackdir)
+
+
def test_mirror_commit_nonexistent(self):
self.make_git_repo()
self.d.setVar("SRCREV", "0"*40)
diff --git a/poky/bitbake/lib/bb/ui/taskexp_ncurses.py b/poky/bitbake/lib/bb/ui/taskexp_ncurses.py
new file mode 100755
index 0000000000..dd91d26bc3
--- /dev/null
+++ b/poky/bitbake/lib/bb/ui/taskexp_ncurses.py
@@ -0,0 +1,1511 @@
+#
+# BitBake Graphical ncurses-based Dependency Explorer
+# * Based on the GTK implementation
+# * Intended to run on any Linux host
+#
+# Copyright (C) 2007 Ross Burton
+# Copyright (C) 2007 - 2008 Richard Purdie
+# Copyright (C) 2022 - 2024 David Reyna
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+#
+# Execution example:
+# $ bitbake -g -u taskexp_ncurses.py acl zlib
+#
+# Self-test example (executes a script of GUI actions):
+# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl
+# ...
+# $ echo $?
+# 0
+# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl foo
+# ERROR: Nothing PROVIDES 'foo'. Close matches:
+# ofono
+# $ echo $?
+# 1
+#
+# Self-test with no terminal example (only tests dependency fetch from bitbake):
+# $ TASK_EXP_UNIT_TEST_NOTERM=1 bitbake -g -u taskexp_ncurses quilt
+# $ echo $?
+# 0
+#
+# Features:
+# * Ncurses is used for the presentation layer. Only the 'curses'
+# library is used (none of the extension libraries), plus only
+# one main screen is used (no sub-windows)
+# * Uses the 'generateDepTreeEvent' bitbake event to fetch the
+# dynamic dependency data based on passed recipes
+# * Computes and provides reverse dependencies
+# * Supports task sorting on:
+# (a) Task dependency order within each recipe
+# (b) Pure alphabetical order
+# (c) Provisions for third sort order (bitbake order?)
+# * The 'Filter' does a "*string*" wildcard filter on tasks in the
+# main window, dynamically re-ordering and re-centering the content
+# * A 'Print' function exports the selected task or its whole recipe
+# task set to the default file "taskdep.txt"
+# * Supports a progress bar for bitbake loads and file printing
+# * Line art for box drawing supported, ASCII art an alernative
+# * No horizontal scrolling support. Selected task's full name
+# shown in bottom bar
+# * Dynamically catches terminals that are (or become) too small
+# * Exception to insure return to normal terminal on errors
+# * Debugging support, self test option
+#
+
+import sys
+import traceback
+import curses
+import re
+import time
+
+# Bitbake server support
+import threading
+from xmlrpc import client
+import bb
+import bb.event
+
+# Dependency indexes (depends_model)
+(TYPE_DEP, TYPE_RDEP) = (0, 1)
+DEPENDS_TYPE = 0
+DEPENDS_TASK = 1
+DEPENDS_DEPS = 2
+# Task indexes (task_list)
+TASK_NAME = 0
+TASK_PRIMARY = 1
+TASK_SORT_ALPHA = 2
+TASK_SORT_DEPS = 3
+TASK_SORT_BITBAKE = 4
+# Sort options (default is SORT_DEPS)
+SORT_ALPHA = 0
+SORT_DEPS = 1
+SORT_BITBAKE_ENABLE = False # NOTE: future sort
+SORT_BITBAKE = 2
+sort_model = SORT_DEPS
+# Print options
+PRINT_MODEL_1 = 0
+PRINT_MODEL_2 = 1
+print_model = PRINT_MODEL_2
+print_file_name = "taskdep_print.log"
+print_file_backup_name = "taskdep_print_backup.log"
+is_printed = False
+is_filter = False
+
+# Standard (and backup) key mappings
+CHAR_NUL = 0 # Used as self-test nop char
+CHAR_BS_H = 8 # Alternate backspace key
+CHAR_TAB = 9
+CHAR_RETURN = 10
+CHAR_ESCAPE = 27
+CHAR_UP = ord('{') # Used as self-test ASCII char
+CHAR_DOWN = ord('}') # Used as self-test ASCII char
+
+# Color_pair IDs
+CURSES_NORMAL = 0
+CURSES_HIGHLIGHT = 1
+CURSES_WARNING = 2
+
+
+#################################################
+### Debugging support
+###
+
+verbose = False
+
+# Debug: message display slow-step through display update issues
+def alert(msg,screen):
+ if msg:
+ screen.addstr(0, 10, '[%-4s]' % msg)
+ screen.refresh();
+ curses.napms(2000)
+ else:
+ if do_line_art:
+ for i in range(10, 24):
+ screen.addch(0, i, curses.ACS_HLINE)
+ else:
+ screen.addstr(0, 10, '-' * 14)
+ screen.refresh();
+
+# Debug: display edge conditions on frame movements
+def debug_frame(nbox_ojb):
+ if verbose:
+ nbox_ojb.screen.addstr(0, 50, '[I=%2d,O=%2d,S=%3s,H=%2d,M=%4d]' % (
+ nbox_ojb.cursor_index,
+ nbox_ojb.cursor_offset,
+ nbox_ojb.scroll_offset,
+ nbox_ojb.inside_height,
+ len(nbox_ojb.task_list),
+ ))
+ nbox_ojb.screen.refresh();
+
+#
+# Unit test (assumes that 'quilt-native' is always present)
+#
+
+unit_test = os.environ.get('TASK_EXP_UNIT_TEST')
+unit_test_cmnds=[
+ '# Default selected task in primary box',
+ 'tst_selected=<TASK>.do_recipe_qa',
+ '# Default selected task in deps',
+ 'tst_entry=<TAB>',
+ 'tst_selected=',
+ '# Default selected task in rdeps',
+ 'tst_entry=<TAB>',
+ 'tst_selected=<TASK>.do_fetch',
+ "# Test 'select' back to primary box",
+ 'tst_entry=<CR>',
+ '#tst_entry=<DOWN>', # optional injected error
+ 'tst_selected=<TASK>.do_fetch',
+ '# Check filter',
+ 'tst_entry=/uilt-nativ/',
+ 'tst_selected=quilt-native.do_recipe_qa',
+ '# Check print',
+ 'tst_entry=p',
+ 'tst_printed=quilt-native.do_fetch',
+ '#tst_printed=quilt-foo.do_nothing', # optional injected error
+ '# Done!',
+ 'tst_entry=q',
+]
+unit_test_idx=0
+unit_test_command_chars=''
+unit_test_results=[]
+def unit_test_action(active_package):
+ global unit_test_idx
+ global unit_test_command_chars
+ global unit_test_results
+ ret = CHAR_NUL
+ if unit_test_command_chars:
+ ch = unit_test_command_chars[0]
+ unit_test_command_chars = unit_test_command_chars[1:]
+ time.sleep(0.5)
+ ret = ord(ch)
+ else:
+ line = unit_test_cmnds[unit_test_idx]
+ unit_test_idx += 1
+ line = re.sub('#.*', '', line).strip()
+ line = line.replace('<TASK>',active_package.primary[0])
+ line = line.replace('<TAB>','\t').replace('<CR>','\n')
+ line = line.replace('<UP>','{').replace('<DOWN>','}')
+ if not line: line = 'nop=nop'
+ cmnd,value = line.split('=')
+ if cmnd == 'tst_entry':
+ unit_test_command_chars = value
+ elif cmnd == 'tst_selected':
+ active_selected = active_package.get_selected()
+ if active_selected != value:
+ unit_test_results.append("ERROR:SELFTEST:expected '%s' but got '%s' (NOTE:bitbake may have changed)" % (value,active_selected))
+ ret = ord('Q')
+ else:
+ unit_test_results.append("Pass:SELFTEST:found '%s'" % (value))
+ elif cmnd == 'tst_printed':
+ result = os.system('grep %s %s' % (value,print_file_name))
+ if result:
+ unit_test_results.append("ERROR:PRINTTEST:expected '%s' in '%s'" % (value,print_file_name))
+ ret = ord('Q')
+ else:
+ unit_test_results.append("Pass:PRINTTEST:found '%s'" % (value))
+ # Return the action (CHAR_NUL for no action til next round)
+ return(ret)
+
+# Unit test without an interative terminal (e.g. ptest)
+unit_test_noterm = os.environ.get('TASK_EXP_UNIT_TEST_NOTERM')
+
+
+#################################################
+### Window frame rendering
+###
+### By default, use the normal line art. Since
+### these extended characters are not ASCII, one
+### must use the ncursus API to render them
+### The alternate ASCII line art set is optionally
+### available via the 'do_line_art' flag
+
+# By default, render frames using line art
+do_line_art = True
+
+# ASCII render set option
+CHAR_HBAR = '-'
+CHAR_VBAR = '|'
+CHAR_UL_CORNER = '/'
+CHAR_UR_CORNER = '\\'
+CHAR_LL_CORNER = '\\'
+CHAR_LR_CORNER = '/'
+
+# Box frame drawing with line-art
+def line_art_frame(box):
+ x = box.base_x
+ y = box.base_y
+ w = box.width
+ h = box.height + 1
+
+ if do_line_art:
+ for i in range(1, w - 1):
+ box.screen.addch(y, x + i, curses.ACS_HLINE, box.color)
+ box.screen.addch(y + h - 1, x + i, curses.ACS_HLINE, box.color)
+ body_line = "%s" % (' ' * (w - 2))
+ for i in range(1, h - 1):
+ box.screen.addch(y + i, x, curses.ACS_VLINE, box.color)
+ box.screen.addstr(y + i, x + 1, body_line, box.color)
+ box.screen.addch(y + i, x + w - 1, curses.ACS_VLINE, box.color)
+ box.screen.addch(y, x, curses.ACS_ULCORNER, box.color)
+ box.screen.addch(y, x + w - 1, curses.ACS_URCORNER, box.color)
+ box.screen.addch(y + h - 1, x, curses.ACS_LLCORNER, box.color)
+ box.screen.addch(y + h - 1, x + w - 1, curses.ACS_LRCORNER, box.color)
+ else:
+ top_line = "%s%s%s" % (CHAR_UL_CORNER,CHAR_HBAR * (w - 2),CHAR_UR_CORNER)
+ body_line = "%s%s%s" % (CHAR_VBAR,' ' * (w - 2),CHAR_VBAR)
+ bot_line = "%s%s%s" % (CHAR_UR_CORNER,CHAR_HBAR * (w - 2),CHAR_UL_CORNER)
+ tag_line = "%s%s%s" % ('[',CHAR_HBAR * (w - 2),']')
+ # Top bar
+ box.screen.addstr(y, x, top_line)
+ # Middle frame
+ for i in range(1, (h - 1)):
+ box.screen.addstr(y+i, x, body_line)
+ # Bottom bar
+ box.screen.addstr(y + (h - 1), x, bot_line)
+
+# Connect the separate boxes
+def line_art_fixup(box):
+ if do_line_art:
+ box.screen.addch(box.base_y+2, box.base_x, curses.ACS_LTEE, box.color)
+ box.screen.addch(box.base_y+2, box.base_x+box.width-1, curses.ACS_RTEE, box.color)
+
+
+#################################################
+### Ncurses box object : box frame object to display
+### and manage a sub-window's display elements
+### using basic ncurses
+###
+### Supports:
+### * Frame drawing, content (re)drawing
+### * Content scrolling via ArrowUp, ArrowDn, PgUp, PgDN,
+### * Highlighting for active selected item
+### * Content sorting based on selected sort model
+###
+
+class NBox():
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ # Box description
+ self.screen = screen
+ self.label = label
+ self.primary = primary
+ self.color = curses.color_pair(CURSES_NORMAL) if screen else None
+ # Box boundaries
+ self.base_x = base_x
+ self.base_y = base_y
+ self.width = width
+ self.height = height
+ # Cursor/scroll management
+ self.cursor_enable = False
+ self.cursor_index = 0 # Absolute offset
+ self.cursor_offset = 0 # Frame centric offset
+ self.scroll_offset = 0 # Frame centric offset
+ # Box specific content
+ # Format of each entry is [package_name,is_primary_recipe,alpha_sort_key,deps_sort_key]
+ self.task_list = []
+
+ @property
+ def inside_width(self):
+ return(self.width-2)
+
+ @property
+ def inside_height(self):
+ return(self.height-2)
+
+ # Populate the box's content, include the sort mappings and is_primary flag
+ def task_list_append(self,task_name,dep):
+ task_sort_alpha = task_name
+ task_sort_deps = dep.get_dep_sort(task_name)
+ is_primary = False
+ for primary in self.primary:
+ if task_name.startswith(primary+'.'):
+ is_primary = True
+ if SORT_BITBAKE_ENABLE:
+ task_sort_bitbake = dep.get_bb_sort(task_name)
+ self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps,task_sort_bitbake])
+ else:
+ self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps])
+
+ def reset(self):
+ self.task_list = []
+ self.cursor_index = 0 # Absolute offset
+ self.cursor_offset = 0 # Frame centric offset
+ self.scroll_offset = 0 # Frame centric offset
+
+ # Sort the box's content based on the current sort model
+ def sort(self):
+ if SORT_ALPHA == sort_model:
+ self.task_list.sort(key = lambda x: x[TASK_SORT_ALPHA])
+ elif SORT_DEPS == sort_model:
+ self.task_list.sort(key = lambda x: x[TASK_SORT_DEPS])
+ elif SORT_BITBAKE == sort_model:
+ self.task_list.sort(key = lambda x: x[TASK_SORT_BITBAKE])
+
+ # The target package list (to hightlight), from the command line
+ def set_primary(self,primary):
+ self.primary = primary
+
+ # Draw the box's outside frame
+ def draw_frame(self):
+ line_art_frame(self)
+ # Title
+ self.screen.addstr(self.base_y,
+ (self.base_x + (self.width//2))-((len(self.label)+2)//2),
+ '['+self.label+']')
+ self.screen.refresh()
+
+ # Draw the box's inside text content
+ def redraw(self):
+ task_list_len = len(self.task_list)
+ # Middle frame
+ body_line = "%s" % (' ' * (self.inside_width-1) )
+ for i in range(0,self.inside_height+1):
+ if i < (task_list_len + self.scroll_offset):
+ str_ctl = "%%-%ss" % (self.width-3)
+ # Safety assert
+ if (i + self.scroll_offset) >= task_list_len:
+ alert("REDRAW:%2d,%4d,%4d" % (i,self.scroll_offset,task_list_len),self.screen)
+ break
+
+ task_obj = self.task_list[i + self.scroll_offset]
+ task = task_obj[TASK_NAME][:self.inside_width-1]
+ task_primary = task_obj[TASK_PRIMARY]
+
+ if task_primary:
+ line = str_ctl % task[:self.inside_width-1]
+ self.screen.addstr(self.base_y+1+i, self.base_x+2, line, curses.A_BOLD)
+ else:
+ line = str_ctl % task[:self.inside_width-1]
+ self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
+ else:
+ line = "%s" % (' ' * (self.inside_width-1) )
+ self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
+ self.screen.refresh()
+
+ # Show the current selected task over the bottom of the frame
+ def show_selected(self,selected_task):
+ if not selected_task:
+ selected_task = self.get_selected()
+ tag_line = "%s%s%s" % ('[',CHAR_HBAR * (self.width-2),']')
+ self.screen.addstr(self.base_y + self.height, self.base_x, tag_line)
+ self.screen.addstr(self.base_y + self.height,
+ (self.base_x + (self.width//2))-((len(selected_task)+2)//2),
+ '['+selected_task+']')
+ self.screen.refresh()
+
+ # Load box with new table of content
+ def update_content(self,task_list):
+ self.task_list = task_list
+ if self.cursor_enable:
+ cursor_update(turn_on=False)
+ self.cursor_index = 0
+ self.cursor_offset = 0
+ self.scroll_offset = 0
+ self.redraw()
+ if self.cursor_enable:
+ cursor_update(turn_on=True)
+
+ # Manage the box's highlighted task and blinking cursor character
+ def cursor_on(self,is_on):
+ self.cursor_enable = is_on
+ self.cursor_update(is_on)
+
+ # High-light the current pointed package, normal for released packages
+ def cursor_update(self,turn_on=True):
+ str_ctl = "%%-%ss" % (self.inside_width-1)
+ try:
+ if len(self.task_list):
+ task_obj = self.task_list[self.cursor_index]
+ task = task_obj[TASK_NAME][:self.inside_width-1]
+ task_primary = task_obj[TASK_PRIMARY]
+ task_font = curses.A_BOLD if task_primary else 0
+ else:
+ task = ''
+ task_font = 0
+ except Exception as e:
+ alert("CURSOR_UPDATE:%s" % (e),self.screen)
+ return
+ if turn_on:
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1,">", curses.color_pair(CURSES_HIGHLIGHT) | curses.A_BLINK)
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, curses.color_pair(CURSES_HIGHLIGHT) | task_font)
+ else:
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1," ")
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, task_font)
+
+ # Down arrow
+ def line_down(self):
+ if len(self.task_list) <= (self.cursor_index+1):
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index += 1
+ self.cursor_offset += 1
+ if self.cursor_offset > (self.inside_height):
+ self.cursor_offset -= 1
+ self.scroll_offset += 1
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Up arrow
+ def line_up(self):
+ if 0 > (self.cursor_index-1):
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index -= 1
+ self.cursor_offset -= 1
+ if self.cursor_offset < 0:
+ self.cursor_offset += 1
+ self.scroll_offset -= 1
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Page down
+ def page_down(self):
+ max_task = len(self.task_list)-1
+ if max_task < self.inside_height:
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index += 10
+ self.cursor_index = min(self.cursor_index,max_task)
+ self.cursor_offset = min(self.inside_height,self.cursor_index)
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Page up
+ def page_up(self):
+ max_task = len(self.task_list)-1
+ if max_task < self.inside_height:
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index -= 10
+ self.cursor_index = max(self.cursor_index,0)
+ self.cursor_offset = max(0, self.inside_height - (max_task - self.cursor_index))
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Return the currently selected task name for this box
+ def get_selected(self):
+ if self.task_list:
+ return(self.task_list[self.cursor_index][TASK_NAME])
+ else:
+ return('')
+
+#################################################
+### The helper sub-windows
+###
+
+# Show persistent help at the top of the screen
+class HelpBarView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(HelpBarView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+ def show_help(self,show):
+ self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.inside_width))
+ if show:
+ help = "Help='?' Filter='/' NextBox=<Tab> Select=<Enter> Print='p','P' Quit='q'"
+ bar_size = self.inside_width - 5 - len(help)
+ self.screen.addstr(self.base_y,self.base_x+((self.inside_width-len(help))//2), help)
+ self.screen.refresh()
+
+# Pop up a detailed Help box
+class HelpBoxView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height, dep):
+ super(HelpBoxView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+ self.x_pos = 0
+ self.y_pos = 0
+ self.dep = dep
+
+ # Instantial the pop-up help box
+ def show_help(self,show):
+ self.x_pos = self.base_x + 4
+ self.y_pos = self.base_y + 2
+
+ def add_line(line):
+ if line:
+ self.screen.addstr(self.y_pos,self.x_pos,line)
+ self.y_pos += 1
+
+ # Gather some statisics
+ dep_count = 0
+ rdep_count = 0
+ for task_obj in self.dep.depends_model:
+ if TYPE_DEP == task_obj[DEPENDS_TYPE]:
+ dep_count += 1
+ elif TYPE_RDEP == task_obj[DEPENDS_TYPE]:
+ rdep_count += 1
+
+ self.draw_frame()
+ line_art_fixup(self.dep)
+ add_line("Quit : 'q' ")
+ add_line("Filter task names : '/'")
+ add_line("Tab to next box : <Tab>")
+ add_line("Select a task : <Enter>")
+ add_line("Print task's deps : 'p'")
+ add_line("Print recipe's deps : 'P'")
+ add_line(" -> '%s'" % print_file_name)
+ add_line("Sort toggle : 's'")
+ add_line(" %s Recipe inner-depends order" % ('->' if (SORT_DEPS == sort_model) else '- '))
+ add_line(" %s Alpha-numeric order" % ('->' if (SORT_ALPHA == sort_model) else '- '))
+ if SORT_BITBAKE_ENABLE:
+ add_line(" %s Bitbake order" % ('->' if (TASK_SORT_BITBAKE == sort_model) else '- '))
+ add_line("Alternate backspace : <CTRL-H>")
+ add_line("")
+ add_line("Primary recipes = %s" % ','.join(self.primary))
+ add_line("Task count = %4d" % len(self.dep.pkg_model))
+ add_line("Deps count = %4d" % dep_count)
+ add_line("RDeps count = %4d" % rdep_count)
+ add_line("")
+ self.screen.addstr(self.y_pos,self.x_pos+7,"<Press any key>", curses.color_pair(CURSES_HIGHLIGHT))
+ self.screen.refresh()
+ c = self.screen.getch()
+
+# Show a progress bar
+class ProgressView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(ProgressView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+ def progress(self,title,current,max):
+ if title:
+ self.label = title
+ else:
+ title = self.label
+ if max <=0: max = 10
+ bar_size = self.width - 7 - len(title)
+ bar_done = int( (float(current)/float(max)) * float(bar_size) )
+ self.screen.addstr(self.base_y,self.base_x, " %s:[%s%s]" % (title,'*' * bar_done,' ' * (bar_size-bar_done)))
+ self.screen.refresh()
+ return(current+1)
+
+ def clear(self):
+ self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
+ self.screen.refresh()
+
+# Implement a task filter bar
+class FilterView(NBox):
+ SEARCH_NOP = 0
+ SEARCH_GO = 1
+ SEARCH_CANCEL = 2
+
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(FilterView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+ self.do_show = False
+ self.filter_str = ""
+
+ def clear(self,enable_show=True):
+ self.filter_str = ""
+
+ def show(self,enable_show=True):
+ self.do_show = enable_show
+ if self.do_show:
+ self.screen.addstr(self.base_y,self.base_x, "[ Filter: %-25s ] '/'=cancel, format='abc' " % self.filter_str[0:25])
+ else:
+ self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
+ self.screen.refresh()
+
+ def show_prompt(self):
+ self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), " ")
+ self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), "")
+
+ # Keys specific to the filter box (start/stop filter keys are in the main loop)
+ def input(self,c,ch):
+ ret = self.SEARCH_GO
+ if c in (curses.KEY_BACKSPACE,CHAR_BS_H):
+ # Backspace
+ if self.filter_str:
+ self.filter_str = self.filter_str[0:-1]
+ self.show()
+ elif ((ch >= 'a') and (ch <= 'z')) or ((ch >= 'A') and (ch <= 'Z')) or ((ch >= '0') and (ch <= '9')) or (ch in (' ','_','.','-')):
+ # The isalnum() acts strangly with keypad(True), so explicit bounds
+ self.filter_str += ch
+ self.show()
+ else:
+ ret = self.SEARCH_NOP
+ return(ret)
+
+
+#################################################
+### The primary dependency windows
+###
+
+# The main list of package tasks
+class PackageView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(PackageView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+ # Find and verticaly center a selected task (from filter or from dependent box)
+ # The 'task_filter_str' can be a full or a partial (filter) task name
+ def find(self,task_filter_str):
+ found = False
+ max = self.height-2
+ if not task_filter_str:
+ return(found)
+ for i,task_obj in enumerate(self.task_list):
+ task = task_obj[TASK_NAME]
+ if task.startswith(task_filter_str):
+ self.cursor_on(False)
+ self.cursor_index = i
+
+ # Position selected at vertical center
+ vcenter = self.inside_height // 2
+ if self.cursor_index <= vcenter:
+ self.scroll_offset = 0
+ self.cursor_offset = self.cursor_index
+ elif self.cursor_index >= (len(self.task_list) - vcenter - 1):
+ self.cursor_offset = self.inside_height-1
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+ else:
+ self.cursor_offset = vcenter
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+
+ self.redraw()
+ self.cursor_on(True)
+ found = True
+ break
+ return(found)
+
+# The view of dependent packages
+class PackageDepView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(PackageDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+# The view of reverse-dependent packages
+class PackageReverseDepView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(PackageReverseDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+
+#################################################
+### DepExplorer : The parent frame and object
+###
+
+class DepExplorer(NBox):
+ def __init__(self,screen):
+ title = "Task Dependency Explorer"
+ super(DepExplorer, self).__init__(screen, 'Task Dependency Explorer','',0,0,80,23)
+
+ self.screen = screen
+ self.pkg_model = []
+ self.depends_model = []
+ self.dep_sort_map = {}
+ self.bb_sort_map = {}
+ self.filter_str = ''
+ self.filter_prev = 'deadbeef'
+
+ if self.screen:
+ self.help_bar_view = HelpBarView(screen, "Help",'',1,1,79,1)
+ self.help_box_view = HelpBoxView(screen, "Help",'',0,2,40,20,self)
+ self.progress_view = ProgressView(screen, "Progress",'',2,1,76,1)
+ self.filter_view = FilterView(screen, "Filter",'',2,1,76,1)
+ self.package_view = PackageView(screen, "Package",'alpha', 0,2,40,20)
+ self.dep_view = PackageDepView(screen, "Dependencies",'beta',40,2,40,10)
+ self.reverse_view = PackageReverseDepView(screen, "Dependent Tasks",'gamma',40,13,40,9)
+ self.draw_frames()
+
+ # Draw this main window's frame and all sub-windows
+ def draw_frames(self):
+ self.draw_frame()
+ self.package_view.draw_frame()
+ self.dep_view.draw_frame()
+ self.reverse_view.draw_frame()
+ if is_filter:
+ self.filter_view.show(True)
+ self.filter_view.show_prompt()
+ else:
+ self.help_bar_view.show_help(True)
+ self.package_view.redraw()
+ self.dep_view.redraw()
+ self.reverse_view.redraw()
+ self.show_selected(self.package_view.get_selected())
+ line_art_fixup(self)
+
+ # Parse the bitbake dependency event object
+ def parse(self, depgraph):
+ for task in depgraph["tdepends"]:
+ self.pkg_model.insert(0, task)
+ for depend in depgraph["tdepends"][task]:
+ self.depends_model.insert (0, (TYPE_DEP, task, depend))
+ self.depends_model.insert (0, (TYPE_RDEP, depend, task))
+ if self.screen:
+ self.dep_sort_prep()
+
+ # Prepare the dependency sort order keys
+ # This method creates sort keys per recipe tasks in
+ # the order of each recipe's internal dependecies
+ # Method:
+ # Filter the tasks in dep order in dep_sort_map = {}
+ # (a) Find a task that has no dependecies
+ # Ignore non-recipe specific tasks
+ # (b) Add it to the sort mapping dict with
+ # key of "<task_group>_<order>"
+ # (c) Remove it as a dependency from the other tasks
+ # (d) Repeat till all tasks are mapped
+ # Use placeholders to insure each sub-dict is instantiated
+ def dep_sort_prep(self):
+ self.progress_view.progress('DepSort',0,4)
+ # Init the task base entries
+ self.progress_view.progress('DepSort',1,4)
+ dep_table = {}
+ bb_index = 0
+ for task in self.pkg_model:
+ # First define the incoming bitbake sort order
+ self.bb_sort_map[task] = "%04d" % (bb_index)
+ bb_index += 1
+ task_group = task[0:task.find('.')]
+ if task_group not in dep_table:
+ dep_table[task_group] = {}
+ dep_table[task_group]['-'] = {} # Placeholder
+ if task not in dep_table[task_group]:
+ dep_table[task_group][task] = {}
+ dep_table[task_group][task]['-'] = {} # Placeholder
+ # Add the task dependecy entries
+ self.progress_view.progress('DepSort',2,4)
+ for task_obj in self.depends_model:
+ if task_obj[DEPENDS_TYPE] != TYPE_DEP:
+ continue
+ task = task_obj[DEPENDS_TASK]
+ task_dep = task_obj[DEPENDS_DEPS]
+ task_group = task[0:task.find('.')]
+ # Only track depends within same group
+ if task_dep.startswith(task_group+'.'):
+ dep_table[task_group][task][task_dep] = 1
+ self.progress_view.progress('DepSort',3,4)
+ for task_group in dep_table:
+ dep_index = 0
+ # Whittle down the tasks of each group
+ this_pass = 1
+ do_loop = True
+ while (len(dep_table[task_group]) > 1) and do_loop:
+ this_pass += 1
+ is_change = False
+ delete_list = []
+ for task in dep_table[task_group]:
+ if '-' == task:
+ continue
+ if 1 == len(dep_table[task_group][task]):
+ is_change = True
+ # No more deps, so collect this task...
+ self.dep_sort_map[task] = "%s_%04d" % (task_group,dep_index)
+ dep_index += 1
+ # ... remove it from other lists as resolved ...
+ for dep_task in dep_table[task_group]:
+ if task in dep_table[task_group][dep_task]:
+ del dep_table[task_group][dep_task][task]
+ # ... and remove it from from the task group
+ delete_list.append(task)
+ for task in delete_list:
+ del dep_table[task_group][task]
+ if not is_change:
+ alert("ERROR:DEP_SIEVE_NO_CHANGE:%s" % task_group,self.screen)
+ do_loop = False
+ continue
+ self.progress_view.progress('',4,4)
+ self.progress_view.clear()
+ self.help_bar_view.show_help(True)
+ if len(self.dep_sort_map) != len(self.pkg_model):
+ alert("ErrorDepSort:%d/%d" % (len(self.dep_sort_map),len(self.pkg_model)),self.screen)
+
+ # Look up a dep sort order key
+ def get_dep_sort(self,key):
+ if key in self.dep_sort_map:
+ return(self.dep_sort_map[key])
+ else:
+ return(key)
+
+ # Look up a bitbake sort order key
+ def get_bb_sort(self,key):
+ if key in self.bb_sort_map:
+ return(self.bb_sort_map[key])
+ else:
+ return(key)
+
+ # Find the selected package in the main frame, update the dependency frames content accordingly
+ def select(self, package_name, only_update_dependents=False):
+ if not package_name:
+ package_name = self.package_view.get_selected()
+ # alert("SELECT:%s:" % package_name,self.screen)
+
+ if self.filter_str != self.filter_prev:
+ self.package_view.cursor_on(False)
+ # Fill of the main package task list using new filter
+ self.package_view.task_list = []
+ for package in self.pkg_model:
+ if self.filter_str:
+ if self.filter_str in package:
+ self.package_view.task_list_append(package,self)
+ else:
+ self.package_view.task_list_append(package,self)
+ self.package_view.sort()
+ self.filter_prev = self.filter_str
+
+ # Old position is lost, assert new position of previous task (if still filtered in)
+ self.package_view.cursor_index = 0
+ self.package_view.cursor_offset = 0
+ self.package_view.scroll_offset = 0
+ self.package_view.redraw()
+ self.package_view.cursor_on(True)
+
+ # Make sure the selected package is in view, with implicit redraw()
+ if (not only_update_dependents):
+ self.package_view.find(package_name)
+ # In case selected name change (i.e. filter removed previous)
+ package_name = self.package_view.get_selected()
+
+ # Filter the package's dependent list to the dependent view
+ self.dep_view.reset()
+ for package_def in self.depends_model:
+ if (package_def[DEPENDS_TYPE] == TYPE_DEP) and (package_def[DEPENDS_TASK] == package_name):
+ self.dep_view.task_list_append(package_def[DEPENDS_DEPS],self)
+ self.dep_view.sort()
+ self.dep_view.redraw()
+ # Filter the package's dependent list to the reverse dependent view
+ self.reverse_view.reset()
+ for package_def in self.depends_model:
+ if (package_def[DEPENDS_TYPE] == TYPE_RDEP) and (package_def[DEPENDS_TASK] == package_name):
+ self.reverse_view.task_list_append(package_def[DEPENDS_DEPS],self)
+ self.reverse_view.sort()
+ self.reverse_view.redraw()
+ self.show_selected(package_name)
+ self.screen.refresh()
+
+ # The print-to-file method
+ def print_deps(self,whole_group=False):
+ global is_printed
+ # Print the selected deptree(s) to a file
+ if not is_printed:
+ try:
+ # Move to backup any exiting file before first write
+ if os.path.isfile(print_file_name):
+ os.system('mv -f %s %s' % (print_file_name,print_file_backup_name))
+ except Exception as e:
+ alert(e,self.screen)
+ alert('',self.screen)
+ print_list = []
+ selected_task = self.package_view.get_selected()
+ if not selected_task:
+ return
+ if not whole_group:
+ print_list.append(selected_task)
+ else:
+ # Use the presorted task_group order from 'package_view'
+ task_group = selected_task[0:selected_task.find('.')+1]
+ for task_obj in self.package_view.task_list:
+ task = task_obj[TASK_NAME]
+ if task.startswith(task_group):
+ print_list.append(task)
+ with open(print_file_name, "a") as fd:
+ print_max = len(print_list)
+ print_count = 1
+ self.progress_view.progress('Write "%s"' % print_file_name,0,print_max)
+ for task in print_list:
+ print_count = self.progress_view.progress('',print_count,print_max)
+ self.select(task)
+ self.screen.refresh();
+ # Utilize the current print output model
+ if print_model == PRINT_MODEL_1:
+ print("=== Dependendency Snapshot ===",file=fd)
+ print(" = Package =",file=fd)
+ print(' '+task,file=fd)
+ # Fill in the matching dependencies
+ print(" = Dependencies =",file=fd)
+ for task_obj in self.dep_view.task_list:
+ print(' '+ task_obj[TASK_NAME],file=fd)
+ print(" = Dependent Tasks =",file=fd)
+ for task_obj in self.reverse_view.task_list:
+ print(' '+ task_obj[TASK_NAME],file=fd)
+ if print_model == PRINT_MODEL_2:
+ print("=== Dependendency Snapshot ===",file=fd)
+ dep_count = len(self.dep_view.task_list) - 1
+ for i,task_obj in enumerate(self.dep_view.task_list):
+ print('%s%s' % ("Dep =" if (i==dep_count) else " ",task_obj[TASK_NAME]),file=fd)
+ if not self.dep_view.task_list:
+ print('Dep =',file=fd)
+ print("Package=%s" % task,file=fd)
+ for i,task_obj in enumerate(self.reverse_view.task_list):
+ print('%s%s' % ("RDep =" if (i==0) else " ",task_obj[TASK_NAME]),file=fd)
+ if not self.reverse_view.task_list:
+ print('RDep =',file=fd)
+ curses.napms(2000)
+ self.progress_view.clear()
+ self.help_bar_view.show_help(True)
+ print('',file=fd)
+ # Restore display to original selected task
+ self.select(selected_task)
+ is_printed = True
+
+#################################################
+### Load bitbake data
+###
+
+def bitbake_load(server, eventHandler, params, dep, curses_off, screen):
+ global bar_len_old
+ bar_len_old = 0
+
+ # Support no screen
+ def progress(msg,count,max):
+ global bar_len_old
+ if screen:
+ dep.progress_view.progress(msg,count,max)
+ else:
+ if msg:
+ if bar_len_old:
+ bar_len_old = 0
+ print("\n")
+ print(f"{msg}: ({count} of {max})")
+ else:
+ bar_len = int((count*40)/max)
+ if bar_len_old != bar_len:
+ print(f"{'*' * (bar_len-bar_len_old)}",end='',flush=True)
+ bar_len_old = bar_len
+ def clear():
+ if screen:
+ dep.progress_view.clear()
+ def clear_curses(screen):
+ if screen:
+ curses_off(screen)
+
+ #
+ # Trigger bitbake "generateDepTreeEvent"
+ #
+
+ cmdline = ''
+ try:
+ params.updateToServer(server, os.environ.copy())
+ params.updateFromServer(server)
+ cmdline = params.parseActions()
+ if not cmdline:
+ clear_curses(screen)
+ print("ERROR: nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
+ return 1,cmdline
+ if 'msg' in cmdline and cmdline['msg']:
+ clear_curses(screen)
+ print('ERROR: ' + cmdline['msg'])
+ return 1,cmdline
+ cmdline = cmdline['action']
+ if not cmdline or cmdline[0] != "generateDotGraph":
+ clear_curses(screen)
+ print("ERROR: This UI requires the -g option")
+ return 1,cmdline
+ ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
+ if error:
+ clear_curses(screen)
+ print("ERROR: running command '%s': %s" % (cmdline, error))
+ return 1,cmdline
+ elif not ret:
+ clear_curses(screen)
+ print("ERROR: running command '%s': returned %s" % (cmdline, ret))
+ return 1,cmdline
+ except client.Fault as x:
+ clear_curses(screen)
+ print("ERROR: XMLRPC Fault getting commandline:\n %s" % x)
+ return 1,cmdline
+ except Exception as e:
+ clear_curses(screen)
+ print("ERROR: in startup:\n %s" % traceback.format_exc())
+ return 1,cmdline
+
+ #
+ # Receive data from bitbake
+ #
+
+ progress_total = 0
+ load_bitbake = True
+ quit = False
+ try:
+ while load_bitbake:
+ try:
+ event = eventHandler.waitEvent(0.25)
+ if quit:
+ _, error = server.runCommand(["stateForceShutdown"])
+ clear_curses(screen)
+ if error:
+ print('Unable to cleanly stop: %s' % error)
+ break
+
+ if event is None:
+ continue
+
+ if isinstance(event, bb.event.CacheLoadStarted):
+ progress_total = event.total
+ progress('Loading Cache',0,progress_total)
+ continue
+
+ if isinstance(event, bb.event.CacheLoadProgress):
+ x = event.current
+ progress('',x,progress_total)
+ continue
+
+ if isinstance(event, bb.event.CacheLoadCompleted):
+ clear()
+ progress('Bitbake... ',1,2)
+ continue
+
+ if isinstance(event, bb.event.ParseStarted):
+ progress_total = event.total
+ progress('Processing recipes',0,progress_total)
+ if progress_total == 0:
+ continue
+
+ if isinstance(event, bb.event.ParseProgress):
+ x = event.current
+ progress('',x,progress_total)
+ continue
+
+ if isinstance(event, bb.event.ParseCompleted):
+ progress('Generating dependency tree',0,3)
+ continue
+
+ if isinstance(event, bb.event.DepTreeGenerated):
+ progress('Generating dependency tree',1,3)
+ dep.parse(event._depgraph)
+ progress('Generating dependency tree',2,3)
+
+ if isinstance(event, bb.command.CommandCompleted):
+ load_bitbake = False
+ progress('Generating dependency tree',3,3)
+ clear()
+ if screen:
+ dep.help_bar_view.show_help(True)
+ continue
+
+ if isinstance(event, bb.event.NoProvider):
+ clear_curses(screen)
+ print('ERROR: %s' % event)
+
+ _, error = server.runCommand(["stateShutdown"])
+ if error:
+ print('ERROR: Unable to cleanly shutdown: %s' % error)
+ return 1,cmdline
+
+ if isinstance(event, bb.command.CommandFailed):
+ clear_curses(screen)
+ print('ERROR: ' + str(event))
+ return event.exitcode,cmdline
+
+ if isinstance(event, bb.command.CommandExit):
+ clear_curses(screen)
+ return event.exitcode,cmdline
+
+ if isinstance(event, bb.cooker.CookerExit):
+ break
+
+ continue
+ except EnvironmentError as ioerror:
+ # ignore interrupted io
+ if ioerror.args[0] == 4:
+ pass
+ except KeyboardInterrupt:
+ if shutdown == 2:
+ clear_curses(screen)
+ print("\nThird Keyboard Interrupt, exit.\n")
+ break
+ if shutdown == 1:
+ clear_curses(screen)
+ print("\nSecond Keyboard Interrupt, stopping...\n")
+ _, error = server.runCommand(["stateForceShutdown"])
+ if error:
+ print('Unable to cleanly stop: %s' % error)
+ if shutdown == 0:
+ clear_curses(screen)
+ print("\nKeyboard Interrupt, closing down...\n")
+ _, error = server.runCommand(["stateShutdown"])
+ if error:
+ print('Unable to cleanly shutdown: %s' % error)
+ shutdown = shutdown + 1
+ pass
+ except Exception as e:
+ # Safe exit on error
+ clear_curses(screen)
+ print("Exception : %s" % e)
+ print("Exception in startup:\n %s" % traceback.format_exc())
+
+ return 0,cmdline
+
+#################################################
+### main
+###
+
+SCREEN_COL_MIN = 83
+SCREEN_ROW_MIN = 26
+
+def main(server, eventHandler, params):
+ global verbose
+ global sort_model
+ global print_model
+ global is_printed
+ global is_filter
+ global screen_too_small
+
+ shutdown = 0
+ screen_too_small = False
+ quit = False
+
+ # Unit test with no terminal?
+ if unit_test_noterm:
+ # Load bitbake, test that there is valid dependency data, then exit
+ screen = None
+ print("* UNIT TEST:START")
+ dep = DepExplorer(screen)
+ print("* UNIT TEST:BITBAKE FETCH")
+ ret,cmdline = bitbake_load(server, eventHandler, params, dep, None, screen)
+ if ret:
+ print("* UNIT TEST: BITBAKE FAILED")
+ return ret
+ # Test the acquired dependency data
+ quilt_native_deps = 0
+ quilt_native_rdeps = 0
+ quilt_deps = 0
+ quilt_rdeps = 0
+ for i,task_obj in enumerate(dep.depends_model):
+ if TYPE_DEP == task_obj[0]:
+ task = task_obj[1]
+ if task.startswith('quilt-native'):
+ quilt_native_deps += 1
+ elif task.startswith('quilt'):
+ quilt_deps += 1
+ elif TYPE_RDEP == task_obj[0]:
+ task = task_obj[1]
+ if task.startswith('quilt-native'):
+ quilt_native_rdeps += 1
+ elif task.startswith('quilt'):
+ quilt_rdeps += 1
+ # Print results
+ failed = False
+ if 0 < len(dep.depends_model):
+ print(f"Pass:Bitbake dependency count = {len(dep.depends_model)}")
+ else:
+ failed = True
+ print(f"FAIL:Bitbake dependency count = 0")
+ if quilt_native_deps:
+ print(f"Pass:Quilt-native depends count = {quilt_native_deps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt-native depends count = 0")
+ if quilt_native_rdeps:
+ print(f"Pass:Quilt-native rdepends count = {quilt_native_rdeps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt-native rdepends count = 0")
+ if quilt_deps:
+ print(f"Pass:Quilt depends count = {quilt_deps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt depends count = 0")
+ if quilt_rdeps:
+ print(f"Pass:Quilt rdepends count = {quilt_rdeps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt rdepends count = 0")
+ print("* UNIT TEST:STOP")
+ return failed
+
+ # Help method to dynamically test parent window too small
+ def check_screen_size(dep, active_package):
+ global screen_too_small
+ rows, cols = screen.getmaxyx()
+ if (rows >= SCREEN_ROW_MIN) and (cols >= SCREEN_COL_MIN):
+ if screen_too_small:
+ # Now big enough, remove error message and redraw screen
+ dep.draw_frames()
+ active_package.cursor_on(True)
+ screen_too_small = False
+ return True
+ # Test on App init
+ if not dep:
+ # Do not start this app if screen not big enough
+ curses.endwin()
+ print("")
+ print("ERROR(Taskexp_cli): Mininal screen size is %dx%d" % (SCREEN_COL_MIN,SCREEN_ROW_MIN))
+ print("Current screen is Cols=%s,Rows=%d" % (cols,rows))
+ return False
+ # First time window too small
+ if not screen_too_small:
+ active_package.cursor_on(False)
+ dep.screen.addstr(0,2,'[BIGGER WINDOW PLEASE]', curses.color_pair(CURSES_WARNING) | curses.A_BLINK)
+ screen_too_small = True
+ return False
+
+ # Helper method to turn off curses mode
+ def curses_off(screen):
+ if not screen: return
+ # Safe error exit
+ screen.keypad(False)
+ curses.echo()
+ curses.curs_set(1)
+ curses.endwin()
+
+ if unit_test_results:
+ print('\nUnit Test Results:')
+ for line in unit_test_results:
+ print(" %s" % line)
+
+ #
+ # Initialize the ncurse environment
+ #
+
+ screen = curses.initscr()
+ try:
+ if not check_screen_size(None, None):
+ exit(1)
+ try:
+ curses.start_color()
+ curses.use_default_colors();
+ curses.init_pair(0xFF, curses.COLOR_BLACK, curses.COLOR_WHITE);
+ curses.init_pair(CURSES_NORMAL, curses.COLOR_WHITE, curses.COLOR_BLACK)
+ curses.init_pair(CURSES_HIGHLIGHT, curses.COLOR_WHITE, curses.COLOR_BLUE)
+ curses.init_pair(CURSES_WARNING, curses.COLOR_WHITE, curses.COLOR_RED)
+ except:
+ curses.endwin()
+ print("")
+ print("ERROR(Taskexp_cli): Requires 256 colors. Please use this or the equivalent:")
+ print(" $ export TERM='xterm-256color'")
+ exit(1)
+
+ screen.keypad(True)
+ curses.noecho()
+ curses.curs_set(0)
+ screen.refresh();
+ except Exception as e:
+ # Safe error exit
+ curses_off(screen)
+ print("Exception : %s" % e)
+ print("Exception in startup:\n %s" % traceback.format_exc())
+ exit(1)
+
+ try:
+ #
+ # Instantiate the presentation layers
+ #
+
+ dep = DepExplorer(screen)
+
+ #
+ # Prepare bitbake
+ #
+
+ # Fetch bitbake dependecy data
+ ret,cmdline = bitbake_load(server, eventHandler, params, dep, curses_off, screen)
+ if ret: return ret
+
+ #
+ # Preset the views
+ #
+
+ # Cmdline example = ['generateDotGraph', ['acl', 'zlib'], 'build']
+ primary_packages = cmdline[1]
+ dep.package_view.set_primary(primary_packages)
+ dep.dep_view.set_primary(primary_packages)
+ dep.reverse_view.set_primary(primary_packages)
+ dep.help_box_view.set_primary(primary_packages)
+ dep.help_bar_view.show_help(True)
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ dep.select(primary_packages[0]+'.')
+ if unit_test:
+ alert('UNIT_TEST',screen)
+
+ # Help method to start/stop the filter feature
+ def filter_mode(new_filter_status):
+ global is_filter
+ if is_filter == new_filter_status:
+ # Ignore no changes
+ return
+ if not new_filter_status:
+ # Turn off
+ curses.curs_set(0)
+ #active_package.cursor_on(False)
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ is_filter = False
+ dep.help_bar_view.show_help(True)
+ dep.filter_str = ''
+ dep.select('')
+ else:
+ # Turn on
+ curses.curs_set(1)
+ dep.help_bar_view.show_help(False)
+ dep.filter_view.clear()
+ dep.filter_view.show(True)
+ dep.filter_view.show_prompt()
+ is_filter = True
+
+ #
+ # Main user loop
+ #
+
+ while not quit:
+ if is_filter:
+ dep.filter_view.show_prompt()
+ if unit_test:
+ c = unit_test_action(active_package)
+ else:
+ c = screen.getch()
+ ch = chr(c)
+
+ # Do not draw if window now too small
+ if not check_screen_size(dep,active_package):
+ continue
+
+ if verbose:
+ if c == CHAR_RETURN:
+ screen.addstr(0, 4, "|%3d,CR |" % (c))
+ else:
+ screen.addstr(0, 4, "|%3d,%3s|" % (c,chr(c)))
+
+ # pre-map alternate filter close keys
+ if is_filter and (c == CHAR_ESCAPE):
+ # Alternate exit from filter
+ ch = '/'
+ c = ord(ch)
+
+ # Filter and non-filter mode command keys
+ # https://docs.python.org/3/library/curses.html
+ if c in (curses.KEY_UP,CHAR_UP):
+ active_package.line_up()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif c in (curses.KEY_DOWN,CHAR_DOWN):
+ active_package.line_down()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif curses.KEY_PPAGE == c:
+ active_package.page_up()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif curses.KEY_NPAGE == c:
+ active_package.page_down()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif CHAR_TAB == c:
+ # Tab between boxes
+ active_package.cursor_on(False)
+ if active_package == dep.package_view:
+ active_package = dep.dep_view
+ elif active_package == dep.dep_view:
+ active_package = dep.reverse_view
+ else:
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ elif curses.KEY_BTAB == c:
+ # Shift-Tab reverse between boxes
+ active_package.cursor_on(False)
+ if active_package == dep.package_view:
+ active_package = dep.reverse_view
+ elif active_package == dep.reverse_view:
+ active_package = dep.dep_view
+ else:
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ elif (CHAR_RETURN == c):
+ # CR to select
+ selected = active_package.get_selected()
+ if selected:
+ active_package.cursor_on(False)
+ active_package = dep.package_view
+ filter_mode(False)
+ dep.select(selected)
+ else:
+ filter_mode(False)
+ dep.select(primary_packages[0]+'.')
+
+ elif '/' == ch: # Enter/exit dep.filter_view
+ if is_filter:
+ filter_mode(False)
+ else:
+ filter_mode(True)
+ elif is_filter:
+ # If in filter mode, re-direct all these other keys to the filter box
+ result = dep.filter_view.input(c,ch)
+ dep.filter_str = dep.filter_view.filter_str
+ dep.select('')
+
+ # Non-filter mode command keys
+ elif 'p' == ch:
+ dep.print_deps(whole_group=False)
+ elif 'P' == ch:
+ dep.print_deps(whole_group=True)
+ elif 'w' == ch:
+ # Toggle the print model
+ if print_model == PRINT_MODEL_1:
+ print_model = PRINT_MODEL_2
+ else:
+ print_model = PRINT_MODEL_1
+ elif 's' == ch:
+ # Toggle the sort model
+ if sort_model == SORT_DEPS:
+ sort_model = SORT_ALPHA
+ elif sort_model == SORT_ALPHA:
+ if SORT_BITBAKE_ENABLE:
+ sort_model = TASK_SORT_BITBAKE
+ else:
+ sort_model = SORT_DEPS
+ else:
+ sort_model = SORT_DEPS
+ active_package.cursor_on(False)
+ current_task = active_package.get_selected()
+ dep.package_view.sort()
+ dep.dep_view.sort()
+ dep.reverse_view.sort()
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ dep.select(current_task)
+ # Announce the new sort model
+ alert("SORT=%s" % ("ALPHA" if (sort_model == SORT_ALPHA) else "DEPS"),screen)
+ alert('',screen)
+
+ elif 'q' == ch:
+ quit = True
+ elif ch in ('h','?'):
+ dep.help_box_view.show_help(True)
+ dep.select(active_package.get_selected())
+
+ #
+ # Debugging commands
+ #
+
+ elif 'V' == ch:
+ verbose = not verbose
+ alert('Verbose=%s' % str(verbose),screen)
+ alert('',screen)
+ elif 'R' == ch:
+ screen.refresh()
+ elif 'B' == ch:
+ # Progress bar unit test
+ dep.progress_view.progress('Test',0,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',10,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',20,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',30,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',40,40)
+ curses.napms(1000)
+ dep.progress_view.clear()
+ dep.help_bar_view.show_help(True)
+ elif 'Q' == ch:
+ # Simulated error
+ curses_off(screen)
+ print('ERROR: simulated error exit')
+ return 1
+
+ # Safe exit
+ curses_off(screen)
+ except Exception as e:
+ # Safe exit on error
+ curses_off(screen)
+ print("Exception : %s" % e)
+ print("Exception in startup:\n %s" % traceback.format_exc())
+
+ # Reminder to pick up your printed results
+ if is_printed:
+ print("")
+ print("You have output ready!")
+ print(" * Your printed dependency file is: %s" % print_file_name)
+ print(" * Your previous results saved in: %s" % print_file_backup_name)
+ print("")
diff --git a/poky/bitbake/lib/bs4/tests/test_tree.py b/poky/bitbake/lib/bs4/tests/test_tree.py
index 8e5c66426e..cf0f1abe0c 100644
--- a/poky/bitbake/lib/bs4/tests/test_tree.py
+++ b/poky/bitbake/lib/bs4/tests/test_tree.py
@@ -585,7 +585,7 @@ class SiblingTest(TreeTest):
</html>'''
# All that whitespace looks good but makes the tests more
# difficult. Get rid of it.
- markup = re.compile("\n\s*").sub("", markup)
+ markup = re.compile(r"\n\s*").sub("", markup)
self.tree = self.soup(markup)
diff --git a/poky/bitbake/lib/hashserv/client.py b/poky/bitbake/lib/hashserv/client.py
index 35a97687fb..b269879ecf 100644
--- a/poky/bitbake/lib/hashserv/client.py
+++ b/poky/bitbake/lib/hashserv/client.py
@@ -16,6 +16,7 @@ logger = logging.getLogger("hashserv.client")
class AsyncClient(bb.asyncrpc.AsyncClient):
MODE_NORMAL = 0
MODE_GET_STREAM = 1
+ MODE_EXIST_STREAM = 2
def __init__(self, username=None, password=None):
super().__init__("OEHASHEQUIV", "1.1", logger)
@@ -49,19 +50,36 @@ class AsyncClient(bb.asyncrpc.AsyncClient):
await self.socket.send("END")
return await self.socket.recv()
- if new_mode == self.MODE_NORMAL and self.mode == self.MODE_GET_STREAM:
+ async def normal_to_stream(command):
+ r = await self.invoke({command: None})
+ if r != "ok":
+ raise ConnectionError(
+ f"Unable to transition to stream mode: Bad response from server {r!r}"
+ )
+
+ self.logger.debug("Mode is now %s", command)
+
+ if new_mode == self.mode:
+ return
+
+ self.logger.debug("Transitioning mode %s -> %s", self.mode, new_mode)
+
+ # Always transition to normal mode before switching to any other mode
+ if self.mode != self.MODE_NORMAL:
r = await self._send_wrapper(stream_to_normal)
if r != "ok":
self.check_invoke_error(r)
- raise ConnectionError("Unable to transition to normal mode: Bad response from server %r" % r)
- elif new_mode == self.MODE_GET_STREAM and self.mode == self.MODE_NORMAL:
- r = await self.invoke({"get-stream": None})
- if r != "ok":
- raise ConnectionError("Unable to transition to stream mode: Bad response from server %r" % r)
- elif new_mode != self.mode:
- raise Exception(
- "Undefined mode transition %r -> %r" % (self.mode, new_mode)
- )
+ raise ConnectionError(
+ f"Unable to transition to normal mode: Bad response from server {r!r}"
+ )
+ self.logger.debug("Mode is now normal")
+
+ if new_mode == self.MODE_GET_STREAM:
+ await normal_to_stream("get-stream")
+ elif new_mode == self.MODE_EXIST_STREAM:
+ await normal_to_stream("exists-stream")
+ elif new_mode != self.MODE_NORMAL:
+ raise Exception("Undefined mode transition {self.mode!r} -> {new_mode!r}")
self.mode = new_mode
@@ -95,6 +113,11 @@ class AsyncClient(bb.asyncrpc.AsyncClient):
{"get": {"taskhash": taskhash, "method": method, "all": all_properties}}
)
+ async def unihash_exists(self, unihash):
+ await self._set_mode(self.MODE_EXIST_STREAM)
+ r = await self.send_stream(unihash)
+ return r == "true"
+
async def get_outhash(self, method, outhash, taskhash, with_unihash=True):
await self._set_mode(self.MODE_NORMAL)
return await self.invoke(
@@ -194,6 +217,34 @@ class AsyncClient(bb.asyncrpc.AsyncClient):
await self._set_mode(self.MODE_NORMAL)
return (await self.invoke({"get-db-query-columns": {}}))["columns"]
+ async def gc_status(self):
+ await self._set_mode(self.MODE_NORMAL)
+ return await self.invoke({"gc-status": {}})
+
+ async def gc_mark(self, mark, where):
+ """
+ Starts a new garbage collection operation identified by "mark". If
+ garbage collection is already in progress with "mark", the collection
+ is continued.
+
+ All unihash entries that match the "where" clause are marked to be
+ kept. In addition, any new entries added to the database after this
+ command will be automatically marked with "mark"
+ """
+ await self._set_mode(self.MODE_NORMAL)
+ return await self.invoke({"gc-mark": {"mark": mark, "where": where}})
+
+ async def gc_sweep(self, mark):
+ """
+ Finishes garbage collection for "mark". All unihash entries that have
+ not been marked will be deleted.
+
+ It is recommended to clean unused outhash entries after running this to
+ cleanup any dangling outhashes
+ """
+ await self._set_mode(self.MODE_NORMAL)
+ return await self.invoke({"gc-sweep": {"mark": mark}})
+
class Client(bb.asyncrpc.Client):
def __init__(self, username=None, password=None):
@@ -208,6 +259,7 @@ class Client(bb.asyncrpc.Client):
"report_unihash",
"report_unihash_equiv",
"get_taskhash",
+ "unihash_exists",
"get_outhash",
"get_stats",
"reset_stats",
@@ -224,7 +276,90 @@ class Client(bb.asyncrpc.Client):
"become_user",
"get_db_usage",
"get_db_query_columns",
+ "gc_status",
+ "gc_mark",
+ "gc_sweep",
)
def _get_async_client(self):
return AsyncClient(self.username, self.password)
+
+
+class ClientPool(bb.asyncrpc.ClientPool):
+ def __init__(
+ self,
+ address,
+ max_clients,
+ *,
+ username=None,
+ password=None,
+ become=None,
+ ):
+ super().__init__(max_clients)
+ self.address = address
+ self.username = username
+ self.password = password
+ self.become = become
+
+ async def _new_client(self):
+ client = await create_async_client(
+ self.address,
+ username=self.username,
+ password=self.password,
+ )
+ if self.become:
+ await client.become_user(self.become)
+ return client
+
+ def _run_key_tasks(self, queries, call):
+ results = {key: None for key in queries.keys()}
+
+ def make_task(key, args):
+ async def task(client):
+ nonlocal results
+ unihash = await call(client, args)
+ results[key] = unihash
+
+ return task
+
+ def gen_tasks():
+ for key, args in queries.items():
+ yield make_task(key, args)
+
+ self.run_tasks(gen_tasks())
+ return results
+
+ def get_unihashes(self, queries):
+ """
+ Query multiple unihashes in parallel.
+
+ The queries argument is a dictionary with arbitrary key. The values
+ must be a tuple of (method, taskhash).
+
+ Returns a dictionary with a corresponding key for each input key, and
+ the value is the queried unihash (which might be none if the query
+ failed)
+ """
+
+ async def call(client, args):
+ method, taskhash = args
+ return await client.get_unihash(method, taskhash)
+
+ return self._run_key_tasks(queries, call)
+
+ def unihashes_exist(self, queries):
+ """
+ Query multiple unihash existence checks in parallel.
+
+ The queries argument is a dictionary with arbitrary key. The values
+ must be a unihash.
+
+ Returns a dictionary with a corresponding key for each input key, and
+ the value is True or False if the unihash is known by the server (or
+ None if there was a failure)
+ """
+
+ async def call(client, unihash):
+ return await client.unihash_exists(unihash)
+
+ return self._run_key_tasks(queries, call)
diff --git a/poky/bitbake/lib/hashserv/server.py b/poky/bitbake/lib/hashserv/server.py
index a86507830e..68f64f983b 100644
--- a/poky/bitbake/lib/hashserv/server.py
+++ b/poky/bitbake/lib/hashserv/server.py
@@ -199,7 +199,7 @@ def permissions(*permissions, allow_anon=True, allow_self_service=False):
if not self.user_has_permissions(*permissions, allow_anon=allow_anon):
if not self.user:
username = "Anonymous user"
- user_perms = self.anon_perms
+ user_perms = self.server.anon_perms
else:
username = self.user.username
user_perms = self.user.permissions
@@ -223,31 +223,18 @@ def permissions(*permissions, allow_anon=True, allow_self_service=False):
class ServerClient(bb.asyncrpc.AsyncServerConnection):
- def __init__(
- self,
- socket,
- db_engine,
- request_stats,
- backfill_queue,
- upstream,
- read_only,
- anon_perms,
- ):
- super().__init__(socket, "OEHASHEQUIV", logger)
- self.db_engine = db_engine
- self.request_stats = request_stats
+ def __init__(self, socket, server):
+ super().__init__(socket, "OEHASHEQUIV", server.logger)
+ self.server = server
self.max_chunk = bb.asyncrpc.DEFAULT_MAX_CHUNK
- self.backfill_queue = backfill_queue
- self.upstream = upstream
- self.read_only = read_only
self.user = None
- self.anon_perms = anon_perms
self.handlers.update(
{
"get": self.handle_get,
"get-outhash": self.handle_get_outhash,
"get-stream": self.handle_get_stream,
+ "exists-stream": self.handle_exists_stream,
"get-stats": self.handle_get_stats,
"get-db-usage": self.handle_get_db_usage,
"get-db-query-columns": self.handle_get_db_query_columns,
@@ -261,13 +248,16 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
}
)
- if not read_only:
+ if not self.server.read_only:
self.handlers.update(
{
"report-equiv": self.handle_equivreport,
"reset-stats": self.handle_reset_stats,
"backfill-wait": self.handle_backfill_wait,
"remove": self.handle_remove,
+ "gc-mark": self.handle_gc_mark,
+ "gc-sweep": self.handle_gc_sweep,
+ "gc-status": self.handle_gc_status,
"clean-unused": self.handle_clean_unused,
"refresh-token": self.handle_refresh_token,
"set-user-perms": self.handle_set_perms,
@@ -282,10 +272,10 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
def user_has_permissions(self, *permissions, allow_anon=True):
permissions = set(permissions)
if allow_anon:
- if ALL_PERM in self.anon_perms:
+ if ALL_PERM in self.server.anon_perms:
return True
- if not permissions - self.anon_perms:
+ if not permissions - self.server.anon_perms:
return True
if self.user is None:
@@ -303,10 +293,10 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
return self.proto_version > (1, 0) and self.proto_version <= (1, 1)
async def process_requests(self):
- async with self.db_engine.connect(self.logger) as db:
+ async with self.server.db_engine.connect(self.logger) as db:
self.db = db
- if self.upstream is not None:
- self.upstream_client = await create_async_client(self.upstream)
+ if self.server.upstream is not None:
+ self.upstream_client = await create_async_client(self.server.upstream)
else:
self.upstream_client = None
@@ -323,7 +313,7 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
if "stream" in k:
return await self.handlers[k](msg[k])
else:
- with self.request_stats.start_sample() as self.request_sample, self.request_sample.measure():
+ with self.server.request_stats.start_sample() as self.request_sample, self.request_sample.measure():
return await self.handlers[k](msg[k])
raise bb.asyncrpc.ClientError("Unrecognized command %r" % msg)
@@ -388,8 +378,7 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"])
await self.db.insert_outhash(data)
- @permissions(READ_PERM)
- async def handle_get_stream(self, request):
+ async def _stream_handler(self, handler):
await self.socket.send_message("ok")
while True:
@@ -404,42 +393,57 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
# possible (which is why the request sample is handled manually
# instead of using 'with', and also why logging statements are
# commented out.
- self.request_sample = self.request_stats.start_sample()
+ self.request_sample = self.server.request_stats.start_sample()
request_measure = self.request_sample.measure()
request_measure.start()
if l == "END":
break
- (method, taskhash) = l.split()
- # self.logger.debug('Looking up %s %s' % (method, taskhash))
- row = await self.db.get_equivalent(method, taskhash)
-
- if row is not None:
- msg = row["unihash"]
- # self.logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
- elif self.upstream_client is not None:
- upstream = await self.upstream_client.get_unihash(method, taskhash)
- if upstream:
- msg = upstream
- else:
- msg = ""
- else:
- msg = ""
-
+ msg = await handler(l)
await self.socket.send(msg)
finally:
request_measure.end()
self.request_sample.end()
- # Post to the backfill queue after writing the result to minimize
- # the turn around time on a request
- if upstream is not None:
- await self.backfill_queue.put((method, taskhash))
-
await self.socket.send("ok")
return self.NO_RESPONSE
+ @permissions(READ_PERM)
+ async def handle_get_stream(self, request):
+ async def handler(l):
+ (method, taskhash) = l.split()
+ # self.logger.debug('Looking up %s %s' % (method, taskhash))
+ row = await self.db.get_equivalent(method, taskhash)
+
+ if row is not None:
+ # self.logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
+ return row["unihash"]
+
+ if self.upstream_client is not None:
+ upstream = await self.upstream_client.get_unihash(method, taskhash)
+ if upstream:
+ await self.server.backfill_queue.put((method, taskhash))
+ return upstream
+
+ return ""
+
+ return await self._stream_handler(handler)
+
+ @permissions(READ_PERM)
+ async def handle_exists_stream(self, request):
+ async def handler(l):
+ if await self.db.unihash_exists(l):
+ return "true"
+
+ if self.upstream_client is not None:
+ if await self.upstream_client.unihash_exists(l):
+ return "true"
+
+ return "false"
+
+ return await self._stream_handler(handler)
+
async def report_readonly(self, data):
method = data["method"]
outhash = data["outhash"]
@@ -461,7 +465,7 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
# report is made inside the function
@permissions(READ_PERM)
async def handle_report(self, data):
- if self.read_only or not self.user_has_permissions(REPORT_PERM):
+ if self.server.read_only or not self.user_has_permissions(REPORT_PERM):
return await self.report_readonly(data)
outhash_data = {
@@ -538,24 +542,24 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
@permissions(READ_PERM)
async def handle_get_stats(self, request):
return {
- "requests": self.request_stats.todict(),
+ "requests": self.server.request_stats.todict(),
}
@permissions(DB_ADMIN_PERM)
async def handle_reset_stats(self, request):
d = {
- "requests": self.request_stats.todict(),
+ "requests": self.server.request_stats.todict(),
}
- self.request_stats.reset()
+ self.server.request_stats.reset()
return d
@permissions(READ_PERM)
async def handle_backfill_wait(self, request):
d = {
- "tasks": self.backfill_queue.qsize(),
+ "tasks": self.server.backfill_queue.qsize(),
}
- await self.backfill_queue.join()
+ await self.server.backfill_queue.join()
return d
@permissions(DB_ADMIN_PERM)
@@ -567,6 +571,46 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
return {"count": await self.db.remove(condition)}
@permissions(DB_ADMIN_PERM)
+ async def handle_gc_mark(self, request):
+ condition = request["where"]
+ mark = request["mark"]
+
+ if not isinstance(condition, dict):
+ raise TypeError("Bad condition type %s" % type(condition))
+
+ if not isinstance(mark, str):
+ raise TypeError("Bad mark type %s" % type(mark))
+
+ return {"count": await self.db.gc_mark(mark, condition)}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_gc_sweep(self, request):
+ mark = request["mark"]
+
+ if not isinstance(mark, str):
+ raise TypeError("Bad mark type %s" % type(mark))
+
+ current_mark = await self.db.get_current_gc_mark()
+
+ if not current_mark or mark != current_mark:
+ raise bb.asyncrpc.InvokeError(
+ f"'{mark}' is not the current mark. Refusing to sweep"
+ )
+
+ count = await self.db.gc_sweep()
+
+ return {"count": count}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_gc_status(self, request):
+ (keep_rows, remove_rows, current_mark) = await self.db.gc_status()
+ return {
+ "keep": keep_rows,
+ "remove": remove_rows,
+ "mark": current_mark,
+ }
+
+ @permissions(DB_ADMIN_PERM)
async def handle_clean_unused(self, request):
max_age = request["max_age_seconds"]
oldest = datetime.now() - timedelta(seconds=-max_age)
@@ -779,15 +823,7 @@ class Server(bb.asyncrpc.AsyncServer):
)
def accept_client(self, socket):
- return ServerClient(
- socket,
- self.db_engine,
- self.request_stats,
- self.backfill_queue,
- self.upstream,
- self.read_only,
- self.anon_perms,
- )
+ return ServerClient(socket, self)
async def create_admin_user(self):
admin_permissions = (ALL_PERM,)
diff --git a/poky/bitbake/lib/hashserv/sqlalchemy.py b/poky/bitbake/lib/hashserv/sqlalchemy.py
index cee04bffb0..fc3ae3d339 100644
--- a/poky/bitbake/lib/hashserv/sqlalchemy.py
+++ b/poky/bitbake/lib/hashserv/sqlalchemy.py
@@ -28,24 +28,28 @@ from sqlalchemy import (
delete,
update,
func,
+ inspect,
)
import sqlalchemy.engine
from sqlalchemy.orm import declarative_base
from sqlalchemy.exc import IntegrityError
+from sqlalchemy.dialects.postgresql import insert as postgres_insert
Base = declarative_base()
-class UnihashesV2(Base):
- __tablename__ = "unihashes_v2"
+class UnihashesV3(Base):
+ __tablename__ = "unihashes_v3"
id = Column(Integer, primary_key=True, autoincrement=True)
method = Column(Text, nullable=False)
taskhash = Column(Text, nullable=False)
unihash = Column(Text, nullable=False)
+ gc_mark = Column(Text, nullable=False)
__table_args__ = (
UniqueConstraint("method", "taskhash"),
- Index("taskhash_lookup_v3", "method", "taskhash"),
+ Index("taskhash_lookup_v4", "method", "taskhash"),
+ Index("unihash_lookup_v1", "unihash"),
)
@@ -79,6 +83,36 @@ class Users(Base):
__table_args__ = (UniqueConstraint("username"),)
+class Config(Base):
+ __tablename__ = "config"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ name = Column(Text, nullable=False)
+ value = Column(Text)
+ __table_args__ = (
+ UniqueConstraint("name"),
+ Index("config_lookup", "name"),
+ )
+
+
+#
+# Old table versions
+#
+DeprecatedBase = declarative_base()
+
+
+class UnihashesV2(DeprecatedBase):
+ __tablename__ = "unihashes_v2"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ method = Column(Text, nullable=False)
+ taskhash = Column(Text, nullable=False)
+ unihash = Column(Text, nullable=False)
+
+ __table_args__ = (
+ UniqueConstraint("method", "taskhash"),
+ Index("taskhash_lookup_v3", "method", "taskhash"),
+ )
+
+
class DatabaseEngine(object):
def __init__(self, url, username=None, password=None):
self.logger = logging.getLogger("hashserv.sqlalchemy")
@@ -91,6 +125,9 @@ class DatabaseEngine(object):
self.url = self.url.set(password=password)
async def create(self):
+ def check_table_exists(conn, name):
+ return inspect(conn).has_table(name)
+
self.logger.info("Using database %s", self.url)
self.engine = create_async_engine(self.url, poolclass=NullPool)
@@ -99,6 +136,24 @@ class DatabaseEngine(object):
self.logger.info("Creating tables...")
await conn.run_sync(Base.metadata.create_all)
+ if await conn.run_sync(check_table_exists, UnihashesV2.__tablename__):
+ self.logger.info("Upgrading Unihashes V2 -> V3...")
+ statement = insert(UnihashesV3).from_select(
+ ["id", "method", "unihash", "taskhash", "gc_mark"],
+ select(
+ UnihashesV2.id,
+ UnihashesV2.method,
+ UnihashesV2.unihash,
+ UnihashesV2.taskhash,
+ literal("").label("gc_mark"),
+ ),
+ )
+ self.logger.debug("%s", statement)
+ await conn.execute(statement)
+
+ await conn.run_sync(Base.metadata.drop_all, [UnihashesV2.__table__])
+ self.logger.info("Upgrade complete")
+
def connect(self, logger):
return Database(self.engine, logger)
@@ -118,6 +173,15 @@ def map_user(row):
)
+def _make_condition_statement(table, condition):
+ where = {}
+ for c in table.__table__.columns:
+ if c.key in condition and condition[c.key] is not None:
+ where[c] = condition[c.key]
+
+ return [(k == v) for k, v in where.items()]
+
+
class Database(object):
def __init__(self, engine, logger):
self.engine = engine
@@ -135,168 +199,265 @@ class Database(object):
await self.db.close()
self.db = None
- async def get_unihash_by_taskhash_full(self, method, taskhash):
- statement = (
- select(
- OuthashesV2,
- UnihashesV2.unihash.label("unihash"),
- )
- .join(
- UnihashesV2,
- and_(
- UnihashesV2.method == OuthashesV2.method,
- UnihashesV2.taskhash == OuthashesV2.taskhash,
- ),
- )
- .where(
- OuthashesV2.method == method,
- OuthashesV2.taskhash == taskhash,
+ async def _execute(self, statement):
+ self.logger.debug("%s", statement)
+ return await self.db.execute(statement)
+
+ async def _set_config(self, name, value):
+ while True:
+ result = await self._execute(
+ update(Config).where(Config.name == name).values(value=value)
)
- .order_by(
- OuthashesV2.created.asc(),
+
+ if result.rowcount == 0:
+ self.logger.debug("Config '%s' not found. Adding it", name)
+ try:
+ await self._execute(insert(Config).values(name=name, value=value))
+ except IntegrityError:
+ # Race. Try again
+ continue
+
+ break
+
+ def _get_config_subquery(self, name, default=None):
+ if default is not None:
+ return func.coalesce(
+ select(Config.value).where(Config.name == name).scalar_subquery(),
+ default,
)
- .limit(1)
- )
- self.logger.debug("%s", statement)
+ return select(Config.value).where(Config.name == name).scalar_subquery()
+
+ async def _get_config(self, name):
+ result = await self._execute(select(Config.value).where(Config.name == name))
+ row = result.first()
+ if row is None:
+ return None
+ return row.value
+
+ async def get_unihash_by_taskhash_full(self, method, taskhash):
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ select(
+ OuthashesV2,
+ UnihashesV3.unihash.label("unihash"),
+ )
+ .join(
+ UnihashesV3,
+ and_(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ ),
+ )
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.taskhash == taskhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
return map_row(result.first())
async def get_unihash_by_outhash(self, method, outhash):
- statement = (
- select(OuthashesV2, UnihashesV2.unihash.label("unihash"))
- .join(
- UnihashesV2,
- and_(
- UnihashesV2.method == OuthashesV2.method,
- UnihashesV2.taskhash == OuthashesV2.taskhash,
- ),
- )
- .where(
- OuthashesV2.method == method,
- OuthashesV2.outhash == outhash,
- )
- .order_by(
- OuthashesV2.created.asc(),
- )
- .limit(1)
- )
- self.logger.debug("%s", statement)
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ select(OuthashesV2, UnihashesV3.unihash.label("unihash"))
+ .join(
+ UnihashesV3,
+ and_(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ ),
+ )
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.outhash == outhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
return map_row(result.first())
- async def get_outhash(self, method, outhash):
- statement = (
- select(OuthashesV2)
- .where(
- OuthashesV2.method == method,
- OuthashesV2.outhash == outhash,
- )
- .order_by(
- OuthashesV2.created.asc(),
+ async def unihash_exists(self, unihash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(UnihashesV3).where(UnihashesV3.unihash == unihash).limit(1)
)
- .limit(1)
- )
- self.logger.debug("%s", statement)
+ return result.first() is not None
+
+ async def get_outhash(self, method, outhash):
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ select(OuthashesV2)
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.outhash == outhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
return map_row(result.first())
async def get_equivalent_for_outhash(self, method, outhash, taskhash):
- statement = (
- select(
- OuthashesV2.taskhash.label("taskhash"),
- UnihashesV2.unihash.label("unihash"),
- )
- .join(
- UnihashesV2,
- and_(
- UnihashesV2.method == OuthashesV2.method,
- UnihashesV2.taskhash == OuthashesV2.taskhash,
- ),
- )
- .where(
- OuthashesV2.method == method,
- OuthashesV2.outhash == outhash,
- OuthashesV2.taskhash != taskhash,
- )
- .order_by(
- OuthashesV2.created.asc(),
- )
- .limit(1)
- )
- self.logger.debug("%s", statement)
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ select(
+ OuthashesV2.taskhash.label("taskhash"),
+ UnihashesV3.unihash.label("unihash"),
+ )
+ .join(
+ UnihashesV3,
+ and_(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ ),
+ )
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.outhash == outhash,
+ OuthashesV2.taskhash != taskhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
return map_row(result.first())
async def get_equivalent(self, method, taskhash):
- statement = select(
- UnihashesV2.unihash,
- UnihashesV2.method,
- UnihashesV2.taskhash,
- ).where(
- UnihashesV2.method == method,
- UnihashesV2.taskhash == taskhash,
- )
- self.logger.debug("%s", statement)
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ select(
+ UnihashesV3.unihash,
+ UnihashesV3.method,
+ UnihashesV3.taskhash,
+ ).where(
+ UnihashesV3.method == method,
+ UnihashesV3.taskhash == taskhash,
+ )
+ )
return map_row(result.first())
async def remove(self, condition):
async def do_remove(table):
- where = {}
- for c in table.__table__.columns:
- if c.key in condition and condition[c.key] is not None:
- where[c] = condition[c.key]
-
+ where = _make_condition_statement(table, condition)
if where:
- statement = delete(table).where(*[(k == v) for k, v in where.items()])
- self.logger.debug("%s", statement)
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(delete(table).where(*where))
return result.rowcount
return 0
count = 0
- count += await do_remove(UnihashesV2)
+ count += await do_remove(UnihashesV3)
count += await do_remove(OuthashesV2)
return count
- async def clean_unused(self, oldest):
- statement = delete(OuthashesV2).where(
- OuthashesV2.created < oldest,
- ~(
- select(UnihashesV2.id)
- .where(
- UnihashesV2.method == OuthashesV2.method,
- UnihashesV2.taskhash == OuthashesV2.taskhash,
+ async def get_current_gc_mark(self):
+ async with self.db.begin():
+ return await self._get_config("gc-mark")
+
+ async def gc_status(self):
+ async with self.db.begin():
+ gc_mark_subquery = self._get_config_subquery("gc-mark", "")
+
+ result = await self._execute(
+ select(func.count())
+ .select_from(UnihashesV3)
+ .where(UnihashesV3.gc_mark == gc_mark_subquery)
+ )
+ keep_rows = result.scalar()
+
+ result = await self._execute(
+ select(func.count())
+ .select_from(UnihashesV3)
+ .where(UnihashesV3.gc_mark != gc_mark_subquery)
+ )
+ remove_rows = result.scalar()
+
+ return (keep_rows, remove_rows, await self._get_config("gc-mark"))
+
+ async def gc_mark(self, mark, condition):
+ async with self.db.begin():
+ await self._set_config("gc-mark", mark)
+
+ where = _make_condition_statement(UnihashesV3, condition)
+ if not where:
+ return 0
+
+ result = await self._execute(
+ update(UnihashesV3)
+ .values(gc_mark=self._get_config_subquery("gc-mark", ""))
+ .where(*where)
+ )
+ return result.rowcount
+
+ async def gc_sweep(self):
+ async with self.db.begin():
+ result = await self._execute(
+ delete(UnihashesV3).where(
+ # A sneaky conditional that provides some errant use
+ # protection: If the config mark is NULL, this will not
+ # match any rows because No default is specified in the
+ # select statement
+ UnihashesV3.gc_mark
+ != self._get_config_subquery("gc-mark")
)
- .limit(1)
- .exists()
- ),
- )
- self.logger.debug("%s", statement)
+ )
+ await self._set_config("gc-mark", None)
+
+ return result.rowcount
+
+ async def clean_unused(self, oldest):
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ delete(OuthashesV2).where(
+ OuthashesV2.created < oldest,
+ ~(
+ select(UnihashesV3.id)
+ .where(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ )
+ .limit(1)
+ .exists()
+ ),
+ )
+ )
return result.rowcount
async def insert_unihash(self, method, taskhash, unihash):
- statement = insert(UnihashesV2).values(
- method=method,
- taskhash=taskhash,
- unihash=unihash,
- )
- self.logger.debug("%s", statement)
+ # Postgres specific ignore on insert duplicate
+ if self.engine.name == "postgresql":
+ statement = (
+ postgres_insert(UnihashesV3)
+ .values(
+ method=method,
+ taskhash=taskhash,
+ unihash=unihash,
+ gc_mark=self._get_config_subquery("gc-mark", ""),
+ )
+ .on_conflict_do_nothing(index_elements=("method", "taskhash"))
+ )
+ else:
+ statement = insert(UnihashesV3).values(
+ method=method,
+ taskhash=taskhash,
+ unihash=unihash,
+ gc_mark=self._get_config_subquery("gc-mark", ""),
+ )
+
try:
async with self.db.begin():
- await self.db.execute(statement)
- return True
+ result = await self._execute(statement)
+ return result.rowcount != 0
except IntegrityError:
self.logger.debug(
"%s, %s, %s already in unihash database", method, taskhash, unihash
@@ -311,12 +472,22 @@ class Database(object):
if "created" in data and not isinstance(data["created"], datetime):
data["created"] = datetime.fromisoformat(data["created"])
- statement = insert(OuthashesV2).values(**data)
- self.logger.debug("%s", statement)
+ # Postgres specific ignore on insert duplicate
+ if self.engine.name == "postgresql":
+ statement = (
+ postgres_insert(OuthashesV2)
+ .values(**data)
+ .on_conflict_do_nothing(
+ index_elements=("method", "taskhash", "outhash")
+ )
+ )
+ else:
+ statement = insert(OuthashesV2).values(**data)
+
try:
async with self.db.begin():
- await self.db.execute(statement)
- return True
+ result = await self._execute(statement)
+ return result.rowcount != 0
except IntegrityError:
self.logger.debug(
"%s, %s already in outhash database", data["method"], data["outhash"]
@@ -324,16 +495,16 @@ class Database(object):
return False
async def _get_user(self, username):
- statement = select(
- Users.username,
- Users.permissions,
- Users.token,
- ).where(
- Users.username == username,
- )
- self.logger.debug("%s", statement)
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ select(
+ Users.username,
+ Users.permissions,
+ Users.token,
+ ).where(
+ Users.username == username,
+ )
+ )
return result.first()
async def lookup_user_token(self, username):
@@ -346,70 +517,66 @@ class Database(object):
return map_user(await self._get_user(username))
async def set_user_token(self, username, token):
- statement = (
- update(Users)
- .where(
- Users.username == username,
- )
- .values(
- token=token,
- )
- )
- self.logger.debug("%s", statement)
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ update(Users)
+ .where(
+ Users.username == username,
+ )
+ .values(
+ token=token,
+ )
+ )
return result.rowcount != 0
async def set_user_perms(self, username, permissions):
- statement = (
- update(Users)
- .where(Users.username == username)
- .values(permissions=" ".join(permissions))
- )
- self.logger.debug("%s", statement)
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ update(Users)
+ .where(Users.username == username)
+ .values(permissions=" ".join(permissions))
+ )
return result.rowcount != 0
async def get_all_users(self):
- statement = select(
- Users.username,
- Users.permissions,
- )
- self.logger.debug("%s", statement)
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ select(
+ Users.username,
+ Users.permissions,
+ )
+ )
return [map_user(row) for row in result]
async def new_user(self, username, permissions, token):
- statement = insert(Users).values(
- username=username,
- permissions=" ".join(permissions),
- token=token,
- )
- self.logger.debug("%s", statement)
try:
async with self.db.begin():
- await self.db.execute(statement)
+ await self._execute(
+ insert(Users).values(
+ username=username,
+ permissions=" ".join(permissions),
+ token=token,
+ )
+ )
return True
except IntegrityError as e:
self.logger.debug("Cannot create new user %s: %s", username, e)
return False
async def delete_user(self, username):
- statement = delete(Users).where(Users.username == username)
- self.logger.debug("%s", statement)
async with self.db.begin():
- result = await self.db.execute(statement)
+ result = await self._execute(
+ delete(Users).where(Users.username == username)
+ )
return result.rowcount != 0
async def get_usage(self):
usage = {}
async with self.db.begin() as session:
for name, table in Base.metadata.tables.items():
- statement = select(func.count()).select_from(table)
- self.logger.debug("%s", statement)
- result = await self.db.execute(statement)
+ result = await self._execute(
+ statement=select(func.count()).select_from(table)
+ )
usage[name] = {
"rows": result.scalar(),
}
@@ -418,7 +585,7 @@ class Database(object):
async def get_query_columns(self):
columns = set()
- for table in (UnihashesV2, OuthashesV2):
+ for table in (UnihashesV3, OuthashesV2):
for c in table.__table__.columns:
if not isinstance(c.type, Text):
continue
diff --git a/poky/bitbake/lib/hashserv/sqlite.py b/poky/bitbake/lib/hashserv/sqlite.py
index f93cb2c1dd..da2e844a03 100644
--- a/poky/bitbake/lib/hashserv/sqlite.py
+++ b/poky/bitbake/lib/hashserv/sqlite.py
@@ -15,6 +15,7 @@ UNIHASH_TABLE_DEFINITION = (
("method", "TEXT NOT NULL", "UNIQUE"),
("taskhash", "TEXT NOT NULL", "UNIQUE"),
("unihash", "TEXT NOT NULL", ""),
+ ("gc_mark", "TEXT NOT NULL", ""),
)
UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION)
@@ -44,6 +45,14 @@ USERS_TABLE_DEFINITION = (
USERS_TABLE_COLUMNS = tuple(name for name, _, _ in USERS_TABLE_DEFINITION)
+CONFIG_TABLE_DEFINITION = (
+ ("name", "TEXT NOT NULL", "UNIQUE"),
+ ("value", "TEXT", ""),
+)
+
+CONFIG_TABLE_COLUMNS = tuple(name for name, _, _ in CONFIG_TABLE_DEFINITION)
+
+
def _make_table(cursor, name, definition):
cursor.execute(
"""
@@ -71,6 +80,35 @@ def map_user(row):
)
+def _make_condition_statement(columns, condition):
+ where = {}
+ for c in columns:
+ if c in condition and condition[c] is not None:
+ where[c] = condition[c]
+
+ return where, " AND ".join("%s=:%s" % (k, k) for k in where.keys())
+
+
+def _get_sqlite_version(cursor):
+ cursor.execute("SELECT sqlite_version()")
+
+ version = []
+ for v in cursor.fetchone()[0].split("."):
+ try:
+ version.append(int(v))
+ except ValueError:
+ version.append(v)
+
+ return tuple(version)
+
+
+def _schema_table_name(version):
+ if version >= (3, 33):
+ return "sqlite_schema"
+
+ return "sqlite_master"
+
+
class DatabaseEngine(object):
def __init__(self, dbname, sync):
self.dbname = dbname
@@ -82,9 +120,10 @@ class DatabaseEngine(object):
db.row_factory = sqlite3.Row
with closing(db.cursor()) as cursor:
- _make_table(cursor, "unihashes_v2", UNIHASH_TABLE_DEFINITION)
+ _make_table(cursor, "unihashes_v3", UNIHASH_TABLE_DEFINITION)
_make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION)
_make_table(cursor, "users", USERS_TABLE_DEFINITION)
+ _make_table(cursor, "config", CONFIG_TABLE_DEFINITION)
cursor.execute("PRAGMA journal_mode = WAL")
cursor.execute(
@@ -96,17 +135,41 @@ class DatabaseEngine(object):
cursor.execute("DROP INDEX IF EXISTS outhash_lookup")
cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v2")
cursor.execute("DROP INDEX IF EXISTS outhash_lookup_v2")
+ cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v3")
# TODO: Upgrade from tasks_v2?
cursor.execute("DROP TABLE IF EXISTS tasks_v2")
# Create new indexes
cursor.execute(
- "CREATE INDEX IF NOT EXISTS taskhash_lookup_v3 ON unihashes_v2 (method, taskhash)"
+ "CREATE INDEX IF NOT EXISTS taskhash_lookup_v4 ON unihashes_v3 (method, taskhash)"
+ )
+ cursor.execute(
+ "CREATE INDEX IF NOT EXISTS unihash_lookup_v1 ON unihashes_v3 (unihash)"
)
cursor.execute(
"CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)"
)
+ cursor.execute("CREATE INDEX IF NOT EXISTS config_lookup ON config (name)")
+
+ sqlite_version = _get_sqlite_version(cursor)
+
+ cursor.execute(
+ f"""
+ SELECT name FROM {_schema_table_name(sqlite_version)} WHERE type = 'table' AND name = 'unihashes_v2'
+ """
+ )
+ if cursor.fetchone():
+ self.logger.info("Upgrading Unihashes V2 -> V3...")
+ cursor.execute(
+ """
+ INSERT INTO unihashes_v3 (id, method, unihash, taskhash, gc_mark)
+ SELECT id, method, unihash, taskhash, '' FROM unihashes_v2
+ """
+ )
+ cursor.execute("DROP TABLE unihashes_v2")
+ db.commit()
+ self.logger.info("Upgrade complete")
def connect(self, logger):
return Database(logger, self.dbname, self.sync)
@@ -126,16 +189,7 @@ class Database(object):
"PRAGMA synchronous = %s" % ("NORMAL" if sync else "OFF")
)
- cursor.execute("SELECT sqlite_version()")
-
- version = []
- for v in cursor.fetchone()[0].split("."):
- try:
- version.append(int(v))
- except ValueError:
- version.append(v)
-
- self.sqlite_version = tuple(version)
+ self.sqlite_version = _get_sqlite_version(cursor)
async def __aenter__(self):
return self
@@ -143,6 +197,30 @@ class Database(object):
async def __aexit__(self, exc_type, exc_value, traceback):
await self.close()
+ async def _set_config(self, cursor, name, value):
+ cursor.execute(
+ """
+ INSERT OR REPLACE INTO config (id, name, value) VALUES
+ ((SELECT id FROM config WHERE name=:name), :name, :value)
+ """,
+ {
+ "name": name,
+ "value": value,
+ },
+ )
+
+ async def _get_config(self, cursor, name):
+ cursor.execute(
+ "SELECT value FROM config WHERE name=:name",
+ {
+ "name": name,
+ },
+ )
+ row = cursor.fetchone()
+ if row is None:
+ return None
+ return row["value"]
+
async def close(self):
self.db.close()
@@ -150,8 +228,8 @@ class Database(object):
with closing(self.db.cursor()) as cursor:
cursor.execute(
"""
- SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
- INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
+ SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2
+ INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash
ORDER BY outhashes_v2.created ASC
LIMIT 1
@@ -167,8 +245,8 @@ class Database(object):
with closing(self.db.cursor()) as cursor:
cursor.execute(
"""
- SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
- INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
+ SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2
+ INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
ORDER BY outhashes_v2.created ASC
LIMIT 1
@@ -180,6 +258,19 @@ class Database(object):
)
return cursor.fetchone()
+ async def unihash_exists(self, unihash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT * FROM unihashes_v3 WHERE unihash=:unihash
+ LIMIT 1
+ """,
+ {
+ "unihash": unihash,
+ },
+ )
+ return cursor.fetchone() is not None
+
async def get_outhash(self, method, outhash):
with closing(self.db.cursor()) as cursor:
cursor.execute(
@@ -200,8 +291,8 @@ class Database(object):
with closing(self.db.cursor()) as cursor:
cursor.execute(
"""
- SELECT outhashes_v2.taskhash AS taskhash, unihashes_v2.unihash AS unihash FROM outhashes_v2
- INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
+ SELECT outhashes_v2.taskhash AS taskhash, unihashes_v3.unihash AS unihash FROM outhashes_v2
+ INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
-- Select any matching output hash except the one we just inserted
WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash
-- Pick the oldest hash
@@ -219,7 +310,7 @@ class Database(object):
async def get_equivalent(self, method, taskhash):
with closing(self.db.cursor()) as cursor:
cursor.execute(
- "SELECT taskhash, method, unihash FROM unihashes_v2 WHERE method=:method AND taskhash=:taskhash",
+ "SELECT taskhash, method, unihash FROM unihashes_v3 WHERE method=:method AND taskhash=:taskhash",
{
"method": method,
"taskhash": taskhash,
@@ -229,15 +320,9 @@ class Database(object):
async def remove(self, condition):
def do_remove(columns, table_name, cursor):
- where = {}
- for c in columns:
- if c in condition and condition[c] is not None:
- where[c] = condition[c]
-
+ where, clause = _make_condition_statement(columns, condition)
if where:
- query = ("DELETE FROM %s WHERE " % table_name) + " AND ".join(
- "%s=:%s" % (k, k) for k in where.keys()
- )
+ query = f"DELETE FROM {table_name} WHERE {clause}"
cursor.execute(query, where)
return cursor.rowcount
@@ -246,17 +331,80 @@ class Database(object):
count = 0
with closing(self.db.cursor()) as cursor:
count += do_remove(OUTHASH_TABLE_COLUMNS, "outhashes_v2", cursor)
- count += do_remove(UNIHASH_TABLE_COLUMNS, "unihashes_v2", cursor)
+ count += do_remove(UNIHASH_TABLE_COLUMNS, "unihashes_v3", cursor)
self.db.commit()
return count
+ async def get_current_gc_mark(self):
+ with closing(self.db.cursor()) as cursor:
+ return await self._get_config(cursor, "gc-mark")
+
+ async def gc_status(self):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT COUNT() FROM unihashes_v3 WHERE
+ gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ """
+ )
+ keep_rows = cursor.fetchone()[0]
+
+ cursor.execute(
+ """
+ SELECT COUNT() FROM unihashes_v3 WHERE
+ gc_mark!=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ """
+ )
+ remove_rows = cursor.fetchone()[0]
+
+ current_mark = await self._get_config(cursor, "gc-mark")
+
+ return (keep_rows, remove_rows, current_mark)
+
+ async def gc_mark(self, mark, condition):
+ with closing(self.db.cursor()) as cursor:
+ await self._set_config(cursor, "gc-mark", mark)
+
+ where, clause = _make_condition_statement(UNIHASH_TABLE_COLUMNS, condition)
+
+ new_rows = 0
+ if where:
+ cursor.execute(
+ f"""
+ UPDATE unihashes_v3 SET
+ gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ WHERE {clause}
+ """,
+ where,
+ )
+ new_rows = cursor.rowcount
+
+ self.db.commit()
+ return new_rows
+
+ async def gc_sweep(self):
+ with closing(self.db.cursor()) as cursor:
+ # NOTE: COALESCE is not used in this query so that if the current
+ # mark is NULL, nothing will happen
+ cursor.execute(
+ """
+ DELETE FROM unihashes_v3 WHERE
+ gc_mark!=(SELECT value FROM config WHERE name='gc-mark')
+ """
+ )
+ count = cursor.rowcount
+ await self._set_config(cursor, "gc-mark", None)
+
+ self.db.commit()
+ return count
+
async def clean_unused(self, oldest):
with closing(self.db.cursor()) as cursor:
cursor.execute(
"""
DELETE FROM outhashes_v2 WHERE created<:oldest AND NOT EXISTS (
- SELECT unihashes_v2.id FROM unihashes_v2 WHERE unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash LIMIT 1
+ SELECT unihashes_v3.id FROM unihashes_v3 WHERE unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash LIMIT 1
)
""",
{
@@ -271,7 +419,13 @@ class Database(object):
prevrowid = cursor.lastrowid
cursor.execute(
"""
- INSERT OR IGNORE INTO unihashes_v2 (method, taskhash, unihash) VALUES(:method, :taskhash, :unihash)
+ INSERT OR IGNORE INTO unihashes_v3 (method, taskhash, unihash, gc_mark) VALUES
+ (
+ :method,
+ :taskhash,
+ :unihash,
+ COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ )
""",
{
"method": method,
@@ -383,14 +537,9 @@ class Database(object):
async def get_usage(self):
usage = {}
with closing(self.db.cursor()) as cursor:
- if self.sqlite_version >= (3, 33):
- table_name = "sqlite_schema"
- else:
- table_name = "sqlite_master"
-
cursor.execute(
f"""
- SELECT name FROM {table_name} WHERE type = 'table' AND name NOT LIKE 'sqlite_%'
+ SELECT name FROM {_schema_table_name(self.sqlite_version)} WHERE type = 'table' AND name NOT LIKE 'sqlite_%'
"""
)
for row in cursor.fetchall():
diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py
index 869f7636c5..0809453cf8 100644
--- a/poky/bitbake/lib/hashserv/tests.py
+++ b/poky/bitbake/lib/hashserv/tests.py
@@ -8,6 +8,7 @@
from . import create_server, create_client
from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS
from bb.asyncrpc import InvokeError
+from .client import ClientPool
import hashlib
import logging
import multiprocessing
@@ -442,6 +443,11 @@ class HashEquivalenceCommonTests(object):
self.assertEqual(result['taskhash'], taskhash9, 'Server failed to copy unihash from upstream')
self.assertEqual(result['method'], self.METHOD)
+ def test_unihash_exsits(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.assertTrue(self.client.unihash_exists(unihash))
+ self.assertFalse(self.client.unihash_exists('6662e699d6e3d894b24408ff9a4031ef9b038ee8'))
+
def test_ro_server(self):
rw_server = self.start_server()
rw_client = self.start_client(rw_server.address)
@@ -549,6 +555,88 @@ class HashEquivalenceCommonTests(object):
# shares a taskhash with Task 2
self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ def test_client_pool_get_unihashes(self):
+ TEST_INPUT = (
+ # taskhash outhash unihash
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'),
+ # Duplicated taskhash with multiple output hashes and unihashes.
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'),
+ # Equivalent hash
+ ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"),
+ ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'),
+ ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'),
+ )
+ EXTRA_QUERIES = (
+ "6b6be7a84ab179b4240c4302518dc3f6",
+ )
+
+ with ClientPool(self.server_address, 10) as client_pool:
+ for taskhash, outhash, unihash in TEST_INPUT:
+ self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+
+ query = {idx: (self.METHOD, data[0]) for idx, data in enumerate(TEST_INPUT)}
+ for idx, taskhash in enumerate(EXTRA_QUERIES):
+ query[idx + len(TEST_INPUT)] = (self.METHOD, taskhash)
+
+ result = client_pool.get_unihashes(query)
+
+ self.assertDictEqual(result, {
+ 0: "218e57509998197d570e2c98512d0105985dffc9",
+ 1: "218e57509998197d570e2c98512d0105985dffc9",
+ 2: "218e57509998197d570e2c98512d0105985dffc9",
+ 3: "3b5d3d83f07f259e9086fcb422c855286e18a57d",
+ 4: "f46d3fbb439bd9b921095da657a4de906510d2cd",
+ 5: "f46d3fbb439bd9b921095da657a4de906510d2cd",
+ 6: "05d2a63c81e32f0a36542ca677e8ad852365c538",
+ 7: None,
+ })
+
+ def test_client_pool_unihash_exists(self):
+ TEST_INPUT = (
+ # taskhash outhash unihash
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'),
+ # Duplicated taskhash with multiple output hashes and unihashes.
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'),
+ # Equivalent hash
+ ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"),
+ ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'),
+ ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'),
+ )
+ EXTRA_QUERIES = (
+ "6b6be7a84ab179b4240c4302518dc3f6",
+ )
+
+ result_unihashes = set()
+
+
+ with ClientPool(self.server_address, 10) as client_pool:
+ for taskhash, outhash, unihash in TEST_INPUT:
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ result_unihashes.add(result["unihash"])
+
+ query = {}
+ expected = {}
+
+ for _, _, unihash in TEST_INPUT:
+ idx = len(query)
+ query[idx] = unihash
+ expected[idx] = unihash in result_unihashes
+
+
+ for unihash in EXTRA_QUERIES:
+ idx = len(query)
+ query[idx] = unihash
+ expected[idx] = False
+
+ result = client_pool.unihashes_exist(query)
+ self.assertDictEqual(result, expected)
+
+
def test_auth_read_perms(self):
admin_client = self.start_auth_server()
@@ -810,6 +898,27 @@ class HashEquivalenceCommonTests(object):
with self.auth_perms("@user-admin") as client:
become = client.become_user(client.username)
+ def test_auth_gc(self):
+ admin_client = self.start_auth_server()
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.gc_mark("ABC", {"unihash": "123"})
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.gc_status()
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.gc_sweep("ABC")
+
+ with self.auth_perms("@db-admin") as client:
+ client.gc_mark("ABC", {"unihash": "123"})
+
+ with self.auth_perms("@db-admin") as client:
+ client.gc_status()
+
+ with self.auth_perms("@db-admin") as client:
+ client.gc_sweep("ABC")
+
def test_get_db_usage(self):
usage = self.client.get_db_usage()
@@ -837,6 +946,147 @@ class HashEquivalenceCommonTests(object):
data = client.get_taskhash(self.METHOD, taskhash, True)
self.assertEqual(data["owner"], user["username"])
+ def test_gc(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
+
+ # Second hash is still there; mark doesn't delete hashes
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ ret = self.client.gc_sweep("ABC")
+ self.assertEqual(ret, {"count": 1})
+
+ # Hash is gone. Taskhash is returned for second hash
+ self.assertClientGetHash(self.client, taskhash2, None)
+ # First hash is still present
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ def test_gc_switch_mark(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
+
+ # Second hash is still there; mark doesn't delete hashes
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Switch to a different mark and mark the second hash. This will start
+ # a new collection cycle
+ ret = self.client.gc_mark("DEF", {"unihash": unihash2, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "DEF", "keep": 1, "remove": 1})
+
+ # Both hashes are still present
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ # Sweep with the new mark
+ ret = self.client.gc_sweep("DEF")
+ self.assertEqual(ret, {"count": 1})
+
+ # First hash is gone, second is kept
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ def test_gc_switch_sweep_mark(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
+
+ # Sweeping with a different mark raises an error
+ with self.assertRaises(InvokeError):
+ self.client.gc_sweep("DEF")
+
+ # Both hashes are present
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ def test_gc_new_hashes(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ # Start a new garbage collection
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 0})
+
+ # Add second hash. It should inherit the mark from the current garbage
+ # collection operation
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Sweep should remove nothing
+ ret = self.client.gc_sweep("ABC")
+ self.assertEqual(ret, {"count": 0})
+
+ # Both hashes are present
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
class TestHashEquivalenceClient(HashEquivalenceTestSetup, unittest.TestCase):
def get_server_addr(self, server_idx):
@@ -869,6 +1119,40 @@ class TestHashEquivalenceClient(HashEquivalenceTestSetup, unittest.TestCase):
def test_stress(self):
self.run_hashclient(["--address", self.server_address, "stress"], check=True)
+ def test_unihash_exsits(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", unihash,
+ ], check=True)
+ self.assertEqual(p.stdout.strip(), "true")
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8',
+ ], check=True)
+ self.assertEqual(p.stdout.strip(), "false")
+
+ def test_unihash_exsits_quiet(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", unihash,
+ "--quiet",
+ ])
+ self.assertEqual(p.returncode, 0)
+ self.assertEqual(p.stdout.strip(), "")
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8',
+ "--quiet",
+ ])
+ self.assertEqual(p.returncode, 1)
+ self.assertEqual(p.stdout.strip(), "")
+
def test_remove_taskhash(self):
taskhash, outhash, unihash = self.create_test_hash(self.client)
self.run_hashclient([
@@ -1086,6 +1370,42 @@ class TestHashEquivalenceClient(HashEquivalenceTestSetup, unittest.TestCase):
"get-db-query-columns",
], check=True)
+ def test_gc(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ self.run_hashclient([
+ "--address", self.server_address,
+ "gc-mark", "ABC",
+ "--where", "unihash", unihash,
+ "--where", "method", self.METHOD
+ ], check=True)
+
+ # Second hash is still there; mark doesn't delete hashes
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ self.run_hashclient([
+ "--address", self.server_address,
+ "gc-sweep", "ABC",
+ ], check=True)
+
+ # Hash is gone. Taskhash is returned for second hash
+ self.assertClientGetHash(self.client, taskhash2, None)
+ # First hash is still present
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
def get_server_addr(self, server_idx):