diff options
Diffstat (limited to 'poky/bitbake/lib')
30 files changed, 530 insertions, 274 deletions
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py index 5c248d3655..e01b8d5256 100644 --- a/poky/bitbake/lib/bb/__init__.py +++ b/poky/bitbake/lib/bb/__init__.py @@ -9,7 +9,7 @@ # SPDX-License-Identifier: GPL-2.0-only # -__version__ = "1.51.1" +__version__ = "1.53.0" import sys if sys.version_info < (3, 6, 0): diff --git a/poky/bitbake/lib/bb/asyncrpc/client.py b/poky/bitbake/lib/bb/asyncrpc/client.py index 50e60d5c31..34960197d1 100644 --- a/poky/bitbake/lib/bb/asyncrpc/client.py +++ b/poky/bitbake/lib/bb/asyncrpc/client.py @@ -7,6 +7,7 @@ import asyncio import json import os import socket +import sys from . import chunkify, DEFAULT_MAX_CHUNK @@ -129,7 +130,7 @@ class Client(object): # required (but harmless) with it. asyncio.set_event_loop(self.loop) - self._add_methods('connect_tcp', 'close', 'ping') + self._add_methods('connect_tcp', 'ping') @abc.abstractmethod def _get_async_client(self): @@ -163,3 +164,9 @@ class Client(object): @max_chunk.setter def max_chunk(self, value): self.client.max_chunk = value + + def close(self): + self.loop.run_until_complete(self.client.close()) + if sys.version_info >= (3, 6): + self.loop.run_until_complete(self.loop.shutdown_asyncgens()) + self.loop.close() diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py index 7e4ab9f64c..d6418e40b3 100644 --- a/poky/bitbake/lib/bb/build.py +++ b/poky/bitbake/lib/bb/build.py @@ -569,7 +569,6 @@ exit $ret def _task_data(fn, task, d): localdata = bb.data.createCopy(d) localdata.setVar('BB_FILENAME', fn) - localdata.setVar('BB_CURRENTTASK', task[3:]) localdata.setVar('OVERRIDES', 'task-%s:%s' % (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False))) localdata.finalize() diff --git a/poky/bitbake/lib/bb/compress/_pipecompress.py b/poky/bitbake/lib/bb/compress/_pipecompress.py index 4b9f662143..5de17a82e2 100644 --- a/poky/bitbake/lib/bb/compress/_pipecompress.py +++ b/poky/bitbake/lib/bb/compress/_pipecompress.py @@ -49,7 +49,7 @@ def open_wrap( raise ValueError("Argument 'newline' not supported in binary mode") file_mode = mode.replace("t", "") - if isinstance(filename, (str, bytes, os.PathLike)): + if isinstance(filename, (str, bytes, os.PathLike, int)): binary_file = cls(filename, file_mode, **kwargs) elif hasattr(filename, "read") or hasattr(filename, "write"): binary_file = cls(None, file_mode, fileobj=filename, **kwargs) diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py index ee29d89b18..666cc1306a 100644 --- a/poky/bitbake/lib/bb/fetch2/__init__.py +++ b/poky/bitbake/lib/bb/fetch2/__init__.py @@ -884,7 +884,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir) success = True except bb.process.NotFoundError as e: - error_message = "Fetch command %s" % (e.command) + error_message = "Fetch command %s not found" % (e.command) except bb.process.ExecutionError as e: if e.stdout: output = "output:\n%s\n%s" % (e.stdout, e.stderr) @@ -1721,7 +1721,9 @@ class Fetch(object): self.d.setVar("BB_NO_NETWORK", "1") firsterr = None - verified_stamp = m.verify_donestamp(ud, self.d) + verified_stamp = False + if done: + verified_stamp = m.verify_donestamp(ud, self.d) if not done and (not verified_stamp or m.need_update(ud, self.d)): try: if not trusted_network(self.d, ud.url): @@ -1780,7 +1782,11 @@ class Fetch(object): def checkstatus(self, urls=None): """ - Check all urls exist upstream + Check all URLs exist upstream. + + Returns None if the URLs exist, raises FetchError if the check wasn't + successful but there wasn't an error (such as file not found), and + raises other exceptions in error cases. """ if not urls: diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py index a4527bf364..a7110a988d 100644 --- a/poky/bitbake/lib/bb/fetch2/gitsm.py +++ b/poky/bitbake/lib/bb/fetch2/gitsm.py @@ -140,16 +140,6 @@ class GitSM(Git): if Git.need_update(self, ud, d): return True - try: - # Check for the nugget dropped by the download operation - known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \ - (ud.basecmd), d, workdir=ud.clonedir) - - if ud.revisions[ud.names[0]] in known_srcrevs.split(): - return False - except bb.fetch2.FetchError: - pass - need_update_list = [] def need_update_submodule(ud, url, module, modpath, workdir, d): url += ";bareclone=1;nobranch=1" @@ -172,11 +162,6 @@ class GitSM(Git): shutil.rmtree(tmpdir) else: self.process_submodules(ud, ud.clonedir, need_update_submodule, d) - if len(need_update_list) == 0: - # We already have the required commits of all submodules. Drop - # a nugget so we don't need to check again. - runfetchcmd("%s config --add bitbake.srcrev %s" % \ - (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) if len(need_update_list) > 0: logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) @@ -209,9 +194,6 @@ class GitSM(Git): shutil.rmtree(tmpdir) else: self.process_submodules(ud, ud.clonedir, download_submodule, d) - # Drop a nugget for the srcrev we've fetched (used by need_update) - runfetchcmd("%s config --add bitbake.srcrev %s" % \ - (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) def unpack(self, ud, destdir, d): def unpack_submodules(ud, url, module, modpath, workdir, d): diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py index 47898509ff..e497c38dc7 100644 --- a/poky/bitbake/lib/bb/fetch2/npm.py +++ b/poky/bitbake/lib/bb/fetch2/npm.py @@ -69,17 +69,35 @@ def npm_unpack(tarball, destdir, d): bb.utils.mkdirhier(destdir) cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball) cmd += " --no-same-owner" + cmd += " --delay-directory-restore" cmd += " --strip-components=1" runfetchcmd(cmd, d, workdir=destdir) + runfetchcmd("chmod -R +X %s" % (destdir), d, quiet=True, workdir=destdir) class NpmEnvironment(object): """ Using a npm config file seems more reliable than using cli arguments. This class allows to create a controlled environment for npm commands. """ - def __init__(self, d, configs=None): + def __init__(self, d, configs=None, npmrc=None): self.d = d - self.configs = configs + + if configs: + self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1) + self.user_config_name = self.user_config.name + for key, value in configs: + self.user_config.write("%s=%s\n" % (key, value)) + else: + self.user_config_name = "/dev/null" + + if npmrc: + self.global_config_name = npmrc + else: + self.global_config_name = "/dev/null" + + def __del__(self): + if self.user_config: + self.user_config.close() def run(self, cmd, args=None, configs=None, workdir=None): """Run npm command in a controlled environment""" @@ -87,23 +105,19 @@ class NpmEnvironment(object): d = bb.data.createCopy(self.d) d.setVar("HOME", tmpdir) - cfgfile = os.path.join(tmpdir, "npmrc") - if not workdir: workdir = tmpdir def _run(cmd): - cmd = "NPM_CONFIG_USERCONFIG=%s " % cfgfile + cmd - cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % cfgfile + cmd + cmd = "NPM_CONFIG_USERCONFIG=%s " % (self.user_config_name) + cmd + cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % (self.global_config_name) + cmd return runfetchcmd(cmd, d, workdir=workdir) - if self.configs: - for key, value in self.configs: - _run("npm config set %s %s" % (key, shlex.quote(value))) - if configs: + bb.warn("Use of configs argument of NpmEnvironment.run() function" + " is deprecated. Please use args argument instead.") for key, value in configs: - _run("npm config set %s %s" % (key, shlex.quote(value))) + cmd += " --%s=%s" % (key, shlex.quote(value)) if args: for key, value in args: @@ -165,14 +179,14 @@ class Npm(FetchMethod): def _resolve_proxy_url(self, ud, d): def _npm_view(): - configs = [] - configs.append(("json", "true")) - configs.append(("registry", ud.registry)) + args = [] + args.append(("json", "true")) + args.append(("registry", ud.registry)) pkgver = shlex.quote(ud.package + "@" + ud.version) cmd = ud.basecmd + " view %s" % pkgver env = NpmEnvironment(d) check_network_access(d, cmd, ud.registry) - view_string = env.run(cmd, configs=configs) + view_string = env.run(cmd, args=args) if not view_string: raise FetchError("Unavailable package %s" % pkgver, ud.url) diff --git a/poky/bitbake/lib/bb/fetch2/npmsw.py b/poky/bitbake/lib/bb/fetch2/npmsw.py index 0c3511d8ab..426a139653 100644 --- a/poky/bitbake/lib/bb/fetch2/npmsw.py +++ b/poky/bitbake/lib/bb/fetch2/npmsw.py @@ -24,11 +24,14 @@ import bb from bb.fetch2 import Fetch from bb.fetch2 import FetchMethod from bb.fetch2 import ParameterError +from bb.fetch2 import runfetchcmd from bb.fetch2 import URI from bb.fetch2.npm import npm_integrity from bb.fetch2.npm import npm_localfile from bb.fetch2.npm import npm_unpack from bb.utils import is_semver +from bb.utils import lockfile +from bb.utils import unlockfile def foreach_dependencies(shrinkwrap, callback=None, dev=False): """ @@ -78,6 +81,7 @@ class NpmShrinkWrap(FetchMethod): extrapaths = [] destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] destsuffix = os.path.join(*destsubdirs) + unpack = True integrity = params.get("integrity", None) resolved = params.get("resolved", None) @@ -148,7 +152,12 @@ class NpmShrinkWrap(FetchMethod): url = str(uri) - # local tarball sources and local link sources are unsupported + # Handle local tarball and link sources + elif version.startswith("file"): + localpath = version[5:] + if not version.endswith(".tgz"): + unpack = False + else: raise ParameterError("Unsupported dependency: %s" % name, ud.url) @@ -157,6 +166,7 @@ class NpmShrinkWrap(FetchMethod): "localpath": localpath, "extrapaths": extrapaths, "destsuffix": destsuffix, + "unpack": unpack, }) try: @@ -177,7 +187,7 @@ class NpmShrinkWrap(FetchMethod): # This fetcher resolves multiple URIs from a shrinkwrap file and then # forwards it to a proxy fetcher. The management of the donestamp file, # the lockfile and the checksums are forwarded to the proxy fetcher. - ud.proxy = Fetch([dep["url"] for dep in ud.deps], data) + ud.proxy = Fetch([dep["url"] for dep in ud.deps if dep["url"]], data) ud.needdonestamp = False @staticmethod @@ -187,7 +197,9 @@ class NpmShrinkWrap(FetchMethod): proxy_ud = ud.proxy.ud[proxy_url] proxy_d = ud.proxy.d proxy_ud.setup_localpath(proxy_d) + lf = lockfile(proxy_ud.lockfile) returns.append(handle(proxy_ud.method, proxy_ud, proxy_d)) + unlockfile(lf) return returns def verify_donestamp(self, ud, d): @@ -237,7 +249,16 @@ class NpmShrinkWrap(FetchMethod): for dep in manual: depdestdir = os.path.join(destdir, dep["destsuffix"]) - npm_unpack(dep["localpath"], depdestdir, d) + if dep["url"]: + npm_unpack(dep["localpath"], depdestdir, d) + else: + depsrcdir= os.path.join(destdir, dep["localpath"]) + if dep["unpack"]: + npm_unpack(depsrcdir, depdestdir, d) + else: + bb.utils.mkdirhier(depdestdir) + cmd = 'cp -fpPRH "%s/." .' % (depsrcdir) + runfetchcmd(cmd, d, workdir=depdestdir) def clean(self, ud, d): """Clean any existing full or partial download""" diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py index e2a41a4a12..3b6fa4b1ec 100644 --- a/poky/bitbake/lib/bb/fetch2/perforce.py +++ b/poky/bitbake/lib/bb/fetch2/perforce.py @@ -134,7 +134,7 @@ class Perforce(FetchMethod): ud.setup_revisions(d) - ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision)) + ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleanedmodule, ud.revision)) def _buildp4command(self, ud, d, command, depot_filename=None): """ diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py index 625a9cf3bb..578ba5d661 100644 --- a/poky/bitbake/lib/bb/siggen.py +++ b/poky/bitbake/lib/bb/siggen.py @@ -11,6 +11,8 @@ import pickle import bb.data import difflib import simplediff +import json +import bb.compress.zstd from bb.checksum import FileChecksumCache from bb import runqueue import hashserv @@ -19,6 +21,17 @@ import hashserv.client logger = logging.getLogger('BitBake.SigGen') hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv') +class SetEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, set): + return dict(_set_object=list(sorted(obj))) + return json.JSONEncoder.default(self, obj) + +def SetDecoder(dct): + if '_set_object' in dct: + return set(dct['_set_object']) + return dct + def init(d): siggens = [obj for obj in globals().values() if type(obj) is type and issubclass(obj, SignatureGenerator)] @@ -398,9 +411,9 @@ class SignatureGeneratorBasic(SignatureGenerator): fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.") try: - with os.fdopen(fd, "wb") as stream: - p = pickle.dump(data, stream, -1) - stream.flush() + with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f: + json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder) + f.flush() os.chmod(tmpfile, 0o664) bb.utils.rename(tmpfile, sigfile) except (OSError, IOError) as err: @@ -794,12 +807,10 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): formatparams.update(values) return formatstr.format(**formatparams) - with open(a, 'rb') as f: - p1 = pickle.Unpickler(f) - a_data = p1.load() - with open(b, 'rb') as f: - p2 = pickle.Unpickler(f) - b_data = p2.load() + with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f: + a_data = json.load(f, object_hook=SetDecoder) + with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f: + b_data = json.load(f, object_hook=SetDecoder) def dict_diff(a, b, whitelist=set()): sa = set(a.keys()) @@ -815,11 +826,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): def file_checksums_diff(a, b): from collections import Counter - # Handle old siginfo format - if isinstance(a, dict): - a = [(os.path.basename(f), cs) for f, cs in a.items()] - if isinstance(b, dict): - b = [(os.path.basename(f), cs) for f, cs in b.items()] + + # Convert lists back to tuples + a = [(f[0], f[1]) for f in a] + b = [(f[0], f[1]) for f in b] + # Compare lists, ensuring we can handle duplicate filenames if they exist removedcount = Counter(a) removedcount.subtract(b) @@ -902,9 +913,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval)) if not 'file_checksum_values' in a_data: - a_data['file_checksum_values'] = {} + a_data['file_checksum_values'] = [] if not 'file_checksum_values' in b_data: - b_data['file_checksum_values'] = {} + b_data['file_checksum_values'] = [] changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values']) if changed: @@ -1031,9 +1042,8 @@ def calc_taskhash(sigdata): def dump_sigfile(a): output = [] - with open(a, 'rb') as f: - p1 = pickle.Unpickler(f) - a_data = p1.load() + with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f: + a_data = json.load(f, object_hook=SetDecoder) output.append("basewhitelist: %s" % (a_data['basewhitelist'])) diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py index af292a2163..8ad1c85990 100644 --- a/poky/bitbake/lib/bb/tests/fetch.py +++ b/poky/bitbake/lib/bb/tests/fetch.py @@ -376,7 +376,7 @@ class FetcherTest(unittest.TestCase): def setUp(self): self.origdir = os.getcwd() self.d = bb.data.init() - self.tempdir = tempfile.mkdtemp() + self.tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-") self.dldir = os.path.join(self.tempdir, "download") os.mkdir(self.dldir) self.d.setVar("DL_DIR", self.dldir) @@ -826,12 +826,12 @@ class FetcherNoNetworkTest(FetcherTest): class FetcherNetworkTest(FetcherTest): @skipIfNoNetwork() def test_fetch(self): - fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) + fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) fetcher.download() self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892) self.d.setVar("BB_NO_NETWORK", "1") - fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) + fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) fetcher.download() fetcher.unpack(self.unpackdir) self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9) @@ -839,21 +839,21 @@ class FetcherNetworkTest(FetcherTest): @skipIfNoNetwork() def test_fetch_mirror(self): - self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") + self.d.setVar("MIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake") fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) fetcher.download() self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) @skipIfNoNetwork() def test_fetch_mirror_of_mirror(self): - self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake") + self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* https://downloads.yoctoproject.org/releases/bitbake") fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) fetcher.download() self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) @skipIfNoNetwork() def test_fetch_file_mirror_of_mirror(self): - self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake") + self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* https://downloads.yoctoproject.org/releases/bitbake") fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) os.mkdir(self.dldir + "/some2where") fetcher.download() @@ -861,20 +861,20 @@ class FetcherNetworkTest(FetcherTest): @skipIfNoNetwork() def test_fetch_premirror(self): - self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") + self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake") fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) fetcher.download() self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) @skipIfNoNetwork() def test_fetch_specify_downloadfilename(self): - fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d) + fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d) fetcher.download() self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749) @skipIfNoNetwork() def test_fetch_premirror_specify_downloadfilename_regex_uri(self): - self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake/") + self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake/") fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d) fetcher.download() self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749) @@ -882,7 +882,7 @@ class FetcherNetworkTest(FetcherTest): @skipIfNoNetwork() # BZ13039 def test_fetch_premirror_specify_downloadfilename_specific_uri(self): - self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake http://downloads.yoctoproject.org/releases/bitbake") + self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake https://downloads.yoctoproject.org/releases/bitbake") fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d) fetcher.download() self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749) @@ -1012,7 +1012,7 @@ class FetcherNetworkTest(FetcherTest): @skipIfNoNetwork() def test_git_submodule_CLI11(self): - url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf" + url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main" fetcher = bb.fetch.Fetch([url], self.d) fetcher.download() # Previous cwd has been deleted @@ -1027,12 +1027,12 @@ class FetcherNetworkTest(FetcherTest): @skipIfNoNetwork() def test_git_submodule_update_CLI11(self): """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """ - url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714" + url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main" fetcher = bb.fetch.Fetch([url], self.d) fetcher.download() # CLI11 that pulls in a newer nlohmann-json - url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca" + url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main" fetcher = bb.fetch.Fetch([url], self.d) fetcher.download() # Previous cwd has been deleted @@ -1291,10 +1291,10 @@ class FetchLatestVersionTest(FetcherTest): # # packages with versions only in current directory # - # http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 + # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "") : "2.19", - # http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 + # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "") : "20120814", # @@ -1357,13 +1357,13 @@ class FetchLatestVersionTest(FetcherTest): class FetchCheckStatusTest(FetcherTest): - test_wget_uris = ["http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", - "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz", - "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz", + test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", + "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz", + "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz", "https://yoctoproject.org/", "https://docs.yoctoproject.org", - "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz", - "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz", + "https://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz", + "https://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz", "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz", "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz", "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz", diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf index efebf001a9..2645c0e985 100644 --- a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf +++ b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf @@ -12,6 +12,6 @@ STAMP = "${TMPDIR}/stamps/${PN}" T = "${TMPDIR}/workdir/${PN}/temp" BB_NUMBER_THREADS = "4" -BB_HASHBASE_WHITELIST = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE" +BB_HASHBASE_WHITELIST = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE BB_CURRENTTASK" include conf/multiconfig/${BB_CURRENT_MC}.conf diff --git a/poky/bitbake/lib/bb/tests/runqueue.py b/poky/bitbake/lib/bb/tests/runqueue.py index 3d51779d6c..5b6ada886a 100644 --- a/poky/bitbake/lib/bb/tests/runqueue.py +++ b/poky/bitbake/lib/bb/tests/runqueue.py @@ -278,7 +278,6 @@ class RunQueueTests(unittest.TestCase): ["mc_2:a1:%s" % t for t in rerun_tasks] self.assertEqual(set(tasks), set(expected)) - @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required') def test_hashserv_single(self): with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: extraenv = { @@ -304,7 +303,6 @@ class RunQueueTests(unittest.TestCase): self.shutdown(tempdir) - @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required') def test_hashserv_double(self): with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: extraenv = { @@ -329,7 +327,6 @@ class RunQueueTests(unittest.TestCase): self.shutdown(tempdir) - @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required') def test_hashserv_multiple_setscene(self): # Runs e1:do_package_setscene twice with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: @@ -361,7 +358,7 @@ class RunQueueTests(unittest.TestCase): def shutdown(self, tempdir): # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup - while os.path.exists(tempdir + "/hashserve.sock"): + while (os.path.exists(tempdir + "/hashserve.sock") or os.path.exists(tempdir + "cache/hashserv.db-wal")): time.sleep(0.5) diff --git a/poky/bitbake/lib/bb/tests/utils.py b/poky/bitbake/lib/bb/tests/utils.py index 4d5e21b99e..c363f62d7d 100644 --- a/poky/bitbake/lib/bb/tests/utils.py +++ b/poky/bitbake/lib/bb/tests/utils.py @@ -418,7 +418,7 @@ MULTILINE = " stuff \\ ['MULTILINE'], handle_var) - testvalue = re.sub('\s+', ' ', value_in_callback.strip()) + testvalue = re.sub(r'\s+', ' ', value_in_callback.strip()) self.assertEqual(expected_value, testvalue) class EditBbLayersConf(unittest.TestCase): diff --git a/poky/bitbake/lib/bb/ui/knotty.py b/poky/bitbake/lib/bb/ui/knotty.py index 8df745d130..484545a684 100644 --- a/poky/bitbake/lib/bb/ui/knotty.py +++ b/poky/bitbake/lib/bb/ui/knotty.py @@ -276,11 +276,11 @@ class TerminalFilter(object): print(content) else: if self.quiet: - content = "Running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) + content = "Running tasks (%s of %s/%s of %s)" % (self.helper.setscene_current, self.helper.setscene_total, self.helper.tasknumber_current, self.helper.tasknumber_total) elif not len(activetasks): - content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) + content = "No currently running tasks (%s of %s/%s of %s)" % (self.helper.setscene_current, self.helper.setscene_total, self.helper.tasknumber_current, self.helper.tasknumber_total) else: - content = "Currently %2s running tasks (%s of %s)" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total) + content = "Currently %2s running tasks (%s of %s/%s of %s)" % (len(activetasks), self.helper.setscene_current, self.helper.setscene_total, self.helper.tasknumber_current, self.helper.tasknumber_total) maxtask = self.helper.tasknumber_total if not self.main_progress or self.main_progress.maxval != maxtask: widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()] diff --git a/poky/bitbake/lib/bb/ui/uihelper.py b/poky/bitbake/lib/bb/ui/uihelper.py index 52fdae3fec..82913e0da8 100644 --- a/poky/bitbake/lib/bb/ui/uihelper.py +++ b/poky/bitbake/lib/bb/ui/uihelper.py @@ -50,8 +50,10 @@ class BBUIHelper: removetid(event.pid, tid) self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)}) elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted): - self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + event.stats.setscene_active + 1 + self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed self.tasknumber_total = event.stats.total + self.setscene_current = event.stats.setscene_active + event.stats.setscene_covered + event.stats.setscene_notcovered + self.setscene_total = event.stats.setscene_total self.needUpdate = True elif isinstance(event, bb.build.TaskProgress): if event.pid > 0 and event.pid in self.pidmap: diff --git a/poky/bitbake/lib/codegen.py b/poky/bitbake/lib/codegen.py index 62a6748c47..6955a7ada5 100644 --- a/poky/bitbake/lib/codegen.py +++ b/poky/bitbake/lib/codegen.py @@ -401,6 +401,12 @@ class SourceGenerator(NodeVisitor): def visit_Num(self, node): self.write(repr(node.n)) + def visit_Constant(self, node): + # Python 3.8 deprecated visit_Num(), visit_Str(), visit_Bytes(), + # visit_NameConstant() and visit_Ellipsis(). They can be removed once we + # require 3.8+. + self.write(repr(node.value)) + def visit_Tuple(self, node): self.write('(') idx = -1 diff --git a/poky/bitbake/lib/hashserv/__init__.py b/poky/bitbake/lib/hashserv/__init__.py index 5f2e101e52..9cb3fd57a5 100644 --- a/poky/bitbake/lib/hashserv/__init__.py +++ b/poky/bitbake/lib/hashserv/__init__.py @@ -22,46 +22,68 @@ ADDR_TYPE_TCP = 1 # is necessary DEFAULT_MAX_CHUNK = 32 * 1024 -TABLE_DEFINITION = ( - ("method", "TEXT NOT NULL"), - ("outhash", "TEXT NOT NULL"), - ("taskhash", "TEXT NOT NULL"), - ("unihash", "TEXT NOT NULL"), - ("created", "DATETIME"), +UNIHASH_TABLE_DEFINITION = ( + ("method", "TEXT NOT NULL", "UNIQUE"), + ("taskhash", "TEXT NOT NULL", "UNIQUE"), + ("unihash", "TEXT NOT NULL", ""), +) + +UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION) + +OUTHASH_TABLE_DEFINITION = ( + ("method", "TEXT NOT NULL", "UNIQUE"), + ("taskhash", "TEXT NOT NULL", "UNIQUE"), + ("outhash", "TEXT NOT NULL", "UNIQUE"), + ("created", "DATETIME", ""), # Optional fields - ("owner", "TEXT"), - ("PN", "TEXT"), - ("PV", "TEXT"), - ("PR", "TEXT"), - ("task", "TEXT"), - ("outhash_siginfo", "TEXT"), + ("owner", "TEXT", ""), + ("PN", "TEXT", ""), + ("PV", "TEXT", ""), + ("PR", "TEXT", ""), + ("task", "TEXT", ""), + ("outhash_siginfo", "TEXT", ""), ) -TABLE_COLUMNS = tuple(name for name, _ in TABLE_DEFINITION) +OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION) + +def _make_table(cursor, name, definition): + cursor.execute(''' + CREATE TABLE IF NOT EXISTS {name} ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + {fields} + UNIQUE({unique}) + ) + '''.format( + name=name, + fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition), + unique=", ".join(name for name, _, flags in definition if "UNIQUE" in flags) + )) + def setup_database(database, sync=True): db = sqlite3.connect(database) db.row_factory = sqlite3.Row with closing(db.cursor()) as cursor: - cursor.execute(''' - CREATE TABLE IF NOT EXISTS tasks_v2 ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - %s - UNIQUE(method, outhash, taskhash) - ) - ''' % " ".join("%s %s," % (name, typ) for name, typ in TABLE_DEFINITION)) + _make_table(cursor, "unihashes_v2", UNIHASH_TABLE_DEFINITION) + _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION) + cursor.execute('PRAGMA journal_mode = WAL') cursor.execute('PRAGMA synchronous = %s' % ('NORMAL' if sync else 'OFF')) # Drop old indexes cursor.execute('DROP INDEX IF EXISTS taskhash_lookup') cursor.execute('DROP INDEX IF EXISTS outhash_lookup') + cursor.execute('DROP INDEX IF EXISTS taskhash_lookup_v2') + cursor.execute('DROP INDEX IF EXISTS outhash_lookup_v2') + + # TODO: Upgrade from tasks_v2? + cursor.execute('DROP TABLE IF EXISTS tasks_v2') # Create new indexes - cursor.execute('CREATE INDEX IF NOT EXISTS taskhash_lookup_v2 ON tasks_v2 (method, taskhash, created)') - cursor.execute('CREATE INDEX IF NOT EXISTS outhash_lookup_v2 ON tasks_v2 (method, outhash)') + cursor.execute('CREATE INDEX IF NOT EXISTS taskhash_lookup_v3 ON unihashes_v2 (method, taskhash)') + cursor.execute('CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)') return db diff --git a/poky/bitbake/lib/hashserv/client.py b/poky/bitbake/lib/hashserv/client.py index 1a67c6982d..b2aa1026ac 100644 --- a/poky/bitbake/lib/hashserv/client.py +++ b/poky/bitbake/lib/hashserv/client.py @@ -107,11 +107,11 @@ class Client(bb.asyncrpc.Client): super().__init__() self._add_methods( "connect_tcp", - "close", "get_unihash", "report_unihash", "report_unihash_equiv", "get_taskhash", + "get_outhash", "get_stats", "reset_stats", "backfill_wait", diff --git a/poky/bitbake/lib/hashserv/server.py b/poky/bitbake/lib/hashserv/server.py index a059e52115..d40a2ab8f8 100644 --- a/poky/bitbake/lib/hashserv/server.py +++ b/poky/bitbake/lib/hashserv/server.py @@ -5,11 +5,12 @@ from contextlib import closing, contextmanager from datetime import datetime +import enum import asyncio import logging import math import time -from . import create_async_client, TABLE_COLUMNS +from . import create_async_client, UNIHASH_TABLE_COLUMNS, OUTHASH_TABLE_COLUMNS import bb.asyncrpc @@ -106,56 +107,64 @@ class Stats(object): return {k: getattr(self, k) for k in ('num', 'total_time', 'max_time', 'average', 'stdev')} -def insert_task(cursor, data, ignore=False): +@enum.unique +class Resolve(enum.Enum): + FAIL = enum.auto() + IGNORE = enum.auto() + REPLACE = enum.auto() + + +def insert_table(cursor, table, data, on_conflict): + resolve = { + Resolve.FAIL: "", + Resolve.IGNORE: " OR IGNORE", + Resolve.REPLACE: " OR REPLACE", + }[on_conflict] + keys = sorted(data.keys()) - query = '''INSERT%s INTO tasks_v2 (%s) VALUES (%s)''' % ( - " OR IGNORE" if ignore else "", - ', '.join(keys), - ', '.join(':' + k for k in keys)) + query = 'INSERT{resolve} INTO {table} ({fields}) VALUES({values})'.format( + resolve=resolve, + table=table, + fields=", ".join(keys), + values=", ".join(":" + k for k in keys), + ) + prevrowid = cursor.lastrowid cursor.execute(query, data) - -async def copy_from_upstream(client, db, method, taskhash): - d = await client.get_taskhash(method, taskhash, True) + logging.debug( + "Inserting %r into %s, %s", + data, + table, + on_conflict + ) + return (cursor.lastrowid, cursor.lastrowid != prevrowid) + +def insert_unihash(cursor, data, on_conflict): + return insert_table(cursor, "unihashes_v2", data, on_conflict) + +def insert_outhash(cursor, data, on_conflict): + return insert_table(cursor, "outhashes_v2", data, on_conflict) + +async def copy_unihash_from_upstream(client, db, method, taskhash): + d = await client.get_taskhash(method, taskhash) if d is not None: - # Filter out unknown columns - d = {k: v for k, v in d.items() if k in TABLE_COLUMNS} - with closing(db.cursor()) as cursor: - insert_task(cursor, d) + insert_unihash( + cursor, + {k: v for k, v in d.items() if k in UNIHASH_TABLE_COLUMNS}, + Resolve.IGNORE, + ) db.commit() - return d -async def copy_outhash_from_upstream(client, db, method, outhash, taskhash): - d = await client.get_outhash(method, outhash, taskhash) - if d is not None: - # Filter out unknown columns - d = {k: v for k, v in d.items() if k in TABLE_COLUMNS} - with closing(db.cursor()) as cursor: - insert_task(cursor, d) - db.commit() +class ServerCursor(object): + def __init__(self, db, cursor, upstream): + self.db = db + self.cursor = cursor + self.upstream = upstream - return d class ServerClient(bb.asyncrpc.AsyncServerConnection): - FAST_QUERY = 'SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1' - ALL_QUERY = 'SELECT * FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1' - OUTHASH_QUERY = ''' - -- Find tasks with a matching outhash (that is, tasks that - -- are equivalent) - SELECT * FROM tasks_v2 WHERE method=:method AND outhash=:outhash - - -- If there is an exact match on the taskhash, return it. - -- Otherwise return the oldest matching outhash of any - -- taskhash - ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END, - created ASC - - -- Only return one row - LIMIT 1 - ''' - def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only): super().__init__(reader, writer, 'OEHASHEQUIV', logger) self.db = db @@ -210,36 +219,102 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection): async def handle_get(self, request): method = request['method'] taskhash = request['taskhash'] + fetch_all = request.get('all', False) - if request.get('all', False): - row = self.query_equivalent(method, taskhash, self.ALL_QUERY) - else: - row = self.query_equivalent(method, taskhash, self.FAST_QUERY) + with closing(self.db.cursor()) as cursor: + d = await self.get_unihash(cursor, method, taskhash, fetch_all) - if row is not None: - logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash'])) - d = {k: row[k] for k in row.keys()} - elif self.upstream_client is not None: - d = await copy_from_upstream(self.upstream_client, self.db, method, taskhash) + self.write_message(d) + + async def get_unihash(self, cursor, method, taskhash, fetch_all=False): + d = None + + if fetch_all: + cursor.execute( + ''' + SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2 + INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash + WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash + ORDER BY outhashes_v2.created ASC + LIMIT 1 + ''', + { + 'method': method, + 'taskhash': taskhash, + } + + ) + row = cursor.fetchone() + + if row is not None: + d = {k: row[k] for k in row.keys()} + elif self.upstream_client is not None: + d = await self.upstream_client.get_taskhash(method, taskhash, True) + self.update_unified(cursor, d) + self.db.commit() else: - d = None + row = self.query_equivalent(cursor, method, taskhash) + + if row is not None: + d = {k: row[k] for k in row.keys()} + elif self.upstream_client is not None: + d = await self.upstream_client.get_taskhash(method, taskhash) + d = {k: v for k, v in d.items() if k in UNIHASH_TABLE_COLUMNS} + insert_unihash(cursor, d, Resolve.IGNORE) + self.db.commit() - self.write_message(d) + return d async def handle_get_outhash(self, request): + method = request['method'] + outhash = request['outhash'] + taskhash = request['taskhash'] + with closing(self.db.cursor()) as cursor: - cursor.execute(self.OUTHASH_QUERY, - {k: request[k] for k in ('method', 'outhash', 'taskhash')}) + d = await self.get_outhash(cursor, method, outhash, taskhash) - row = cursor.fetchone() + self.write_message(d) + + async def get_outhash(self, cursor, method, outhash, taskhash): + d = None + cursor.execute( + ''' + SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2 + INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash + WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash + ORDER BY outhashes_v2.created ASC + LIMIT 1 + ''', + { + 'method': method, + 'outhash': outhash, + } + ) + row = cursor.fetchone() if row is not None: - logger.debug('Found equivalent outhash %s -> %s', (row['outhash'], row['unihash'])) d = {k: row[k] for k in row.keys()} - else: - d = None + elif self.upstream_client is not None: + d = await self.upstream_client.get_outhash(method, outhash, taskhash) + self.update_unified(cursor, d) + self.db.commit() - self.write_message(d) + return d + + def update_unified(self, cursor, data): + if data is None: + return + + insert_unihash( + cursor, + {k: v for k, v in data.items() if k in UNIHASH_TABLE_COLUMNS}, + Resolve.IGNORE + ) + insert_outhash( + cursor, + {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS}, + Resolve.IGNORE + ) async def handle_get_stream(self, request): self.write_message('ok') @@ -267,7 +342,12 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection): (method, taskhash) = l.split() #logger.debug('Looking up %s %s' % (method, taskhash)) - row = self.query_equivalent(method, taskhash, self.FAST_QUERY) + cursor = self.db.cursor() + try: + row = self.query_equivalent(cursor, method, taskhash) + finally: + cursor.close() + if row is not None: msg = ('%s\n' % row['unihash']).encode('utf-8') #logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash'])) @@ -294,55 +374,82 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection): async def handle_report(self, data): with closing(self.db.cursor()) as cursor: - cursor.execute(self.OUTHASH_QUERY, - {k: data[k] for k in ('method', 'outhash', 'taskhash')}) + outhash_data = { + 'method': data['method'], + 'outhash': data['outhash'], + 'taskhash': data['taskhash'], + 'created': datetime.now() + } - row = cursor.fetchone() + for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'): + if k in data: + outhash_data[k] = data[k] + + # Insert the new entry, unless it already exists + (rowid, inserted) = insert_outhash(cursor, outhash_data, Resolve.IGNORE) + + if inserted: + # If this row is new, check if it is equivalent to another + # output hash + cursor.execute( + ''' + SELECT outhashes_v2.taskhash AS taskhash, unihashes_v2.unihash AS unihash FROM outhashes_v2 + INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash + -- Select any matching output hash except the one we just inserted + WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash + -- Pick the oldest hash + ORDER BY outhashes_v2.created ASC + LIMIT 1 + ''', + { + 'method': data['method'], + 'outhash': data['outhash'], + 'taskhash': data['taskhash'], + } + ) + row = cursor.fetchone() - if row is None and self.upstream_client: - # Try upstream - row = await copy_outhash_from_upstream(self.upstream_client, - self.db, - data['method'], - data['outhash'], - data['taskhash']) - - # If no matching outhash was found, or one *was* found but it - # wasn't an exact match on the taskhash, a new entry for this - # taskhash should be added - if row is None or row['taskhash'] != data['taskhash']: - # If a row matching the outhash was found, the unihash for - # the new taskhash should be the same as that one. - # Otherwise the caller provided unihash is used. - unihash = data['unihash'] if row is not None: + # A matching output hash was found. Set our taskhash to the + # same unihash since they are equivalent unihash = row['unihash'] + resolve = Resolve.IGNORE + else: + # No matching output hash was found. This is probably the + # first outhash to be added. + unihash = data['unihash'] + resolve = Resolve.IGNORE + + # Query upstream to see if it has a unihash we can use + if self.upstream_client is not None: + upstream_data = await self.upstream_client.get_outhash(data['method'], data['outhash'], data['taskhash']) + if upstream_data is not None: + unihash = upstream_data['unihash'] + + + insert_unihash( + cursor, + { + 'method': data['method'], + 'taskhash': data['taskhash'], + 'unihash': unihash, + }, + resolve + ) + + unihash_data = await self.get_unihash(cursor, data['method'], data['taskhash']) + if unihash_data is not None: + unihash = unihash_data['unihash'] + else: + unihash = data['unihash'] - insert_data = { - 'method': data['method'], - 'outhash': data['outhash'], - 'taskhash': data['taskhash'], - 'unihash': unihash, - 'created': datetime.now() - } - - for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'): - if k in data: - insert_data[k] = data[k] - - insert_task(cursor, insert_data) - self.db.commit() - - logger.info('Adding taskhash %s with unihash %s', - data['taskhash'], unihash) + self.db.commit() - d = { - 'taskhash': data['taskhash'], - 'method': data['method'], - 'unihash': unihash - } - else: - d = {k: row[k] for k in ('taskhash', 'method', 'unihash')} + d = { + 'taskhash': data['taskhash'], + 'method': data['method'], + 'unihash': unihash, + } self.write_message(d) @@ -350,23 +457,16 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection): with closing(self.db.cursor()) as cursor: insert_data = { 'method': data['method'], - 'outhash': "", 'taskhash': data['taskhash'], 'unihash': data['unihash'], - 'created': datetime.now() } - - for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'): - if k in data: - insert_data[k] = data[k] - - insert_task(cursor, insert_data, ignore=True) + insert_unihash(cursor, insert_data, Resolve.IGNORE) self.db.commit() # Fetch the unihash that will be reported for the taskhash. If the # unihash matches, it means this row was inserted (or the mapping # was already valid) - row = self.query_equivalent(data['method'], data['taskhash'], self.FAST_QUERY) + row = self.query_equivalent(cursor, data['method'], data['taskhash']) if row['unihash'] == data['unihash']: logger.info('Adding taskhash equivalence for %s with unihash %s', @@ -399,14 +499,16 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection): await self.backfill_queue.join() self.write_message(d) - def query_equivalent(self, method, taskhash, query): + def query_equivalent(self, cursor, method, taskhash): # This is part of the inner loop and must be as fast as possible - try: - cursor = self.db.cursor() - cursor.execute(query, {'method': method, 'taskhash': taskhash}) - return cursor.fetchone() - except: - cursor.close() + cursor.execute( + 'SELECT taskhash, method, unihash FROM unihashes_v2 WHERE method=:method AND taskhash=:taskhash', + { + 'method': method, + 'taskhash': taskhash, + } + ) + return cursor.fetchone() class Server(bb.asyncrpc.AsyncServer): @@ -435,7 +537,7 @@ class Server(bb.asyncrpc.AsyncServer): self.backfill_queue.task_done() break method, taskhash = item - await copy_from_upstream(client, self.db, method, taskhash) + await copy_unihash_from_upstream(client, self.db, method, taskhash) self.backfill_queue.task_done() finally: await client.close() diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py index e851535c59..f6b85aed85 100644 --- a/poky/bitbake/lib/hashserv/tests.py +++ b/poky/bitbake/lib/hashserv/tests.py @@ -19,10 +19,10 @@ import time import signal def server_prefunc(server, idx): - logging.basicConfig(level=logging.DEBUG, filename='bbhashserv.log', filemode='w', + logging.basicConfig(level=logging.DEBUG, filename='bbhashserv-%d.log' % idx, filemode='w', format='%(levelname)s %(filename)s:%(lineno)d %(message)s') server.logger.debug("Running server %d" % idx) - sys.stdout = open('bbhashserv-%d.log' % idx, 'w') + sys.stdout = open('bbhashserv-stdout-%d.log' % idx, 'w') sys.stderr = sys.stdout class HashEquivalenceTestSetup(object): @@ -140,12 +140,17 @@ class HashEquivalenceCommonTests(object): }) self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash') - result = self.client.get_taskhash(self.METHOD, taskhash, True) - self.assertEqual(result['taskhash'], taskhash) - self.assertEqual(result['unihash'], unihash) - self.assertEqual(result['method'], self.METHOD) - self.assertEqual(result['outhash'], outhash) - self.assertEqual(result['outhash_siginfo'], siginfo) + result_unihash = self.client.get_taskhash(self.METHOD, taskhash, True) + self.assertEqual(result_unihash['taskhash'], taskhash) + self.assertEqual(result_unihash['unihash'], unihash) + self.assertEqual(result_unihash['method'], self.METHOD) + + result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash) + self.assertEqual(result_outhash['taskhash'], taskhash) + self.assertEqual(result_outhash['method'], self.METHOD) + self.assertEqual(result_outhash['unihash'], unihash) + self.assertEqual(result_outhash['outhash'], outhash) + self.assertEqual(result_outhash['outhash_siginfo'], siginfo) def test_stress(self): def query_server(failures): @@ -260,6 +265,39 @@ class HashEquivalenceCommonTests(object): result = down_client.report_unihash(taskhash6, self.METHOD, outhash5, unihash6) self.assertEqual(result['unihash'], unihash5, 'Server failed to copy unihash from upstream') + # Tests read through from server with + taskhash7 = '9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74' + outhash7 = '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69' + unihash7 = '05d2a63c81e32f0a36542ca677e8ad852365c538' + self.client.report_unihash(taskhash7, self.METHOD, outhash7, unihash7) + + result = down_client.get_taskhash(self.METHOD, taskhash7, True) + self.assertEqual(result['unihash'], unihash7, 'Server failed to copy unihash from upstream') + self.assertEqual(result['outhash'], outhash7, 'Server failed to copy unihash from upstream') + self.assertEqual(result['taskhash'], taskhash7, 'Server failed to copy unihash from upstream') + self.assertEqual(result['method'], self.METHOD) + + taskhash8 = '86978a4c8c71b9b487330b0152aade10c1ee58aa' + outhash8 = 'ca8c128e9d9e4a28ef24d0508aa20b5cf880604eacd8f65c0e366f7e0cc5fbcf' + unihash8 = 'd8bcf25369d40590ad7d08c84d538982f2023e01' + self.client.report_unihash(taskhash8, self.METHOD, outhash8, unihash8) + + result = down_client.get_outhash(self.METHOD, outhash8, taskhash8) + self.assertEqual(result['unihash'], unihash8, 'Server failed to copy unihash from upstream') + self.assertEqual(result['outhash'], outhash8, 'Server failed to copy unihash from upstream') + self.assertEqual(result['taskhash'], taskhash8, 'Server failed to copy unihash from upstream') + self.assertEqual(result['method'], self.METHOD) + + taskhash9 = 'ae6339531895ddf5b67e663e6a374ad8ec71d81c' + outhash9 = 'afc78172c81880ae10a1fec994b5b4ee33d196a001a1b66212a15ebe573e00b5' + unihash9 = '6662e699d6e3d894b24408ff9a4031ef9b038ee8' + self.client.report_unihash(taskhash9, self.METHOD, outhash9, unihash9) + + result = down_client.get_taskhash(self.METHOD, taskhash9, False) + self.assertEqual(result['unihash'], unihash9, 'Server failed to copy unihash from upstream') + self.assertEqual(result['taskhash'], taskhash9, 'Server failed to copy unihash from upstream') + self.assertEqual(result['method'], self.METHOD) + def test_ro_server(self): (ro_client, ro_server) = self.start_server(dbpath=self.server.dbpath, read_only=True) @@ -287,10 +325,8 @@ class HashEquivalenceCommonTests(object): def test_slow_server_start(self): - """ - Ensures that the server will exit correctly even if it gets a SIGTERM - before entering the main loop - """ + # Ensures that the server will exit correctly even if it gets a SIGTERM + # before entering the main loop event = multiprocessing.Event() @@ -312,6 +348,58 @@ class HashEquivalenceCommonTests(object): server.process.join(300) self.assertIsNotNone(server.process.exitcode, "Server did not exit in a timely manner!") + def test_diverging_report_race(self): + # Tests that a reported task will correctly pick up an updated unihash + + # This is a baseline report added to the database to ensure that there + # is something to match against as equivalent + outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa' + taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab' + unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab' + result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1) + + # Add a report that is equivalent to Task 1. It should ignore the + # provided unihash and report the unihash from task 1 + taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273' + unihash2 = taskhash2 + result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2) + self.assertEqual(result['unihash'], unihash1) + + # Add another report for Task 2, but with a different outhash (e.g. the + # task is non-deterministic). It should still be marked with the Task 1 + # unihash because it has the Task 2 taskhash, which is equivalent to + # Task 1 + outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c' + result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2) + self.assertEqual(result['unihash'], unihash1) + + + def test_diverging_report_reverse_race(self): + # Same idea as the previous test, but Tasks 2 and 3 are reported in + # reverse order the opposite order + + outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa' + taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab' + unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab' + result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1) + + taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273' + unihash2 = taskhash2 + + # Report Task 3 first. Since there is nothing else in the database it + # will use the client provided unihash + outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c' + result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2) + self.assertEqual(result['unihash'], unihash2) + + # Report Task 2. This is equivalent to Task 1 but there is already a mapping for + # taskhash2 so it will report unihash2 + result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2) + self.assertEqual(result['unihash'], unihash2) + + # The originally reported unihash for Task 3 should be unchanged even if it + # shares a taskhash with Task 2 + self.assertClientGetHash(self.client, taskhash2, unihash2) class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase): def get_server_addr(self, server_idx): diff --git a/poky/bitbake/lib/layerindexlib/__init__.py b/poky/bitbake/lib/layerindexlib/__init__.py index 3159bf2f66..08063c571e 100644 --- a/poky/bitbake/lib/layerindexlib/__init__.py +++ b/poky/bitbake/lib/layerindexlib/__init__.py @@ -198,7 +198,7 @@ The format of the indexURI: For example: - http://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index + https://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index cooker:// ''' if reload: @@ -576,7 +576,7 @@ This function is used to implement debugging and provide the user info. # index['config'] - configuration data for this index # index['branches'] - dictionary of Branch objects, by id number # index['layerItems'] - dictionary of layerItem objects, by id number -# ...etc... (See: http://layers.openembedded.org/layerindex/api/) +# ...etc... (See: https://layers.openembedded.org/layerindex/api/) # # The class needs to manage the 'index' entries and allow easily adding # of new items, as well as simply loading of the items. diff --git a/poky/bitbake/lib/layerindexlib/restapi.py b/poky/bitbake/lib/layerindexlib/restapi.py index 26a1c9674e..81d99b02ea 100644 --- a/poky/bitbake/lib/layerindexlib/restapi.py +++ b/poky/bitbake/lib/layerindexlib/restapi.py @@ -31,7 +31,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): The return value is a LayerIndexObj. url is the url to the rest api of the layer index, such as: - http://layers.openembedded.org/layerindex/api/ + https://layers.openembedded.org/layerindex/api/ Or a local file... """ @@ -138,7 +138,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): The return value is a LayerIndexObj. ud is the parsed url to the rest api of the layer index, such as: - http://layers.openembedded.org/layerindex/api/ + https://layers.openembedded.org/layerindex/api/ """ def _get_json_response(apiurl=None, username=None, password=None, retry=True): diff --git a/poky/bitbake/lib/layerindexlib/tests/restapi.py b/poky/bitbake/lib/layerindexlib/tests/restapi.py index 33b5c1c4c8..71f0ae8a9d 100644 --- a/poky/bitbake/lib/layerindexlib/tests/restapi.py +++ b/poky/bitbake/lib/layerindexlib/tests/restapi.py @@ -22,7 +22,7 @@ class LayerIndexWebRestApiTest(LayersTest): self.assertFalse(os.environ.get("BB_SKIP_NETTESTS") == "yes", msg="BB_SKIP_NETTESTS set, but we tried to test anyway") LayersTest.setUp(self) self.layerindex = layerindexlib.LayerIndex(self.d) - self.layerindex.load_layerindex('http://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies']) + self.layerindex.load_layerindex('https://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies']) @skipIfNoNetwork() def test_layerindex_is_empty(self): diff --git a/poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml b/poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml index 026d94869a..b01a337012 100644 --- a/poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml +++ b/poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml @@ -34,7 +34,7 @@ <field type="CharField" name="description">Openembedded Dunfell</field> <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> <field type="CharField" name="branch_name">dunfell</field> - <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=dunfell\">OpenEmbedded Dunfell</a> branch.</field> + <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=dunfell\">OpenEmbedded Dunfell</a> branch.</field> </object> <object model="orm.release" pk="2"> <field type="CharField" name="name">local</field> @@ -48,14 +48,14 @@ <field type="CharField" name="description">OpenEmbedded core master</field> <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field> <field type="CharField" name="branch_name">master</field> - <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/\">OpenEmbedded master</a> branch.</field> + <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/\">OpenEmbedded master</a> branch.</field> </object> <object model="orm.release" pk="4"> <field type="CharField" name="name">gatesgarth</field> <field type="CharField" name="description">Openembedded Gatesgarth</field> <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> <field type="CharField" name="branch_name">gatesgarth</field> - <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=gatesgarth\">OpenEmbedded Gatesgarth</a> branch.</field> + <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=gatesgarth\">OpenEmbedded Gatesgarth</a> branch.</field> </object> <!-- Default layers for each release --> @@ -81,9 +81,9 @@ <object model="orm.layer" pk="1"> <field type="CharField" name="name">openembedded-core</field> <field type="CharField" name="vcs_url">git://git.openembedded.org/openembedded-core</field> - <field type="CharField" name="vcs_web_url">http://cgit.openembedded.org/openembedded-core</field> - <field type="CharField" name="vcs_web_tree_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> - <field type="CharField" name="vcs_web_file_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> + <field type="CharField" name="vcs_web_url">https://cgit.openembedded.org/openembedded-core</field> + <field type="CharField" name="vcs_web_tree_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> + <field type="CharField" name="vcs_web_file_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field> </object> <object model="orm.layer_version" pk="1"> <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> diff --git a/poky/bitbake/lib/toaster/orm/fixtures/poky.xml b/poky/bitbake/lib/toaster/orm/fixtures/poky.xml index a468a54c49..363789d624 100644 --- a/poky/bitbake/lib/toaster/orm/fixtures/poky.xml +++ b/poky/bitbake/lib/toaster/orm/fixtures/poky.xml @@ -39,7 +39,7 @@ <field type="CharField" name="description">Yocto Project 3.1 "Dunfell"</field> <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field> <field type="CharField" name="branch_name">dunfell</field> - <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell">Yocto Project Dunfell branch</a>.</field> + <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell">Yocto Project Dunfell branch</a>.</field> </object> <object model="orm.release" pk="2"> <field type="CharField" name="name">local</field> @@ -53,14 +53,14 @@ <field type="CharField" name="description">Yocto Project master</field> <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field> <field type="CharField" name="branch_name">master</field> - <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/">Yocto Project Master branch</a>.</field> + <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/">Yocto Project Master branch</a>.</field> </object> <object model="orm.release" pk="4"> <field type="CharField" name="name">gatesgarth</field> <field type="CharField" name="description">Yocto Project 3.2 "Gatesgarth"</field> <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field> <field type="CharField" name="branch_name">gatesgarth</field> - <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=gatesgarth">Yocto Project Gatesgarth branch</a>.</field> + <field type="TextField" name="helptext">Toaster will run your builds using the tip of the <a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=gatesgarth">Yocto Project Gatesgarth branch</a>.</field> </object> <!-- Default project layers for each release --> @@ -122,9 +122,9 @@ <field type="CharField" name="name">openembedded-core</field> <field type="CharField" name="layer_index_url"></field> <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> - <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field> - <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> - <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> + <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field> + <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> + <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> </object> <object model="orm.layer_version" pk="1"> <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field> @@ -160,9 +160,9 @@ <field type="CharField" name="name">meta-poky</field> <field type="CharField" name="layer_index_url"></field> <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> - <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field> - <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> - <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> + <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field> + <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> + <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> </object> <object model="orm.layer_version" pk="5"> <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field> @@ -198,9 +198,9 @@ <field type="CharField" name="name">meta-yocto-bsp</field> <field type="CharField" name="layer_index_url"></field> <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field> - <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field> - <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> - <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> + <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field> + <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> + <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field> </object> <object model="orm.layer_version" pk="9"> <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field> diff --git a/poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py b/poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py index 2fbd7be3d2..3b950e6e15 100644 --- a/poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py +++ b/poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py @@ -21,7 +21,7 @@ import threading import time logger = logging.getLogger("toaster") -DEFAULT_LAYERINDEX_SERVER = "http://layers.openembedded.org/layerindex/api/" +DEFAULT_LAYERINDEX_SERVER = "https://layers.openembedded.org/layerindex/api/" # Add path to bitbake modules for layerindexlib # lib/toaster/orm/management/commands/lsupdates.py (abspath) diff --git a/poky/bitbake/lib/toaster/tests/builds/buildtest.py b/poky/bitbake/lib/toaster/tests/builds/buildtest.py index 872bbd3775..13b51fb0d8 100644 --- a/poky/bitbake/lib/toaster/tests/builds/buildtest.py +++ b/poky/bitbake/lib/toaster/tests/builds/buildtest.py @@ -119,7 +119,7 @@ class BuildTest(unittest.TestCase): if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"): ProjectVariable.objects.get_or_create( name="SSTATE_MIRRORS", - value="file://.* http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH", + value="file://.* http://sstate.yoctoproject.org/PATH;downloadfilename=PATH", project=project) ProjectTarget.objects.create(project=project, diff --git a/poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html b/poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html index 1e26e31c8b..923ca3bfe4 100644 --- a/poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html +++ b/poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html @@ -355,7 +355,7 @@ {% if layerversion.layer_source == layer_source.TYPE_LAYERINDEX %} <dt>Layer index</dt> <dd> - <a href="http://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a> + <a href="https://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a> </dd> {% endif %} </dl> diff --git a/poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html b/poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html index 66f8e7f069..a4fcd2aa42 100644 --- a/poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html +++ b/poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html @@ -127,7 +127,7 @@ {% comment %} # Removed per team meeting of 1/29/2014 until # decision on index search algorithm - <a href="http://layers.openembedded.org" target="_blank"> + <a href="https://layers.openembedded.org" target="_blank"> <i class="glyphicon glyphicon-share get-info"></i> </a> {% endcomment %} |