summaryrefslogtreecommitdiff
path: root/poky/bitbake
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake')
-rwxr-xr-xpoky/bitbake/bin/bitbake2
-rw-r--r--poky/bitbake/lib/bb/__init__.py2
-rw-r--r--poky/bitbake/lib/bb/cooker.py1
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py6
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py17
-rw-r--r--poky/bitbake/lib/bb/runqueue.py109
-rw-r--r--poky/bitbake/lib/bb/siggen.py20
-rw-r--r--poky/bitbake/lib/bb/tests/data.py13
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py80
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue.py100
-rw-r--r--poky/bitbake/lib/bb/ui/buildinfohelper.py10
-rw-r--r--poky/bitbake/lib/hashserv/client.py1
-rw-r--r--poky/bitbake/lib/hashserv/tests.py1
-rw-r--r--poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js12
14 files changed, 217 insertions, 157 deletions
diff --git a/poky/bitbake/bin/bitbake b/poky/bitbake/bin/bitbake
index 4834a7158..778b1d69e 100755
--- a/poky/bitbake/bin/bitbake
+++ b/poky/bitbake/bin/bitbake
@@ -26,7 +26,7 @@ from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
if sys.getfilesystemencoding() != "utf-8":
sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
-__version__ = "1.43.1"
+__version__ = "1.43.2"
if __name__ == "__main__":
if __version__ != bb.__version__:
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index 322a1e093..f89969174 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "1.43.1"
+__version__ = "1.43.2"
import sys
if sys.version_info < (3, 4, 0):
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index 0c540028a..20ef04d3f 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -399,7 +399,6 @@ class BBCooker:
self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
- self.hashserv.process.daemon = True
self.hashserv.process.start()
self.data.setVar("BB_HASHSERVE", self.hashservaddr)
self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index 96a8e6bee..472423fdc 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -13,6 +13,7 @@ import logging
import os
import re
import sys
+import hashlib
from functools import wraps
import bb
from bb import data
@@ -267,6 +268,7 @@ class CookerDataBuilder(object):
self.mcdata = {}
def parseBaseConfiguration(self):
+ data_hash = hashlib.sha256()
try:
self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
@@ -290,7 +292,7 @@ class CookerDataBuilder(object):
bb.event.fire(bb.event.ConfigParsed(), self.data)
bb.parse.init_parser(self.data)
- self.data_hash = self.data.get_hash()
+ data_hash.update(self.data.get_hash().encode('utf-8'))
self.mcdata[''] = self.data
multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
@@ -298,9 +300,11 @@ class CookerDataBuilder(object):
mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
bb.event.fire(bb.event.ConfigParsed(), mcdata)
self.mcdata[config] = mcdata
+ data_hash.update(mcdata.get_hash().encode('utf-8'))
if multiconfig:
bb.event.fire(bb.event.MultiConfigParsed(self.mcdata), self.data)
+ self.data_hash = data_hash.hexdigest()
except (SyntaxError, bb.BBHandledException):
raise bb.BBHandledException
except bb.data_smart.ExpansionError as e:
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 5fd63b4e3..2d1d2cabd 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -495,14 +495,8 @@ class Git(FetchMethod):
runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
if self._contains_lfs(ud, d, destdir):
- if need_lfs:
- path = d.getVar('PATH')
- if path:
- gitlfstool = bb.utils.which(path, "git-lfs", executable=True)
- if not gitlfstool:
- raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
- else:
- bb.note("Could not find 'PATH'")
+ if need_lfs and not self._find_git_lfs(d):
+ raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
else:
bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
@@ -570,6 +564,13 @@ class Git(FetchMethod):
pass
return False
+ def _find_git_lfs(self, d):
+ """
+ Return True if git-lfs can be found, False otherwise.
+ """
+ import shutil
+ return shutil.which("git-lfs", path=d.getVar('PATH')) is not None
+
def _get_repo_url(self, ud):
"""
Return the repository URL
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index d9a67a316..18049436f 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -73,7 +73,7 @@ def build_tid(mc, fn, taskname):
def pending_hash_index(tid, rqdata):
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
pn = rqdata.dataCaches[mc].pkg_fn[taskfn]
- h = rqdata.runtaskentries[tid].hash
+ h = rqdata.runtaskentries[tid].unihash
return pn + ":" + "taskname" + h
class RunQueueStats:
@@ -207,6 +207,8 @@ class RunQueueScheduler(object):
def newbuildable(self, task):
self.buildable.add(task)
+ # Once tasks are running we don't need to worry about them again
+ self.buildable.difference_update(self.rq.runq_running)
def removebuildable(self, task):
self.buildable.remove(task)
@@ -1162,6 +1164,8 @@ class RunQueueData:
self.init_progress_reporter.next_stage()
+ bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
+
# Iterate over the task list and call into the siggen code
dealtwith = set()
todeal = set(self.runtaskentries)
@@ -1173,7 +1177,6 @@ class RunQueueData:
self.prepare_task_hash(tid)
bb.parse.siggen.writeout_file_checksum_cache()
- bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
#self.dump_data()
return len(self.runtaskentries)
@@ -1442,6 +1445,7 @@ class RunQueue:
self.state = runQueueComplete
else:
self.state = runQueueSceneInit
+ bb.parse.siggen.save_unitaskhashes()
if self.state is runQueueSceneInit:
self.rqdata.init_progress_reporter.next_stage()
@@ -2299,11 +2303,12 @@ class RunQueueExecute:
for tid in changed:
if tid not in self.rqdata.runq_setscene_tids:
continue
- valid = self.rq.validate_hashes(set([tid]), self.cooker.data, None, False)
- if not valid:
- continue
if tid in self.runq_running:
continue
+ if tid in self.scenequeue_covered:
+ # Potentially risky, should we report this hash as a match?
+ logger.info("Already covered setscene for %s so ignoring rehash" % (tid))
+ continue
if tid not in self.pending_migrations:
self.pending_migrations.add(tid)
@@ -2358,6 +2363,7 @@ class RunQueueExecute:
logger.info("Setscene task %s now valid and being rerun" % tid)
self.sqdone = False
+ update_scenequeue_data([tid], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self)
if changed:
self.holdoff_need_update = True
@@ -2674,64 +2680,77 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
rqdata.init_progress_reporter.next_stage()
- multiconfigs = set()
+ sqdata.multiconfigs = set()
for tid in sqdata.sq_revdeps:
- multiconfigs.add(mc_from_tid(tid))
+ sqdata.multiconfigs.add(mc_from_tid(tid))
if len(sqdata.sq_revdeps[tid]) == 0:
sqrq.sq_buildable.add(tid)
rqdata.init_progress_reporter.finish()
- if rq.hashvalidate:
- noexec = []
- stamppresent = []
- tocheck = set()
+ sqdata.noexec = set()
+ sqdata.stamppresent = set()
+ sqdata.valid = set()
- for tid in sorted(sqdata.sq_revdeps):
- (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq)
- taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
+def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq):
- if 'noexec' in taskdep and taskname in taskdep['noexec']:
- noexec.append(tid)
- sqrq.sq_task_skip(tid)
- bb.build.make_stamp(taskname + "_setscene", rqdata.dataCaches[mc], taskfn)
- continue
+ tocheck = set()
- if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
- logger.debug(2, 'Setscene stamp current for task %s', tid)
- stamppresent.append(tid)
- sqrq.sq_task_skip(tid)
- continue
+ for tid in sorted(tids):
+ if tid in sqdata.stamppresent:
+ sqdata.stamppresent.remove(tid)
+ if tid in sqdata.valid:
+ sqdata.valid.remove(tid)
- if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache):
- logger.debug(2, 'Normal stamp current for task %s', tid)
- stamppresent.append(tid)
- sqrq.sq_task_skip(tid)
- continue
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
- tocheck.add(tid)
+ taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
+
+ if 'noexec' in taskdep and taskname in taskdep['noexec']:
+ sqdata.noexec.add(tid)
+ sqrq.sq_task_skip(tid)
+ bb.build.make_stamp(taskname + "_setscene", rqdata.dataCaches[mc], taskfn)
+ continue
+
+ if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
+ logger.debug(2, 'Setscene stamp current for task %s', tid)
+ sqdata.stamppresent.add(tid)
+ sqrq.sq_task_skip(tid)
+ continue
+
+ if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache):
+ logger.debug(2, 'Normal stamp current for task %s', tid)
+ sqdata.stamppresent.add(tid)
+ sqrq.sq_task_skip(tid)
+ continue
- valid = rq.validate_hashes(tocheck, cooker.data, len(stamppresent), False)
+ tocheck.add(tid)
- valid_new = stamppresent
- for v in valid:
- valid_new.append(v)
+ sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False)
- hashes = {}
- for mc in sorted(multiconfigs):
- for tid in sorted(sqdata.sq_revdeps):
+ sqdata.hashes = {}
+ for mc in sorted(sqdata.multiconfigs):
+ for tid in sorted(sqdata.sq_revdeps):
if mc_from_tid(tid) != mc:
continue
- if tid not in valid_new and tid not in noexec and tid not in sqrq.scenequeue_notcovered:
- sqdata.outrightfail.add(tid)
+ if tid in sqdata.stamppresent:
+ continue
+ if tid in sqdata.valid:
+ continue
+ if tid in sqdata.noexec:
+ continue
+ if tid in sqrq.scenequeue_notcovered:
+ continue
+ sqdata.outrightfail.add(tid)
- h = pending_hash_index(tid, rqdata)
- if h not in hashes:
- hashes[h] = tid
- else:
- sqrq.sq_deferred[tid] = hashes[h]
- bb.warn("Deferring %s after %s" % (tid, hashes[h]))
+ h = pending_hash_index(tid, rqdata)
+ if h not in sqdata.hashes:
+ sqdata.hashes[h] = tid
+ else:
+ sqrq.sq_deferred[tid] = sqdata.hashes[h]
+ bb.warn("Deferring %s after %s" % (tid, sqdata.hashes[h]))
class TaskFailure(Exception):
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index e047c217e..a4bb1ff7f 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -44,6 +44,7 @@ class SignatureGenerator(object):
self.file_checksum_values = {}
self.taints = {}
self.unitaskhashes = {}
+ self.setscenetasks = {}
def finalise(self, fn, d, varient):
return
@@ -75,10 +76,10 @@ class SignatureGenerator(object):
return
def get_taskdata(self):
- return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes)
+ return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.setscenetasks)
def set_taskdata(self, data):
- self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes = data
+ self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.setscenetasks = data
def reset(self, data):
self.__init__(data)
@@ -267,7 +268,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
sigfile = stampbase
referencestamp = runtime[11:]
elif runtime and tid in self.taskhash:
- sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[tid]
+ sigfile = stampbase + "." + task + ".sigdata" + "." + self.get_unihash(tid)
else:
sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
@@ -295,6 +296,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
for dep in data['runtaskdeps']:
data['runtaskhashes'][dep] = self.get_unihash(dep)
data['taskhash'] = self.taskhash[tid]
+ data['unihash'] = self.get_unihash(tid)
taint = self.read_taint(fn, task, referencestamp)
if taint:
@@ -384,7 +386,7 @@ class SignatureGeneratorUniHashMixIn(object):
def __get_task_unihash_key(self, tid):
# TODO: The key only *needs* to be the taskhash, the tid is just
# convenient
- return '%s:%s' % (tid, self.taskhash[tid])
+ return '%s:%s' % (tid.rsplit("/", 1)[1], self.taskhash[tid])
def get_stampfile_hash(self, tid):
if tid in self.taskhash:
@@ -440,7 +442,7 @@ class SignatureGeneratorUniHashMixIn(object):
bb.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
else:
bb.debug(2, 'No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
- except hashserv.HashConnectionError as e:
+ except hashserv.client.HashConnectionError as e:
bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
self.unitaskhashes[key] = unihash
@@ -454,7 +456,11 @@ class SignatureGeneratorUniHashMixIn(object):
report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
tempdir = d.getVar('T')
fn = d.getVar('BB_FILENAME')
- key = fn + ':do_' + task + ':' + taskhash
+ tid = fn + ':do_' + task
+ key = tid.rsplit("/", 1)[1] + ':' + taskhash
+
+ if self.setscenetasks and tid not in self.setscenetasks:
+ return
# Sanity checks
cache_unihash = self.unitaskhashes.get(key, None)
@@ -504,7 +510,7 @@ class SignatureGeneratorUniHashMixIn(object):
bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d)
else:
bb.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
- except hashserv.HashConnectionError as e:
+ except hashserv.client.HashConnectionError as e:
bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
finally:
if sigfile:
diff --git a/poky/bitbake/lib/bb/tests/data.py b/poky/bitbake/lib/bb/tests/data.py
index a9b0bdb09..3e49984c9 100644
--- a/poky/bitbake/lib/bb/tests/data.py
+++ b/poky/bitbake/lib/bb/tests/data.py
@@ -381,6 +381,19 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST"), " testvalue5")
+ def test_append_and_override_1(self):
+ self.d.setVar("TEST_append", "testvalue2")
+ self.d.setVar("TEST_bar", "testvalue3")
+ self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2")
+
+ def test_append_and_override_2(self):
+ self.d.setVar("TEST_append_bar", "testvalue2")
+ self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2")
+
+ def test_append_and_override_3(self):
+ self.d.setVar("TEST_bar_append", "testvalue2")
+ self.assertEqual(self.d.getVar("TEST"), "testvalue2")
+
# Test an override with _<numeric> in it based on a real world OE issue
def test_underscore_override(self):
self.d.setVar("TARGET_ARCH", "x86_64")
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index 2ee030546..a0b656b61 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -1908,3 +1908,83 @@ class GitShallowTest(FetcherTest):
dir = os.listdir(self.unpackdir + "/git/")
self.assertIn("fstests.doap", dir)
+
+class GitLfsTest(FetcherTest):
+ def setUp(self):
+ FetcherTest.setUp(self)
+
+ self.gitdir = os.path.join(self.tempdir, 'git')
+ self.srcdir = os.path.join(self.tempdir, 'gitsource')
+
+ self.d.setVar('WORKDIR', self.tempdir)
+ self.d.setVar('S', self.gitdir)
+ self.d.delVar('PREMIRRORS')
+ self.d.delVar('MIRRORS')
+
+ self.d.setVar('SRCREV', '${AUTOREV}')
+ self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
+
+ bb.utils.mkdirhier(self.srcdir)
+ self.git('init', cwd=self.srcdir)
+ with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs:
+ attrs.write('*.mp3 filter=lfs -text')
+ self.git(['add', '.gitattributes'], cwd=self.srcdir)
+ self.git(['commit', '-m', "attributes", '.gitattributes'], cwd=self.srcdir)
+
+ def git(self, cmd, cwd=None):
+ if isinstance(cmd, str):
+ cmd = 'git ' + cmd
+ else:
+ cmd = ['git'] + cmd
+ if cwd is None:
+ cwd = self.gitdir
+ return bb.process.run(cmd, cwd=cwd)[0]
+
+ def fetch(self, uri=None):
+ uris = self.d.getVar('SRC_URI').split()
+ uri = uris[0]
+ d = self.d
+
+ fetcher = bb.fetch2.Fetch(uris, d)
+ fetcher.download()
+ ud = fetcher.ud[uri]
+ return fetcher, ud
+
+ def test_lfs_enabled(self):
+ import shutil
+
+ uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir
+ self.d.setVar('SRC_URI', uri)
+
+ fetcher, ud = self.fetch()
+ self.assertIsNotNone(ud.method._find_git_lfs)
+
+ # If git-lfs can be found, the unpack should be successful
+ ud.method._find_git_lfs = lambda d: True
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+
+ # If git-lfs cannot be found, the unpack should throw an error
+ with self.assertRaises(bb.fetch2.FetchError):
+ ud.method._find_git_lfs = lambda d: False
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+
+ def test_lfs_disabled(self):
+ import shutil
+
+ uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir
+ self.d.setVar('SRC_URI', uri)
+
+ fetcher, ud = self.fetch()
+ self.assertIsNotNone(ud.method._find_git_lfs)
+
+ # If git-lfs can be found, the unpack should be successful
+ ud.method._find_git_lfs = lambda d: True
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+
+ # If git-lfs cannot be found, the unpack should be successful
+ ud.method._find_git_lfs = lambda d: False
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
diff --git a/poky/bitbake/lib/bb/tests/runqueue.py b/poky/bitbake/lib/bb/tests/runqueue.py
index cb4d526f1..5e6439156 100644
--- a/poky/bitbake/lib/bb/tests/runqueue.py
+++ b/poky/bitbake/lib/bb/tests/runqueue.py
@@ -12,6 +12,7 @@ import os
import tempfile
import subprocess
import sys
+import time
#
# TODO:
@@ -257,6 +258,8 @@ class RunQueueTests(unittest.TestCase):
'a1:package_write_ipk_setscene', 'a1:package_qa_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
@unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
def test_hashserv_double(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -280,6 +283,7 @@ class RunQueueTests(unittest.TestCase):
'a1:package_write_rpm_setscene', 'b1:package_write_ipk_setscene', 'a1:packagedata_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
@unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
def test_hashserv_multiple_setscene(self):
@@ -307,97 +311,13 @@ class RunQueueTests(unittest.TestCase):
'e1:package_setscene']
self.assertEqual(set(tasks), set(expected))
for i in expected:
- if i in ["e1:package_setscene"]:
- self.assertEqual(tasks.count(i), 4, "%s not in task list four times" % i)
- else:
- self.assertEqual(tasks.count(i), 1, "%s not in task list once" % i)
+ self.assertEqual(tasks.count(i), 1, "%s not in task list once" % i)
- @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
- def test_hashserv_partial_match(self):
- # e1:do_package matches initial built but not second hash value
- with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
- extraenv = {
- "BB_HASHSERVE" : "auto",
- "BB_SIGNATURE_HANDLER" : "TestEquivHash"
- }
- cmd = ["bitbake", "a1", "b1"]
- setscenetasks = ['package_write_ipk_setscene', 'package_write_rpm_setscene', 'packagedata_setscene',
- 'populate_sysroot_setscene', 'package_qa_setscene']
- sstatevalid = ""
- tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv, cleanup=True)
- expected = ['a1:' + x for x in self.alltasks] + ['b1:' + x for x in self.alltasks]
- self.assertEqual(set(tasks), set(expected))
- with open(tempdir + "/stamps/a1.do_install.taint", "w") as f:
- f.write("d460a29e-903f-4b76-a96b-3bcc22a65994")
- with open(tempdir + "/stamps/b1.do_install.taint", "w") as f:
- f.write("ed36d46a-2977-458a-b3de-eef885bc1817")
- cmd = ["bitbake", "e1"]
- sstatevalid = "e1:do_package:685e69a026b2f029483fdefe6a11e1e06641dd2a0f6f86e27b9b550f8f21229d"
- tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv, cleanup=True)
- expected = ['a1:package', 'a1:install', 'b1:package', 'b1:install', 'a1:populate_sysroot', 'b1:populate_sysroot',
- 'a1:package_write_ipk_setscene', 'b1:packagedata_setscene', 'b1:package_write_rpm_setscene',
- 'a1:package_write_rpm_setscene', 'b1:package_write_ipk_setscene', 'a1:packagedata_setscene',
- 'e1:package_setscene'] + ['e1:' + x for x in self.alltasks]
- expected.remove('e1:package')
- self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
- @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
- def test_hashserv_partial_match2(self):
- # e1:do_package + e1:do_populate_sysroot matches initial built but not second hash value
- with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
- extraenv = {
- "BB_HASHSERVE" : "auto",
- "BB_SIGNATURE_HANDLER" : "TestEquivHash"
- }
- cmd = ["bitbake", "a1", "b1"]
- setscenetasks = ['package_write_ipk_setscene', 'package_write_rpm_setscene', 'packagedata_setscene',
- 'populate_sysroot_setscene', 'package_qa_setscene']
- sstatevalid = ""
- tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv, cleanup=True)
- expected = ['a1:' + x for x in self.alltasks] + ['b1:' + x for x in self.alltasks]
- self.assertEqual(set(tasks), set(expected))
- with open(tempdir + "/stamps/a1.do_install.taint", "w") as f:
- f.write("d460a29e-903f-4b76-a96b-3bcc22a65994")
- with open(tempdir + "/stamps/b1.do_install.taint", "w") as f:
- f.write("ed36d46a-2977-458a-b3de-eef885bc1817")
- cmd = ["bitbake", "e1"]
- sstatevalid = "e1:do_package:685e69a026b2f029483fdefe6a11e1e06641dd2a0f6f86e27b9b550f8f21229d e1:do_populate_sysroot:ef7dc0e2dd55d0534e75cba50731ff42f949818b6f29a65d72bc05856e56711d"
- tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv, cleanup=True)
- expected = ['a1:package', 'a1:install', 'b1:package', 'b1:install', 'a1:populate_sysroot', 'b1:populate_sysroot',
- 'a1:package_write_ipk_setscene', 'b1:packagedata_setscene', 'b1:package_write_rpm_setscene',
- 'a1:package_write_rpm_setscene', 'b1:package_write_ipk_setscene', 'a1:packagedata_setscene',
- 'e1:package_setscene', 'e1:populate_sysroot_setscene', 'e1:build', 'e1:package_qa', 'e1:package_write_rpm', 'e1:package_write_ipk', 'e1:packagedata']
- self.assertEqual(set(tasks), set(expected))
-
- @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
- def test_hashserv_partial_match3(self):
- # e1:do_package is valid for a1 but not after b1
- # In former buggy code, this triggered e1:do_fetch, then e1:do_populate_sysroot to run
- # with none of the intermediate tasks which is a serious bug
- with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
- extraenv = {
- "BB_HASHSERVE" : "auto",
- "BB_SIGNATURE_HANDLER" : "TestEquivHash"
- }
- cmd = ["bitbake", "a1", "b1"]
- setscenetasks = ['package_write_ipk_setscene', 'package_write_rpm_setscene', 'packagedata_setscene',
- 'populate_sysroot_setscene', 'package_qa_setscene']
- sstatevalid = ""
- tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv, cleanup=True)
- expected = ['a1:' + x for x in self.alltasks] + ['b1:' + x for x in self.alltasks]
- self.assertEqual(set(tasks), set(expected))
- with open(tempdir + "/stamps/a1.do_install.taint", "w") as f:
- f.write("d460a29e-903f-4b76-a96b-3bcc22a65994")
- with open(tempdir + "/stamps/b1.do_install.taint", "w") as f:
- f.write("ed36d46a-2977-458a-b3de-eef885bc1817")
- cmd = ["bitbake", "e1", "-DD"]
- sstatevalid = "e1:do_package:af056eae12a733a6a8c4f4da8c6757e588e13565852c94e2aad4d953a3989c13 e1:do_package:a3677703db82b22d28d57c1820a47851dd780104580863f5bd32e66e003a779d"
- tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv, cleanup=True, slowtasks="e1:fetch b1:install")
- expected = ['a1:package', 'a1:install', 'b1:package', 'b1:install', 'a1:populate_sysroot', 'b1:populate_sysroot',
- 'a1:package_write_ipk_setscene', 'b1:packagedata_setscene', 'b1:package_write_rpm_setscene',
- 'a1:package_write_rpm_setscene', 'b1:package_write_ipk_setscene', 'a1:packagedata_setscene',
- 'e1:package_setscene'] + ['e1:' + x for x in self.alltasks]
- expected.remove('e1:package')
- self.assertEqual(set(tasks), set(expected))
+ def shutdown(self, tempdir):
+ # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup
+ while os.path.exists(tempdir + "/hashserve.sock"):
+ time.sleep(0.5)
diff --git a/poky/bitbake/lib/bb/ui/buildinfohelper.py b/poky/bitbake/lib/bb/ui/buildinfohelper.py
index f2151c2d4..5cbca97f3 100644
--- a/poky/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/poky/bitbake/lib/bb/ui/buildinfohelper.py
@@ -646,6 +646,9 @@ class ORMWrapper(object):
Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
packagedeps_objs = []
+ pattern_so = re.compile(r'.*\.so(\.\d*)?$')
+ pattern_lib = re.compile(r'.*\-suffix(\d*)?$')
+ pattern_ko = re.compile(r'^kernel-module-.*')
for p in packagedict:
for (px,deptype) in packagedict[p]['depends']:
if deptype == 'depends':
@@ -654,6 +657,13 @@ class ORMWrapper(object):
tdeptype = Package_Dependency.TYPE_TRECOMMENDS
try:
+ # Skip known non-package objects like libraries and kernel modules
+ if pattern_so.match(px) or pattern_lib.match(px):
+ logger.info("Toaster does not add library file dependencies to packages (%s,%s)", p, px)
+ continue
+ if pattern_ko.match(px):
+ logger.info("Toaster does not add kernel module dependencies to packages (%s,%s)", p, px)
+ continue
packagedeps_objs.append(Package_Dependency(
package = packagedict[p]['object'],
depends_on = packagedict[px]['object'],
diff --git a/poky/bitbake/lib/hashserv/client.py b/poky/bitbake/lib/hashserv/client.py
index 2559bbb3f..f65956617 100644
--- a/poky/bitbake/lib/hashserv/client.py
+++ b/poky/bitbake/lib/hashserv/client.py
@@ -7,6 +7,7 @@ from contextlib import closing
import json
import logging
import socket
+import os
logger = logging.getLogger('hashserv.client')
diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py
index 6584ff57b..a5472a996 100644
--- a/poky/bitbake/lib/hashserv/tests.py
+++ b/poky/bitbake/lib/hashserv/tests.py
@@ -32,7 +32,6 @@ class TestHashEquivalenceServer(object):
self.server = create_server(self.get_server_addr(), self.dbfile)
self.server_thread = multiprocessing.Process(target=self._run_server)
- self.server_thread.daemon = True
self.server_thread.start()
self.client = create_client(self.server.address)
diff --git a/poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js b/poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js
index 296483985..8e2032de2 100644
--- a/poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js
+++ b/poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js
@@ -17,11 +17,15 @@ function importLayerPageInit (ctx) {
var currentLayerDepSelection;
var validLayerName = /^(\w|-)+$/;
+ /* Catch 'disable' race condition between type-ahead started and "input change" */
+ var typeAheadStarted = 0;
+
libtoaster.makeTypeahead(layerDepInput,
libtoaster.ctx.layersTypeAheadUrl,
{ include_added: "true" }, function(item){
currentLayerDepSelection = item;
layerDepBtn.removeAttr("disabled");
+ typeAheadStarted = 1;
});
layerDepInput.on("typeahead:select", function(event, data){
@@ -34,7 +38,10 @@ function importLayerPageInit (ctx) {
// disable the "Add layer" button when the layer input typeahead is empty
// or not in the typeahead choices
layerDepInput.on("input change", function(){
- layerDepBtn.attr("disabled","disabled");
+ if (0 == typeAheadStarted) {
+ layerDepBtn.attr("disabled","disabled");
+ }
+ typeAheadStarted = 0;
});
/* We automatically add "openembedded-core" layer for convenience as a
@@ -50,6 +57,7 @@ function importLayerPageInit (ctx) {
});
layerDepBtn.click(function(){
+ typeAheadStarted = 0;
if (currentLayerDepSelection == undefined)
return;
@@ -77,7 +85,7 @@ function importLayerPageInit (ctx) {
$("#layer-deps-list").append(newLayerDep);
- libtoaster.getLayerDepsForProject(currentLayerDepSelection.layerdetailurl,
+ libtoaster.getLayerDepsForProject(currentLayerDepSelection.xhrLayerUrl,
function (data){
/* These are the dependencies of the layer added as a dependency */
if (data.list.length > 0) {