summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb')
-rw-r--r--poky/bitbake/lib/bb/__init__.py19
-rw-r--r--poky/bitbake/lib/bb/build.py6
-rw-r--r--poky/bitbake/lib/bb/cache.py56
-rw-r--r--poky/bitbake/lib/bb/cooker.py30
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py2
-rw-r--r--poky/bitbake/lib/bb/event.py26
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py40
-rw-r--r--poky/bitbake/lib/bb/fetch2/bzr.py8
-rw-r--r--poky/bitbake/lib/bb/fetch2/clearcase.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/cvs.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py44
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/hg.py16
-rw-r--r--poky/bitbake/lib/bb/fetch2/local.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/osc.py6
-rw-r--r--poky/bitbake/lib/bb/fetch2/perforce.py13
-rw-r--r--poky/bitbake/lib/bb/fetch2/repo.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/svn.py6
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py25
-rw-r--r--poky/bitbake/lib/bb/parse/__init__.py2
-rw-r--r--poky/bitbake/lib/bb/parse/ast.py6
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/BBHandler.py4
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py2
-rw-r--r--poky/bitbake/lib/bb/persist_data.py2
-rw-r--r--poky/bitbake/lib/bb/providers.py24
-rw-r--r--poky/bitbake/lib/bb/runqueue.py75
-rw-r--r--poky/bitbake/lib/bb/siggen.py8
-rw-r--r--poky/bitbake/lib/bb/taskdata.py42
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py47
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc-1.conf (renamed from poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc1.conf)0
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc_2.conf (renamed from poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc2.conf)0
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/recipes/f1.bb2
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc1.bb4
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc2.bb4
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue.py38
-rw-r--r--poky/bitbake/lib/bb/ui/buildinfohelper.py10
-rw-r--r--poky/bitbake/lib/bb/utils.py6
37 files changed, 353 insertions, 236 deletions
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index b21773734..84a9051c1 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "1.49.0"
+__version__ = "1.49.2"
import sys
if sys.version_info < (3, 5, 0):
@@ -21,8 +21,8 @@ class BBHandledException(Exception):
The big dilemma for generic bitbake code is what information to give the user
when an exception occurs. Any exception inheriting this base exception class
has already provided information to the user via some 'fired' message type such as
- an explicitly fired event using bb.fire, or a bb.error message. If bitbake
- encounters an exception derived from this class, no backtrace or other information
+ an explicitly fired event using bb.fire, or a bb.error message. If bitbake
+ encounters an exception derived from this class, no backtrace or other information
will be given to the user, its assumed the earlier event provided the relevant information.
"""
pass
@@ -42,7 +42,16 @@ class BBLoggerMixin(object):
def setup_bblogger(self, name):
if name.split(".")[0] == "BitBake":
- self.debug = self.bbdebug
+ self.debug = self._debug_helper
+
+ def _debug_helper(self, *args, **kwargs):
+ return self.bbdebug(1, *args, **kwargs)
+
+ def debug2(self, *args, **kwargs):
+ return self.bbdebug(2, *args, **kwargs)
+
+ def debug3(self, *args, **kwargs):
+ return self.bbdebug(3, *args, **kwargs)
def bbdebug(self, level, msg, *args, **kwargs):
loglevel = logging.DEBUG - level + 1
@@ -128,7 +137,7 @@ def debug(lvl, *args):
mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
args = (lvl,) + args
lvl = 1
- mainlogger.debug(lvl, ''.join(args))
+ mainlogger.bbdebug(lvl, ''.join(args))
def note(*args):
mainlogger.info(''.join(args))
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index 974d2ff06..f4f897e41 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -583,7 +583,7 @@ def _exec_task(fn, task, d, quieterr):
logger.error("No such task: %s" % task)
return 1
- logger.debug(1, "Executing task %s", task)
+ logger.debug("Executing task %s", task)
localdata = _task_data(fn, task, d)
tempdir = localdata.getVar('T')
@@ -596,7 +596,7 @@ def _exec_task(fn, task, d, quieterr):
curnice = os.nice(0)
nice = int(nice) - curnice
newnice = os.nice(nice)
- logger.debug(1, "Renice to %s " % newnice)
+ logger.debug("Renice to %s " % newnice)
ionice = localdata.getVar("BB_TASK_IONICE_LEVEL")
if ionice:
try:
@@ -720,7 +720,7 @@ def _exec_task(fn, task, d, quieterr):
logfile.close()
if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
- logger.debug(2, "Zero size logfn %s, removing", logfn)
+ logger.debug2("Zero size logfn %s, removing", logfn)
bb.utils.remove(logfn)
bb.utils.remove(loglink)
event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata)
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 36270d009..aea2b8bc1 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -215,7 +215,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
if not self.not_world:
cachedata.possible_world.append(fn)
#else:
- # logger.debug(2, "EXCLUDE FROM WORLD: %s", fn)
+ # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
# create a collection of all targets for sanity checking
# tasks, such as upstream versions, license, and tools for
@@ -238,7 +238,7 @@ def virtualfn2realfn(virtualfn):
Convert a virtual file name to a real one + the associated subclass keyword
"""
mc = ""
- if virtualfn.startswith('mc:'):
+ if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
elems = virtualfn.split(':')
mc = elems[1]
virtualfn = ":".join(elems[2:])
@@ -268,7 +268,7 @@ def variant2virtual(realfn, variant):
"""
if variant == "":
return realfn
- if variant.startswith("mc:"):
+ if variant.startswith("mc:") and variant.count(':') >= 2:
elems = variant.split(":")
if elems[2]:
return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
@@ -323,7 +323,7 @@ class NoCache(object):
Return a complete set of data for fn.
To do this, we need to parse the file.
"""
- logger.debug(1, "Parsing %s (full)" % virtualfn)
+ logger.debug("Parsing %s (full)" % virtualfn)
(fn, virtual, mc) = virtualfn2realfn(virtualfn)
bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
return bb_data[virtual]
@@ -400,7 +400,7 @@ class Cache(NoCache):
self.cachefile = self.getCacheFile("bb_cache.dat")
- self.logger.debug(1, "Cache dir: %s", self.cachedir)
+ self.logger.debug("Cache dir: %s", self.cachedir)
bb.utils.mkdirhier(self.cachedir)
cache_ok = True
@@ -408,7 +408,7 @@ class Cache(NoCache):
for cache_class in self.caches_array:
cachefile = self.getCacheFile(cache_class.cachefile)
cache_exists = os.path.exists(cachefile)
- self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists)
+ self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
cache_ok = cache_ok and cache_exists
cache_class.init_cacheData(self)
if cache_ok:
@@ -416,7 +416,7 @@ class Cache(NoCache):
elif os.path.isfile(self.cachefile):
self.logger.info("Out of date cache found, rebuilding...")
else:
- self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
+ self.logger.debug("Cache file %s not found, building..." % self.cachefile)
# We don't use the symlink, its just for debugging convinience
if self.mc:
@@ -453,7 +453,7 @@ class Cache(NoCache):
for cache_class in self.caches_array:
cachefile = self.getCacheFile(cache_class.cachefile)
- self.logger.debug(1, 'Loading cache file: %s' % cachefile)
+ self.logger.debug('Loading cache file: %s' % cachefile)
with open(cachefile, "rb") as cachefile:
pickled = pickle.Unpickler(cachefile)
# Check cache version information
@@ -500,7 +500,7 @@ class Cache(NoCache):
def parse(self, filename, appends):
"""Parse the specified filename, returning the recipe information"""
- self.logger.debug(1, "Parsing %s", filename)
+ self.logger.debug("Parsing %s", filename)
infos = []
datastores = self.load_bbfile(filename, appends, mc=self.mc)
depends = []
@@ -554,7 +554,7 @@ class Cache(NoCache):
cached, infos = self.load(fn, appends)
for virtualfn, info_array in infos:
if info_array[0].skipped:
- self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
+ self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
skipped += 1
else:
self.add_info(virtualfn, info_array, cacheData, not cached)
@@ -590,21 +590,21 @@ class Cache(NoCache):
# File isn't in depends_cache
if not fn in self.depends_cache:
- self.logger.debug(2, "%s is not cached", fn)
+ self.logger.debug2("%s is not cached", fn)
return False
mtime = bb.parse.cached_mtime_noerror(fn)
# Check file still exists
if mtime == 0:
- self.logger.debug(2, "%s no longer exists", fn)
+ self.logger.debug2("%s no longer exists", fn)
self.remove(fn)
return False
info_array = self.depends_cache[fn]
# Check the file's timestamp
if mtime != info_array[0].timestamp:
- self.logger.debug(2, "%s changed", fn)
+ self.logger.debug2("%s changed", fn)
self.remove(fn)
return False
@@ -615,13 +615,13 @@ class Cache(NoCache):
fmtime = bb.parse.cached_mtime_noerror(f)
# Check if file still exists
if old_mtime != 0 and fmtime == 0:
- self.logger.debug(2, "%s's dependency %s was removed",
+ self.logger.debug2("%s's dependency %s was removed",
fn, f)
self.remove(fn)
return False
if (fmtime != old_mtime):
- self.logger.debug(2, "%s's dependency %s changed",
+ self.logger.debug2("%s's dependency %s changed",
fn, f)
self.remove(fn)
return False
@@ -638,14 +638,14 @@ class Cache(NoCache):
continue
f, exist = f.split(":")
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
- self.logger.debug(2, "%s's file checksum list file %s changed",
+ self.logger.debug2("%s's file checksum list file %s changed",
fn, f)
self.remove(fn)
return False
if tuple(appends) != tuple(info_array[0].appends):
- self.logger.debug(2, "appends for %s changed", fn)
- self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
+ self.logger.debug2("appends for %s changed", fn)
+ self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
self.remove(fn)
return False
@@ -654,10 +654,10 @@ class Cache(NoCache):
virtualfn = variant2virtual(fn, cls)
self.clean.add(virtualfn)
if virtualfn not in self.depends_cache:
- self.logger.debug(2, "%s is not cached", virtualfn)
+ self.logger.debug2("%s is not cached", virtualfn)
invalid = True
elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
- self.logger.debug(2, "Extra caches missing for %s?" % virtualfn)
+ self.logger.debug2("Extra caches missing for %s?" % virtualfn)
invalid = True
# If any one of the variants is not present, mark as invalid for all
@@ -665,10 +665,10 @@ class Cache(NoCache):
for cls in info_array[0].variants:
virtualfn = variant2virtual(fn, cls)
if virtualfn in self.clean:
- self.logger.debug(2, "Removing %s from cache", virtualfn)
+ self.logger.debug2("Removing %s from cache", virtualfn)
self.clean.remove(virtualfn)
if fn in self.clean:
- self.logger.debug(2, "Marking %s as not clean", fn)
+ self.logger.debug2("Marking %s as not clean", fn)
self.clean.remove(fn)
return False
@@ -681,10 +681,10 @@ class Cache(NoCache):
Called from the parser in error cases
"""
if fn in self.depends_cache:
- self.logger.debug(1, "Removing %s from cache", fn)
+ self.logger.debug("Removing %s from cache", fn)
del self.depends_cache[fn]
if fn in self.clean:
- self.logger.debug(1, "Marking %s as unclean", fn)
+ self.logger.debug("Marking %s as unclean", fn)
self.clean.remove(fn)
def sync(self):
@@ -697,13 +697,13 @@ class Cache(NoCache):
return
if self.cacheclean:
- self.logger.debug(2, "Cache is clean, not saving.")
+ self.logger.debug2("Cache is clean, not saving.")
return
for cache_class in self.caches_array:
cache_class_name = cache_class.__name__
cachefile = self.getCacheFile(cache_class.cachefile)
- self.logger.debug(2, "Writing %s", cachefile)
+ self.logger.debug2("Writing %s", cachefile)
with open(cachefile, "wb") as f:
p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
p.dump(__cache_version__)
@@ -879,7 +879,7 @@ class MultiProcessCache(object):
bb.utils.mkdirhier(cachedir)
self.cachefile = os.path.join(cachedir,
cache_file_name or self.__class__.cache_file_name)
- logger.debug(1, "Using cache in '%s'", self.cachefile)
+ logger.debug("Using cache in '%s'", self.cachefile)
glf = bb.utils.lockfile(self.cachefile + ".lock")
@@ -985,7 +985,7 @@ class SimpleCache(object):
bb.utils.mkdirhier(cachedir)
self.cachefile = os.path.join(cachedir,
cache_file_name or self.__class__.cache_file_name)
- logger.debug(1, "Using cache in '%s'", self.cachefile)
+ logger.debug("Using cache in '%s'", self.cachefile)
glf = bb.utils.lockfile(self.cachefile + ".lock")
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index 83cfee7fb..f4ab797ed 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -411,6 +411,8 @@ class BBCooker:
self.data.disableTracking()
def parseConfiguration(self):
+ self.updateCacheSync()
+
# Change nice level if we're asked to
nice = self.data.getVar("BB_NICE_LEVEL")
if nice:
@@ -441,7 +443,7 @@ class BBCooker:
continue
except AttributeError:
pass
- logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
+ logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
clean = False
if hasattr(self.configuration, o):
@@ -468,17 +470,17 @@ class BBCooker:
for k in bb.utils.approved_variables():
if k in environment and k not in self.configuration.env:
- logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
+ logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
self.configuration.env[k] = environment[k]
clean = False
if k in self.configuration.env and k not in environment:
- logger.debug(1, "Updating environment variable %s (deleted)" % (k))
+ logger.debug("Updating environment variable %s (deleted)" % (k))
del self.configuration.env[k]
clean = False
if k not in self.configuration.env and k not in environment:
continue
if environment[k] != self.configuration.env[k]:
- logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
+ logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
self.configuration.env[k] = environment[k]
clean = False
@@ -486,7 +488,7 @@ class BBCooker:
self.configuration.env = environment
if not clean:
- logger.debug(1, "Base environment change, triggering reparse")
+ logger.debug("Base environment change, triggering reparse")
self.reset()
def runCommands(self, server, data, abort):
@@ -614,7 +616,7 @@ class BBCooker:
# Replace string such as "mc:*:bash"
# into "mc:A:bash mc:B:bash bash"
for k in targetlist:
- if k.startswith("mc:"):
+ if k.startswith("mc:") and k.count(':') >= 2:
if wildcard:
bb.fatal('multiconfig conflict')
if k.split(":")[1] == "*":
@@ -648,7 +650,7 @@ class BBCooker:
for k in fulltargetlist:
origk = k
mc = ""
- if k.startswith("mc:"):
+ if k.startswith("mc:") and k.count(':') >= 2:
mc = k.split(":")[1]
k = ":".join(k.split(":")[2:])
ktask = task
@@ -697,7 +699,7 @@ class BBCooker:
if depmc not in self.multiconfigs:
bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
else:
- logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
+ logger.debug("Adding providers for multiconfig dependency %s" % l[3])
taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
seen.add(k)
new = True
@@ -1553,7 +1555,7 @@ class BBCooker:
self.inotify_modified_files = []
if not self.baseconfig_valid:
- logger.debug(1, "Reloading base configuration data")
+ logger.debug("Reloading base configuration data")
self.initConfigurationData()
self.handlePRServ()
@@ -2209,18 +2211,18 @@ class CookerParser(object):
except bb.BBHandledException as exc:
self.error += 1
logger.error('Failed to parse recipe: %s' % exc.recipe)
- self.shutdown(clean=False)
+ self.shutdown(clean=False, force=True)
return False
except ParsingFailure as exc:
self.error += 1
logger.error('Unable to parse %s: %s' %
(exc.recipe, bb.exceptions.to_string(exc.realexception)))
- self.shutdown(clean=False)
+ self.shutdown(clean=False, force=True)
return False
except bb.parse.ParseError as exc:
self.error += 1
logger.error(str(exc))
- self.shutdown(clean=False)
+ self.shutdown(clean=False, force=True)
return False
except bb.data_smart.ExpansionError as exc:
self.error += 1
@@ -2229,7 +2231,7 @@ class CookerParser(object):
tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
logger.error('ExpansionError during parsing %s', value.recipe,
exc_info=(etype, value, tb))
- self.shutdown(clean=False)
+ self.shutdown(clean=False, force=True)
return False
except Exception as exc:
self.error += 1
@@ -2241,7 +2243,7 @@ class CookerParser(object):
# Most likely, an exception occurred during raising an exception
import traceback
logger.error('Exception during parse: %s' % traceback.format_exc())
- self.shutdown(clean=False)
+ self.shutdown(clean=False, force=True)
return False
self.current += 1
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index c39b56813..1c1e008c6 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -429,7 +429,7 @@ class CookerDataBuilder(object):
parselog.critical("Undefined event handler function '%s'" % var)
raise bb.BBHandledException()
handlerln = int(data.getVarFlag(var, "lineno", False))
- bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln)
+ bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln, data)
data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
diff --git a/poky/bitbake/lib/bb/event.py b/poky/bitbake/lib/bb/event.py
index 694b47052..23e1f3187 100644
--- a/poky/bitbake/lib/bb/event.py
+++ b/poky/bitbake/lib/bb/event.py
@@ -118,6 +118,8 @@ def fire_class_handlers(event, d):
if _eventfilter:
if not _eventfilter(name, handler, event, d):
continue
+ if d and not name in (d.getVar("__BBHANDLERS_MC") or []):
+ continue
execute_handler(name, handler, event, d)
ui_queue = []
@@ -227,9 +229,13 @@ def fire_from_worker(event, d):
fire_ui_handlers(event, d)
noop = lambda _: None
-def register(name, handler, mask=None, filename=None, lineno=None):
+def register(name, handler, mask=None, filename=None, lineno=None, data=None):
"""Register an Event handler"""
+ if data and data.getVar("BB_CURRENT_MC"):
+ mc = data.getVar("BB_CURRENT_MC")
+ name = '%s%s' % (mc.replace('-', '_'), name)
+
# already registered
if name in _handlers:
return AlreadyRegistered
@@ -268,10 +274,20 @@ def register(name, handler, mask=None, filename=None, lineno=None):
_event_handler_map[m] = {}
_event_handler_map[m][name] = True
+ if data:
+ bbhands_mc = (data.getVar("__BBHANDLERS_MC") or [])
+ bbhands_mc.append(name)
+ data.setVar("__BBHANDLERS_MC", bbhands_mc)
+
return Registered
-def remove(name, handler):
+def remove(name, handler, data=None):
"""Remove an Event handler"""
+ if data:
+ if data.getVar("BB_CURRENT_MC"):
+ mc = data.getVar("BB_CURRENT_MC")
+ name = '%s%s' % (mc.replace('-', '_'), name)
+
_handlers.pop(name)
if name in _catchall_handlers:
_catchall_handlers.pop(name)
@@ -279,6 +295,12 @@ def remove(name, handler):
if name in _event_handler_map[event]:
_event_handler_map[event].pop(name)
+ if data:
+ bbhands_mc = (data.getVar("__BBHANDLERS_MC") or [])
+ if name in bbhands_mc:
+ bbhands_mc.remove(name)
+ data.setVar("__BBHANDLERS_MC", bbhands_mc)
+
def get_handlers():
return _handlers
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 07b7ae41b..19169d780 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -290,7 +290,7 @@ class URI(object):
def _param_str_split(self, string, elmdelim, kvdelim="="):
ret = collections.OrderedDict()
- for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
+ for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]:
ret[k] = v
return ret
@@ -428,7 +428,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
uri_decoded = list(decodeurl(ud.url))
uri_find_decoded = list(decodeurl(uri_find))
uri_replace_decoded = list(decodeurl(uri_replace))
- logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
+ logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
result_decoded = ['', '', '', '', '', {}]
for loc, i in enumerate(uri_find_decoded):
result_decoded[loc] = uri_decoded[loc]
@@ -474,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
result = encodeurl(result_decoded)
if result == ud.url:
return None
- logger.debug(2, "For url %s returning %s" % (ud.url, result))
+ logger.debug2("For url %s returning %s" % (ud.url, result))
return result
methods = []
@@ -499,9 +499,9 @@ def fetcher_init(d):
# When to drop SCM head revisions controlled by user policy
srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
if srcrev_policy == "cache":
- logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
+ logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear":
- logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+ logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
revs.clear()
else:
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
@@ -857,9 +857,9 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
cmd = 'export PSEUDO_DISABLED=1; ' + cmd
if workdir:
- logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
+ logger.debug("Running '%s' in %s" % (cmd, workdir))
else:
- logger.debug(1, "Running %s", cmd)
+ logger.debug("Running %s", cmd)
success = False
error_message = ""
@@ -900,7 +900,7 @@ def check_network_access(d, info, url):
elif not trusted_network(d, url):
raise UntrustedUrl(url, info)
else:
- logger.debug(1, "Fetcher accessed the network with the command %s" % info)
+ logger.debug("Fetcher accessed the network with the command %s" % info)
def build_mirroruris(origud, mirrors, ld):
uris = []
@@ -926,7 +926,7 @@ def build_mirroruris(origud, mirrors, ld):
continue
if not trusted_network(ld, newuri):
- logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
+ logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri))
continue
# Create a local copy of the mirrors minus the current line
@@ -939,8 +939,8 @@ def build_mirroruris(origud, mirrors, ld):
newud = FetchData(newuri, ld)
newud.setup_localpath(ld)
except bb.fetch2.BBFetchException as e:
- logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
- logger.debug(1, str(e))
+ logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
+ logger.debug(str(e))
try:
# setup_localpath of file:// urls may fail, we should still see
# if mirrors of the url exist
@@ -1043,8 +1043,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
elif isinstance(e, NoChecksumError):
raise
else:
- logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
- logger.debug(1, str(e))
+ logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
+ logger.debug(str(e))
try:
ud.method.clean(ud, ld)
except UnboundLocalError:
@@ -1688,7 +1688,7 @@ class Fetch(object):
if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
done = True
elif m.try_premirror(ud, self.d):
- logger.debug(1, "Trying PREMIRRORS")
+ logger.debug("Trying PREMIRRORS")
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
done = m.try_mirrors(self, ud, self.d, mirrors)
if done:
@@ -1698,7 +1698,7 @@ class Fetch(object):
m.update_donestamp(ud, self.d)
except ChecksumError as e:
logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
- logger.debug(1, str(e))
+ logger.debug(str(e))
done = False
if premirroronly:
@@ -1710,7 +1710,7 @@ class Fetch(object):
try:
if not trusted_network(self.d, ud.url):
raise UntrustedUrl(ud.url)
- logger.debug(1, "Trying Upstream")
+ logger.debug("Trying Upstream")
m.download(ud, self.d)
if hasattr(m, "build_mirror_data"):
m.build_mirror_data(ud, self.d)
@@ -1725,19 +1725,19 @@ class Fetch(object):
except BBFetchException as e:
if isinstance(e, ChecksumError):
logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
- logger.debug(1, str(e))
+ logger.debug(str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
- logger.debug(1, str(e))
+ logger.debug(str(e))
firsterr = e
# Remove any incomplete fetch
if not verified_stamp:
m.clean(ud, self.d)
- logger.debug(1, "Trying MIRRORS")
+ logger.debug("Trying MIRRORS")
mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
done = m.try_mirrors(self, ud, self.d, mirrors)
@@ -1774,7 +1774,7 @@ class Fetch(object):
ud = self.ud[u]
ud.setup_localpath(self.d)
m = ud.method
- logger.debug(1, "Testing URL %s", u)
+ logger.debug("Testing URL %s", u)
# First try checking uri, u, from PREMIRRORS
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
ret = m.try_mirrors(self, ud, self.d, mirrors, True)
diff --git a/poky/bitbake/lib/bb/fetch2/bzr.py b/poky/bitbake/lib/bb/fetch2/bzr.py
index 566ace9f0..fc558f50b 100644
--- a/poky/bitbake/lib/bb/fetch2/bzr.py
+++ b/poky/bitbake/lib/bb/fetch2/bzr.py
@@ -74,16 +74,16 @@ class Bzr(FetchMethod):
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
bzrcmd = self._buildbzrcommand(ud, d, "update")
- logger.debug(1, "BZR Update %s", ud.url)
+ logger.debug("BZR Update %s", ud.url)
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path)))
else:
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- logger.debug(1, "BZR Checkout %s", ud.url)
+ logger.debug("BZR Checkout %s", ud.url)
bb.utils.mkdirhier(ud.pkgdir)
- logger.debug(1, "Running %s", bzrcmd)
+ logger.debug("Running %s", bzrcmd)
runfetchcmd(bzrcmd, d, workdir=ud.pkgdir)
scmdata = ud.parm.get("scmdata", "")
@@ -109,7 +109,7 @@ class Bzr(FetchMethod):
"""
Return the latest upstream revision number
"""
- logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
+ logger.debug2("BZR fetcher hitting network for %s", ud.url)
bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
diff --git a/poky/bitbake/lib/bb/fetch2/clearcase.py b/poky/bitbake/lib/bb/fetch2/clearcase.py
index 49d7ae1b0..1a9c86376 100644
--- a/poky/bitbake/lib/bb/fetch2/clearcase.py
+++ b/poky/bitbake/lib/bb/fetch2/clearcase.py
@@ -70,7 +70,7 @@ class ClearCase(FetchMethod):
return ud.type in ['ccrc']
def debug(self, msg):
- logger.debug(1, "ClearCase: %s", msg)
+ logger.debug("ClearCase: %s", msg)
def urldata_init(self, ud, d):
"""
diff --git a/poky/bitbake/lib/bb/fetch2/cvs.py b/poky/bitbake/lib/bb/fetch2/cvs.py
index 22abdef79..01de5ff4c 100644
--- a/poky/bitbake/lib/bb/fetch2/cvs.py
+++ b/poky/bitbake/lib/bb/fetch2/cvs.py
@@ -109,7 +109,7 @@ class Cvs(FetchMethod):
cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
# create module directory
- logger.debug(2, "Fetch: checking for module directory")
+ logger.debug2("Fetch: checking for module directory")
moddir = os.path.join(ud.pkgdir, localdir)
workdir = None
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
@@ -123,7 +123,7 @@ class Cvs(FetchMethod):
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
workdir = ud.pkgdir
- logger.debug(1, "Running %s", cvscmd)
+ logger.debug("Running %s", cvscmd)
bb.fetch2.check_network_access(d, cvscmd, ud.url)
cmd = cvscmd
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index df9538a60..e3ba80a3f 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -384,6 +384,35 @@ class Git(FetchMethod):
if missing_rev:
raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
+ if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud):
+ # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching
+ # of all LFS blobs needed at the the srcrev.
+ #
+ # It would be nice to just do this inline here by running 'git-lfs fetch'
+ # on the bare clonedir, but that operation requires a working copy on some
+ # releases of Git LFS.
+ tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
+ try:
+ # Do the checkout. This implicitly involves a Git LFS fetch.
+ self.unpack(ud, tmpdir, d)
+
+ # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into
+ # the bare clonedir.
+ #
+ # As this procedure is invoked repeatedly on incremental fetches as
+ # a recipe's SRCREV is bumped throughout its lifetime, this will
+ # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs
+ # corresponding to all the blobs reachable from the different revs
+ # fetched across time.
+ #
+ # Only do this if the unpack resulted in a .git/lfs directory being
+ # created; this only happens if at least one blob needed to be
+ # downloaded.
+ if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")):
+ runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir)
+ finally:
+ bb.utils.remove(tmpdir, recurse=True)
+
def build_mirror_data(self, ud, d):
if ud.shallow and ud.write_shallow_tarballs:
if not os.path.exists(ud.fullshallow):
@@ -479,7 +508,7 @@ class Git(FetchMethod):
if os.path.exists(destdir):
bb.utils.prunedir(destdir)
- need_lfs = ud.parm.get("lfs", "1") == "1"
+ need_lfs = self._need_lfs(ud)
if not need_lfs:
ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd
@@ -568,6 +597,9 @@ class Git(FetchMethod):
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
return output.split()[0] != "0"
+ def _need_lfs(self, ud):
+ return ud.parm.get("lfs", "1") == "1"
+
def _contains_lfs(self, ud, d, wd):
"""
Check if the repository has 'lfs' (large file) content
@@ -578,8 +610,14 @@ class Git(FetchMethod):
else:
branchname = "master"
- cmd = "%s grep lfs origin/%s:.gitattributes | wc -l" % (
- ud.basecmd, ud.branches[ud.names[0]])
+ # The bare clonedir doesn't use the remote names; it has the branch immediately.
+ if wd == ud.clonedir:
+ refname = ud.branches[ud.names[0]]
+ else:
+ refname = "origin/%s" % ud.branches[ud.names[0]]
+
+ cmd = "%s grep lfs %s:.gitattributes | wc -l" % (
+ ud.basecmd, refname)
try:
output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index d6e5c5c05..a4527bf36 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -78,7 +78,7 @@ class GitSM(Git):
module_hash = ""
if not module_hash:
- logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m)
+ logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m)
continue
submodules.append(m)
@@ -179,7 +179,7 @@ class GitSM(Git):
(ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
if len(need_update_list) > 0:
- logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
+ logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
return True
return False
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
index 8f503701e..063e13008 100644
--- a/poky/bitbake/lib/bb/fetch2/hg.py
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -150,7 +150,7 @@ class Hg(FetchMethod):
def download(self, ud, d):
"""Fetch url"""
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+ logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
# If the checkout doesn't exist and the mirror tarball does, extract it
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
@@ -160,7 +160,7 @@ class Hg(FetchMethod):
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
# Found the source, check whether need pull
updatecmd = self._buildhgcommand(ud, d, "update")
- logger.debug(1, "Running %s", updatecmd)
+ logger.debug("Running %s", updatecmd)
try:
runfetchcmd(updatecmd, d, workdir=ud.moddir)
except bb.fetch2.FetchError:
@@ -168,7 +168,7 @@ class Hg(FetchMethod):
pullcmd = self._buildhgcommand(ud, d, "pull")
logger.info("Pulling " + ud.url)
# update sources there
- logger.debug(1, "Running %s", pullcmd)
+ logger.debug("Running %s", pullcmd)
bb.fetch2.check_network_access(d, pullcmd, ud.url)
runfetchcmd(pullcmd, d, workdir=ud.moddir)
try:
@@ -183,14 +183,14 @@ class Hg(FetchMethod):
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- logger.debug(1, "Running %s", fetchcmd)
+ logger.debug("Running %s", fetchcmd)
bb.fetch2.check_network_access(d, fetchcmd, ud.url)
runfetchcmd(fetchcmd, d, workdir=ud.pkgdir)
# Even when we clone (fetch), we still need to update as hg's clone
# won't checkout the specified revision if its on a branch
updatecmd = self._buildhgcommand(ud, d, "update")
- logger.debug(1, "Running %s", updatecmd)
+ logger.debug("Running %s", updatecmd)
runfetchcmd(updatecmd, d, workdir=ud.moddir)
def clean(self, ud, d):
@@ -247,9 +247,9 @@ class Hg(FetchMethod):
if scmdata != "nokeep":
proto = ud.parm.get('protocol', 'http')
if not os.access(os.path.join(codir, '.hg'), os.R_OK):
- logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
+ logger.debug2("Unpack: creating new hg repository in '" + codir + "'")
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
- logger.debug(2, "Unpack: updating source in '" + codir + "'")
+ logger.debug2("Unpack: updating source in '" + codir + "'")
if ud.user and ud.pswd:
runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir)
else:
@@ -259,5 +259,5 @@ class Hg(FetchMethod):
else:
runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
else:
- logger.debug(2, "Unpack: extracting source to '" + codir + "'")
+ logger.debug2("Unpack: extracting source to '" + codir + "'")
runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
diff --git a/poky/bitbake/lib/bb/fetch2/local.py b/poky/bitbake/lib/bb/fetch2/local.py
index 25d4557db..e7d1c8c58 100644
--- a/poky/bitbake/lib/bb/fetch2/local.py
+++ b/poky/bitbake/lib/bb/fetch2/local.py
@@ -54,12 +54,12 @@ class Local(FetchMethod):
return [path]
filespath = d.getVar('FILESPATH')
if filespath:
- logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
+ logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
newpath, hist = bb.utils.which(filespath, path, history=True)
searched.extend(hist)
if not os.path.exists(newpath):
dldirfile = os.path.join(d.getVar("DL_DIR"), path)
- logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
+ logger.debug2("Defaulting to %s for %s" % (dldirfile, path))
bb.utils.mkdirhier(os.path.dirname(dldirfile))
searched.append(dldirfile)
return searched
diff --git a/poky/bitbake/lib/bb/fetch2/osc.py b/poky/bitbake/lib/bb/fetch2/osc.py
index 3a6cd2951..d9ce44390 100644
--- a/poky/bitbake/lib/bb/fetch2/osc.py
+++ b/poky/bitbake/lib/bb/fetch2/osc.py
@@ -84,13 +84,13 @@ class Osc(FetchMethod):
Fetch url
"""
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+ logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK):
oscupdatecmd = self._buildosccommand(ud, d, "update")
logger.info("Update "+ ud.url)
# update sources there
- logger.debug(1, "Running %s", oscupdatecmd)
+ logger.debug("Running %s", oscupdatecmd)
bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
runfetchcmd(oscupdatecmd, d, workdir=ud.moddir)
else:
@@ -98,7 +98,7 @@ class Osc(FetchMethod):
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- logger.debug(1, "Running %s", oscfetchcmd)
+ logger.debug("Running %s", oscfetchcmd)
bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir)
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
index 6f3c95b6c..e2a41a4a1 100644
--- a/poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -90,16 +90,16 @@ class Perforce(FetchMethod):
p4port = d.getVar('P4PORT')
if p4port:
- logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
+ logger.debug('Using recipe provided P4PORT: %s' % p4port)
ud.host = p4port
else:
- logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
+ logger.debug('Trying to use P4CONFIG to automatically set P4PORT...')
ud.usingp4config = True
p4cmd = '%s info | grep "Server address"' % ud.basecmd
bb.fetch2.check_network_access(d, p4cmd, ud.url)
ud.host = runfetchcmd(p4cmd, d, True)
ud.host = ud.host.split(': ')[1].strip()
- logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
+ logger.debug('Determined P4PORT to be: %s' % ud.host)
if not ud.host:
raise FetchError('Could not determine P4PORT from P4CONFIG')
@@ -119,6 +119,7 @@ class Perforce(FetchMethod):
cleanedpath = ud.path.replace('/...', '').replace('/', '.')
cleanedhost = ud.host.replace(':', '.')
+ cleanedmodule = ""
# Merge the path and module into the final depot location
if ud.module:
if ud.module.find('/') == 0:
@@ -133,7 +134,7 @@ class Perforce(FetchMethod):
ud.setup_revisions(d)
- ud.localfile = d.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision))
+ ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision))
def _buildp4command(self, ud, d, command, depot_filename=None):
"""
@@ -207,7 +208,7 @@ class Perforce(FetchMethod):
for filename in p4fileslist:
item = filename.split(' - ')
lastaction = item[1].split()
- logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
+ logger.debug('File: %s Last Action: %s' % (item[0], lastaction[0]))
if lastaction[0] == 'delete':
continue
filelist.append(item[0])
@@ -254,7 +255,7 @@ class Perforce(FetchMethod):
raise FetchError('Could not determine the latest perforce changelist')
tipcset = tip.split(' ')[1]
- logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
+ logger.debug('p4 tip found to be changelist %s' % tipcset)
return tipcset
def sortable_revision(self, ud, d, name):
diff --git a/poky/bitbake/lib/bb/fetch2/repo.py b/poky/bitbake/lib/bb/fetch2/repo.py
index 2bdbbd409..fa4cb8149 100644
--- a/poky/bitbake/lib/bb/fetch2/repo.py
+++ b/poky/bitbake/lib/bb/fetch2/repo.py
@@ -47,7 +47,7 @@ class Repo(FetchMethod):
"""Fetch url"""
if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
- logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
+ logger.debug("%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
return
repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo")
diff --git a/poky/bitbake/lib/bb/fetch2/svn.py b/poky/bitbake/lib/bb/fetch2/svn.py
index 971a5add4..8856ef1c6 100644
--- a/poky/bitbake/lib/bb/fetch2/svn.py
+++ b/poky/bitbake/lib/bb/fetch2/svn.py
@@ -116,7 +116,7 @@ class Svn(FetchMethod):
def download(self, ud, d):
"""Fetch url"""
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+ logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
lf = bb.utils.lockfile(ud.svnlock)
@@ -129,7 +129,7 @@ class Svn(FetchMethod):
runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
except FetchError:
pass
- logger.debug(1, "Running %s", svncmd)
+ logger.debug("Running %s", svncmd)
bb.fetch2.check_network_access(d, svncmd, ud.url)
runfetchcmd(svncmd, d, workdir=ud.moddir)
else:
@@ -137,7 +137,7 @@ class Svn(FetchMethod):
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- logger.debug(1, "Running %s", svncmd)
+ logger.debug("Running %s", svncmd)
bb.fetch2.check_network_access(d, svncmd, ud.url)
runfetchcmd(svncmd, d, workdir=ud.pkgdir)
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index e6d9f528d..6d82f3af0 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -52,6 +52,12 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
class Wget(FetchMethod):
+
+ # CDNs like CloudFlare may do a 'browser integrity test' which can fail
+ # with the standard wget/urllib User-Agent, so pretend to be a modern
+ # browser.
+ user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
+
"""Class to fetch urls via 'wget'"""
def supports(self, ud, d):
"""
@@ -82,7 +88,7 @@ class Wget(FetchMethod):
progresshandler = WgetProgressHandler(d)
- logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
+ logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
bb.fetch2.check_network_access(d, command, ud.url)
runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir)
@@ -297,7 +303,7 @@ class Wget(FetchMethod):
# Some servers (FusionForge, as used on Alioth) require that the
# optional Accept header is set.
r.add_header("Accept", "*/*")
- r.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12")
+ r.add_header("User-Agent", self.user_agent)
def add_basic_auth(login_str, request):
'''Adds Basic auth to http request, pass in login:password as string'''
import base64
@@ -320,11 +326,19 @@ class Wget(FetchMethod):
pass
except urllib.error.URLError as e:
if try_again:
- logger.debug(2, "checkstatus: trying again")
+ logger.debug2("checkstatus: trying again")
+ return self.checkstatus(fetch, ud, d, False)
+ else:
+ # debug for now to avoid spamming the logs in e.g. remote sstate searches
+ logger.debug2("checkstatus() urlopen failed: %s" % e)
+ return False
+ except ConnectionResetError as e:
+ if try_again:
+ logger.debug2("checkstatus: trying again")
return self.checkstatus(fetch, ud, d, False)
else:
# debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug(2, "checkstatus() urlopen failed: %s" % e)
+ logger.debug2("checkstatus() urlopen failed: %s" % e)
return False
return True
@@ -401,9 +415,8 @@ class Wget(FetchMethod):
"""
f = tempfile.NamedTemporaryFile()
with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
- agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
fetchcmd = self.basecmd
- fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
+ fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'"
try:
self._runwget(ud, d, fetchcmd, True, workdir=workdir)
fetchresult = f.read()
diff --git a/poky/bitbake/lib/bb/parse/__init__.py b/poky/bitbake/lib/bb/parse/__init__.py
index 76e180b41..c01807ba8 100644
--- a/poky/bitbake/lib/bb/parse/__init__.py
+++ b/poky/bitbake/lib/bb/parse/__init__.py
@@ -71,7 +71,7 @@ def update_mtime(f):
def update_cache(f):
if f in __mtime_cache:
- logger.debug(1, "Updating mtime cache for %s" % f)
+ logger.debug("Updating mtime cache for %s" % f)
update_mtime(f)
def clear_cache():
diff --git a/poky/bitbake/lib/bb/parse/ast.py b/poky/bitbake/lib/bb/parse/ast.py
index 0714296af..50a88f7da 100644
--- a/poky/bitbake/lib/bb/parse/ast.py
+++ b/poky/bitbake/lib/bb/parse/ast.py
@@ -34,7 +34,7 @@ class IncludeNode(AstNode):
Include the file and evaluate the statements
"""
s = data.expand(self.what_file)
- logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
+ logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s)
# TODO: Cache those includes... maybe not here though
if self.force:
@@ -335,7 +335,7 @@ def finalize(fn, d, variant = None):
if not handlerfn:
bb.fatal("Undefined event handler function '%s'" % var)
handlerln = int(d.getVarFlag(var, "lineno", False))
- bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln)
+ bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln, data=d)
bb.event.fire(bb.event.RecipePreFinalise(fn), d)
@@ -376,7 +376,7 @@ def _create_variants(datastores, names, function, onlyfinalise):
def multi_finalize(fn, d):
appends = (d.getVar("__BBAPPEND") or "").split()
for append in appends:
- logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
+ logger.debug("Appending .bbappend file %s to %s", append, fn)
bb.parse.BBHandler.handle(append, d, True)
onlyfinalise = d.getVar("__ONLYFINALISE", False)
diff --git a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 8a520e307..f8988b863 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -22,7 +22,7 @@ from .ConfHandler import include, init
# For compatibility
bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
-__func_start_regexp__ = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
+__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
@@ -60,7 +60,7 @@ def inherit(files, fn, lineno, d):
file = abs_fn
if not file in __inherit_cache:
- logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno))
+ logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno))
__inherit_cache.append( file )
d.setVar('__inherit_cache', __inherit_cache)
include(fn, file, lineno, d, "inherit")
diff --git a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index af64d3446..f171c5c93 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -95,7 +95,7 @@ def include_single_file(parentfn, fn, lineno, data, error_out):
if exc.errno == errno.ENOENT:
if error_out:
raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno)
- logger.debug(2, "CONF file '%s' not found", fn)
+ logger.debug2("CONF file '%s' not found", fn)
else:
if error_out:
raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno)
diff --git a/poky/bitbake/lib/bb/persist_data.py b/poky/bitbake/lib/bb/persist_data.py
index 5f4fbe350..c6a209fb3 100644
--- a/poky/bitbake/lib/bb/persist_data.py
+++ b/poky/bitbake/lib/bb/persist_data.py
@@ -248,7 +248,7 @@ class PersistData(object):
stacklevel=2)
self.data = persist(d)
- logger.debug(1, "Using '%s' as the persistent data cache",
+ logger.debug("Using '%s' as the persistent data cache",
self.data.filename)
def addDomain(self, domain):
diff --git a/poky/bitbake/lib/bb/providers.py b/poky/bitbake/lib/bb/providers.py
index 3f66a3d99..b5a6cd009 100644
--- a/poky/bitbake/lib/bb/providers.py
+++ b/poky/bitbake/lib/bb/providers.py
@@ -165,7 +165,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
available_vers.sort()
logger.warn("versions of %s available: %s", pn, ' '.join(available_vers))
else:
- logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
+ logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
return (preferred_ver, preferred_file)
@@ -232,7 +232,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
pkg_pn[pn] = []
pkg_pn[pn].append(p)
- logger.debug(1, "providers for %s are: %s", item, list(sorted(pkg_pn.keys())))
+ logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys())))
# First add PREFERRED_VERSIONS
for pn in sorted(pkg_pn):
@@ -248,9 +248,9 @@ def _filterProviders(providers, item, cfgData, dataCache):
preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0])
eligible.append(preferred_versions[pn][1])
- if len(eligible) == 0:
+ if not eligible:
logger.error("no eligible providers for %s", item)
- return 0
+ return eligible
# If pn == item, give it a slight default preference
# This means PREFERRED_PROVIDER_foobar defaults to foobar if available
@@ -291,7 +291,7 @@ def filterProviders(providers, item, cfgData, dataCache):
foundUnique = True
break
- logger.debug(1, "sorted providers for %s are: %s", item, eligible)
+ logger.debug("sorted providers for %s are: %s", item, eligible)
return eligible, foundUnique
@@ -333,7 +333,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
provides = dataCache.pn_provides[pn]
for provide in provides:
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide)
- #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
+ #logger.debug("checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
if prefervar in pns and pns[prefervar] not in preferred:
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
@@ -349,7 +349,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
if numberPreferred > 1:
logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s. You could set PREFERRED_RPROVIDER_%s" % (item, preferred, preferred_vars, item))
- logger.debug(1, "sorted runtime providers for %s are: %s", item, eligible)
+ logger.debug("sorted runtime providers for %s are: %s", item, eligible)
return eligible, numberPreferred
@@ -384,7 +384,7 @@ def getRuntimeProviders(dataCache, rdepend):
regexp_cache[pattern] = regexp
if regexp.match(rdepend):
rproviders += dataCache.packages_dynamic[pattern]
- logger.debug(1, "Assuming %s is a dynamic package, but it may not exist" % rdepend)
+ logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend)
return rproviders
@@ -396,22 +396,22 @@ def buildWorldTargetList(dataCache, task=None):
if dataCache.world_target:
return
- logger.debug(1, "collating packages for \"world\"")
+ logger.debug("collating packages for \"world\"")
for f in dataCache.possible_world:
terminal = True
pn = dataCache.pkg_fn[f]
if task and task not in dataCache.task_deps[f]['tasks']:
- logger.debug(2, "World build skipping %s as task %s doesn't exist", f, task)
+ logger.debug2("World build skipping %s as task %s doesn't exist", f, task)
terminal = False
for p in dataCache.pn_provides[pn]:
if p.startswith('virtual/'):
- logger.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
+ logger.debug2("World build skipping %s due to %s provider starting with virtual/", f, p)
terminal = False
break
for pf in dataCache.providers[p]:
if dataCache.pkg_fn[pf] != pn:
- logger.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
+ logger.debug2("World build skipping %s due to both us and %s providing %s", f, pf, p)
terminal = False
break
if terminal:
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 28bdadb45..54ef245a6 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -38,7 +38,7 @@ def taskname_from_tid(tid):
return tid.rsplit(":", 1)[1]
def mc_from_tid(tid):
- if tid.startswith('mc:'):
+ if tid.startswith('mc:') and tid.count(':') >= 2:
return tid.split(':')[1]
return ""
@@ -47,13 +47,13 @@ def split_tid(tid):
return (mc, fn, taskname)
def split_mc(n):
- if n.startswith("mc:"):
+ if n.startswith("mc:") and n.count(':') >= 2:
_, mc, n = n.split(":", 2)
return (mc, n)
return ('', n)
def split_tid_mcfn(tid):
- if tid.startswith('mc:'):
+ if tid.startswith('mc:') and tid.count(':') >= 2:
elems = tid.split(':')
mc = elems[1]
fn = ":".join(elems[2:-1])
@@ -544,8 +544,8 @@ class RunQueueData:
for tid in self.runtaskentries:
if task_done[tid] is False or deps_left[tid] != 0:
problem_tasks.append(tid)
- logger.debug(2, "Task %s is not buildable", tid)
- logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[tid], deps_left[tid])
+ logger.debug2("Task %s is not buildable", tid)
+ logger.debug2("(Complete marker was %s and the remaining dependency count was %s)\n", task_done[tid], deps_left[tid])
self.runtaskentries[tid].weight = weight[tid]
if problem_tasks:
@@ -643,7 +643,7 @@ class RunQueueData:
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
#runtid = build_tid(mc, fn, taskname)
- #logger.debug(2, "Processing %s,%s:%s", mc, fn, taskname)
+ #logger.debug2("Processing %s,%s:%s", mc, fn, taskname)
depends = set()
task_deps = self.dataCaches[mc].task_deps[taskfn]
@@ -1199,9 +1199,9 @@ class RunQueueData:
"""
Dump some debug information on the internal data structures
"""
- logger.debug(3, "run_tasks:")
+ logger.debug3("run_tasks:")
for tid in self.runtaskentries:
- logger.debug(3, " %s: %s Deps %s RevDeps %s", tid,
+ logger.debug3(" %s: %s Deps %s RevDeps %s", tid,
self.runtaskentries[tid].weight,
self.runtaskentries[tid].depends,
self.runtaskentries[tid].revdeps)
@@ -1238,7 +1238,7 @@ class RunQueue:
self.fakeworker = {}
def _start_worker(self, mc, fakeroot = False, rqexec = None):
- logger.debug(1, "Starting bitbake-worker")
+ logger.debug("Starting bitbake-worker")
magic = "decafbad"
if self.cooker.configuration.profile:
magic = "decafbadbad"
@@ -1271,6 +1271,7 @@ class RunQueue:
"date" : self.cfgData.getVar("DATE"),
"time" : self.cfgData.getVar("TIME"),
"hashservaddr" : self.cooker.hashservaddr,
+ "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"),
}
worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>")
@@ -1283,7 +1284,7 @@ class RunQueue:
def _teardown_worker(self, worker):
if not worker:
return
- logger.debug(1, "Teardown for bitbake-worker")
+ logger.debug("Teardown for bitbake-worker")
try:
worker.process.stdin.write(b"<quit></quit>")
worker.process.stdin.flush()
@@ -1356,12 +1357,12 @@ class RunQueue:
# If the stamp is missing, it's not current
if not os.access(stampfile, os.F_OK):
- logger.debug(2, "Stampfile %s not available", stampfile)
+ logger.debug2("Stampfile %s not available", stampfile)
return False
# If it's a 'nostamp' task, it's not current
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
- logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
+ logger.debug2("%s.%s is nostamp\n", fn, taskname)
return False
if taskname != "do_setscene" and taskname.endswith("_setscene"):
@@ -1385,18 +1386,18 @@ class RunQueue:
continue
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
if not t2:
- logger.debug(2, 'Stampfile %s does not exist', stampfile2)
+ logger.debug2('Stampfile %s does not exist', stampfile2)
iscurrent = False
break
if t1 < t2:
- logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
+ logger.debug2('Stampfile %s < %s', stampfile, stampfile2)
iscurrent = False
break
if recurse and iscurrent:
if dep in cache:
iscurrent = cache[dep]
if not iscurrent:
- logger.debug(2, 'Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2))
+ logger.debug2('Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2))
else:
iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache)
cache[dep] = iscurrent
@@ -1466,7 +1467,7 @@ class RunQueue:
if not self.dm_event_handler_registered:
res = bb.event.register(self.dm_event_handler_name,
lambda x: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False,
- ('bb.event.HeartbeatEvent',))
+ ('bb.event.HeartbeatEvent',), data=self.cfgData)
self.dm_event_handler_registered = True
dump = self.cooker.configuration.dump_signatures
@@ -1505,7 +1506,7 @@ class RunQueue:
build_done = self.state is runQueueComplete or self.state is runQueueFailed
if build_done and self.dm_event_handler_registered:
- bb.event.remove(self.dm_event_handler_name, None)
+ bb.event.remove(self.dm_event_handler_name, None, data=self.cfgData)
self.dm_event_handler_registered = False
if build_done and self.rqexe:
@@ -1761,7 +1762,7 @@ class RunQueueExecute:
for scheduler in schedulers:
if self.scheduler == scheduler.name:
self.sched = scheduler(self, self.rqdata)
- logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
+ logger.debug("Using runqueue scheduler '%s'", scheduler.name)
break
else:
bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
@@ -1899,7 +1900,7 @@ class RunQueueExecute:
break
if alldeps:
self.setbuildable(revdep)
- logger.debug(1, "Marking task %s as buildable", revdep)
+ logger.debug("Marking task %s as buildable", revdep)
def task_complete(self, task):
self.stats.taskCompleted()
@@ -1929,7 +1930,7 @@ class RunQueueExecute:
def summarise_scenequeue_errors(self):
err = False
if not self.sqdone:
- logger.debug(1, 'We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered)))
+ logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered)))
completeevent = sceneQueueComplete(self.sq_stats, self.rq)
bb.event.fire(completeevent, self.cfgData)
if self.sq_deferred:
@@ -1986,7 +1987,7 @@ class RunQueueExecute:
if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values():
if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
if nexttask not in self.rqdata.target_tids:
- logger.debug(2, "Skipping setscene for task %s" % nexttask)
+ logger.debug2("Skipping setscene for task %s" % nexttask)
self.sq_task_skip(nexttask)
self.scenequeue_notneeded.add(nexttask)
if nexttask in self.sq_deferred:
@@ -1999,28 +2000,28 @@ class RunQueueExecute:
if nexttask in self.sq_deferred:
if self.sq_deferred[nexttask] not in self.runq_complete:
continue
- logger.debug(1, "Task %s no longer deferred" % nexttask)
+ logger.debug("Task %s no longer deferred" % nexttask)
del self.sq_deferred[nexttask]
valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False)
if not valid:
- logger.debug(1, "%s didn't become valid, skipping setscene" % nexttask)
+ logger.debug("%s didn't become valid, skipping setscene" % nexttask)
self.sq_task_failoutright(nexttask)
return True
else:
self.sqdata.outrightfail.remove(nexttask)
if nexttask in self.sqdata.outrightfail:
- logger.debug(2, 'No package found, so skipping setscene task %s', nexttask)
+ logger.debug2('No package found, so skipping setscene task %s', nexttask)
self.sq_task_failoutright(nexttask)
return True
if nexttask in self.sqdata.unskippable:
- logger.debug(2, "Setscene task %s is unskippable" % nexttask)
+ logger.debug2("Setscene task %s is unskippable" % nexttask)
task = nexttask
break
if task is not None:
(mc, fn, taskname, taskfn) = split_tid_mcfn(task)
taskname = taskname + "_setscene"
if self.rq.check_stamp_task(task, taskname_from_tid(task), recurse = True, cache=self.stampcache):
- logger.debug(2, 'Stamp for underlying task %s is current, so skipping setscene variant', task)
+ logger.debug2('Stamp for underlying task %s is current, so skipping setscene variant', task)
self.sq_task_failoutright(task)
return True
@@ -2030,12 +2031,12 @@ class RunQueueExecute:
return True
if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
- logger.debug(2, 'Setscene stamp current task %s, so skip it and its dependencies', task)
+ logger.debug2('Setscene stamp current task %s, so skip it and its dependencies', task)
self.sq_task_skip(task)
return True
if self.cooker.configuration.skipsetscene:
- logger.debug(2, 'No setscene tasks should be executed. Skipping %s', task)
+ logger.debug2('No setscene tasks should be executed. Skipping %s', task)
self.sq_task_failoutright(task)
return True
@@ -2097,12 +2098,12 @@ class RunQueueExecute:
return True
if task in self.tasks_covered:
- logger.debug(2, "Setscene covered task %s", task)
+ logger.debug2("Setscene covered task %s", task)
self.task_skip(task, "covered")
return True
if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
- logger.debug(2, "Stamp current task %s", task)
+ logger.debug2("Stamp current task %s", task)
self.task_skip(task, "existing")
self.runq_tasksrun.add(task)
@@ -2322,7 +2323,7 @@ class RunQueueExecute:
remapped = True
if not remapped:
- #logger.debug(1, "Task %s hash changes: %s->%s %s->%s" % (tid, orighash, newhash, origuni, newuni))
+ #logger.debug("Task %s hash changes: %s->%s %s->%s" % (tid, orighash, newhash, origuni, newuni))
self.rqdata.runtaskentries[tid].hash = newhash
self.rqdata.runtaskentries[tid].unihash = newuni
changed.add(tid)
@@ -2337,7 +2338,7 @@ class RunQueueExecute:
for mc in self.rq.fakeworker:
self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>")
- hashequiv_logger.debug(1, pprint.pformat("Tasks changed:\n%s" % (changed)))
+ hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed)))
for tid in changed:
if tid not in self.rqdata.runq_setscene_tids:
@@ -2356,7 +2357,7 @@ class RunQueueExecute:
# Check no tasks this covers are running
for dep in self.sqdata.sq_covered_tasks[tid]:
if dep in self.runq_running and dep not in self.runq_complete:
- hashequiv_logger.debug(2, "Task %s is running which blocks setscene for %s from running" % (dep, tid))
+ hashequiv_logger.debug2("Task %s is running which blocks setscene for %s from running" % (dep, tid))
valid = False
break
if not valid:
@@ -2430,7 +2431,7 @@ class RunQueueExecute:
for dep in sorted(self.sqdata.sq_deps[task]):
if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]:
- logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
+ logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
self.sq_task_failoutright(dep)
continue
if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
@@ -2460,7 +2461,7 @@ class RunQueueExecute:
completed dependencies as buildable
"""
- logger.debug(1, 'Found task %s which could be accelerated', task)
+ logger.debug('Found task %s which could be accelerated', task)
self.scenequeue_covered.add(task)
self.scenequeue_updatecounters(task)
@@ -2775,13 +2776,13 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
continue
if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
- logger.debug(2, 'Setscene stamp current for task %s', tid)
+ logger.debug2('Setscene stamp current for task %s', tid)
sqdata.stamppresent.add(tid)
sqrq.sq_task_skip(tid)
continue
if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache):
- logger.debug(2, 'Normal stamp current for task %s', tid)
+ logger.debug2('Normal stamp current for task %s', tid)
sqdata.stamppresent.add(tid)
sqrq.sq_task_skip(tid)
continue
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 0ac395246..0d88c6ec6 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -541,7 +541,7 @@ class SignatureGeneratorUniHashMixIn(object):
# is much more interesting, so it is reported at debug level 1
hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
else:
- hashequiv_logger.debug(2, 'No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
+ hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
except hashserv.client.HashConnectionError as e:
bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
@@ -615,12 +615,12 @@ class SignatureGeneratorUniHashMixIn(object):
new_unihash = data['unihash']
if new_unihash != unihash:
- hashequiv_logger.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
+ hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d)
self.set_unihash(tid, new_unihash)
d.setVar('BB_UNIHASH', new_unihash)
else:
- hashequiv_logger.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
+ hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
except hashserv.client.HashConnectionError as e:
bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
finally:
@@ -748,7 +748,7 @@ def clean_basepath(basepath):
if basepath[0] == '/':
return cleaned
- if basepath.startswith("mc:"):
+ if basepath.startswith("mc:") and basepath.count(':') >= 2:
mc, mc_name, basepath = basepath.split(":", 2)
mc_suffix = ':mc:' + mc_name
else:
diff --git a/poky/bitbake/lib/bb/taskdata.py b/poky/bitbake/lib/bb/taskdata.py
index ffbaf362e..47bad6d1f 100644
--- a/poky/bitbake/lib/bb/taskdata.py
+++ b/poky/bitbake/lib/bb/taskdata.py
@@ -131,7 +131,7 @@ class TaskData:
for depend in dataCache.deps[fn]:
dependids.add(depend)
self.depids[fn] = list(dependids)
- logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
+ logger.debug2("Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
# Work out runtime dependencies
if not fn in self.rdepids:
@@ -149,9 +149,9 @@ class TaskData:
rreclist.append(rdepend)
rdependids.add(rdepend)
if rdependlist:
- logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn)
+ logger.debug2("Added runtime dependencies %s for %s", str(rdependlist), fn)
if rreclist:
- logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn)
+ logger.debug2("Added runtime recommendations %s for %s", str(rreclist), fn)
self.rdepids[fn] = list(rdependids)
for dep in self.depids[fn]:
@@ -378,7 +378,7 @@ class TaskData:
for fn in eligible:
if fn in self.failed_fns:
continue
- logger.debug(2, "adding %s to satisfy %s", fn, item)
+ logger.debug2("adding %s to satisfy %s", fn, item)
self.add_build_target(fn, item)
self.add_tasks(fn, dataCache)
@@ -431,7 +431,7 @@ class TaskData:
for fn in eligible:
if fn in self.failed_fns:
continue
- logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
+ logger.debug2("adding '%s' to satisfy runtime '%s'", fn, item)
self.add_runtime_target(fn, item)
self.add_tasks(fn, dataCache)
@@ -446,7 +446,7 @@ class TaskData:
return
if not missing_list:
missing_list = []
- logger.debug(1, "File '%s' is unbuildable, removing...", fn)
+ logger.debug("File '%s' is unbuildable, removing...", fn)
self.failed_fns.append(fn)
for target in self.build_targets:
if fn in self.build_targets[target]:
@@ -526,7 +526,7 @@ class TaskData:
added = added + 1
except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
self.remove_runtarget(target)
- logger.debug(1, "Resolved " + str(added) + " extra dependencies")
+ logger.debug("Resolved " + str(added) + " extra dependencies")
if added == 0:
break
# self.dump_data()
@@ -549,38 +549,38 @@ class TaskData:
"""
Dump some debug information on the internal data structures
"""
- logger.debug(3, "build_names:")
- logger.debug(3, ", ".join(self.build_targets))
+ logger.debug3("build_names:")
+ logger.debug3(", ".join(self.build_targets))
- logger.debug(3, "run_names:")
- logger.debug(3, ", ".join(self.run_targets))
+ logger.debug3("run_names:")
+ logger.debug3(", ".join(self.run_targets))
- logger.debug(3, "build_targets:")
+ logger.debug3("build_targets:")
for target in self.build_targets:
targets = "None"
if target in self.build_targets:
targets = self.build_targets[target]
- logger.debug(3, " %s: %s", target, targets)
+ logger.debug3(" %s: %s", target, targets)
- logger.debug(3, "run_targets:")
+ logger.debug3("run_targets:")
for target in self.run_targets:
targets = "None"
if target in self.run_targets:
targets = self.run_targets[target]
- logger.debug(3, " %s: %s", target, targets)
+ logger.debug3(" %s: %s", target, targets)
- logger.debug(3, "tasks:")
+ logger.debug3("tasks:")
for tid in self.taskentries:
- logger.debug(3, " %s: %s %s %s",
+ logger.debug3(" %s: %s %s %s",
tid,
self.taskentries[tid].idepends,
self.taskentries[tid].irdepends,
self.taskentries[tid].tdepends)
- logger.debug(3, "dependency ids (per fn):")
+ logger.debug3("dependency ids (per fn):")
for fn in self.depids:
- logger.debug(3, " %s: %s", fn, self.depids[fn])
+ logger.debug3(" %s: %s", fn, self.depids[fn])
- logger.debug(3, "runtime dependency ids (per fn):")
+ logger.debug3("runtime dependency ids (per fn):")
for fn in self.rdepids:
- logger.debug(3, " %s: %s", fn, self.rdepids[fn])
+ logger.debug3(" %s: %s", fn, self.rdepids[fn])
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index 1452d7615..7b2dac7b8 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -87,6 +87,25 @@ class URITest(unittest.TestCase):
},
'relative': False
},
+ # Check that trailing semicolons are handled correctly
+ "http://www.example.org/index.html?qparam1=qvalue1;param2=value2;" : {
+ 'uri': 'http://www.example.org/index.html?qparam1=qvalue1;param2=value2',
+ 'scheme': 'http',
+ 'hostname': 'www.example.org',
+ 'port': None,
+ 'hostport': 'www.example.org',
+ 'path': '/index.html',
+ 'userinfo': '',
+ 'username': '',
+ 'password': '',
+ 'params': {
+ 'param2': 'value2'
+ },
+ 'query': {
+ 'qparam1': 'qvalue1'
+ },
+ 'relative': False
+ },
"http://www.example.com:8080/index.html" : {
'uri': 'http://www.example.com:8080/index.html',
'scheme': 'http',
@@ -2089,13 +2108,14 @@ class GitLfsTest(FetcherTest):
cwd = self.gitdir
return bb.process.run(cmd, cwd=cwd)[0]
- def fetch(self, uri=None):
+ def fetch(self, uri=None, download=True):
uris = self.d.getVar('SRC_URI').split()
uri = uris[0]
d = self.d
fetcher = bb.fetch2.Fetch(uris, d)
- fetcher.download()
+ if download:
+ fetcher.download()
ud = fetcher.ud[uri]
return fetcher, ud
@@ -2105,16 +2125,21 @@ class GitLfsTest(FetcherTest):
uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir
self.d.setVar('SRC_URI', uri)
- fetcher, ud = self.fetch()
+ # Careful: suppress initial attempt at downloading until
+ # we know whether git-lfs is installed.
+ fetcher, ud = self.fetch(uri=None, download=False)
self.assertIsNotNone(ud.method._find_git_lfs)
- # If git-lfs can be found, the unpack should be successful
- ud.method._find_git_lfs = lambda d: True
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
+ # If git-lfs can be found, the unpack should be successful. Only
+ # attempt this with the real live copy of git-lfs installed.
+ if ud.method._find_git_lfs(self.d):
+ fetcher.download()
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
# If git-lfs cannot be found, the unpack should throw an error
with self.assertRaises(bb.fetch2.FetchError):
+ fetcher.download()
ud.method._find_git_lfs = lambda d: False
shutil.rmtree(self.gitdir, ignore_errors=True)
fetcher.unpack(self.d.getVar('WORKDIR'))
@@ -2125,10 +2150,16 @@ class GitLfsTest(FetcherTest):
uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir
self.d.setVar('SRC_URI', uri)
+ # In contrast to test_lfs_enabled(), allow the implicit download
+ # done by self.fetch() to occur here. The point of this test case
+ # is to verify that the fetcher can survive even if the source
+ # repository has Git LFS usage configured.
fetcher, ud = self.fetch()
self.assertIsNotNone(ud.method._find_git_lfs)
- # If git-lfs can be found, the unpack should be successful
+ # If git-lfs can be found, the unpack should be successful. A
+ # live copy of git-lfs is not required for this case, so
+ # unconditionally forge its presence.
ud.method._find_git_lfs = lambda d: True
shutil.rmtree(self.gitdir, ignore_errors=True)
fetcher.unpack(self.d.getVar('WORKDIR'))
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc1.conf b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc-1.conf
index f34b8dccc..f34b8dccc 100644
--- a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc1.conf
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc-1.conf
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc2.conf b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc_2.conf
index c3360fc5c..c3360fc5c 100644
--- a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc2.conf
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc_2.conf
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/f1.bb b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/f1.bb
index d45a4cff5..7b8fc592a 100644
--- a/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/f1.bb
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/f1.bb
@@ -1 +1 @@
-do_install[mcdepends] = "mc:mc1:mc2:a1:do_build"
+do_install[mcdepends] = "mc:mc-1:mc_2:a1:do_build"
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc1.bb b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc1.bb
index 17a181fff..eed69c805 100644
--- a/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc1.bb
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc1.bb
@@ -1,5 +1,5 @@
python () {
- if d.getVar("BB_CURRENT_MC") == "mc1":
- bb.fatal("Multiconfig is mc1")
+ if d.getVar("BB_CURRENT_MC") == "mc-1":
+ bb.fatal("Multiconfig is mc-1")
}
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc2.bb b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc2.bb
index cc69e7b82..3c172ef97 100644
--- a/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc2.bb
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc2.bb
@@ -1,4 +1,4 @@
python () {
- if d.getVar("BB_CURRENT_MC") == "mc2":
- bb.fatal("Multiconfig is mc2")
+ if d.getVar("BB_CURRENT_MC") == "mc_2":
+ bb.fatal("Multiconfig is mc_2")
}
diff --git a/poky/bitbake/lib/bb/tests/runqueue.py b/poky/bitbake/lib/bb/tests/runqueue.py
index d3d62b98f..3d51779d6 100644
--- a/poky/bitbake/lib/bb/tests/runqueue.py
+++ b/poky/bitbake/lib/bb/tests/runqueue.py
@@ -216,66 +216,66 @@ class RunQueueTests(unittest.TestCase):
def test_multiconfig_setscene_optimise(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
- "BBMULTICONFIG" : "mc1 mc2",
+ "BBMULTICONFIG" : "mc-1 mc_2",
"BB_SIGNATURE_HANDLER" : "basic"
}
- cmd = ["bitbake", "b1", "mc:mc1:b1", "mc:mc2:b1"]
+ cmd = ["bitbake", "b1", "mc:mc-1:b1", "mc:mc_2:b1"]
setscenetasks = ['package_write_ipk_setscene', 'package_write_rpm_setscene', 'packagedata_setscene',
'populate_sysroot_setscene', 'package_qa_setscene']
sstatevalid = ""
tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv)
expected = ['a1:' + x for x in self.alltasks] + ['b1:' + x for x in self.alltasks] + \
- ['mc1:b1:' + x for x in setscenetasks] + ['mc1:a1:' + x for x in setscenetasks] + \
- ['mc2:b1:' + x for x in setscenetasks] + ['mc2:a1:' + x for x in setscenetasks] + \
- ['mc1:b1:build', 'mc2:b1:build']
- for x in ['mc1:a1:package_qa_setscene', 'mc2:a1:package_qa_setscene', 'a1:build', 'a1:package_qa']:
+ ['mc-1:b1:' + x for x in setscenetasks] + ['mc-1:a1:' + x for x in setscenetasks] + \
+ ['mc_2:b1:' + x for x in setscenetasks] + ['mc_2:a1:' + x for x in setscenetasks] + \
+ ['mc-1:b1:build', 'mc_2:b1:build']
+ for x in ['mc-1:a1:package_qa_setscene', 'mc_2:a1:package_qa_setscene', 'a1:build', 'a1:package_qa']:
expected.remove(x)
self.assertEqual(set(tasks), set(expected))
def test_multiconfig_bbmask(self):
# This test validates that multiconfigs can independently mask off
# recipes they do not want with BBMASK. It works by having recipes
- # that will fail to parse for mc1 and mc2, then making each multiconfig
+ # that will fail to parse for mc-1 and mc_2, then making each multiconfig
# build the one that does parse. This ensures that the recipes are in
# each multiconfigs BBFILES, but each is masking only the one that
# doesn't parse
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
- "BBMULTICONFIG" : "mc1 mc2",
+ "BBMULTICONFIG" : "mc-1 mc_2",
"BB_SIGNATURE_HANDLER" : "basic",
"EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb",
}
- cmd = ["bitbake", "mc:mc1:fails-mc2", "mc:mc2:fails-mc1"]
+ cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"]
self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv)
def test_multiconfig_mcdepends(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
- "BBMULTICONFIG" : "mc1 mc2",
+ "BBMULTICONFIG" : "mc-1 mc_2",
"BB_SIGNATURE_HANDLER" : "TestMulticonfigDepends",
"EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb",
}
- tasks = self.run_bitbakecmd(["bitbake", "mc:mc1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
- expected = ["mc1:f1:%s" % t for t in self.alltasks] + \
- ["mc2:a1:%s" % t for t in self.alltasks]
+ tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
+ expected = ["mc-1:f1:%s" % t for t in self.alltasks] + \
+ ["mc_2:a1:%s" % t for t in self.alltasks]
self.assertEqual(set(tasks), set(expected))
# A rebuild does nothing
- tasks = self.run_bitbakecmd(["bitbake", "mc:mc1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
+ tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
self.assertEqual(set(tasks), set())
# Test that a signature change in the dependent task causes
# mcdepends to rebuild
- tasks = self.run_bitbakecmd(["bitbake", "mc:mc2:a1", "-c", "compile", "-f"], tempdir, "", extraenv=extraenv, cleanup=True)
- expected = ["mc2:a1:compile"]
+ tasks = self.run_bitbakecmd(["bitbake", "mc:mc_2:a1", "-c", "compile", "-f"], tempdir, "", extraenv=extraenv, cleanup=True)
+ expected = ["mc_2:a1:compile"]
self.assertEqual(set(tasks), set(expected))
rerun_tasks = self.alltasks[:]
for x in ("fetch", "unpack", "patch", "prepare_recipe_sysroot", "configure", "compile"):
rerun_tasks.remove(x)
- tasks = self.run_bitbakecmd(["bitbake", "mc:mc1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
- expected = ["mc1:f1:%s" % t for t in rerun_tasks] + \
- ["mc2:a1:%s" % t for t in rerun_tasks]
+ tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
+ expected = ["mc-1:f1:%s" % t for t in rerun_tasks] + \
+ ["mc_2:a1:%s" % t for t in rerun_tasks]
self.assertEqual(set(tasks), set(expected))
@unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
diff --git a/poky/bitbake/lib/bb/ui/buildinfohelper.py b/poky/bitbake/lib/bb/ui/buildinfohelper.py
index 82c62e332..43aa59284 100644
--- a/poky/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/poky/bitbake/lib/bb/ui/buildinfohelper.py
@@ -148,14 +148,14 @@ class ORMWrapper(object):
buildrequest = None
if brbe is not None:
# Toaster-triggered build
- logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
+ logger.debug("buildinfohelper: brbe is %s" % brbe)
br, _ = brbe.split(":")
buildrequest = BuildRequest.objects.get(pk=br)
prj = buildrequest.project
else:
# CLI build
prj = Project.objects.get_or_create_default_project()
- logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
+ logger.debug("buildinfohelper: project is not specified, defaulting to %s" % prj)
if buildrequest is not None:
# reuse existing Build object
@@ -171,7 +171,7 @@ class ORMWrapper(object):
completed_on=now,
build_name='')
- logger.debug(1, "buildinfohelper: build is created %s" % build)
+ logger.debug("buildinfohelper: build is created %s" % build)
if buildrequest is not None:
buildrequest.build = build
@@ -906,7 +906,7 @@ class BuildInfoHelper(object):
self.project = None
- logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
+ logger.debug("buildinfohelper: Build info helper inited %s" % vars(self))
###################
@@ -1620,7 +1620,7 @@ class BuildInfoHelper(object):
# if we have a backlog of events, do our best to save them here
if len(self.internal_state['backlog']):
tempevent = self.internal_state['backlog'].pop()
- logger.debug(1, "buildinfohelper: Saving stored event %s "
+ logger.debug("buildinfohelper: Saving stored event %s "
% tempevent)
self.store_log_event(tempevent,cli_backlog)
else:
diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py
index 5c775bd8a..b282d09ab 100644
--- a/poky/bitbake/lib/bb/utils.py
+++ b/poky/bitbake/lib/bb/utils.py
@@ -609,7 +609,7 @@ def filter_environment(good_vars):
os.environ["LC_ALL"] = "en_US.UTF-8"
if removed_vars:
- logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
+ logger.debug("Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
return removed_vars
@@ -1613,12 +1613,12 @@ def export_proxies(d):
def load_plugins(logger, plugins, pluginpath):
def load_plugin(name):
- logger.debug(1, 'Loading plugin %s' % name)
+ logger.debug('Loading plugin %s' % name)
spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
if spec:
return spec.loader.load_module()
- logger.debug(1, 'Loading plugins from %s...' % pluginpath)
+ logger.debug('Loading plugins from %s...' % pluginpath)
expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
for ext in python_extensions)