summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib')
-rw-r--r--poky/bitbake/lib/bb/COW.py150
-rw-r--r--poky/bitbake/lib/bb/__init__.py52
-rw-r--r--poky/bitbake/lib/bb/build.py137
-rw-r--r--poky/bitbake/lib/bb/cache.py238
-rw-r--r--poky/bitbake/lib/bb/command.py65
-rw-r--r--poky/bitbake/lib/bb/compat.py10
-rw-r--r--poky/bitbake/lib/bb/cooker.py360
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py59
-rw-r--r--poky/bitbake/lib/bb/daemonize.py2
-rw-r--r--poky/bitbake/lib/bb/data.py6
-rw-r--r--poky/bitbake/lib/bb/data_smart.py2
-rw-r--r--poky/bitbake/lib/bb/event.py16
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py39
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py28
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py66
-rw-r--r--poky/bitbake/lib/bb/fetch2/local.py15
-rw-r--r--poky/bitbake/lib/bb/fetch2/osc.py3
-rw-r--r--poky/bitbake/lib/bb/fetch2/perforce.py81
-rw-r--r--poky/bitbake/lib/bb/fetch2/ssh.py7
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py5
-rwxr-xr-xpoky/bitbake/lib/bb/main.py18
-rw-r--r--poky/bitbake/lib/bb/msg.py9
-rw-r--r--poky/bitbake/lib/bb/namedtuple_with_abc.py14
-rw-r--r--poky/bitbake/lib/bb/parse/ast.py10
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/BBHandler.py5
-rw-r--r--poky/bitbake/lib/bb/persist_data.py8
-rw-r--r--poky/bitbake/lib/bb/process.py4
-rw-r--r--poky/bitbake/lib/bb/progress.py60
-rw-r--r--poky/bitbake/lib/bb/pysh/pyshyacc.py1
-rw-r--r--poky/bitbake/lib/bb/runqueue.py47
-rw-r--r--poky/bitbake/lib/bb/server/process.py282
-rw-r--r--poky/bitbake/lib/bb/siggen.py112
-rw-r--r--poky/bitbake/lib/bb/taskdata.py9
-rw-r--r--poky/bitbake/lib/bb/tests/color.py95
-rw-r--r--poky/bitbake/lib/bb/tests/cooker.py2
-rw-r--r--poky/bitbake/lib/bb/tests/cow.py218
-rw-r--r--poky/bitbake/lib/bb/tests/data.py1
-rw-r--r--poky/bitbake/lib/bb/tests/event.py17
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py71
-rw-r--r--poky/bitbake/lib/bb/tests/parse.py6
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf3
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc1.conf1
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc2.conf1
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/recipes/f1.bb1
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc1.bb5
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc2.bb4
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue.py45
-rw-r--r--poky/bitbake/lib/bb/tests/siggen.py91
-rw-r--r--poky/bitbake/lib/bb/tinfoil.py62
-rw-r--r--poky/bitbake/lib/bb/ui/knotty.py23
-rw-r--r--poky/bitbake/lib/bb/ui/ncurses.py2
-rw-r--r--poky/bitbake/lib/bb/ui/taskexp.py20
-rw-r--r--poky/bitbake/lib/bb/ui/teamcity.py2
-rw-r--r--poky/bitbake/lib/bb/ui/uievent.py6
-rw-r--r--poky/bitbake/lib/bb/utils.py74
-rw-r--r--poky/bitbake/lib/bblayers/action.py22
-rw-r--r--poky/bitbake/lib/bblayers/query.py16
-rw-r--r--poky/bitbake/lib/hashserv/__init__.py22
-rw-r--r--poky/bitbake/lib/hashserv/client.py43
-rw-r--r--poky/bitbake/lib/hashserv/server.py105
-rw-r--r--poky/bitbake/lib/hashserv/tests.py31
-rw-r--r--poky/bitbake/lib/layerindexlib/__init__.py15
-rw-r--r--poky/bitbake/lib/layerindexlib/cooker.py7
-rw-r--r--poky/bitbake/lib/layerindexlib/restapi.py6
-rw-r--r--poky/bitbake/lib/layerindexlib/tests/restapi.py2
-rw-r--r--poky/bitbake/lib/ply/lex.py6
-rw-r--r--poky/bitbake/lib/ply/yacc.py2
-rw-r--r--poky/bitbake/lib/toaster/tests/functional/functional_helpers.py8
68 files changed, 2082 insertions, 873 deletions
diff --git a/poky/bitbake/lib/bb/COW.py b/poky/bitbake/lib/bb/COW.py
index bc20ce38e..23c22b65e 100644
--- a/poky/bitbake/lib/bb/COW.py
+++ b/poky/bitbake/lib/bb/COW.py
@@ -3,13 +3,14 @@
#
# Copyright (C) 2006 Tim Ansell
#
-#Please Note:
+# Please Note:
# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
# Assign a file to __warn__ to get warnings about slow operations.
#
import copy
+
ImmutableTypes = (
bool,
complex,
@@ -22,9 +23,11 @@ ImmutableTypes = (
MUTABLE = "__mutable__"
+
class COWMeta(type):
pass
+
class COWDictMeta(COWMeta):
__warn__ = False
__hasmutable__ = False
@@ -33,12 +36,15 @@ class COWDictMeta(COWMeta):
def __str__(cls):
# FIXME: I have magic numbers!
return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
+
__repr__ = __str__
def cow(cls):
class C(cls):
__count__ = cls.__count__ + 1
+
return C
+
copy = cow
__call__ = cow
@@ -70,8 +76,9 @@ class COWDictMeta(COWMeta):
return value
__getmarker__ = []
+
def __getreadonly__(cls, key, default=__getmarker__):
- """\
+ """
Get a value (even if mutable) which you promise not to change.
"""
return cls.__getitem__(key, default, True)
@@ -138,24 +145,29 @@ class COWDictMeta(COWMeta):
def iterkeys(cls):
return cls.iter("keys")
+
def itervalues(cls, readonly=False):
if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
- print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
+ print("Warning: If you aren't going to change any of the values call with True.", file=cls.__warn__)
return cls.iter("values", readonly)
+
def iteritems(cls, readonly=False):
if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
- print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
+ print("Warning: If you aren't going to change any of the values call with True.", file=cls.__warn__)
return cls.iter("items", readonly)
+
class COWSetMeta(COWDictMeta):
def __str__(cls):
# FIXME: I have magic numbers!
- return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
+ return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
+
__repr__ = __str__
def cow(cls):
class C(cls):
__count__ = cls.__count__ + 1
+
return C
def add(cls, value):
@@ -173,131 +185,11 @@ class COWSetMeta(COWDictMeta):
def iteritems(cls):
raise TypeError("sets don't have 'items'")
+
# These are the actual classes you use!
-class COWDictBase(object, metaclass = COWDictMeta):
+class COWDictBase(metaclass=COWDictMeta):
__count__ = 0
-class COWSetBase(object, metaclass = COWSetMeta):
- __count__ = 0
-if __name__ == "__main__":
- import sys
- COWDictBase.__warn__ = sys.stderr
- a = COWDictBase()
- print("a", a)
-
- a['a'] = 'a'
- a['b'] = 'b'
- a['dict'] = {}
-
- b = a.copy()
- print("b", b)
- b['c'] = 'b'
-
- print()
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems():
- print(x)
- print()
-
- b['dict']['a'] = 'b'
- b['a'] = 'c'
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems():
- print(x)
- print()
-
- try:
- b['dict2']
- except KeyError as e:
- print("Okay!")
-
- a['set'] = COWSetBase()
- a['set'].add("o1")
- a['set'].add("o1")
- a['set'].add("o2")
-
- print("a", a)
- for x in a['set'].itervalues():
- print(x)
- print("--")
- print("b", b)
- for x in b['set'].itervalues():
- print(x)
- print()
-
- b['set'].add('o3')
-
- print("a", a)
- for x in a['set'].itervalues():
- print(x)
- print("--")
- print("b", b)
- for x in b['set'].itervalues():
- print(x)
- print()
-
- a['set2'] = set()
- a['set2'].add("o1")
- a['set2'].add("o1")
- a['set2'].add("o2")
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- del b['b']
- try:
- print(b['b'])
- except KeyError:
- print("Yay! deleted key raises error")
-
- if 'b' in b:
- print("Boo!")
- else:
- print("Yay - has_key with delete works!")
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- b.__revertitem__('b')
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- b.__revertitem__('dict')
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
+class COWSetBase(metaclass=COWSetMeta):
+ __count__ = 0
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index b96466e65..888dd5ccc 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "1.46.0"
+__version__ = "1.47.0"
import sys
if sys.version_info < (3, 5, 0):
@@ -35,12 +35,14 @@ class NullHandler(logging.Handler):
def emit(self, record):
pass
-Logger = logging.getLoggerClass()
-class BBLogger(Logger):
- def __init__(self, name):
+class BBLoggerMixin(object):
+ def __init__(self, *args, **kwargs):
+ # Does nothing to allow calling super() from derived classes
+ pass
+
+ def setup_bblogger(self, name):
if name.split(".")[0] == "BitBake":
self.debug = self.bbdebug
- Logger.__init__(self, name)
def bbdebug(self, level, msg, *args, **kwargs):
loglevel = logging.DEBUG - level + 1
@@ -60,16 +62,56 @@ class BBLogger(Logger):
def verbnote(self, msg, *args, **kwargs):
return self.log(logging.INFO + 2, msg, *args, **kwargs)
+Logger = logging.getLoggerClass()
+class BBLogger(Logger, BBLoggerMixin):
+ def __init__(self, name, *args, **kwargs):
+ self.setup_bblogger(name)
+ super().__init__(name, *args, **kwargs)
logging.raiseExceptions = False
logging.setLoggerClass(BBLogger)
+class BBLoggerAdapter(logging.LoggerAdapter, BBLoggerMixin):
+ def __init__(self, logger, *args, **kwargs):
+ self.setup_bblogger(logger.name)
+ super().__init__(logger, *args, **kwargs)
+
+ if sys.version_info < (3, 6):
+ # These properties were added in Python 3.6. Add them in older versions
+ # for compatibility
+ @property
+ def manager(self):
+ return self.logger.manager
+
+ @manager.setter
+ def manager(self, value):
+ self.logger.manager = value
+
+ @property
+ def name(self):
+ return self.logger.name
+
+ def __repr__(self):
+ logger = self.logger
+ level = logger.getLevelName(logger.getEffectiveLevel())
+ return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level)
+
+logging.LoggerAdapter = BBLoggerAdapter
+
logger = logging.getLogger("BitBake")
logger.addHandler(NullHandler())
logger.setLevel(logging.DEBUG - 2)
mainlogger = logging.getLogger("BitBake.Main")
+class PrefixLoggerAdapter(logging.LoggerAdapter):
+ def __init__(self, prefix, logger):
+ super().__init__(logger, {})
+ self.__msg_prefix = prefix
+
+ def process(self, msg, kwargs):
+ return "%s%s" %(self.__msg_prefix, msg), kwargs
+
# This has to be imported after the setLoggerClass, as the import of bb.msg
# can result in construction of the various loggers.
import bb.msg
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index 23b6ee455..974d2ff06 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -16,7 +16,9 @@ import os
import sys
import logging
import glob
+import itertools
import time
+import re
import stat
import bb
import bb.msg
@@ -27,6 +29,9 @@ from bb import data, event, utils
bblogger = logging.getLogger('BitBake')
logger = logging.getLogger('BitBake.Build')
+verboseShellLogging = False
+verboseStdoutLogging = False
+
__mtime_cache = {}
def cached_mtime_noerror(f):
@@ -303,20 +308,60 @@ def exec_func_python(func, d, runfile, cwd=None):
def shell_trap_code():
return '''#!/bin/sh\n
+__BITBAKE_LAST_LINE=0
+
# Emit a useful diagnostic if something fails:
-bb_exit_handler() {
+bb_sh_exit_handler() {
+ ret=$?
+ if [ "$ret" != 0 ]; then
+ echo "WARNING: exit code $ret from a shell command."
+ fi
+ exit $ret
+}
+
+bb_bash_exit_handler() {
ret=$?
- case $ret in
- 0) ;;
- *) case $BASH_VERSION in
- "") echo "WARNING: exit code $ret from a shell command.";;
- *) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from '$BASH_COMMAND'";;
- esac
- exit $ret
- esac
+ { set +x; } > /dev/null
+ trap "" DEBUG
+ if [ "$ret" != 0 ]; then
+ echo "WARNING: ${BASH_SOURCE[0]}:${__BITBAKE_LAST_LINE} exit $ret from '$1'"
+
+ echo "WARNING: Backtrace (BB generated script): "
+ for i in $(seq 1 $((${#FUNCNAME[@]} - 1))); do
+ if [ "$i" -eq 1 ]; then
+ echo -e "\t#$((i)): ${FUNCNAME[$i]}, ${BASH_SOURCE[$((i-1))]}, line ${__BITBAKE_LAST_LINE}"
+ else
+ echo -e "\t#$((i)): ${FUNCNAME[$i]}, ${BASH_SOURCE[$((i-1))]}, line ${BASH_LINENO[$((i-1))]}"
+ fi
+ done
+ fi
+ exit $ret
}
-trap 'bb_exit_handler' 0
-set -e
+
+bb_bash_debug_handler() {
+ local line=${BASH_LINENO[0]}
+ # For some reason the DEBUG trap trips with lineno=1 when scripts exit; ignore it
+ if [ "$line" -eq 1 ]; then
+ return
+ fi
+
+ # Track the line number of commands as they execute. This is so we can have access to the failing line number
+ # in the EXIT trap. See http://gnu-bash.2382.n7.nabble.com/trap-echo-quot-trap-exit-on-LINENO-quot-EXIT-gt-wrong-linenumber-td3666.html
+ if [ "${FUNCNAME[1]}" != "bb_bash_exit_handler" ]; then
+ __BITBAKE_LAST_LINE=$line
+ fi
+}
+
+case $BASH_VERSION in
+"") trap 'bb_sh_exit_handler' 0
+ set -e
+ ;;
+*) trap 'bb_bash_exit_handler "$BASH_COMMAND"' 0
+ trap '{ bb_bash_debug_handler; } 2>/dev/null' DEBUG
+ set -e
+ shopt -s extdebug
+ ;;
+esac
'''
def create_progress_handler(func, progress, logfile, d):
@@ -346,7 +391,7 @@ def create_progress_handler(func, progress, logfile, d):
cls_obj = functools.reduce(resolve, cls.split("."), bb.utils._context)
if not cls_obj:
# Fall-back on __builtins__
- cls_obj = functools.reduce(lambda x, y: x.get(y), cls.split("."), __builtins__)
+ cls_obj = functools.reduce(resolve, cls.split("."), __builtins__)
if cls_obj:
return cls_obj(d, outfile=logfile, otherargs=otherargs)
bb.warn('%s: unknown custom progress handler in task progress varflag value "%s", ignoring' % (func, cls))
@@ -371,7 +416,7 @@ def exec_func_shell(func, d, runfile, cwd=None):
bb.data.emit_func(func, script, d)
- if bb.msg.loggerVerboseLogs:
+ if verboseShellLogging or bb.utils.to_boolean(d.getVar("BB_VERBOSE_LOGS", False)):
script.write("set -x\n")
if cwd:
script.write("cd '%s'\n" % cwd)
@@ -391,14 +436,20 @@ exit $ret
if fakerootcmd:
cmd = [fakerootcmd, runfile]
- if bb.msg.loggerDefaultVerbose:
+ if verboseStdoutLogging:
logfile = LogTee(logger, StdoutNoopContextManager())
else:
logfile = StdoutNoopContextManager()
progress = d.getVarFlag(func, 'progress')
if progress:
- logfile = create_progress_handler(func, progress, logfile, d)
+ try:
+ logfile = create_progress_handler(func, progress, logfile, d)
+ except:
+ from traceback import format_exc
+ logger.error("Failed to create progress handler")
+ logger.error(format_exc())
+ raise
fifobuffer = bytearray()
def readfifo(data):
@@ -450,6 +501,62 @@ exit $ret
bb.debug(2, "Executing shell function %s" % func)
with open(os.devnull, 'r+') as stdin, logfile:
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
+ except bb.process.ExecutionError as exe:
+ # Find the backtrace that the shell trap generated
+ backtrace_marker_regex = re.compile(r"WARNING: Backtrace \(BB generated script\)")
+ stdout_lines = (exe.stdout or "").split("\n")
+ backtrace_start_line = None
+ for i, line in enumerate(reversed(stdout_lines)):
+ if backtrace_marker_regex.search(line):
+ backtrace_start_line = len(stdout_lines) - i
+ break
+
+ # Read the backtrace frames, starting at the location we just found
+ backtrace_entry_regex = re.compile(r"#(?P<frameno>\d+): (?P<funcname>[^\s]+), (?P<file>.+?), line ("
+ r"?P<lineno>\d+)")
+ backtrace_frames = []
+ if backtrace_start_line:
+ for line in itertools.islice(stdout_lines, backtrace_start_line, None):
+ match = backtrace_entry_regex.search(line)
+ if match:
+ backtrace_frames.append(match.groupdict())
+
+ with open(runfile, "r") as script:
+ script_lines = [line.rstrip() for line in script.readlines()]
+
+ # For each backtrace frame, search backwards in the script (from the line number called out by the frame),
+ # to find the comment that emit_vars injected when it wrote the script. This will give us the metadata
+ # filename (e.g. .bb or .bbclass) and line number where the shell function was originally defined.
+ script_metadata_comment_regex = re.compile(r"# line: (?P<lineno>\d+), file: (?P<file>.+)")
+ better_frames = []
+ # Skip the very last frame since it's just the call to the shell task in the body of the script
+ for frame in backtrace_frames[:-1]:
+ # Check whether the frame corresponds to a function defined in the script vs external script.
+ if os.path.samefile(frame["file"], runfile):
+ # Search backwards from the frame lineno to locate the comment that BB injected
+ i = int(frame["lineno"]) - 1
+ while i >= 0:
+ match = script_metadata_comment_regex.match(script_lines[i])
+ if match:
+ # Calculate the relative line in the function itself
+ relative_line_in_function = int(frame["lineno"]) - i - 2
+ # Calculate line in the function as declared in the metadata
+ metadata_function_line = relative_line_in_function + int(match["lineno"])
+ better_frames.append("#{frameno}: {funcname}, {file}, line {lineno}".format(
+ frameno=frame["frameno"],
+ funcname=frame["funcname"],
+ file=match["file"],
+ lineno=metadata_function_line
+ ))
+ break
+ i -= 1
+ else:
+ better_frames.append("#{frameno}: {funcname}, {file}, line {lineno}".format(**frame))
+
+ if better_frames:
+ better_frames = ("\t{0}".format(frame) for frame in better_frames)
+ exe.extra_message = "\nBacktrace (metadata-relative locations):\n{0}".format("\n".join(better_frames))
+ raise
finally:
os.unlink(fifopath)
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index d1be83617..9e0c931a0 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -19,16 +19,20 @@
import os
import logging
import pickle
-from collections import defaultdict
+from collections import defaultdict, Mapping
import bb.utils
+from bb import PrefixLoggerAdapter
import re
logger = logging.getLogger("BitBake.Cache")
-__cache_version__ = "152"
+__cache_version__ = "153"
-def getCacheFile(path, filename, data_hash):
- return os.path.join(path, filename + "." + data_hash)
+def getCacheFile(path, filename, mc, data_hash):
+ mcspec = ''
+ if mc:
+ mcspec = ".%s" % mc
+ return os.path.join(path, filename + mcspec + "." + data_hash)
# RecipeInfoCommon defines common data retrieving methods
# from meta data for caches. CoreRecipeInfo as well as other
@@ -324,7 +328,7 @@ class NoCache(object):
bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
return bb_data[virtual]
- def load_bbfile(self, bbfile, appends, virtonly = False):
+ def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
"""
Load and parse one .bb build file
Return the data and whether parsing resulted in the file being skipped
@@ -337,6 +341,10 @@ class NoCache(object):
datastores = parse_recipe(bb_data, bbfile, appends, mc)
return datastores
+ if mc is not None:
+ bb_data = self.databuilder.mcdata[mc].createCopy()
+ return parse_recipe(bb_data, bbfile, appends, mc)
+
bb_data = self.data.createCopy()
datastores = parse_recipe(bb_data, bbfile, appends)
@@ -354,14 +362,15 @@ class Cache(NoCache):
"""
BitBake Cache implementation
"""
-
- def __init__(self, databuilder, data_hash, caches_array):
+ def __init__(self, databuilder, mc, data_hash, caches_array):
super().__init__(databuilder)
data = databuilder.data
# Pass caches_array information into Cache Constructor
# It will be used later for deciding whether we
# need extra cache file dump/load support
+ self.mc = mc
+ self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
self.caches_array = caches_array
self.cachedir = data.getVar("CACHE")
self.clean = set()
@@ -374,31 +383,47 @@ class Cache(NoCache):
if self.cachedir in [None, '']:
self.has_cache = False
- logger.info("Not using a cache. "
- "Set CACHE = <directory> to enable.")
+ self.logger.info("Not using a cache. "
+ "Set CACHE = <directory> to enable.")
return
self.has_cache = True
- self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
- logger.debug(1, "Cache dir: %s", self.cachedir)
+ def getCacheFile(self, cachefile):
+ return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
+
+ def prepare_cache(self, progress):
+ if not self.has_cache:
+ return 0
+
+ loaded = 0
+
+ self.cachefile = self.getCacheFile("bb_cache.dat")
+
+ self.logger.debug(1, "Cache dir: %s", self.cachedir)
bb.utils.mkdirhier(self.cachedir)
cache_ok = True
if self.caches_array:
for cache_class in self.caches_array:
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- cache_ok = cache_ok and os.path.exists(cachefile)
+ cachefile = self.getCacheFile(cache_class.cachefile)
+ cache_exists = os.path.exists(cachefile)
+ self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists)
+ cache_ok = cache_ok and cache_exists
cache_class.init_cacheData(self)
if cache_ok:
- self.load_cachefile()
+ loaded = self.load_cachefile(progress)
elif os.path.isfile(self.cachefile):
- logger.info("Out of date cache found, rebuilding...")
+ self.logger.info("Out of date cache found, rebuilding...")
else:
- logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
+ self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
# We don't use the symlink, its just for debugging convinience
- symlink = os.path.join(self.cachedir, "bb_cache.dat")
+ if self.mc:
+ symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
+ else:
+ symlink = os.path.join(self.cachedir, "bb_cache.dat")
+
if os.path.exists(symlink):
bb.utils.remove(symlink)
try:
@@ -406,22 +431,31 @@ class Cache(NoCache):
except OSError:
pass
- def load_cachefile(self):
- cachesize = 0
- previous_progress = 0
- previous_percent = 0
+ return loaded
+
+ def cachesize(self):
+ if not self.has_cache:
+ return 0
- # Calculate the correct cachesize of all those cache files
+ cachesize = 0
for cache_class in self.caches_array:
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- with open(cachefile, "rb") as cachefile:
- cachesize += os.fstat(cachefile.fileno()).st_size
+ cachefile = self.getCacheFile(cache_class.cachefile)
+ try:
+ with open(cachefile, "rb") as cachefile:
+ cachesize += os.fstat(cachefile.fileno()).st_size
+ except FileNotFoundError:
+ pass
+
+ return cachesize
- bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
+ def load_cachefile(self, progress):
+ cachesize = self.cachesize()
+ previous_progress = 0
+ previous_percent = 0
for cache_class in self.caches_array:
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- logger.debug(1, 'Loading cache file: %s' % cachefile)
+ cachefile = self.getCacheFile(cache_class.cachefile)
+ self.logger.debug(1, 'Loading cache file: %s' % cachefile)
with open(cachefile, "rb") as cachefile:
pickled = pickle.Unpickler(cachefile)
# Check cache version information
@@ -429,15 +463,15 @@ class Cache(NoCache):
cache_ver = pickled.load()
bitbake_ver = pickled.load()
except Exception:
- logger.info('Invalid cache, rebuilding...')
- return
+ self.logger.info('Invalid cache, rebuilding...')
+ return 0
if cache_ver != __cache_version__:
- logger.info('Cache version mismatch, rebuilding...')
- return
+ self.logger.info('Cache version mismatch, rebuilding...')
+ return 0
elif bitbake_ver != bb.__version__:
- logger.info('Bitbake version mismatch, rebuilding...')
- return
+ self.logger.info('Bitbake version mismatch, rebuilding...')
+ return 0
# Load the rest of the cache file
current_progress = 0
@@ -460,29 +494,17 @@ class Cache(NoCache):
self.depends_cache[key] = [value]
# only fire events on even percentage boundaries
current_progress = cachefile.tell() + previous_progress
- if current_progress > cachesize:
- # we might have calculated incorrect total size because a file
- # might've been written out just after we checked its size
- cachesize = current_progress
- current_percent = 100 * current_progress / cachesize
- if current_percent > previous_percent:
- previous_percent = current_percent
- bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
- self.data)
+ progress(cachefile.tell() + previous_progress)
previous_progress += current_progress
- # Note: depends cache number is corresponding to the parsing file numbers.
- # The same file has several caches, still regarded as one item in the cache
- bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
- len(self.depends_cache)),
- self.data)
+ return len(self.depends_cache)
def parse(self, filename, appends):
"""Parse the specified filename, returning the recipe information"""
- logger.debug(1, "Parsing %s", filename)
+ self.logger.debug(1, "Parsing %s", filename)
infos = []
- datastores = self.load_bbfile(filename, appends)
+ datastores = self.load_bbfile(filename, appends, mc=self.mc)
depends = []
variants = []
# Process the "real" fn last so we can store variants list
@@ -534,7 +556,7 @@ class Cache(NoCache):
cached, infos = self.load(fn, appends)
for virtualfn, info_array in infos:
if info_array[0].skipped:
- logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
+ self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
skipped += 1
else:
self.add_info(virtualfn, info_array, cacheData, not cached)
@@ -570,21 +592,21 @@ class Cache(NoCache):
# File isn't in depends_cache
if not fn in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", fn)
+ self.logger.debug(2, "%s is not cached", fn)
return False
mtime = bb.parse.cached_mtime_noerror(fn)
# Check file still exists
if mtime == 0:
- logger.debug(2, "Cache: %s no longer exists", fn)
+ self.logger.debug(2, "%s no longer exists", fn)
self.remove(fn)
return False
info_array = self.depends_cache[fn]
# Check the file's timestamp
if mtime != info_array[0].timestamp:
- logger.debug(2, "Cache: %s changed", fn)
+ self.logger.debug(2, "%s changed", fn)
self.remove(fn)
return False
@@ -595,14 +617,14 @@ class Cache(NoCache):
fmtime = bb.parse.cached_mtime_noerror(f)
# Check if file still exists
if old_mtime != 0 and fmtime == 0:
- logger.debug(2, "Cache: %s's dependency %s was removed",
- fn, f)
+ self.logger.debug(2, "%s's dependency %s was removed",
+ fn, f)
self.remove(fn)
return False
if (fmtime != old_mtime):
- logger.debug(2, "Cache: %s's dependency %s changed",
- fn, f)
+ self.logger.debug(2, "%s's dependency %s changed",
+ fn, f)
self.remove(fn)
return False
@@ -614,18 +636,18 @@ class Cache(NoCache):
# Have to be careful about spaces and colons in filenames
flist = self.filelist_regex.split(fl)
for f in flist:
- if not f or "*" in f:
+ if not f:
continue
f, exist = f.split(":")
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
- logger.debug(2, "Cache: %s's file checksum list file %s changed",
- fn, f)
+ self.logger.debug(2, "%s's file checksum list file %s changed",
+ fn, f)
self.remove(fn)
return False
- if appends != info_array[0].appends:
- logger.debug(2, "Cache: appends for %s changed", fn)
- logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
+ if tuple(appends) != tuple(info_array[0].appends):
+ self.logger.debug(2, "appends for %s changed", fn)
+ self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
self.remove(fn)
return False
@@ -634,10 +656,10 @@ class Cache(NoCache):
virtualfn = variant2virtual(fn, cls)
self.clean.add(virtualfn)
if virtualfn not in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", virtualfn)
+ self.logger.debug(2, "%s is not cached", virtualfn)
invalid = True
elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
- logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
+ self.logger.debug(2, "Extra caches missing for %s?" % virtualfn)
invalid = True
# If any one of the variants is not present, mark as invalid for all
@@ -645,10 +667,10 @@ class Cache(NoCache):
for cls in info_array[0].variants:
virtualfn = variant2virtual(fn, cls)
if virtualfn in self.clean:
- logger.debug(2, "Cache: Removing %s from cache", virtualfn)
+ self.logger.debug(2, "Removing %s from cache", virtualfn)
self.clean.remove(virtualfn)
if fn in self.clean:
- logger.debug(2, "Cache: Marking %s as not clean", fn)
+ self.logger.debug(2, "Marking %s as not clean", fn)
self.clean.remove(fn)
return False
@@ -661,10 +683,10 @@ class Cache(NoCache):
Called from the parser in error cases
"""
if fn in self.depends_cache:
- logger.debug(1, "Removing %s from cache", fn)
+ self.logger.debug(1, "Removing %s from cache", fn)
del self.depends_cache[fn]
if fn in self.clean:
- logger.debug(1, "Marking %s as unclean", fn)
+ self.logger.debug(1, "Marking %s as unclean", fn)
self.clean.remove(fn)
def sync(self):
@@ -677,12 +699,13 @@ class Cache(NoCache):
return
if self.cacheclean:
- logger.debug(2, "Cache is clean, not saving.")
+ self.logger.debug(2, "Cache is clean, not saving.")
return
for cache_class in self.caches_array:
cache_class_name = cache_class.__name__
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+ cachefile = self.getCacheFile(cache_class.cachefile)
+ self.logger.debug(2, "Writing %s", cachefile)
with open(cachefile, "wb") as f:
p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
p.dump(__cache_version__)
@@ -701,8 +724,18 @@ class Cache(NoCache):
return bb.parse.cached_mtime_noerror(cachefile)
def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
+ if self.mc is not None:
+ (fn, cls, mc) = virtualfn2realfn(filename)
+ if mc:
+ self.logger.error("Unexpected multiconfig %s", filename)
+ return
+
+ vfn = realfn2virtual(fn, cls, self.mc)
+ else:
+ vfn = filename
+
if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
- cacheData.add_from_recipeinfo(filename, info_array)
+ cacheData.add_from_recipeinfo(vfn, info_array)
if watcher:
watcher(info_array[0].file_depends)
@@ -727,6 +760,65 @@ class Cache(NoCache):
info_array.append(cache_class(realfn, data))
self.add_info(file_name, info_array, cacheData, parsed)
+class MulticonfigCache(Mapping):
+ def __init__(self, databuilder, data_hash, caches_array):
+ def progress(p):
+ nonlocal current_progress
+ nonlocal previous_progress
+ nonlocal previous_percent
+ nonlocal cachesize
+
+ current_progress = previous_progress + p
+
+ if current_progress > cachesize:
+ # we might have calculated incorrect total size because a file
+ # might've been written out just after we checked its size
+ cachesize = current_progress
+ current_percent = 100 * current_progress / cachesize
+ if current_percent > previous_percent:
+ previous_percent = current_percent
+ bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
+ databuilder.data)
+
+
+ cachesize = 0
+ current_progress = 0
+ previous_progress = 0
+ previous_percent = 0
+ self.__caches = {}
+
+ for mc, mcdata in databuilder.mcdata.items():
+ self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
+
+ cachesize += self.__caches[mc].cachesize()
+
+ bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
+ loaded = 0
+
+ for c in self.__caches.values():
+ loaded += c.prepare_cache(progress)
+ previous_progress = current_progress
+
+ # Note: depends cache number is corresponding to the parsing file numbers.
+ # The same file has several caches, still regarded as one item in the cache
+ bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
+
+ def __len__(self):
+ return len(self.__caches)
+
+ def __getitem__(self, key):
+ return self.__caches[key]
+
+ def __contains__(self, key):
+ return key in self.__caches
+
+ def __iter__(self):
+ for k in self.__caches:
+ yield k
+
+ def keys(self):
+ return self.__caches[key]
+
def init(cooker):
"""
diff --git a/poky/bitbake/lib/bb/command.py b/poky/bitbake/lib/bb/command.py
index 6abf38668..f8c6a03bb 100644
--- a/poky/bitbake/lib/bb/command.py
+++ b/poky/bitbake/lib/bb/command.py
@@ -54,13 +54,20 @@ class Command:
self.cooker = cooker
self.cmds_sync = CommandsSync()
self.cmds_async = CommandsAsync()
- self.remotedatastores = bb.remotedata.RemoteDatastores(cooker)
+ self.remotedatastores = None
# FIXME Add lock for this
self.currentAsyncCommand = None
def runCommand(self, commandline, ro_only = False):
command = commandline.pop(0)
+
+ # Ensure cooker is ready for commands
+ if command != "updateConfig" and command != "setFeatures":
+ self.cooker.init_configdata()
+ if not self.remotedatastores:
+ self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
+
if hasattr(CommandsSync, command):
# Can run synchronous commands straight away
command_method = getattr(self.cmds_sync, command)
@@ -84,7 +91,7 @@ class Command:
if command not in CommandsAsync.__dict__:
return None, "No such command"
self.currentAsyncCommand = (command, commandline)
- self.cooker.configuration.server_register_idlecallback(self.cooker.runCommands, self.cooker)
+ self.cooker.idleCallBackRegister(self.cooker.runCommands, self.cooker)
return True, None
def runAsyncCommand(self):
@@ -136,13 +143,8 @@ class Command:
self.cooker.finishcommand()
def reset(self):
- self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
-
-def split_mc_pn(pn):
- if pn.startswith("multiconfig:"):
- _, mc, pn = pn.split(":", 2)
- return (mc, pn)
- return ('', pn)
+ if self.remotedatastores:
+ self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
class CommandsSync:
"""
@@ -232,7 +234,11 @@ class CommandsSync:
def matchFile(self, command, params):
fMatch = params[0]
- return command.cooker.matchFile(fMatch)
+ try:
+ mc = params[0]
+ except IndexError:
+ mc = ''
+ return command.cooker.matchFile(fMatch, mc)
matchFile.needconfig = False
def getUIHandlerNum(self, command, params):
@@ -395,22 +401,38 @@ class CommandsSync:
def getSkippedRecipes(self, command, params):
# Return list sorted by reverse priority order
import bb.cache
- skipdict = OrderedDict(sorted(command.cooker.skiplist.items(),
- key=lambda x: (-command.cooker.collection.calc_bbfile_priority(bb.cache.virtualfn2realfn(x[0])[0]), x[0])))
+ def sortkey(x):
+ vfn, _ = x
+ realfn, _, mc = bb.cache.virtualfn2realfn(vfn)
+ return (-command.cooker.collections[mc].calc_bbfile_priority(realfn)[0], vfn)
+
+ skipdict = OrderedDict(sorted(command.cooker.skiplist.items(), key=sortkey))
return list(skipdict.items())
getSkippedRecipes.readonly = True
def getOverlayedRecipes(self, command, params):
- return list(command.cooker.collection.overlayed.items())
+ try:
+ mc = params[0]
+ except IndexError:
+ mc = ''
+ return list(command.cooker.collections[mc].overlayed.items())
getOverlayedRecipes.readonly = True
def getFileAppends(self, command, params):
fn = params[0]
- return command.cooker.collection.get_file_appends(fn)
+ try:
+ mc = params[1]
+ except IndexError:
+ mc = ''
+ return command.cooker.collections[mc].get_file_appends(fn)
getFileAppends.readonly = True
def getAllAppends(self, command, params):
- return command.cooker.collection.bbappends
+ try:
+ mc = params[0]
+ except IndexError:
+ mc = ''
+ return command.cooker.collections[mc].bbappends
getAllAppends.readonly = True
def findProviders(self, command, params):
@@ -422,7 +444,7 @@ class CommandsSync:
findProviders.readonly = True
def findBestProvider(self, command, params):
- (mc, pn) = split_mc_pn(params[0])
+ (mc, pn) = bb.runqueue.split_mc(params[0])
return command.cooker.findBestProvider(pn, mc)
findBestProvider.readonly = True
@@ -496,6 +518,7 @@ class CommandsSync:
for the recipe.
"""
fn = params[0]
+ mc = bb.runqueue.mc_from_tid(fn)
appends = params[1]
appendlist = params[2]
if len(params) > 3:
@@ -507,7 +530,7 @@ class CommandsSync:
if appendlist is not None:
appendfiles = appendlist
else:
- appendfiles = command.cooker.collection.get_file_appends(fn)
+ appendfiles = command.cooker.collections[mc].get_file_appends(fn)
else:
appendfiles = []
# We are calling bb.cache locally here rather than on the server,
@@ -517,7 +540,7 @@ class CommandsSync:
if config_data:
# We have to use a different function here if we're passing in a datastore
# NOTE: we took a copy above, so we don't do it here again
- envdata = bb.cache.parse_recipe(config_data, fn, appendfiles)['']
+ envdata = bb.cache.parse_recipe(config_data, fn, appendfiles, mc)['']
else:
# Use the standard path
parser = bb.cache.NoCache(command.cooker.databuilder)
@@ -708,10 +731,10 @@ class CommandsAsync:
"""
Find signature info files via the signature generator
"""
- pn = params[0]
+ (mc, pn) = bb.runqueue.split_mc(params[0])
taskname = params[1]
sigs = params[2]
- res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.data)
- bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.data)
+ res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc])
+ bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc])
command.finishAsyncCommand()
findSigInfo.needcache = False
diff --git a/poky/bitbake/lib/bb/compat.py b/poky/bitbake/lib/bb/compat.py
deleted file mode 100644
index 49356681a..000000000
--- a/poky/bitbake/lib/bb/compat.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#
-# SPDX-License-Identifier: GPL-2.0-only
-#
-
-"""Code pulled from future python versions, here for compatibility"""
-
-from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict
-from functools import total_ordering
-
-
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index e527e2311..1f4cc1e96 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -148,15 +148,18 @@ class BBCooker:
Manages one bitbake build run
"""
- def __init__(self, configuration, featureSet=None):
+ def __init__(self, featureSet=None, idleCallBackRegister=None):
self.recipecaches = None
+ self.eventlog = None
self.skiplist = {}
self.featureset = CookerFeatures()
if featureSet:
for f in featureSet:
self.featureset.setFeature(f)
- self.configuration = configuration
+ self.configuration = bb.cookerdata.CookerConfiguration()
+
+ self.idleCallBackRegister = idleCallBackRegister
bb.debug(1, "BBCooker starting %s" % time.time())
sys.stdout.flush()
@@ -192,25 +195,13 @@ class BBCooker:
self.hashserv = None
self.hashservaddr = None
- self.initConfigurationData()
-
- bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
- sys.stdout.flush()
-
- # we log all events to a file if so directed
- if self.configuration.writeeventlog:
- # register the log file writer as UI Handler
- writer = EventWriter(self, self.configuration.writeeventlog)
- EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
- bb.event.register_UIHhandler(EventLogWriteHandler(writer))
-
self.inotify_modified_files = []
def _process_inotify_updates(server, cooker, abort):
cooker.process_inotify_updates()
return 1.0
- self.configuration.server_register_idlecallback(_process_inotify_updates, self)
+ self.idleCallBackRegister(_process_inotify_updates, self)
# TOSTOP must not be set or our children will hang when they output
try:
@@ -237,6 +228,13 @@ class BBCooker:
bb.debug(1, "BBCooker startup complete %s" % time.time())
sys.stdout.flush()
+ def init_configdata(self):
+ if not hasattr(self, "data"):
+ self.initConfigurationData()
+ bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
+ sys.stdout.flush()
+ self.handlePRServ()
+
def process_inotify_updates(self):
for n in [self.confignotifier, self.notifier]:
if n.check_events(timeout=0):
@@ -322,7 +320,7 @@ class BBCooker:
for feature in features:
self.featureset.setFeature(feature)
bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
- if (original_featureset != list(self.featureset)) and self.state != state.error:
+ if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
self.reset()
def initConfigurationData(self):
@@ -354,7 +352,7 @@ class BBCooker:
self.caches_array.append(getattr(module, cache_name))
except ImportError as exc:
logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
- sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
+ raise bb.BBHandledException()
self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
self.databuilder.parseBaseConfiguration()
@@ -411,11 +409,6 @@ class BBCooker:
self.data.disableTracking()
def parseConfiguration(self):
- # Set log file verbosity
- verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
- if verboselogs:
- bb.msg.loggerVerboseLogs = True
-
# Change nice level if we're asked to
nice = self.data.getVar("BB_NICE_LEVEL")
if nice:
@@ -449,7 +442,28 @@ class BBCooker:
logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
clean = False
- setattr(self.configuration, o, options[o])
+ if hasattr(self.configuration, o):
+ setattr(self.configuration, o, options[o])
+
+ if self.configuration.writeeventlog:
+ if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
+ bb.event.unregister_UIHhandler(self.eventlog[1])
+ if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
+ # we log all events to a file if so directed
+ # register the log file writer as UI Handler
+ writer = EventWriter(self, self.configuration.writeeventlog)
+ EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
+ self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
+
+ bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
+ bb.msg.loggerDefaultDomains = self.configuration.debug_domains
+
+ if hasattr(self, "data"):
+ origenv = bb.data.init()
+ for k in environment:
+ origenv.setVar(k, environment[k])
+ self.data.setVar("BB_ORIGENV", origenv)
+
for k in bb.utils.approved_variables():
if k in environment and k not in self.configuration.env:
logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
@@ -465,6 +479,10 @@ class BBCooker:
logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
self.configuration.env[k] = environment[k]
clean = False
+
+ # Now update all the variables not in the datastore to match
+ self.configuration.env = environment
+
if not clean:
logger.debug(1, "Base environment change, triggering reparse")
self.reset()
@@ -525,7 +543,7 @@ class BBCooker:
self.parseConfiguration()
fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
- fn = self.matchFile(fn)
+ fn = self.matchFile(fn, mc)
fn = bb.cache.realfn2virtual(fn, cls, mc)
elif len(pkgs_to_build) == 1:
mc = mc_base(pkgs_to_build[0])
@@ -541,8 +559,8 @@ class BBCooker:
if fn:
try:
- bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
- envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
+ bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
+ envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
except Exception as e:
parselog.exception("Unable to read %s", fn)
raise
@@ -626,6 +644,7 @@ class BBCooker:
current = 0
runlist = []
for k in fulltargetlist:
+ origk = k
mc = ""
if k.startswith("mc:"):
mc = k.split(":")[1]
@@ -635,6 +654,10 @@ class BBCooker:
k2 = k.split(":do_")
k = k2[0]
ktask = k2[1]
+
+ if mc not in self.multiconfigs:
+ bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
+
taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
current += 1
if not ktask.startswith("do_"):
@@ -670,7 +693,7 @@ class BBCooker:
l = k.split(':')
depmc = l[2]
if depmc not in self.multiconfigs:
- bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
+ bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
else:
logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
@@ -929,26 +952,33 @@ class BBCooker:
logger.info("Task dependencies saved to 'task-depends.dot'")
def show_appends_with_no_recipes(self):
+ appends_without_recipes = {}
# Determine which bbappends haven't been applied
-
- # First get list of recipes, including skipped
- recipefns = list(self.recipecaches[''].pkg_fn.keys())
- recipefns.extend(self.skiplist.keys())
-
- # Work out list of bbappends that have been applied
- applied_appends = []
- for fn in recipefns:
- applied_appends.extend(self.collection.get_file_appends(fn))
-
- appends_without_recipes = []
- for _, appendfn in self.collection.bbappends:
- if not appendfn in applied_appends:
- appends_without_recipes.append(appendfn)
-
- if appends_without_recipes:
- msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
- warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
- False) or "no"
+ for mc in self.multiconfigs:
+ # First get list of recipes, including skipped
+ recipefns = list(self.recipecaches[mc].pkg_fn.keys())
+ recipefns.extend(self.skiplist.keys())
+
+ # Work out list of bbappends that have been applied
+ applied_appends = []
+ for fn in recipefns:
+ applied_appends.extend(self.collections[mc].get_file_appends(fn))
+
+ appends_without_recipes[mc] = []
+ for _, appendfn in self.collections[mc].bbappends:
+ if not appendfn in applied_appends:
+ appends_without_recipes[mc].append(appendfn)
+
+ msgs = []
+ for mc in sorted(appends_without_recipes.keys()):
+ if appends_without_recipes[mc]:
+ msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
+ '\n '.join(appends_without_recipes[mc])))
+
+ if msgs:
+ msg = "\n".join(msgs)
+ warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
+ False) or "no"
if warn_only.lower() in ("1", "yes", "true"):
bb.warn(msg)
else:
@@ -1097,7 +1127,7 @@ class BBCooker:
from bb import shell
except ImportError:
parselog.exception("Interactive mode not available")
- sys.exit(1)
+ raise bb.BBHandledException()
else:
shell.start( self )
@@ -1249,15 +1279,15 @@ class BBCooker:
if siggen_cache:
bb.parse.siggen.checksum_cache.mtime_cache.clear()
- def matchFiles(self, bf):
+ def matchFiles(self, bf, mc=''):
"""
Find the .bb files which match the expression in 'buildfile'.
"""
if bf.startswith("/") or bf.startswith("../"):
bf = os.path.abspath(bf)
- self.collection = CookerCollectFiles(self.bbfile_config_priorities)
- filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
+ self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
+ filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
try:
os.stat(bf)
bf = os.path.abspath(bf)
@@ -1270,12 +1300,12 @@ class BBCooker:
matches.append(f)
return matches
- def matchFile(self, buildfile):
+ def matchFile(self, buildfile, mc=''):
"""
Find the .bb file which matches the expression in 'buildfile'.
Raise an error if multiple files
"""
- matches = self.matchFiles(buildfile)
+ matches = self.matchFiles(buildfile, mc)
if len(matches) != 1:
if matches:
msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
@@ -1316,14 +1346,14 @@ class BBCooker:
task = "do_%s" % task
fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
- fn = self.matchFile(fn)
+ fn = self.matchFile(fn, mc)
self.buildSetVars()
self.reset_mtime_caches()
- bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
+ bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
- infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
+ infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
infos = dict(infos)
fn = bb.cache.realfn2virtual(fn, cls, mc)
@@ -1411,7 +1441,7 @@ class BBCooker:
return True
return retval
- self.configuration.server_register_idlecallback(buildFileIdle, rq)
+ self.idleCallBackRegister(buildFileIdle, rq)
def buildTargets(self, targets, task):
"""
@@ -1482,7 +1512,7 @@ class BBCooker:
if 'universe' in targets:
rq.rqdata.warn_multi_bb = True
- self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
+ self.idleCallBackRegister(buildTargetsIdle, rq)
def getAllKeysWithFlags(self, flaglist):
@@ -1533,6 +1563,7 @@ class BBCooker:
if self.state in (state.shutdown, state.forceshutdown, state.error):
if hasattr(self.parser, 'shutdown'):
self.parser.shutdown(clean=False, force = True)
+ self.parser.final_cleanup()
raise bb.BBHandledException()
if self.state != state.parsing:
@@ -1552,14 +1583,24 @@ class BBCooker:
for dep in self.configuration.extra_assume_provided:
self.recipecaches[mc].ignored_dependencies.add(dep)
- self.collection = CookerCollectFiles(self.bbfile_config_priorities)
- (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
+ self.collections = {}
+
+ mcfilelist = {}
+ total_masked = 0
+ searchdirs = set()
+ for mc in self.multiconfigs:
+ self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
+ (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
+
+ mcfilelist[mc] = filelist
+ total_masked += masked
+ searchdirs |= set(search)
# Add inotify watches for directories searched for bb/bbappend files
for dirent in searchdirs:
self.add_filewatch([[dirent]], dirs=True)
- self.parser = CookerParser(self, filelist, masked)
+ self.parser = CookerParser(self, mcfilelist, total_masked)
self.parsecache_valid = True
self.state = state.parsing
@@ -1571,7 +1612,7 @@ class BBCooker:
self.show_appends_with_no_recipes()
self.handlePrefProviders()
for mc in self.multiconfigs:
- self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
+ self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
self.state = state.running
# Send an event listing all stamps reachable after parsing
@@ -1592,7 +1633,7 @@ class BBCooker:
raise NothingToBuild
ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
- for pkg in pkgs_to_build:
+ for pkg in pkgs_to_build.copy():
if pkg in ignore:
parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
if pkg.startswith("multiconfig:"):
@@ -1630,17 +1671,16 @@ class BBCooker:
return pkgs_to_build
def pre_serve(self):
- # We now are in our own process so we can call this here.
- # PRServ exits if its parent process exits
- self.handlePRServ()
return
def post_serve(self):
+ self.shutdown(force=True)
prserv.serv.auto_shutdown()
if self.hashserv:
self.hashserv.process.terminate()
self.hashserv.process.join()
- bb.event.fire(CookerExit(), self.data)
+ if hasattr(self, "data"):
+ bb.event.fire(CookerExit(), self.data)
def shutdown(self, force = False):
if force:
@@ -1650,6 +1690,7 @@ class BBCooker:
if self.parser:
self.parser.shutdown(clean=not force, force=force)
+ self.parser.final_cleanup()
def finishcommand(self):
self.state = state.initial
@@ -1663,8 +1704,9 @@ class BBCooker:
self.finishcommand()
self.extraconfigdata = {}
self.command.reset()
- self.databuilder.reset()
- self.data = self.databuilder.data
+ if hasattr(self, "data"):
+ self.databuilder.reset()
+ self.data = self.databuilder.data
self.parsecache_valid = False
self.baseconfig_valid = False
@@ -1679,21 +1721,19 @@ class CookerExit(bb.event.Event):
class CookerCollectFiles(object):
- def __init__(self, priorities):
+ def __init__(self, priorities, mc=''):
+ self.mc = mc
self.bbappends = []
# Priorities is a list of tupples, with the second element as the pattern.
# We need to sort the list with the longest pattern first, and so on to
# the shortest. This allows nested layers to be properly evaluated.
self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
- def calc_bbfile_priority( self, filename, matched = None ):
+ def calc_bbfile_priority(self, filename):
for _, _, regex, pri in self.bbfile_config_priorities:
if regex.match(filename):
- if matched is not None:
- if not regex in matched:
- matched.add(regex)
- return pri
- return 0
+ return pri, regex
+ return 0, None
def get_bbfiles(self):
"""Get list of default .bb files by reading out the current directory"""
@@ -1723,10 +1763,10 @@ class CookerCollectFiles(object):
collectlog.debug(1, "collecting .bb files")
files = (config.getVar( "BBFILES") or "").split()
- config.setVar("BBFILES", " ".join(files))
# Sort files by priority
- files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
+ files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
+ config.setVar("BBFILES_PRIORITIZED", " ".join(files))
if not len(files):
files = self.get_bbfiles()
@@ -1846,43 +1886,67 @@ class CookerCollectFiles(object):
(bbappend, filename) = b
if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
filelist.append(filename)
- return filelist
+ return tuple(filelist)
- def collection_priorities(self, pkgfns, d):
+ def collection_priorities(self, pkgfns, fns, d):
+ # Return the priorities of the entries in pkgfns
+ # Also check that all the regexes in self.bbfile_config_priorities are used
+ # (but to do that we need to ensure skipped recipes aren't counted, nor
+ # collections in BBFILE_PATTERN_IGNORE_EMPTY)
priorities = {}
+ seen = set()
+ matched = set()
+
+ matched_regex = set()
+ unmatched_regex = set()
+ for _, _, regex, _ in self.bbfile_config_priorities:
+ unmatched_regex.add(regex)
# Calculate priorities for each file
- matched = set()
for p in pkgfns:
realfn, cls, mc = bb.cache.virtualfn2realfn(p)
- priorities[p] = self.calc_bbfile_priority(realfn, matched)
-
- unmatched = set()
- for _, _, regex, pri in self.bbfile_config_priorities:
- if not regex in matched:
- unmatched.add(regex)
-
- # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
- def find_bbappend_match(regex):
+ priorities[p], regex = self.calc_bbfile_priority(realfn)
+ if regex in unmatched_regex:
+ matched_regex.add(regex)
+ unmatched_regex.remove(regex)
+ seen.add(realfn)
+ if regex:
+ matched.add(realfn)
+
+ if unmatched_regex:
+ # Account for bbappend files
for b in self.bbappends:
(bbfile, append) = b
- if regex.match(append):
- # If the bbappend is matched by already "matched set", return False
- for matched_regex in matched:
- if matched_regex.match(append):
- return False
- return True
- return False
+ seen.add(append)
+
+ # Account for skipped recipes
+ seen.update(fns)
- for unmatch in unmatched.copy():
- if find_bbappend_match(unmatch):
- unmatched.remove(unmatch)
+ seen.difference_update(matched)
+
+ def already_matched(fn):
+ for regex in matched_regex:
+ if regex.match(fn):
+ return True
+ return False
+
+ for unmatch in unmatched_regex.copy():
+ for fn in seen:
+ if unmatch.match(fn):
+ # If the bbappend or file was already matched by another regex, skip it
+ # e.g. for a layer within a layer, the outer regex could match, the inner
+ # regex may match nothing and we should warn about that
+ if already_matched(fn):
+ continue
+ unmatched_regex.remove(unmatch)
+ break
for collection, pattern, regex, _ in self.bbfile_config_priorities:
- if regex in unmatched:
+ if regex in unmatched_regex:
if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
- collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
+ collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
+ collection, pattern))
return priorities
@@ -1931,7 +1995,8 @@ class Parser(multiprocessing.Process):
except queue.Empty:
pass
else:
- self.results.cancel_join_thread()
+ self.results.close()
+ self.results.join_thread()
break
if pending:
@@ -1940,6 +2005,8 @@ class Parser(multiprocessing.Process):
try:
job = self.jobs.pop()
except IndexError:
+ self.results.close()
+ self.results.join_thread()
break
result = self.parse(*job)
# Clear the siggen cache after parsing to control memory usage, its huge
@@ -1949,7 +2016,7 @@ class Parser(multiprocessing.Process):
except queue.Full:
pending.append(result)
- def parse(self, filename, appends):
+ def parse(self, mc, cache, filename, appends):
try:
origfilter = bb.event.LogHandler.filter
# Record the filename we're parsing into any events generated
@@ -1963,7 +2030,7 @@ class Parser(multiprocessing.Process):
bb.event.set_class_handlers(self.handlers.copy())
bb.event.LogHandler.filter = parse_filter
- return True, self.bb_cache.parse(filename, appends)
+ return True, mc, cache.parse(filename, appends)
except Exception as exc:
tb = sys.exc_info()[2]
exc.recipe = filename
@@ -1978,8 +2045,8 @@ class Parser(multiprocessing.Process):
bb.event.LogHandler.filter = origfilter
class CookerParser(object):
- def __init__(self, cooker, filelist, masked):
- self.filelist = filelist
+ def __init__(self, cooker, mcfilelist, masked):
+ self.mcfilelist = mcfilelist
self.cooker = cooker
self.cfgdata = cooker.data
self.cfghash = cooker.data_hash
@@ -1993,28 +2060,31 @@ class CookerParser(object):
self.skipped = 0
self.virtuals = 0
- self.total = len(filelist)
self.current = 0
self.process_names = []
- self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
- self.fromcache = []
- self.willparse = []
- for filename in self.filelist:
- appends = self.cooker.collection.get_file_appends(filename)
- if not self.bb_cache.cacheValid(filename, appends):
- self.willparse.append((filename, appends))
- else:
- self.fromcache.append((filename, appends))
- self.toparse = self.total - len(self.fromcache)
+ self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
+ self.fromcache = set()
+ self.willparse = set()
+ for mc in self.cooker.multiconfigs:
+ for filename in self.mcfilelist[mc]:
+ appends = self.cooker.collections[mc].get_file_appends(filename)
+ if not self.bb_caches[mc].cacheValid(filename, appends):
+ self.willparse.add((mc, self.bb_caches[mc], filename, appends))
+ else:
+ self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
+
+ self.total = len(self.fromcache) + len(self.willparse)
+ self.toparse = len(self.willparse)
self.progress_chunk = int(max(self.toparse / 100, 1))
self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
- multiprocessing.cpu_count()), len(self.willparse))
+ multiprocessing.cpu_count()), self.toparse)
self.start()
self.haveshutdown = False
+ self.syncthread = None
def start(self):
self.results = self.load_cached()
@@ -2022,7 +2092,9 @@ class CookerParser(object):
if self.toparse:
bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
def init():
- Parser.bb_cache = self.bb_cache
+ signal.signal(signal.SIGTERM, signal.SIG_DFL)
+ signal.signal(signal.SIGHUP, signal.SIG_DFL)
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
bb.utils.set_process_name(multiprocessing.current_process().name)
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
@@ -2032,7 +2104,7 @@ class CookerParser(object):
def chunkify(lst,n):
return [lst[i::n] for i in range(n)]
- self.jobs = chunkify(self.willparse, self.num_processes)
+ self.jobs = chunkify(list(self.willparse), self.num_processes)
for i in range(0, self.num_processes):
parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
@@ -2056,12 +2128,9 @@ class CookerParser(object):
self.total)
bb.event.fire(event, self.cfgdata)
- for process in self.processes:
- self.parser_quit.put(None)
- else:
- self.parser_quit.cancel_join_thread()
- for process in self.processes:
- self.parser_quit.put(None)
+
+ for process in self.processes:
+ self.parser_quit.put(None)
# Cleanup the queue before call process.join(), otherwise there might be
# deadlocks.
@@ -2078,9 +2147,17 @@ class CookerParser(object):
else:
process.join()
- sync = threading.Thread(target=self.bb_cache.sync)
+ self.parser_quit.close()
+ # Allow data left in the cancel queue to be discarded
+ self.parser_quit.cancel_join_thread()
+
+ def sync_caches():
+ for c in self.bb_caches.values():
+ c.sync()
+
+ sync = threading.Thread(target=sync_caches, name="SyncThread")
+ self.syncthread = sync
sync.start()
- multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
bb.codeparser.parser_cache_savemerge()
bb.fetch.fetcher_parse_done()
if self.cooker.configuration.profile:
@@ -2094,10 +2171,14 @@ class CookerParser(object):
bb.utils.process_profilelog(profiles, pout = pout)
print("Processed parsing statistics saved to %s" % (pout))
+ def final_cleanup(self):
+ if self.syncthread:
+ self.syncthread.join()
+
def load_cached(self):
- for filename, appends in self.fromcache:
- cached, infos = self.bb_cache.load(filename, appends)
- yield not cached, infos
+ for mc, cache, filename, appends in self.fromcache:
+ cached, infos = cache.load(filename, appends)
+ yield not cached, mc, infos
def parse_generator(self):
while True:
@@ -2119,7 +2200,7 @@ class CookerParser(object):
result = []
parsed = None
try:
- parsed, result = next(self.results)
+ parsed, mc, result = next(self.results)
except StopIteration:
self.shutdown()
return False
@@ -2175,13 +2256,16 @@ class CookerParser(object):
if info_array[0].skipped:
self.skipped += 1
self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
- (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
- self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
+ self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
parsed=parsed, watcher = self.cooker.add_filewatch)
return True
def reparse(self, filename):
- infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
- for vfn, info_array in infos:
- (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
- self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
+ to_reparse = set()
+ for mc in self.cooker.multiconfigs:
+ to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
+
+ for mc, filename, appends in to_reparse:
+ infos = self.bb_caches[mc].parse(filename, appends)
+ for vfn, info_array in infos:
+ self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index 472423fdc..91cc4347f 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -58,11 +58,18 @@ class ConfigParameters(object):
def updateToServer(self, server, environment):
options = {}
for o in ["abort", "force", "invalidate_stamp",
- "verbose", "debug", "dry_run", "dump_signatures",
- "debug_domains", "extra_assume_provided", "profile",
- "prefile", "postfile", "server_timeout"]:
+ "dry_run", "dump_signatures",
+ "extra_assume_provided", "profile",
+ "prefile", "postfile", "server_timeout",
+ "nosetscene", "setsceneonly", "skipsetscene",
+ "runall", "runonly", "writeeventlog"]:
options[o] = getattr(self.options, o)
+ options['build_verbose_shell'] = self.options.verbose
+ options['build_verbose_stdout'] = self.options.verbose
+ options['default_loglevel'] = bb.msg.loggerDefaultLogLevel
+ options['debug_domains'] = bb.msg.loggerDefaultDomains
+
ret, error = server.runCommand(["updateConfig", options, environment, sys.argv])
if error:
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
@@ -111,11 +118,11 @@ class CookerConfiguration(object):
"""
def __init__(self):
- self.debug_domains = []
+ self.debug_domains = bb.msg.loggerDefaultDomains
+ self.default_loglevel = bb.msg.loggerDefaultLogLevel
self.extra_assume_provided = []
self.prefile = []
self.postfile = []
- self.debug = 0
self.cmd = None
self.abort = True
self.force = False
@@ -125,34 +132,21 @@ class CookerConfiguration(object):
self.skipsetscene = False
self.invalidate_stamp = False
self.dump_signatures = []
+ self.build_verbose_shell = False
+ self.build_verbose_stdout = False
self.dry_run = False
self.tracking = False
- self.xmlrpcinterface = []
- self.server_timeout = None
self.writeeventlog = False
- self.server_only = False
self.limited_deps = False
self.runall = []
self.runonly = []
self.env = {}
- def setConfigParameters(self, parameters):
- for key in self.__dict__.keys():
- if key in parameters.options.__dict__:
- setattr(self, key, parameters.options.__dict__[key])
- self.env = parameters.environment.copy()
-
- def setServerRegIdleCallback(self, srcb):
- self.server_register_idlecallback = srcb
-
def __getstate__(self):
state = {}
for key in self.__dict__.keys():
- if key == "server_register_idlecallback":
- state[key] = None
- else:
- state[key] = getattr(self, key)
+ state[key] = getattr(self, key)
return state
def __setstate__(self,state):
@@ -170,7 +164,7 @@ def catch_parse_error(func):
import traceback
parselog.critical(traceback.format_exc())
parselog.critical("Unable to parse %s: %s" % (fn, exc))
- sys.exit(1)
+ raise bb.BBHandledException()
except bb.data_smart.ExpansionError as exc:
import traceback
@@ -182,10 +176,10 @@ def catch_parse_error(func):
if not fn.startswith(bbdir):
break
parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
- sys.exit(1)
+ raise bb.BBHandledException()
except bb.parse.ParseError as exc:
parselog.critical(str(exc))
- sys.exit(1)
+ raise bb.BBHandledException()
return wrapped
@catch_parse_error
@@ -306,13 +300,13 @@ class CookerDataBuilder(object):
self.data_hash = data_hash.hexdigest()
except (SyntaxError, bb.BBHandledException):
- raise bb.BBHandledException
+ raise bb.BBHandledException()
except bb.data_smart.ExpansionError as e:
logger.error(str(e))
- raise bb.BBHandledException
+ raise bb.BBHandledException()
except Exception:
logger.exception("Error parsing configuration files")
- raise bb.BBHandledException
+ raise bb.BBHandledException()
# Create a copy so we can reset at a later date when UIs disconnect
self.origdata = self.data
@@ -361,7 +355,7 @@ class CookerDataBuilder(object):
for layer in broken_layers:
parselog.critical(" %s", layer)
parselog.critical("Please check BBLAYERS in %s" % (layerconf))
- sys.exit(1)
+ raise bb.BBHandledException()
for layer in layers:
parselog.debug(2, "Adding layer %s", layer)
@@ -387,10 +381,13 @@ class CookerDataBuilder(object):
invalid.append(entry)
continue
l, f = parts
- if l in collections:
+ invert = l[0] == "!"
+ if invert:
+ l = l[1:]
+ if (l in collections and not invert) or (l not in collections and invert):
data.appendVar("BBFILES", " " + f)
if invalid:
- bb.fatal("BBFILES_DYNAMIC entries must be of the form <collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid))
+ bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid))
layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
collections_tmp = collections[:]
@@ -430,7 +427,7 @@ class CookerDataBuilder(object):
handlerfn = data.getVarFlag(var, "filename", False)
if not handlerfn:
parselog.critical("Undefined event handler function '%s'" % var)
- sys.exit(1)
+ raise bb.BBHandledException()
handlerln = int(data.getVarFlag(var, "lineno", False))
bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln)
diff --git a/poky/bitbake/lib/bb/daemonize.py b/poky/bitbake/lib/bb/daemonize.py
index f01e6ec7c..c187fcfc6 100644
--- a/poky/bitbake/lib/bb/daemonize.py
+++ b/poky/bitbake/lib/bb/daemonize.py
@@ -14,6 +14,8 @@ import sys
import io
import traceback
+import bb
+
def createDaemon(function, logfile):
"""
Detach a process from the controlling terminal and run it in the
diff --git a/poky/bitbake/lib/bb/data.py b/poky/bitbake/lib/bb/data.py
index b0683c518..97022853c 100644
--- a/poky/bitbake/lib/bb/data.py
+++ b/poky/bitbake/lib/bb/data.py
@@ -161,6 +161,12 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
return True
if func:
+ # Write a comment indicating where the shell function came from (line number and filename) to make it easier
+ # for the user to diagnose task failures. This comment is also used by build.py to determine the metadata
+ # location of shell functions.
+ o.write("# line: {0}, file: {1}\n".format(
+ d.getVarFlag(var, "lineno", False),
+ d.getVarFlag(var, "filename", False)))
# NOTE: should probably check for unbalanced {} within the var
val = val.rstrip('\n')
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
diff --git a/poky/bitbake/lib/bb/data_smart.py b/poky/bitbake/lib/bb/data_smart.py
index 7f1b6dcb4..c559102cf 100644
--- a/poky/bitbake/lib/bb/data_smart.py
+++ b/poky/bitbake/lib/bb/data_smart.py
@@ -189,7 +189,7 @@ class IncludeHistory(object):
if self.current.parent:
self.current = self.current.parent
else:
- bb.warn("Include log: Tried to finish '%s' at top level." % filename)
+ bb.warn("Include log: Tried to finish '%s' at top level." % self.filename)
return False
def emit(self, o, level = 0):
diff --git a/poky/bitbake/lib/bb/event.py b/poky/bitbake/lib/bb/event.py
index 0e6d9b296..694b47052 100644
--- a/poky/bitbake/lib/bb/event.py
+++ b/poky/bitbake/lib/bb/event.py
@@ -10,17 +10,17 @@ BitBake build tools.
# SPDX-License-Identifier: GPL-2.0-only
#
-import sys
-import pickle
-import logging
-import atexit
-import traceback
import ast
+import atexit
+import collections
+import logging
+import pickle
+import sys
import threading
+import traceback
-import bb.utils
-import bb.compat
import bb.exceptions
+import bb.utils
# This is the pid for which we should generate the event. This is set when
# the runqueue forks off.
@@ -56,7 +56,7 @@ def set_class_handlers(h):
_handlers = h
def clean_class_handlers():
- return bb.compat.OrderedDict()
+ return collections.OrderedDict()
# Internal
_handlers = clean_class_handlers()
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index eb112f069..551bfb70f 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -1195,8 +1195,6 @@ def get_checksum_file_list(d):
paths = ud.method.localpaths(ud, d)
for f in paths:
pth = ud.decodedurl
- if '*' in pth:
- f = os.path.join(os.path.abspath(f), pth)
if f.startswith(dl_dir):
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
if os.path.exists(f):
@@ -1365,9 +1363,6 @@ class FetchMethod(object):
# We cannot compute checksums for directories
if os.path.isdir(urldata.localpath):
return False
- if urldata.localpath.find("*") != -1:
- return False
-
return True
def recommends_checksum(self, urldata):
@@ -1430,11 +1425,6 @@ class FetchMethod(object):
iterate = False
file = urldata.localpath
- # Localpath can't deal with 'dir/*' entries, so it converts them to '.',
- # but it must be corrected back for local files copying
- if urldata.basename == '*' and file.endswith('/.'):
- file = '%s/%s' % (file.rstrip('/.'), urldata.path)
-
try:
unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
except ValueError as exc:
@@ -1530,7 +1520,7 @@ class FetchMethod(object):
if urlpath.find("/") != -1:
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
- cmd = 'cp -fpPRH %s %s' % (file, destdir)
+ cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
if not cmd:
return
@@ -1613,10 +1603,15 @@ class FetchMethod(object):
"""
if os.path.exists(ud.localpath):
return True
- if ud.localpath.find("*") != -1:
- return True
return False
+ def implicit_urldata(self, ud, d):
+ """
+ Get a list of FetchData objects for any implicit URLs that will also
+ be downloaded when we fetch the given URL.
+ """
+ return []
+
class Fetch(object):
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
if localonly and cache:
@@ -1842,6 +1837,24 @@ class Fetch(object):
if ud.lockfile:
bb.utils.unlockfile(lf)
+ def expanded_urldata(self, urls=None):
+ """
+ Get an expanded list of FetchData objects covering both the given
+ URLS and any additional implicit URLs that are added automatically by
+ the appropriate FetchMethod.
+ """
+
+ if not urls:
+ urls = self.urls
+
+ urldata = []
+ for url in urls:
+ ud = self.ud[url]
+ urldata.append(ud)
+ urldata += ud.method.implicit_urldata(ud, self.d)
+
+ return urldata
+
class FetchConnectionCache(object):
"""
A class which represents an container for socket connections.
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 5b3793a70..07064c694 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -236,7 +236,7 @@ class Git(FetchMethod):
ud.unresolvedrev[name] = ud.revisions[name]
ud.revisions[name] = self.latest_revision(ud, d, name)
- gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
+ gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_'))
if gitsrcname.startswith('.'):
gitsrcname = gitsrcname[1:]
@@ -342,7 +342,7 @@ class Git(FetchMethod):
# We do this since git will use a "-l" option automatically for local urls where possible
if repourl.startswith("file://"):
repourl = repourl[7:]
- clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
+ clone_cmd = "LANG=C %s clone --bare --mirror \"%s\" %s --progress" % (ud.basecmd, repourl, ud.clonedir)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, clone_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -354,8 +354,8 @@ class Git(FetchMethod):
if "origin" in output:
runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
- runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
- fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
+ runfetchcmd("%s remote add --mirror=fetch origin \"%s\"" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
+ fetch_cmd = "LANG=C %s fetch -f --progress \"%s\" refs/*:refs/*" % (ud.basecmd, repourl)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -475,6 +475,9 @@ class Git(FetchMethod):
need_lfs = ud.parm.get("lfs", "1") == "1"
+ if not need_lfs:
+ ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd
+
source_found = False
source_error = []
@@ -501,12 +504,12 @@ class Git(FetchMethod):
raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
repourl = self._get_repo_url(ud)
- runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
+ runfetchcmd("%s remote set-url origin \"%s\"" % (ud.basecmd, repourl), d, workdir=destdir)
if self._contains_lfs(ud, d, destdir):
if need_lfs and not self._find_git_lfs(d):
raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
- else:
+ elif not need_lfs:
bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
if not ud.nocheckout:
@@ -563,8 +566,15 @@ class Git(FetchMethod):
"""
Check if the repository has 'lfs' (large file) content
"""
- cmd = "%s grep lfs HEAD:.gitattributes | wc -l" % (
- ud.basecmd)
+
+ if not ud.nobranch:
+ branchname = ud.branches[ud.names[0]]
+ else:
+ branchname = "master"
+
+ cmd = "%s grep lfs origin/%s:.gitattributes | wc -l" % (
+ ud.basecmd, ud.branches[ud.names[0]])
+
try:
output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
if int(output) > 0:
@@ -613,7 +623,7 @@ class Git(FetchMethod):
d.setVar('_BB_GIT_IN_LSREMOTE', '1')
try:
repourl = self._get_repo_url(ud)
- cmd = "%s ls-remote %s %s" % \
+ cmd = "%s ls-remote \"%s\" %s" % \
(ud.basecmd, repourl, search)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, cmd, repourl)
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index e7083001d..d6e5c5c05 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -143,12 +143,43 @@ class GitSM(Git):
try:
# Check for the nugget dropped by the download operation
known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
- (ud.basecmd), d, workdir=ud.clonedir)
+ (ud.basecmd), d, workdir=ud.clonedir)
- if ud.revisions[ud.names[0]] not in known_srcrevs.split():
- return True
+ if ud.revisions[ud.names[0]] in known_srcrevs.split():
+ return False
except bb.fetch2.FetchError:
- # No srcrev nuggets, so this is new and needs to be updated
+ pass
+
+ need_update_list = []
+ def need_update_submodule(ud, url, module, modpath, workdir, d):
+ url += ";bareclone=1;nobranch=1"
+
+ try:
+ newfetch = Fetch([url], d, cache=False)
+ new_ud = newfetch.ud[url]
+ if new_ud.method.need_update(new_ud, d):
+ need_update_list.append(modpath)
+ except Exception as e:
+ logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
+ need_update_result = True
+
+ # If we're using a shallow mirror tarball it needs to be unpacked
+ # temporarily so that we can examine the .gitmodules file
+ if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir):
+ tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
+ runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
+ self.process_submodules(ud, tmpdir, need_update_submodule, d)
+ shutil.rmtree(tmpdir)
+ else:
+ self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
+ if len(need_update_list) == 0:
+ # We already have the required commits of all submodules. Drop
+ # a nugget so we don't need to check again.
+ runfetchcmd("%s config --add bitbake.srcrev %s" % \
+ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
+
+ if len(need_update_list) > 0:
+ logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
return True
return False
@@ -163,9 +194,6 @@ class GitSM(Git):
try:
newfetch = Fetch([url], d, cache=False)
newfetch.download()
- # Drop a nugget to add each of the srcrevs we've fetched (used by need_update)
- runfetchcmd("%s config --add bitbake.srcrev %s" % \
- (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=workdir)
except Exception as e:
logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e)))
raise
@@ -181,6 +209,9 @@ class GitSM(Git):
shutil.rmtree(tmpdir)
else:
self.process_submodules(ud, ud.clonedir, download_submodule, d)
+ # Drop a nugget for the srcrev we've fetched (used by need_update)
+ runfetchcmd("%s config --add bitbake.srcrev %s" % \
+ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
def unpack(self, ud, destdir, d):
def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -223,3 +254,24 @@ class GitSM(Git):
# up the configuration and checks out the files. The main project config should remain
# unmodified, and no download from the internet should occur.
runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+
+ def implicit_urldata(self, ud, d):
+ import shutil, subprocess, tempfile
+
+ urldata = []
+ def add_submodule(ud, url, module, modpath, workdir, d):
+ url += ";bareclone=1;nobranch=1"
+ newfetch = Fetch([url], d, cache=False)
+ urldata.extend(newfetch.expanded_urldata())
+
+ # If we're using a shallow mirror tarball it needs to be unpacked
+ # temporarily so that we can examine the .gitmodules file
+ if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d):
+ tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
+ subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True)
+ self.process_submodules(ud, tmpdir, add_submodule, d)
+ shutil.rmtree(tmpdir)
+ else:
+ self.process_submodules(ud, ud.clonedir, add_submodule, d)
+
+ return urldata
diff --git a/poky/bitbake/lib/bb/fetch2/local.py b/poky/bitbake/lib/bb/fetch2/local.py
index 01d9ff9f8..25d4557db 100644
--- a/poky/bitbake/lib/bb/fetch2/local.py
+++ b/poky/bitbake/lib/bb/fetch2/local.py
@@ -17,7 +17,7 @@ import os
import urllib.request, urllib.parse, urllib.error
import bb
import bb.utils
-from bb.fetch2 import FetchMethod, FetchError
+from bb.fetch2 import FetchMethod, FetchError, ParameterError
from bb.fetch2 import logger
class Local(FetchMethod):
@@ -33,6 +33,8 @@ class Local(FetchMethod):
ud.basename = os.path.basename(ud.decodedurl)
ud.basepath = ud.decodedurl
ud.needdonestamp = False
+ if "*" in ud.decodedurl:
+ raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url)
return
def localpath(self, urldata, d):
@@ -55,12 +57,6 @@ class Local(FetchMethod):
logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
newpath, hist = bb.utils.which(filespath, path, history=True)
searched.extend(hist)
- if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
- # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
- newpath, hist = bb.utils.which(filespath, ".", history=True)
- searched.extend(hist)
- logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
- return searched
if not os.path.exists(newpath):
dldirfile = os.path.join(d.getVar("DL_DIR"), path)
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
@@ -70,8 +66,6 @@ class Local(FetchMethod):
return searched
def need_update(self, ud, d):
- if ud.url.find("*") != -1:
- return False
if os.path.exists(ud.localpath):
return False
return True
@@ -95,9 +89,6 @@ class Local(FetchMethod):
"""
Check the status of the url
"""
- if urldata.localpath.find("*") != -1:
- logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
- return True
if os.path.exists(urldata.localpath):
return True
return False
diff --git a/poky/bitbake/lib/bb/fetch2/osc.py b/poky/bitbake/lib/bb/fetch2/osc.py
index 8f091efd0..3a6cd2951 100644
--- a/poky/bitbake/lib/bb/fetch2/osc.py
+++ b/poky/bitbake/lib/bb/fetch2/osc.py
@@ -8,12 +8,15 @@ Based on the svn "Fetch" implementation.
"""
import logging
+import os
import bb
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import MissingParameterError
from bb.fetch2 import runfetchcmd
+logger = logging.getLogger(__name__)
+
class Osc(FetchMethod):
"""Class to fetch a module or modules from Opensuse build server
repositories."""
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
index f57c2a4f5..6f3c95b6c 100644
--- a/poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -1,6 +1,20 @@
"""
BitBake 'Fetch' implementation for perforce
+Supported SRC_URI options are:
+
+- module
+ The top-level location to fetch while preserving the remote paths
+
+ The value of module can point to either a directory or a file. The result,
+ in both cases, is that the fetcher will preserve all file paths starting
+ from the module path. That is, the top-level directory in the module value
+ will also be the top-level directory in P4DIR.
+
+- remotepath
+ If the value "keep" is given, the full depot location of each file is
+ preserved in P4DIR. This option overrides the effect of the module option.
+
"""
# Copyright (C) 2003, 2004 Chris Larson
@@ -17,6 +31,36 @@ from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
+class PerforceProgressHandler (bb.progress.BasicProgressHandler):
+ """
+ Implements basic progress information for perforce, based on the number of
+ files to be downloaded.
+
+ The p4 print command will print one line per file, therefore it can be used
+ to "count" the number of files already completed and give an indication of
+ the progress.
+ """
+ def __init__(self, d, num_files):
+ self._num_files = num_files
+ self._count = 0
+ super(PerforceProgressHandler, self).__init__(d)
+
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(-1)
+
+ def write(self, string):
+ self._count = self._count + 1
+
+ percent = int(100.0 * float(self._count) / float(self._num_files))
+
+ # In case something goes wrong, we try to preserve our sanity
+ if percent > 100:
+ percent = 100
+
+ self.update(percent)
+
+ super(PerforceProgressHandler, self).write(string)
+
class Perforce(FetchMethod):
""" Class to fetch from perforce repositories """
def supports(self, ud, d):
@@ -58,14 +102,33 @@ class Perforce(FetchMethod):
logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
if not ud.host:
raise FetchError('Could not determine P4PORT from P4CONFIG')
-
+
+ # Fetcher options
+ ud.module = ud.parm.get('module')
+ ud.keepremotepath = (ud.parm.get('remotepath', '') == 'keep')
+
if ud.path.find('/...') >= 0:
ud.pathisdir = True
else:
ud.pathisdir = False
+ # Avoid using the "/..." syntax in SRC_URI when a module value is given
+ if ud.pathisdir and ud.module:
+ raise FetchError('SRC_URI depot path cannot not end in /... when a module value is given')
+
cleanedpath = ud.path.replace('/...', '').replace('/', '.')
cleanedhost = ud.host.replace(':', '.')
+
+ # Merge the path and module into the final depot location
+ if ud.module:
+ if ud.module.find('/') == 0:
+ raise FetchError('module cannot begin with /')
+ ud.path = os.path.join(ud.path, ud.module)
+
+ # Append the module path to the local pkg name
+ cleanedmodule = ud.module.replace('/...', '').replace('/', '.')
+ cleanedpath += '--%s' % cleanedmodule
+
ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
ud.setup_revisions(d)
@@ -95,10 +158,20 @@ class Perforce(FetchMethod):
pathnrev = '%s' % (ud.path)
if depot_filename:
- if ud.pathisdir: # Remove leading path to obtain filename
+ if ud.keepremotepath:
+ # preserve everything, remove the leading //
+ filename = depot_filename.lstrip('/')
+ elif ud.module:
+ # remove everything up to the module path
+ modulepath = ud.module.rstrip('/...')
+ filename = depot_filename[depot_filename.rfind(modulepath):]
+ elif ud.pathisdir:
+ # Remove leading (visible) path to obtain the filepath
filename = depot_filename[len(ud.path)-1:]
else:
+ # Remove everything, except the filename
filename = depot_filename[depot_filename.rfind('/'):]
+
filename = filename[:filename.find('#')] # Remove trailing '#rev'
if command == 'changes':
@@ -150,10 +223,12 @@ class Perforce(FetchMethod):
bb.utils.remove(ud.pkgdir, True)
bb.utils.mkdirhier(ud.pkgdir)
+ progresshandler = PerforceProgressHandler(d, len(filelist))
+
for afile in filelist:
p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
bb.fetch2.check_network_access(d, p4fetchcmd, ud.url)
- runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
+ runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir, log=progresshandler)
runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
diff --git a/poky/bitbake/lib/bb/fetch2/ssh.py b/poky/bitbake/lib/bb/fetch2/ssh.py
index 5e982ecf3..2c8557e1f 100644
--- a/poky/bitbake/lib/bb/fetch2/ssh.py
+++ b/poky/bitbake/lib/bb/fetch2/ssh.py
@@ -31,8 +31,7 @@ IETF secsh internet draft:
#
import re, os
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import runfetchcmd
+from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd
__pattern__ = re.compile(r'''
@@ -65,7 +64,7 @@ class SSH(FetchMethod):
def urldata_init(self, urldata, d):
if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
- raise bb.fetch2.ParameterError(
+ raise ParameterError(
"Invalid protocol - if you wish to fetch from a git " +
"repository using ssh, you need to use " +
"git:// prefix with protocol=ssh", urldata.url)
@@ -105,7 +104,7 @@ class SSH(FetchMethod):
dldir
)
- bb.fetch2.check_network_access(d, cmd, urldata.url)
+ check_network_access(d, cmd, urldata.url)
runfetchcmd(cmd, d)
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index f7d1de26b..e6d9f528d 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -208,10 +208,7 @@ class Wget(FetchMethod):
fetch.connection_cache.remove_connection(h.host, h.port)
raise urllib.error.URLError(err)
else:
- try:
- r = h.getresponse(buffering=True)
- except TypeError: # buffering kw not supported
- r = h.getresponse()
+ r = h.getresponse()
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
diff --git a/poky/bitbake/lib/bb/main.py b/poky/bitbake/lib/bb/main.py
index af2880f8d..7990195ea 100755
--- a/poky/bitbake/lib/bb/main.py
+++ b/poky/bitbake/lib/bb/main.py
@@ -344,8 +344,6 @@ def bitbake_main(configParams, configuration):
except:
pass
- configuration.setConfigParameters(configParams)
-
if configParams.server_only and configParams.remote_server:
raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
("the BBSERVER environment variable" if "BBSERVER" in os.environ \
@@ -357,13 +355,13 @@ def bitbake_main(configParams, configuration):
if "BBDEBUG" in os.environ:
level = int(os.environ["BBDEBUG"])
- if level > configuration.debug:
- configuration.debug = level
+ if level > configParams.debug:
+ configParams.debug = level
- bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
- configuration.debug_domains)
+ bb.msg.init_msgconfig(configParams.verbose, configParams.debug,
+ configParams.debug_domains)
- server_connection, ui_module = setup_bitbake(configParams, configuration)
+ server_connection, ui_module = setup_bitbake(configParams)
# No server connection
if server_connection is None:
if configParams.status_only:
@@ -390,7 +388,7 @@ def bitbake_main(configParams, configuration):
return 1
-def setup_bitbake(configParams, configuration, extrafeatures=None):
+def setup_bitbake(configParams, extrafeatures=None):
# Ensure logging messages get sent to the UI as events
handler = bb.event.LogHandler()
if not configParams.status_only:
@@ -431,11 +429,11 @@ def setup_bitbake(configParams, configuration, extrafeatures=None):
logger.info("bitbake server is not running.")
lock.close()
return None, None
- # we start a server with a given configuration
+ # we start a server with a given featureset
logger.info("Starting bitbake server...")
# Clear the event queue since we already displayed messages
bb.event.ui_queue = []
- server = bb.server.process.BitBakeServer(lock, sockname, configuration, featureset)
+ server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface)
else:
logger.info("Reconnecting to bitbake server...")
diff --git a/poky/bitbake/lib/bb/msg.py b/poky/bitbake/lib/bb/msg.py
index c0b344e32..6f17b6acc 100644
--- a/poky/bitbake/lib/bb/msg.py
+++ b/poky/bitbake/lib/bb/msg.py
@@ -14,6 +14,7 @@ import sys
import copy
import logging
import logging.config
+import os
from itertools import groupby
import bb
import bb.event
@@ -146,18 +147,12 @@ class LogFilterLTLevel(logging.Filter):
#
loggerDefaultLogLevel = BBLogFormatter.NOTE
-loggerDefaultVerbose = False
-loggerVerboseLogs = False
loggerDefaultDomains = {}
def init_msgconfig(verbose, debug, debug_domains=None):
"""
Set default verbosity and debug levels config the logger
"""
- bb.msg.loggerDefaultVerbose = verbose
- if verbose:
- bb.msg.loggerVerboseLogs = True
-
if debug:
bb.msg.loggerDefaultLogLevel = BBLogFormatter.DEBUG - debug + 1
elif verbose:
@@ -280,7 +275,7 @@ def setLoggingConfig(defaultconfig, userconfigfile=None):
logconfig = copy.deepcopy(defaultconfig)
if userconfigfile:
- with open(userconfigfile, 'r') as f:
+ with open(os.path.normpath(userconfigfile), 'r') as f:
if userconfigfile.endswith('.yml') or userconfigfile.endswith('.yaml'):
import yaml
userconfig = yaml.load(f)
diff --git a/poky/bitbake/lib/bb/namedtuple_with_abc.py b/poky/bitbake/lib/bb/namedtuple_with_abc.py
index 646aed6ff..e46dbf084 100644
--- a/poky/bitbake/lib/bb/namedtuple_with_abc.py
+++ b/poky/bitbake/lib/bb/namedtuple_with_abc.py
@@ -61,17 +61,9 @@ class _NamedTupleABCMeta(ABCMeta):
return ABCMeta.__new__(mcls, name, bases, namespace)
-exec(
- # Python 2.x metaclass declaration syntax
- """class _NamedTupleABC(object):
- '''The abstract base class + mix-in for named tuples.'''
- __metaclass__ = _NamedTupleABCMeta
- _fields = abstractproperty()""" if version_info[0] < 3 else
- # Python 3.x metaclass declaration syntax
- """class _NamedTupleABC(metaclass=_NamedTupleABCMeta):
- '''The abstract base class + mix-in for named tuples.'''
- _fields = abstractproperty()"""
-)
+class _NamedTupleABC(metaclass=_NamedTupleABCMeta):
+ '''The abstract base class + mix-in for named tuples.'''
+ _fields = abstractproperty()
_namedtuple.abc = _NamedTupleABC
diff --git a/poky/bitbake/lib/bb/parse/ast.py b/poky/bitbake/lib/bb/parse/ast.py
index 785aa974e..0714296af 100644
--- a/poky/bitbake/lib/bb/parse/ast.py
+++ b/poky/bitbake/lib/bb/parse/ast.py
@@ -244,12 +244,14 @@ class AddTaskNode(AstNode):
bb.build.addtask(self.func, self.before, self.after, data)
class DelTaskNode(AstNode):
- def __init__(self, filename, lineno, func):
+ def __init__(self, filename, lineno, tasks):
AstNode.__init__(self, filename, lineno)
- self.func = func
+ self.tasks = tasks
def eval(self, data):
- bb.build.deltask(self.func, data)
+ tasks = data.expand(self.tasks).split()
+ for task in tasks:
+ bb.build.deltask(task, data)
class BBHandlerNode(AstNode):
def __init__(self, filename, lineno, fns):
@@ -305,7 +307,7 @@ def handleAddTask(statements, filename, lineno, m):
statements.append(AddTaskNode(filename, lineno, func, before, after))
def handleDelTask(statements, filename, lineno, m):
- func = m.group("func")
+ func = m.group(1)
if func is None:
return
diff --git a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 6e216effb..215f940b6 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -26,7 +26,7 @@ __func_start_regexp__ = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(
__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
-__deltask_regexp__ = re.compile(r"deltask\s+(?P<func>\w+)(?P<ignores>.*)")
+__deltask_regexp__ = re.compile(r"deltask\s+(.+)")
__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" )
__def_regexp__ = re.compile(r"def\s+(\w+).*:" )
__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" )
@@ -238,9 +238,6 @@ def feeder(lineno, s, fn, root, statements, eof=False):
m = __deltask_regexp__.match(s)
if m:
- # Check and warn "for deltask task1 task2"
- if m.group('ignores'):
- logger.warning('deltask ignored: "%s"' % m.group('ignores'))
ast.handleDelTask(statements, fn, lineno, m)
return
diff --git a/poky/bitbake/lib/bb/persist_data.py b/poky/bitbake/lib/bb/persist_data.py
index 7357ab2d4..5f4fbe350 100644
--- a/poky/bitbake/lib/bb/persist_data.py
+++ b/poky/bitbake/lib/bb/persist_data.py
@@ -12,14 +12,14 @@ currently, providing a key/value store accessed by 'domain'.
#
import collections
+import contextlib
+import functools
import logging
import os.path
+import sqlite3
import sys
import warnings
-from bb.compat import total_ordering
from collections import Mapping
-import sqlite3
-import contextlib
sqlversion = sqlite3.sqlite_version_info
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -28,7 +28,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
logger = logging.getLogger("BitBake.PersistData")
-@total_ordering
+@functools.total_ordering
class SQLTable(collections.MutableMapping):
class _Decorators(object):
@staticmethod
diff --git a/poky/bitbake/lib/bb/process.py b/poky/bitbake/lib/bb/process.py
index 2dc472a86..7c3995cce 100644
--- a/poky/bitbake/lib/bb/process.py
+++ b/poky/bitbake/lib/bb/process.py
@@ -7,6 +7,7 @@ import signal
import subprocess
import errno
import select
+import bb
logger = logging.getLogger('BitBake.Process')
@@ -41,6 +42,7 @@ class ExecutionError(CmdError):
self.exitcode = exitcode
self.stdout = stdout
self.stderr = stderr
+ self.extra_message = None
def __str__(self):
message = ""
@@ -51,7 +53,7 @@ class ExecutionError(CmdError):
if message:
message = ":\n" + message
return (CmdError.__str__(self) +
- " with exit code %s" % self.exitcode + message)
+ " with exit code %s" % self.exitcode + message + (self.extra_message or ""))
class Popen(subprocess.Popen):
defaults = {
diff --git a/poky/bitbake/lib/bb/progress.py b/poky/bitbake/lib/bb/progress.py
index 9c755b7f7..d051ba019 100644
--- a/poky/bitbake/lib/bb/progress.py
+++ b/poky/bitbake/lib/bb/progress.py
@@ -14,7 +14,27 @@ import bb.event
import bb.build
from bb.build import StdoutNoopContextManager
-class ProgressHandler(object):
+
+# from https://stackoverflow.com/a/14693789/221061
+ANSI_ESCAPE_REGEX = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')
+
+
+def filter_color(string):
+ """
+ Filter ANSI escape codes out of |string|, return new string
+ """
+ return ANSI_ESCAPE_REGEX.sub('', string)
+
+
+def filter_color_n(string):
+ """
+ Filter ANSI escape codes out of |string|, returns tuple of
+ (new string, # of ANSI codes removed)
+ """
+ return ANSI_ESCAPE_REGEX.subn('', string)
+
+
+class ProgressHandler:
"""
Base class that can pretend to be a file object well enough to be
used to build objects to intercept console output and determine the
@@ -55,6 +75,7 @@ class ProgressHandler(object):
self._lastevent = ts
self._progress = progress
+
class LineFilterProgressHandler(ProgressHandler):
"""
A ProgressHandler variant that provides the ability to filter out
@@ -66,7 +87,7 @@ class LineFilterProgressHandler(ProgressHandler):
"""
def __init__(self, d, outfile=None):
self._linebuffer = ''
- super(LineFilterProgressHandler, self).__init__(d, outfile)
+ super().__init__(d, outfile)
def write(self, string):
self._linebuffer += string
@@ -80,41 +101,44 @@ class LineFilterProgressHandler(ProgressHandler):
lbreakpos = line.rfind('\r') + 1
if lbreakpos:
line = line[lbreakpos:]
- if self.writeline(line):
- super(LineFilterProgressHandler, self).write(line)
+ if self.writeline(filter_color(line)):
+ super().write(line)
def writeline(self, line):
return True
+
class BasicProgressHandler(ProgressHandler):
def __init__(self, d, regex=r'(\d+)%', outfile=None):
- super(BasicProgressHandler, self).__init__(d, outfile)
+ super().__init__(d, outfile)
self._regex = re.compile(regex)
# Send an initial progress event so the bar gets shown
self._fire_progress(0)
def write(self, string):
- percs = self._regex.findall(string)
+ percs = self._regex.findall(filter_color(string))
if percs:
progress = int(percs[-1])
self.update(progress)
- super(BasicProgressHandler, self).write(string)
+ super().write(string)
+
class OutOfProgressHandler(ProgressHandler):
def __init__(self, d, regex, outfile=None):
- super(OutOfProgressHandler, self).__init__(d, outfile)
+ super().__init__(d, outfile)
self._regex = re.compile(regex)
# Send an initial progress event so the bar gets shown
self._fire_progress(0)
def write(self, string):
- nums = self._regex.findall(string)
+ nums = self._regex.findall(filter_color(string))
if nums:
progress = (float(nums[-1][0]) / float(nums[-1][1])) * 100
self.update(progress)
- super(OutOfProgressHandler, self).write(string)
+ super().write(string)
+
-class MultiStageProgressReporter(object):
+class MultiStageProgressReporter:
"""
Class which allows reporting progress without the caller
having to know where they are in the overall sequence. Useful
@@ -199,6 +223,7 @@ class MultiStageProgressReporter(object):
value is considered to be out of stage_total, otherwise it should
be a percentage value from 0 to 100.
"""
+ progress = None
if self._stage_total:
stage_progress = (float(stage_progress) / self._stage_total) * 100
if self._stage < 0:
@@ -207,9 +232,10 @@ class MultiStageProgressReporter(object):
progress = self._base_progress + (stage_progress * self._stage_weights[self._stage])
else:
progress = self._base_progress
- if progress > 100:
- progress = 100
- self._fire_progress(progress)
+ if progress:
+ if progress > 100:
+ progress = 100
+ self._fire_progress(progress)
def finish(self):
if self._finished:
@@ -230,6 +256,7 @@ class MultiStageProgressReporter(object):
out.append('Up to finish: %d' % stage_weight)
bb.warn('Stage times:\n %s' % '\n '.join(out))
+
class MultiStageProcessProgressReporter(MultiStageProgressReporter):
"""
Version of MultiStageProgressReporter intended for use with
@@ -238,7 +265,7 @@ class MultiStageProcessProgressReporter(MultiStageProgressReporter):
def __init__(self, d, processname, stage_weights, debug=False):
self._processname = processname
self._started = False
- MultiStageProgressReporter.__init__(self, d, stage_weights, debug)
+ super().__init__(d, stage_weights, debug)
def start(self):
if not self._started:
@@ -255,13 +282,14 @@ class MultiStageProcessProgressReporter(MultiStageProgressReporter):
MultiStageProgressReporter.finish(self)
bb.event.fire(bb.event.ProcessFinished(self._processname), self._data)
+
class DummyMultiStageProcessProgressReporter(MultiStageProgressReporter):
"""
MultiStageProcessProgressReporter that takes the calls and does nothing
with them (to avoid a bunch of "if progress_reporter:" checks)
"""
def __init__(self):
- MultiStageProcessProgressReporter.__init__(self, "", None, [])
+ super().__init__(None, [])
def _fire_progress(self, taskprogress, rate=None):
pass
diff --git a/poky/bitbake/lib/bb/pysh/pyshyacc.py b/poky/bitbake/lib/bb/pysh/pyshyacc.py
index de565dc9a..924860a6f 100644
--- a/poky/bitbake/lib/bb/pysh/pyshyacc.py
+++ b/poky/bitbake/lib/bb/pysh/pyshyacc.py
@@ -570,6 +570,7 @@ def p_linebreak(p):
def p_separator_op(p):
"""separator_op : COMMA
+ | COMMA COMMA
| AMP"""
p[0] = p[1]
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 16f076f3b..28bdadb45 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -46,6 +46,12 @@ def split_tid(tid):
(mc, fn, taskname, _) = split_tid_mcfn(tid)
return (mc, fn, taskname)
+def split_mc(n):
+ if n.startswith("mc:"):
+ _, mc, n = n.split(":", 2)
+ return (mc, n)
+ return ('', n)
+
def split_tid_mcfn(tid):
if tid.startswith('mc:'):
elems = tid.split(':')
@@ -370,7 +376,7 @@ class RunQueueData:
self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or ""
self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
- self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData)
+ self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData, targets)
self.setscenewhitelist_checked = False
self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1")
self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter()
@@ -1184,8 +1190,9 @@ class RunQueueData:
return len(self.runtaskentries)
def prepare_task_hash(self, tid):
- bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches[mc_from_tid(tid)])
- self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches[mc_from_tid(tid)])
+ dc = bb.parse.siggen.get_data_caches(self.dataCaches, mc_from_tid(tid))
+ bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, dc)
+ self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, dc)
self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
def dump_data(self):
@@ -1256,8 +1263,8 @@ class RunQueue:
"fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
"sigdata" : bb.parse.siggen.get_taskdata(),
"logdefaultlevel" : bb.msg.loggerDefaultLogLevel,
- "logdefaultverbose" : bb.msg.loggerDefaultVerbose,
- "logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
+ "build_verbose_shell" : self.cooker.configuration.build_verbose_shell,
+ "build_verbose_stdout" : self.cooker.configuration.build_verbose_stdout,
"logdefaultdomain" : bb.msg.loggerDefaultDomains,
"prhost" : self.cooker.prhost,
"buildname" : self.cfgData.getVar("BUILDNAME"),
@@ -1557,7 +1564,8 @@ class RunQueue:
def rq_dump_sigfn(self, fn, options):
bb_cache = bb.cache.NoCache(self.cooker.databuilder)
- the_data = bb_cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn))
+ mc = bb.runqueue.mc_from_tid(fn)
+ the_data = bb_cache.loadDataFull(fn, self.cooker.collections[mc].get_file_appends(fn))
siggen = bb.parse.siggen
dataCaches = self.rqdata.dataCaches
siggen.dump_sigfn(fn, dataCaches, options)
@@ -2042,10 +2050,10 @@ class RunQueueExecute:
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
if not mc in self.rq.fakeworker:
self.rq.start_fakeworker(self, mc)
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
self.rq.worker[mc].process.stdin.flush()
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -2129,10 +2137,10 @@ class RunQueueExecute:
self.rq.state = runQueueFailed
self.stats.taskFailed()
return True
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
self.rq.worker[mc].process.stdin.flush()
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -2298,7 +2306,8 @@ class RunQueueExecute:
if len(self.rqdata.runtaskentries[p].depends) and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
continue
orighash = self.rqdata.runtaskentries[tid].hash
- newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches[mc_from_tid(tid)])
+ dc = bb.parse.siggen.get_data_caches(self.rqdata.dataCaches, mc_from_tid(tid))
+ newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, dc)
origuni = self.rqdata.runtaskentries[tid].unihash
newuni = bb.parse.siggen.get_unihash(tid)
# FIXME, need to check it can come from sstate at all for determinism?
@@ -2958,7 +2967,12 @@ class runQueuePipe():
while index != -1 and self.queue.startswith(b"<event>"):
try:
event = pickle.loads(self.queue[7:index])
- except ValueError as e:
+ except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e:
+ if isinstance(e, pickle.UnpicklingError) and "truncated" in str(e):
+ # The pickled data could contain "</event>" so search for the next occurance
+ # unpickling again, this should be the only way an unpickle error could occur
+ index = self.queue.find(b"</event>", index + 1)
+ continue
bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[7:index]))
bb.event.fire_from_worker(event, self.d)
if isinstance(event, taskUniHashUpdate):
@@ -2970,7 +2984,7 @@ class runQueuePipe():
while index != -1 and self.queue.startswith(b"<exitcode>"):
try:
task, status = pickle.loads(self.queue[10:index])
- except ValueError as e:
+ except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e:
bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index]))
self.rqexec.runqueue_process_waitpid(task, status)
found = True
@@ -2985,16 +2999,15 @@ class runQueuePipe():
print("Warning, worker left partial message: %s" % self.queue)
self.input.close()
-def get_setscene_enforce_whitelist(d):
+def get_setscene_enforce_whitelist(d, targets):
if d.getVar('BB_SETSCENE_ENFORCE') != '1':
return None
whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split()
outlist = []
for item in whitelist[:]:
if item.startswith('%:'):
- for target in sys.argv[1:]:
- if not target.startswith('-'):
- outlist.append(target.split(':')[0] + ':' + item.split(':')[1])
+ for (mc, target, task, fn) in targets:
+ outlist.append(target + ':' + item.split(':')[1])
else:
outlist.append(item)
return outlist
diff --git a/poky/bitbake/lib/bb/server/process.py b/poky/bitbake/lib/bb/server/process.py
index 69aae626e..c7cb34f0c 100644
--- a/poky/bitbake/lib/bb/server/process.py
+++ b/poky/bitbake/lib/bb/server/process.py
@@ -25,6 +25,7 @@ import subprocess
import errno
import re
import datetime
+import pickle
import bb.server.xmlrpcserver
from bb import daemonize
from multiprocessing import queues
@@ -34,12 +35,15 @@ logger = logging.getLogger('BitBake')
class ProcessTimeout(SystemExit):
pass
-class ProcessServer(multiprocessing.Process):
+def serverlog(msg):
+ print(str(os.getpid()) + " " + datetime.datetime.now().strftime('%H:%M:%S.%f') + " " + msg)
+ sys.stdout.flush()
+
+class ProcessServer():
profile_filename = "profile.log"
profile_processed_filename = "profile.log.processed"
- def __init__(self, lock, sock, sockname):
- multiprocessing.Process.__init__(self)
+ def __init__(self, lock, lockname, sock, sockname, server_timeout, xmlrpcinterface):
self.command_channel = False
self.command_channel_reply = False
self.quit = False
@@ -47,16 +51,22 @@ class ProcessServer(multiprocessing.Process):
self.next_heartbeat = time.time()
self.event_handle = None
+ self.hadanyui = False
self.haveui = False
- self.lastui = False
+ self.maxuiwait = 30
self.xmlrpc = False
self._idlefuns = {}
self.bitbake_lock = lock
+ self.bitbake_lock_name = lockname
self.sock = sock
self.sockname = sockname
+ self.server_timeout = server_timeout
+ self.timeout = self.server_timeout
+ self.xmlrpcinterface = xmlrpcinterface
+
def register_idle_function(self, function, data):
"""Register a function to be called while the server is idle"""
assert hasattr(function, '__call__')
@@ -67,22 +77,7 @@ class ProcessServer(multiprocessing.Process):
if self.xmlrpcinterface[0]:
self.xmlrpc = bb.server.xmlrpcserver.BitBakeXMLRPCServer(self.xmlrpcinterface, self.cooker, self)
- print("Bitbake XMLRPC server address: %s, server port: %s" % (self.xmlrpc.host, self.xmlrpc.port))
-
- heartbeat_event = self.cooker.data.getVar('BB_HEARTBEAT_EVENT')
- if heartbeat_event:
- try:
- self.heartbeat_seconds = float(heartbeat_event)
- except:
- bb.warn('Ignoring invalid BB_HEARTBEAT_EVENT=%s, must be a float specifying seconds.' % heartbeat_event)
-
- self.timeout = self.server_timeout or self.cooker.data.getVar('BB_SERVER_TIMEOUT')
- try:
- if self.timeout:
- self.timeout = float(self.timeout)
- except:
- bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout)
-
+ serverlog("Bitbake XMLRPC server address: %s, server port: %s" % (self.xmlrpc.host, self.xmlrpc.port))
try:
self.bitbake_lock.seek(0)
@@ -93,7 +88,7 @@ class ProcessServer(multiprocessing.Process):
self.bitbake_lock.write("%s\n" % (os.getpid()))
self.bitbake_lock.flush()
except Exception as e:
- print("Error writing to lock file: %s" % str(e))
+ serverlog("Error writing to lock file: %s" % str(e))
pass
if self.cooker.configuration.profile:
@@ -107,7 +102,7 @@ class ProcessServer(multiprocessing.Process):
prof.dump_stats("profile.log")
bb.utils.process_profilelog("profile.log")
- print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
+ serverlog("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
else:
ret = self.main()
@@ -126,10 +121,11 @@ class ProcessServer(multiprocessing.Process):
fds = [self.sock]
if self.xmlrpc:
fds.append(self.xmlrpc)
- print("Entering server connection loop")
+ seendata = False
+ serverlog("Entering server connection loop")
def disconnect_client(self, fds):
- print("Disconnecting Client")
+ serverlog("Disconnecting Client")
if self.controllersock:
fds.remove(self.controllersock)
self.controllersock.close()
@@ -147,30 +143,31 @@ class ProcessServer(multiprocessing.Process):
self.haveui = False
ready = select.select(fds,[],[],0)[0]
if newconnections:
- print("Starting new client")
+ serverlog("Starting new client")
conn = newconnections.pop(-1)
fds.append(conn)
self.controllersock = conn
elif self.timeout is None and not ready:
- print("No timeout, exiting.")
+ serverlog("No timeout, exiting.")
self.quit = True
+ self.lastui = time.time()
while not self.quit:
if self.sock in ready:
while select.select([self.sock],[],[],0)[0]:
controllersock, address = self.sock.accept()
if self.controllersock:
- print("Queuing %s (%s)" % (str(ready), str(newconnections)))
+ serverlog("Queuing %s (%s)" % (str(ready), str(newconnections)))
newconnections.append(controllersock)
else:
- print("Accepting %s (%s)" % (str(ready), str(newconnections)))
+ serverlog("Accepting %s (%s)" % (str(ready), str(newconnections)))
self.controllersock = controllersock
fds.append(controllersock)
if self.controllersock in ready:
try:
- print("Processing Client")
+ serverlog("Processing Client")
ui_fds = recvfds(self.controllersock, 3)
- print("Connecting Client")
+ serverlog("Connecting Client")
# Where to write events to
writer = ConnectionWriter(ui_fds[0])
@@ -187,13 +184,21 @@ class ProcessServer(multiprocessing.Process):
self.command_channel_reply = writer
self.haveui = True
+ self.hadanyui = True
except (EOFError, OSError):
disconnect_client(self, fds)
- if not self.timeout == -1.0 and not self.haveui and self.lastui and self.timeout and \
+ if not self.timeout == -1.0 and not self.haveui and self.timeout and \
(self.lastui + self.timeout) < time.time():
- print("Server timeout, exiting.")
+ serverlog("Server timeout, exiting.")
+ self.quit = True
+
+ # If we don't see a UI connection within maxuiwait, its unlikely we're going to see
+ # one. We have had issue with processes hanging indefinitely so timing out UI-less
+ # servers is useful.
+ if not self.hadanyui and not self.xmlrpc and not self.timeout and (self.lastui + self.maxuiwait) < time.time():
+ serverlog("No UI connection within max timeout, exiting to avoid infinite loop.")
self.quit = True
if self.command_channel in ready:
@@ -208,22 +213,45 @@ class ProcessServer(multiprocessing.Process):
self.quit = True
continue
try:
- print("Running command %s" % command)
+ serverlog("Running command %s" % command)
self.command_channel_reply.send(self.cooker.command.runCommand(command))
+ serverlog("Command Completed")
except Exception as e:
logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e)))
if self.xmlrpc in ready:
self.xmlrpc.handle_requests()
+ if not seendata and hasattr(self.cooker, "data"):
+ heartbeat_event = self.cooker.data.getVar('BB_HEARTBEAT_EVENT')
+ if heartbeat_event:
+ try:
+ self.heartbeat_seconds = float(heartbeat_event)
+ except:
+ bb.warn('Ignoring invalid BB_HEARTBEAT_EVENT=%s, must be a float specifying seconds.' % heartbeat_event)
+
+ self.timeout = self.server_timeout or self.cooker.data.getVar('BB_SERVER_TIMEOUT')
+ try:
+ if self.timeout:
+ self.timeout = float(self.timeout)
+ except:
+ bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout)
+ seendata = True
+
ready = self.idle_commands(.1, fds)
- print("Exiting")
+ if len(threading.enumerate()) != 1:
+ serverlog("More than one thread left?: " + str(threading.enumerate()))
+
+ serverlog("Exiting")
# Remove the socket file so we don't get any more connections to avoid races
- os.unlink(self.sockname)
+ try:
+ os.unlink(self.sockname)
+ except:
+ pass
self.sock.close()
- try:
+ try:
self.cooker.shutdown(True)
self.cooker.notifier.stop()
self.cooker.confignotifier.stop()
@@ -232,41 +260,73 @@ class ProcessServer(multiprocessing.Process):
self.cooker.post_serve()
+ # Flush logs before we release the lock
+ sys.stdout.flush()
+ sys.stderr.flush()
+
# Finally release the lockfile but warn about other processes holding it open
lock = self.bitbake_lock
- lockfile = lock.name
+ lockfile = self.bitbake_lock_name
+
+ def get_lock_contents(lockfile):
+ try:
+ with open(lockfile, "r") as f:
+ return f.readlines()
+ except FileNotFoundError:
+ return None
+
+ lockcontents = get_lock_contents(lockfile)
+ serverlog("Original lockfile contents: " + str(lockcontents))
+
lock.close()
lock = None
while not lock:
- with bb.utils.timeout(3):
- lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
- if lock:
- # We hold the lock so we can remove the file (hide stale pid data)
- bb.utils.remove(lockfile)
- bb.utils.unlockfile(lock)
- return
-
+ i = 0
+ lock = None
+ while not lock and i < 30:
+ lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False)
if not lock:
- # Some systems may not have lsof available
- procs = None
+ newlockcontents = get_lock_contents(lockfile)
+ if newlockcontents != lockcontents:
+ # A new server was started, the lockfile contents changed, we can exit
+ serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents))
+ return
+ time.sleep(0.1)
+ i += 1
+ if lock:
+ # We hold the lock so we can remove the file (hide stale pid data)
+ # via unlockfile.
+ bb.utils.unlockfile(lock)
+ serverlog("Exiting as we could obtain the lock")
+ return
+
+ if not lock:
+ # Some systems may not have lsof available
+ procs = None
+ try:
+ procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ # File was deleted?
+ continue
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ if procs is None:
+ # Fall back to fuser if lsof is unavailable
try:
- procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
+ procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ # File was deleted?
+ continue
except OSError as e:
if e.errno != errno.ENOENT:
raise
- if procs is None:
- # Fall back to fuser if lsof is unavailable
- try:
- procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
-
- msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
- if procs:
- msg += ":\n%s" % str(procs)
- print(msg)
+
+ msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
+ if procs:
+ msg += ":\n%s" % str(procs.decode("utf-8"))
+ serverlog(msg)
def idle_commands(self, delay, fds=None):
nextsleep = delay
@@ -304,8 +364,9 @@ class ProcessServer(multiprocessing.Process):
self.next_heartbeat += self.heartbeat_seconds
if self.next_heartbeat <= now:
self.next_heartbeat = now + self.heartbeat_seconds
- heartbeat = bb.event.HeartbeatEvent(now)
- bb.event.fire(heartbeat, self.cooker.data)
+ if hasattr(self.cooker, "data"):
+ heartbeat = bb.event.HeartbeatEvent(now)
+ bb.event.fire(heartbeat, self.cooker.data)
if nextsleep and now + nextsleep > self.next_heartbeat:
# Shorten timeout so that we we wake up in time for
# the heartbeat.
@@ -331,8 +392,15 @@ class ServerCommunicator():
def runCommand(self, command):
self.connection.send(command)
if not self.recv.poll(30):
- raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server")
- return self.recv.get()
+ logger.info("No reply from server in 30s")
+ if not self.recv.poll(30):
+ raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)")
+ ret, exc = self.recv.get()
+ # Should probably turn all exceptions in exc back into exceptions?
+ # For now, at least handle BBHandledException
+ if exc and ("BBHandledException" in exc or "SystemExit" in exc):
+ raise bb.BBHandledException()
+ return ret, exc
def updateFeatureSet(self, featureset):
_, error = self.runCommand(["setFeatures", featureset])
@@ -365,39 +433,26 @@ class BitBakeProcessServerConnection(object):
self.connection.recv.close()
return
+start_log_format = '--- Starting bitbake server pid %s at %s ---'
+start_log_datetime_format = '%Y-%m-%d %H:%M:%S.%f'
+
class BitBakeServer(object):
- start_log_format = '--- Starting bitbake server pid %s at %s ---'
- start_log_datetime_format = '%Y-%m-%d %H:%M:%S.%f'
- def __init__(self, lock, sockname, configuration, featureset):
+ def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface):
- self.configuration = configuration
+ self.server_timeout = server_timeout
+ self.xmlrpcinterface = xmlrpcinterface
self.featureset = featureset
self.sockname = sockname
self.bitbake_lock = lock
self.readypipe, self.readypipein = os.pipe()
- # Create server control socket
- if os.path.exists(sockname):
- os.unlink(sockname)
-
# Place the log in the builddirectory alongside the lock file
logfile = os.path.join(os.path.dirname(self.bitbake_lock.name), "bitbake-cookerdaemon.log")
+ self.logfile = logfile
- self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- # AF_UNIX has path length issues so chdir here to workaround
- cwd = os.getcwd()
- try:
- os.chdir(os.path.dirname(sockname))
- self.sock.bind(os.path.basename(sockname))
- finally:
- os.chdir(cwd)
- self.sock.listen(1)
-
- os.set_inheritable(self.sock.fileno(), True)
startdatetime = datetime.datetime.now()
bb.daemonize.createDaemon(self._startServer, logfile)
- self.sock.close()
self.bitbake_lock.close()
os.close(self.readypipein)
@@ -416,7 +471,7 @@ class BitBakeServer(object):
ready.close()
bb.error("Unable to start bitbake server (%s)" % str(r))
if os.path.exists(logfile):
- logstart_re = re.compile(self.start_log_format % ('([0-9]+)', '([0-9-]+ [0-9:.]+)'))
+ logstart_re = re.compile(start_log_format % ('([0-9]+)', '([0-9-]+ [0-9:.]+)'))
started = False
lines = []
lastlines = []
@@ -426,9 +481,9 @@ class BitBakeServer(object):
lines.append(line)
else:
lastlines.append(line)
- res = logstart_re.match(line.rstrip())
+ res = logstart_re.search(line.rstrip())
if res:
- ldatetime = datetime.datetime.strptime(res.group(2), self.start_log_datetime_format)
+ ldatetime = datetime.datetime.strptime(res.group(2), start_log_datetime_format)
if ldatetime >= startdatetime:
started = True
lines.append(line)
@@ -449,26 +504,53 @@ class BitBakeServer(object):
ready.close()
def _startServer(self):
- print(self.start_log_format % (os.getpid(), datetime.datetime.now().strftime(self.start_log_datetime_format)))
- sys.stdout.flush()
-
- server = ProcessServer(self.bitbake_lock, self.sock, self.sockname)
- self.configuration.setServerRegIdleCallback(server.register_idle_function)
os.close(self.readypipe)
- writer = ConnectionWriter(self.readypipein)
+ os.set_inheritable(self.bitbake_lock.fileno(), True)
+ os.set_inheritable(self.readypipein, True)
+ serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server")
+ os.execl(sys.executable, "bitbake-server", serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
+
+def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface):
+
+ import bb.cookerdata
+ import bb.cooker
+
+ serverlog(start_log_format % (os.getpid(), datetime.datetime.now().strftime(start_log_datetime_format)))
+
+ try:
+ bitbake_lock = os.fdopen(lockfd, "w")
+
+ # Create server control socket
+ if os.path.exists(sockname):
+ os.unlink(sockname)
+
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ # AF_UNIX has path length issues so chdir here to workaround
+ cwd = os.getcwd()
+ try:
+ os.chdir(os.path.dirname(sockname))
+ sock.bind(os.path.basename(sockname))
+ finally:
+ os.chdir(cwd)
+ sock.listen(1)
+
+ server = ProcessServer(bitbake_lock, lockname, sock, sockname, server_timeout, xmlrpcinterface)
+ writer = ConnectionWriter(readypipeinfd)
try:
- self.cooker = bb.cooker.BBCooker(self.configuration, self.featureset)
+ featureset = []
+ cooker = bb.cooker.BBCooker(featureset, server.register_idle_function)
except bb.BBHandledException:
return None
writer.send("r")
writer.close()
- server.cooker = self.cooker
- server.server_timeout = self.configuration.server_timeout
- server.xmlrpcinterface = self.configuration.xmlrpcinterface
- print("Started bitbake server pid %d" % os.getpid())
- sys.stdout.flush()
+ server.cooker = cooker
+ serverlog("Started bitbake server pid %d" % os.getpid())
- server.start()
+ server.run()
+ finally:
+ # Flush any ,essages/errors to the logfile before exit
+ sys.stdout.flush()
+ sys.stderr.flush()
def connectProcessServer(sockname, featureset):
# Connect to socket
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 4c8d81c5d..86e0e16f3 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -14,6 +14,7 @@ import simplediff
from bb.checksum import FileChecksumCache
from bb import runqueue
import hashserv
+import hashserv.client
logger = logging.getLogger('BitBake.SigGen')
hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
@@ -38,6 +39,11 @@ class SignatureGenerator(object):
"""
name = "noop"
+ # If the derived class supports multiconfig datacaches, set this to True
+ # The default is False for backward compatibility with derived signature
+ # generators that do not understand multiconfig caches
+ supports_multiconfig_datacaches = False
+
def __init__(self, data):
self.basehash = {}
self.taskhash = {}
@@ -58,10 +64,10 @@ class SignatureGenerator(object):
def get_unihash(self, tid):
return self.taskhash[tid]
- def prep_taskhash(self, tid, deps, dataCache):
+ def prep_taskhash(self, tid, deps, dataCaches):
return
- def get_taskhash(self, tid, deps, dataCache):
+ def get_taskhash(self, tid, deps, dataCaches):
self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest()
return self.taskhash[tid]
@@ -105,6 +111,38 @@ class SignatureGenerator(object):
def set_setscene_tasks(self, setscene_tasks):
return
+ @classmethod
+ def get_data_caches(cls, dataCaches, mc):
+ """
+ This function returns the datacaches that should be passed to signature
+ generator functions. If the signature generator supports multiconfig
+ caches, the entire dictionary of data caches is sent, otherwise a
+ special proxy is sent that support both index access to all
+ multiconfigs, and also direct access for the default multiconfig.
+
+ The proxy class allows code in this class itself to always use
+ multiconfig aware code (to ease maintenance), but derived classes that
+ are unaware of multiconfig data caches can still access the default
+ multiconfig as expected.
+
+ Do not override this function in derived classes; it will be removed in
+ the future when support for multiconfig data caches is mandatory
+ """
+ class DataCacheProxy(object):
+ def __init__(self):
+ pass
+
+ def __getitem__(self, key):
+ return dataCaches[key]
+
+ def __getattr__(self, name):
+ return getattr(dataCaches[mc], name)
+
+ if cls.supports_multiconfig_datacaches:
+ return dataCaches
+
+ return DataCacheProxy()
+
class SignatureGeneratorBasic(SignatureGenerator):
"""
"""
@@ -200,7 +238,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.lookupcache = {}
self.taskdeps = {}
- def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
+ def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
# Return True if we should keep the dependency, False to drop it
# We only manipulate the dependencies for packages not in the whitelist
if self.twl and not self.twl.search(recipename):
@@ -218,37 +256,40 @@ class SignatureGeneratorBasic(SignatureGenerator):
pass
return taint
- def prep_taskhash(self, tid, deps, dataCache):
+ def prep_taskhash(self, tid, deps, dataCaches):
(mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
- self.basehash[tid] = dataCache.basetaskhash[tid]
+ self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
self.runtaskdeps[tid] = []
self.file_checksum_values[tid] = []
- recipename = dataCache.pkg_fn[fn]
+ recipename = dataCaches[mc].pkg_fn[fn]
self.tidtopn[tid] = recipename
for dep in sorted(deps, key=clean_basepath):
- (depmc, _, deptaskname, depfn) = bb.runqueue.split_tid_mcfn(dep)
- if mc != depmc:
+ (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
+ depname = dataCaches[depmc].pkg_fn[depmcfn]
+ if not self.supports_multiconfig_datacaches and mc != depmc:
+ # If the signature generator doesn't understand multiconfig
+ # data caches, any dependency not in the same multiconfig must
+ # be skipped for backward compatibility
continue
- depname = dataCache.pkg_fn[depfn]
- if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
+ if not self.rundep_check(fn, recipename, task, dep, depname, dataCaches):
continue
if dep not in self.taskhash:
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
self.runtaskdeps[tid].append(dep)
- if task in dataCache.file_checksums[fn]:
+ if task in dataCaches[mc].file_checksums[fn]:
if self.checksum_cache:
- checksums = self.checksum_cache.get_checksums(dataCache.file_checksums[fn][task], recipename, self.localdirsexclude)
+ checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude)
else:
- checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename, self.localdirsexclude)
+ checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude)
for (f,cs) in checksums:
self.file_checksum_values[tid].append((f,cs))
- taskdep = dataCache.task_deps[fn]
+ taskdep = dataCaches[mc].task_deps[fn]
if 'nostamp' in taskdep and task in taskdep['nostamp']:
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
if tid in self.taints and self.taints[tid].startswith("nostamp:"):
@@ -259,14 +300,14 @@ class SignatureGeneratorBasic(SignatureGenerator):
taint = str(uuid.uuid4())
self.taints[tid] = "nostamp:" + taint
- taint = self.read_taint(fn, task, dataCache.stamp[fn])
+ taint = self.read_taint(fn, task, dataCaches[mc].stamp[fn])
if taint:
self.taints[tid] = taint
logger.warning("%s is tainted from a forced run" % tid)
return
- def get_taskhash(self, tid, deps, dataCache):
+ def get_taskhash(self, tid, deps, dataCaches):
data = self.basehash[tid]
for dep in self.runtaskdeps[tid]:
@@ -317,7 +358,8 @@ class SignatureGeneratorBasic(SignatureGenerator):
else:
sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
- bb.utils.mkdirhier(os.path.dirname(sigfile))
+ with bb.utils.umask(0o002):
+ bb.utils.mkdirhier(os.path.dirname(sigfile))
data = {}
data['task'] = task
@@ -640,6 +682,12 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
self.server = data.getVar('BB_HASHSERVE')
self.method = "sstate_output_hash"
+#
+# Dummy class used for bitbake-selftest
+#
+class SignatureGeneratorTestMulticonfigDepends(SignatureGeneratorBasicHash):
+ name = "TestMulticonfigDepends"
+ supports_multiconfig_datacaches = True
def dump_this_task(outfile, d):
import bb.parse
@@ -699,16 +747,26 @@ def list_inline_diff(oldlist, newlist, colors=None):
ret.append(item)
return '[%s]' % (', '.join(ret))
-def clean_basepath(a):
- mc = None
- if a.startswith("mc:"):
- _, mc, a = a.split(":", 2)
- b = a.rsplit("/", 2)[1] + '/' + a.rsplit("/", 2)[2]
- if a.startswith("virtual:"):
- b = b + ":" + a.rsplit(":", 1)[0]
- if mc:
- b = b + ":mc:" + mc
- return b
+def clean_basepath(basepath):
+ basepath, dir, recipe_task = basepath.rsplit("/", 2)
+ cleaned = dir + '/' + recipe_task
+
+ if basepath[0] == '/':
+ return cleaned
+
+ if basepath.startswith("mc:"):
+ mc, mc_name, basepath = basepath.split(":", 2)
+ mc_suffix = ':mc:' + mc_name
+ else:
+ mc_suffix = ''
+
+ # mc stuff now removed from basepath. Whatever was next, if present will be the first
+ # suffix. ':/', recipe path start, marks the end of this. Something like
+ # 'virtual:a[:b[:c]]:/path...' (b and c being optional)
+ if basepath[0] != '/':
+ cleaned += ':' + basepath.split(':/', 1)[0]
+
+ return cleaned + mc_suffix
def clean_basepaths(a):
b = {}
diff --git a/poky/bitbake/lib/bb/taskdata.py b/poky/bitbake/lib/bb/taskdata.py
index d13a12498..ffbaf362e 100644
--- a/poky/bitbake/lib/bb/taskdata.py
+++ b/poky/bitbake/lib/bb/taskdata.py
@@ -21,8 +21,13 @@ def re_match_strings(target, strings):
Whether or not the string 'target' matches
any one string of the strings which can be regular expression string
"""
- return any(name == target or re.match(name, target)
- for name in strings)
+ for name in strings:
+ if name.startswith("^") or name.endswith("$"):
+ if re.match(name, target):
+ return True
+ elif name == target:
+ return True
+ return False
class TaskEntry:
def __init__(self):
diff --git a/poky/bitbake/lib/bb/tests/color.py b/poky/bitbake/lib/bb/tests/color.py
new file mode 100644
index 000000000..bf03750c6
--- /dev/null
+++ b/poky/bitbake/lib/bb/tests/color.py
@@ -0,0 +1,95 @@
+#
+# BitBake Test for ANSI color code filtering
+#
+# Copyright (C) 2020 Agilent Technologies, Inc.
+# Author: Chris Laplante <chris.laplante@agilent.com>
+#
+# SPDX-License-Identifier: MIT
+#
+
+import unittest
+import bb.progress
+import bb.data
+import bb.event
+from bb.progress import filter_color, filter_color_n
+import io
+import re
+
+
+class ProgressWatcher:
+ def __init__(self):
+ self._reports = []
+
+ def handle_event(self, event):
+ self._reports.append((event.progress, event.rate))
+
+ def reports(self):
+ return self._reports
+
+
+class ColorCodeTests(unittest.TestCase):
+ def setUp(self):
+ self.d = bb.data.init()
+ self._progress_watcher = ProgressWatcher()
+ bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event)
+
+ def tearDown(self):
+ bb.event.remove("bb.build.TaskProgress", None)
+
+ def test_filter_color(self):
+ input_string = "~~~~~~~~~~~~^~~~~~~~"
+ filtered = filter_color(input_string)
+ self.assertEqual(filtered, "~~~~~~~~~~~~^~~~~~~~")
+
+ def test_filter_color_n(self):
+ input_string = "~~~~~~~~~~~~^~~~~~~~"
+ filtered, code_count = filter_color_n(input_string)
+ self.assertEqual(filtered, "~~~~~~~~~~~~^~~~~~~~")
+ self.assertEqual(code_count, 4)
+
+ def test_LineFilterProgressHandler_color_filtering(self):
+ class CustomProgressHandler(bb.progress.LineFilterProgressHandler):
+ PROGRESS_REGEX = re.compile(r"Progress: (?P<progress>\d+)%")
+
+ def writeline(self, line):
+ match = self.PROGRESS_REGEX.match(line)
+ if match:
+ self.update(int(match.group("progress")))
+ return False
+ return True
+
+ buffer = io.StringIO()
+ handler = CustomProgressHandler(self.d, buffer)
+ handler.write("Program output!\n")
+ handler.write("More output!\n")
+ handler.write("Progress: 10%\n") # 10%
+ handler.write("Even more\n")
+ handler.write("Progress: 50%\n") # 50%
+ handler.write("Progress: 60%\n") # 60%
+ handler.write("Progress: 100%\n") # 100%
+
+ expected = [(10, None), (50, None), (60, None), (100, None)]
+ self.assertEqual(self._progress_watcher.reports(), expected)
+
+ self.assertEqual(buffer.getvalue(), "Program output!\nMore output!\nEven more\n")
+
+ def test_BasicProgressHandler_color_filtering(self):
+ buffer = io.StringIO()
+ handler = bb.progress.BasicProgressHandler(self.d, outfile=buffer)
+ handler.write("1%\n") # 1%
+ handler.write("2%\n") # 2%
+ handler.write("10%\n") # 10%
+ handler.write("100%\n") # 100%
+
+ expected = [(0, None), (1, None), (2, None), (10, None), (100, None)]
+ self.assertListEqual(self._progress_watcher.reports(), expected)
+
+ def test_OutOfProgressHandler_color_filtering(self):
+ buffer = io.StringIO()
+ handler = bb.progress.OutOfProgressHandler(self.d, r'(\d+) of (\d+)', outfile=buffer)
+ handler.write("Text text 1 of 5") # 1/5
+ handler.write("Text text 3 of 5") # 3/5
+ handler.write("Text text 5 of 5") # 5/5
+
+ expected = [(0, None), (20.0, None), (60.0, None), (100.0, None)]
+ self.assertListEqual(self._progress_watcher.reports(), expected)
diff --git a/poky/bitbake/lib/bb/tests/cooker.py b/poky/bitbake/lib/bb/tests/cooker.py
index 74c903f01..c82d4b7b8 100644
--- a/poky/bitbake/lib/bb/tests/cooker.py
+++ b/poky/bitbake/lib/bb/tests/cooker.py
@@ -60,7 +60,7 @@ class CookerTest(unittest.TestCase):
log_handler = LogHandler()
logger.addHandler(log_handler)
collection = bb.cooker.CookerCollectFiles(bbfile_config_priorities)
- collection.collection_priorities(pkgfns, self.d)
+ collection.collection_priorities(pkgfns, pkgfns, self.d)
logger.removeHandler(log_handler)
# Should be empty (no generated messages)
diff --git a/poky/bitbake/lib/bb/tests/cow.py b/poky/bitbake/lib/bb/tests/cow.py
index bf6e79fce..75142649c 100644
--- a/poky/bitbake/lib/bb/tests/cow.py
+++ b/poky/bitbake/lib/bb/tests/cow.py
@@ -4,9 +4,17 @@
# SPDX-License-Identifier: GPL-2.0-only
#
# Copyright 2006 Holger Freyther <freyther@handhelds.org>
+# Copyright (C) 2020 Agilent Technologies, Inc.
#
+import io
+import re
+import sys
import unittest
+import contextlib
+import collections
+
+from bb.COW import COWDictBase, COWSetBase, COWDictMeta, COWSetMeta
class COWTestCase(unittest.TestCase):
@@ -14,11 +22,61 @@ class COWTestCase(unittest.TestCase):
Test case for the COW module from mithro
"""
+ def setUp(self):
+ self._track_warnings = False
+ self._warning_file = io.StringIO()
+ self._unhandled_warnings = collections.deque()
+ COWDictBase.__warn__ = self._warning_file
+
+ def tearDown(self):
+ COWDictBase.__warn__ = sys.stderr
+ if self._track_warnings:
+ self._checkAllWarningsRead()
+
+ def trackWarnings(self):
+ self._track_warnings = True
+
+ def _collectWarnings(self):
+ self._warning_file.seek(0)
+ for warning in self._warning_file:
+ self._unhandled_warnings.append(warning.rstrip("\n"))
+ self._warning_file.truncate(0)
+ self._warning_file.seek(0)
+
+ def _checkAllWarningsRead(self):
+ self._collectWarnings()
+ self.assertSequenceEqual(self._unhandled_warnings, [])
+
+ @contextlib.contextmanager
+ def checkReportsWarning(self, expected_warning):
+ self._checkAllWarningsRead()
+ yield
+ self._collectWarnings()
+ warning = self._unhandled_warnings.popleft()
+ self.assertEqual(warning, expected_warning)
+
+ def checkStrOutput(self, obj, expected_levels, expected_keys):
+ if obj.__class__ is COWDictMeta:
+ expected_class_name = "COWDict"
+ elif obj.__class__ is COWSetMeta:
+ expected_class_name = "COWSet"
+ else:
+ self.fail("obj is of unknown type {0}".format(type(obj)))
+ s = str(obj)
+ regex = re.compile(r"<(\w+) Level: (\d+) Current Keys: (\d+)>")
+ match = regex.match(s)
+ self.assertIsNotNone(match, "bad str output: '{0}'".format(s))
+ class_name = match.group(1)
+ self.assertEqual(class_name, expected_class_name)
+ levels = int(match.group(2))
+ self.assertEqual(levels, expected_levels, "wrong # levels in str: '{0}'".format(s))
+ keys = int(match.group(3))
+ self.assertEqual(keys, expected_keys, "wrong # keys in str: '{0}'".format(s))
+
def testGetSet(self):
"""
Test and set
"""
- from bb.COW import COWDictBase
a = COWDictBase.copy()
self.assertEqual(False, 'a' in a)
@@ -27,16 +85,14 @@ class COWTestCase(unittest.TestCase):
a['b'] = 'b'
self.assertEqual(True, 'a' in a)
self.assertEqual(True, 'b' in a)
- self.assertEqual('a', a['a'] )
- self.assertEqual('b', a['b'] )
+ self.assertEqual('a', a['a'])
+ self.assertEqual('b', a['b'])
def testCopyCopy(self):
"""
Test the copy of copies
"""
- from bb.COW import COWDictBase
-
# create two COW dict 'instances'
b = COWDictBase.copy()
c = COWDictBase.copy()
@@ -94,30 +150,168 @@ class COWTestCase(unittest.TestCase):
self.assertEqual(False, 'e' in b_2)
def testCow(self):
- from bb.COW import COWDictBase
+ self.trackWarnings()
+
c = COWDictBase.copy()
c['123'] = 1027
c['other'] = 4711
- c['d'] = { 'abc' : 10, 'bcd' : 20 }
+ c['d'] = {'abc': 10, 'bcd': 20}
copy = c.copy()
self.assertEqual(1027, c['123'])
self.assertEqual(4711, c['other'])
- self.assertEqual({'abc':10, 'bcd':20}, c['d'])
+ self.assertEqual({'abc': 10, 'bcd': 20}, c['d'])
self.assertEqual(1027, copy['123'])
self.assertEqual(4711, copy['other'])
- self.assertEqual({'abc':10, 'bcd':20}, copy['d'])
+ with self.checkReportsWarning("Warning: Doing a copy because d is a mutable type."):
+ self.assertEqual({'abc': 10, 'bcd': 20}, copy['d'])
# cow it now
copy['123'] = 1028
copy['other'] = 4712
copy['d']['abc'] = 20
-
self.assertEqual(1027, c['123'])
self.assertEqual(4711, c['other'])
- self.assertEqual({'abc':10, 'bcd':20}, c['d'])
+ self.assertEqual({'abc': 10, 'bcd': 20}, c['d'])
self.assertEqual(1028, copy['123'])
self.assertEqual(4712, copy['other'])
- self.assertEqual({'abc':20, 'bcd':20}, copy['d'])
+ self.assertEqual({'abc': 20, 'bcd': 20}, copy['d'])
+
+ def testOriginalTestSuite(self):
+ # This test suite is a port of the original one from COW.py
+ self.trackWarnings()
+
+ a = COWDictBase.copy()
+ self.checkStrOutput(a, 1, 0)
+
+ a['a'] = 'a'
+ a['b'] = 'b'
+ a['dict'] = {}
+ self.checkStrOutput(a, 1, 4) # 4th member is dict__mutable__
+
+ b = a.copy()
+ self.checkStrOutput(b, 2, 0)
+ b['c'] = 'b'
+ self.checkStrOutput(b, 2, 1)
+
+ with self.checkReportsWarning("Warning: If you aren't going to change any of the values call with True."):
+ self.assertListEqual(list(a.iteritems()),
+ [('a', 'a'),
+ ('b', 'b'),
+ ('dict', {})
+ ])
+
+ with self.checkReportsWarning("Warning: If you aren't going to change any of the values call with True."):
+ b_gen = b.iteritems()
+ self.assertTupleEqual(next(b_gen), ('a', 'a'))
+ self.assertTupleEqual(next(b_gen), ('b', 'b'))
+ self.assertTupleEqual(next(b_gen), ('c', 'b'))
+ with self.checkReportsWarning("Warning: Doing a copy because dict is a mutable type."):
+ self.assertTupleEqual(next(b_gen), ('dict', {}))
+ with self.assertRaises(StopIteration):
+ next(b_gen)
+
+ b['dict']['a'] = 'b'
+ b['a'] = 'c'
+
+ self.checkStrOutput(a, 1, 4)
+ self.checkStrOutput(b, 2, 3)
+
+ with self.checkReportsWarning("Warning: If you aren't going to change any of the values call with True."):
+ self.assertListEqual(list(a.iteritems()),
+ [('a', 'a'),
+ ('b', 'b'),
+ ('dict', {})
+ ])
+
+ with self.checkReportsWarning("Warning: If you aren't going to change any of the values call with True."):
+ b_gen = b.iteritems()
+ self.assertTupleEqual(next(b_gen), ('a', 'c'))
+ self.assertTupleEqual(next(b_gen), ('b', 'b'))
+ self.assertTupleEqual(next(b_gen), ('c', 'b'))
+ self.assertTupleEqual(next(b_gen), ('dict', {'a': 'b'}))
+ with self.assertRaises(StopIteration):
+ next(b_gen)
+
+ with self.assertRaises(KeyError):
+ print(b["dict2"])
+
+ a['set'] = COWSetBase()
+ a['set'].add("o1")
+ a['set'].add("o1")
+ a['set'].add("o2")
+ self.assertSetEqual(set(a['set'].itervalues()), {"o1", "o2"})
+ self.assertSetEqual(set(b['set'].itervalues()), {"o1", "o2"})
+
+ b['set'].add('o3')
+ self.assertSetEqual(set(a['set'].itervalues()), {"o1", "o2"})
+ self.assertSetEqual(set(b['set'].itervalues()), {"o1", "o2", "o3"})
+
+ a['set2'] = set()
+ a['set2'].add("o1")
+ a['set2'].add("o1")
+ a['set2'].add("o2")
+
+ # We don't expect 'a' to change anymore
+ def check_a():
+ with self.checkReportsWarning("Warning: If you aren't going to change any of the values call with True."):
+ a_gen = a.iteritems()
+ self.assertTupleEqual(next(a_gen), ('a', 'a'))
+ self.assertTupleEqual(next(a_gen), ('b', 'b'))
+ self.assertTupleEqual(next(a_gen), ('dict', {}))
+ self.assertTupleEqual(next(a_gen), ('set2', {'o1', 'o2'}))
+ a_sub_set = next(a_gen)
+ self.assertEqual(a_sub_set[0], 'set')
+ self.checkStrOutput(a_sub_set[1], 1, 2)
+ self.assertSetEqual(set(a_sub_set[1].itervalues()), {'o1', 'o2'})
+
+ check_a()
+
+ b_gen = b.iteritems(readonly=True)
+ self.assertTupleEqual(next(b_gen), ('a', 'c'))
+ self.assertTupleEqual(next(b_gen), ('b', 'b'))
+ self.assertTupleEqual(next(b_gen), ('c', 'b'))
+ self.assertTupleEqual(next(b_gen), ('dict', {'a': 'b'}))
+ self.assertTupleEqual(next(b_gen), ('set2', {'o1', 'o2'}))
+ b_sub_set = next(b_gen)
+ self.assertEqual(b_sub_set[0], 'set')
+ self.checkStrOutput(b_sub_set[1], 2, 1)
+ self.assertSetEqual(set(b_sub_set[1].itervalues()), {'o1', 'o2', 'o3'})
+
+ del b['b']
+ with self.assertRaises(KeyError):
+ print(b['b'])
+ self.assertFalse('b' in b)
+
+ check_a()
+
+ b.__revertitem__('b')
+ check_a()
+ self.assertEqual(b['b'], 'b')
+ self.assertTrue('b' in b)
+
+ b.__revertitem__('dict')
+ check_a()
+
+ b_gen = b.iteritems(readonly=True)
+ self.assertTupleEqual(next(b_gen), ('a', 'c'))
+ self.assertTupleEqual(next(b_gen), ('b', 'b'))
+ self.assertTupleEqual(next(b_gen), ('c', 'b'))
+ self.assertTupleEqual(next(b_gen), ('dict', {}))
+ self.assertTupleEqual(next(b_gen), ('set2', {'o1', 'o2'}))
+ b_sub_set = next(b_gen)
+ self.assertEqual(b_sub_set[0], 'set')
+ self.checkStrOutput(b_sub_set[1], 2, 1)
+ self.assertSetEqual(set(b_sub_set[1].itervalues()), {'o1', 'o2', 'o3'})
+
+ self.checkStrOutput(a, 1, 6)
+ self.checkStrOutput(b, 2, 3)
+
+ def testSetMethods(self):
+ s = COWSetBase()
+ with self.assertRaises(TypeError):
+ print(s.iteritems())
+ with self.assertRaises(TypeError):
+ print(s.iterkeys())
diff --git a/poky/bitbake/lib/bb/tests/data.py b/poky/bitbake/lib/bb/tests/data.py
index 5f195047d..1d4a64b10 100644
--- a/poky/bitbake/lib/bb/tests/data.py
+++ b/poky/bitbake/lib/bb/tests/data.py
@@ -12,6 +12,7 @@ import bb
import bb.data
import bb.parse
import logging
+import os
class LogRecord():
def __enter__(self):
diff --git a/poky/bitbake/lib/bb/tests/event.py b/poky/bitbake/lib/bb/tests/event.py
index 9229b63d4..9ca7e9bc8 100644
--- a/poky/bitbake/lib/bb/tests/event.py
+++ b/poky/bitbake/lib/bb/tests/event.py
@@ -6,17 +6,18 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-import unittest
-import bb
-import logging
-import bb.compat
-import bb.event
+import collections
import importlib
+import logging
+import pickle
import threading
import time
-import pickle
+import unittest
from unittest.mock import Mock
from unittest.mock import call
+
+import bb
+import bb.event
from bb.msg import BBLogFormatter
@@ -75,7 +76,7 @@ class EventHandlingTest(unittest.TestCase):
def _create_test_handlers(self):
""" Method used to create a test handler ordered dictionary """
- test_handlers = bb.compat.OrderedDict()
+ test_handlers = collections.OrderedDict()
test_handlers["handler1"] = self._test_process.handler1
test_handlers["handler2"] = self._test_process.handler2
return test_handlers
@@ -96,7 +97,7 @@ class EventHandlingTest(unittest.TestCase):
def test_clean_class_handlers(self):
""" Test clean_class_handlers method """
- cleanDict = bb.compat.OrderedDict()
+ cleanDict = collections.OrderedDict()
self.assertEqual(cleanDict,
bb.event.clean_class_handlers())
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index d0c161a7f..5a4db9ca4 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -223,6 +223,21 @@ class URITest(unittest.TestCase):
'query': {},
'relative': False
},
+ "git://tfs-example.org:22/tfs/example%20path/example.git": {
+ 'uri': 'git://tfs-example.org:22/tfs/example%20path/example.git',
+ 'scheme': 'git',
+ 'hostname': 'tfs-example.org',
+ 'port': 22,
+ 'hostport': 'tfs-example.org:22',
+ 'path': '/tfs/example path/example.git',
+ 'userinfo': '',
+ 'userinfo': '',
+ 'username': '',
+ 'password': '',
+ 'params': {},
+ 'query': {},
+ 'relative': False
+ },
"http://somesite.net;someparam=1": {
'uri': 'http://somesite.net;someparam=1',
'scheme': 'http',
@@ -584,6 +599,7 @@ class FetcherLocalTest(FetcherTest):
touch(os.path.join(self.localsrcdir, 'dir', 'd'))
os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir'))
touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e'))
+ touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device'))
self.d.setVar("FILESPATH", self.localsrcdir)
def fetchUnpack(self, uris):
@@ -601,9 +617,13 @@ class FetcherLocalTest(FetcherTest):
tree = self.fetchUnpack(['file://a', 'file://dir/c'])
self.assertEqual(tree, ['a', 'dir/c'])
+ def test_local_backslash(self):
+ tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device'])
+ self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device'])
+
def test_local_wildcard(self):
- tree = self.fetchUnpack(['file://a', 'file://dir/*'])
- self.assertEqual(tree, ['a', 'dir/c', 'dir/d', 'dir/subdir/e'])
+ with self.assertRaises(bb.fetch2.ParameterError):
+ tree = self.fetchUnpack(['file://a', 'file://dir/*'])
def test_local_dir(self):
tree = self.fetchUnpack(['file://a', 'file://dir'])
@@ -1031,7 +1051,7 @@ class SVNTest(FetcherTest):
bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
# Github will emulate SVN. Use this to check if we're downloding...
- bb.process.run("svn propset svn:externals 'bitbake http://github.com/openembedded/bitbake' .",
+ bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
bb.process.run("svn commit --non-interactive -m 'Add external'",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1152,10 +1172,12 @@ class FetchLatestVersionTest(FetcherTest):
("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
: "1.99.4",
# version pattern "vX.Y"
- ("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
+ # mirror of git.infradead.org since network issues interfered with testing
+ ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
: "1.5.0",
# version pattern "pkg_name-X.Y"
- ("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
+ # mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing
+ ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
: "1.0",
# version pattern "pkg_name-vX.Y.Z"
("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
@@ -1169,7 +1191,8 @@ class FetchLatestVersionTest(FetcherTest):
("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
: "20120614",
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
- ("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
+ # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
+ ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
: "0.4.3",
("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
: "11.0.0",
@@ -1261,9 +1284,7 @@ class FetchLatestVersionTest(FetcherTest):
class FetchCheckStatusTest(FetcherTest):
- test_wget_uris = ["http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2",
- "http://www.cups.org/",
- "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
+ test_wget_uris = ["http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
"https://yoctoproject.org/",
@@ -2079,6 +2100,38 @@ class GitLfsTest(FetcherTest):
shutil.rmtree(self.gitdir, ignore_errors=True)
fetcher.unpack(self.d.getVar('WORKDIR'))
+class GitURLWithSpacesTest(FetcherTest):
+ test_git_urls = {
+ "git://tfs-example.org:22/tfs/example%20path/example.git" : {
+ 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git',
+ 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git',
+ 'path': '/tfs/example path/example.git'
+ },
+ "git://tfs-example.org:22/tfs/example%20path/example%20repo.git" : {
+ 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git',
+ 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git',
+ 'path': '/tfs/example path/example repo.git'
+ }
+ }
+
+ def test_urls(self):
+
+ # Set fake SRCREV to stop git fetcher from trying to contact non-existent git repo
+ self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
+
+ for test_git_url, ref in self.test_git_urls.items():
+
+ fetcher = bb.fetch.Fetch([test_git_url], self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+
+ self.assertEqual(ud.url, ref['url'])
+ self.assertEqual(ud.path, ref['path'])
+ self.assertEqual(ud.localfile, os.path.join(self.dldir, "git2", ref['gitsrcname']))
+ self.assertEqual(ud.localpath, os.path.join(self.dldir, "git2", ref['gitsrcname']))
+ self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock'))
+ self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname']))
+ self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz'))
+
class NPMTest(FetcherTest):
def skipIfNoNpm():
import shutil
diff --git a/poky/bitbake/lib/bb/tests/parse.py b/poky/bitbake/lib/bb/tests/parse.py
index 9afd1b208..9e21e1842 100644
--- a/poky/bitbake/lib/bb/tests/parse.py
+++ b/poky/bitbake/lib/bb/tests/parse.py
@@ -178,7 +178,10 @@ python () {
addtask do_patch after do_foo after do_unpack before do_configure before do_compile
addtask do_fetch do_patch
-deltask do_fetch do_patch
+MYVAR = "do_patch"
+EMPTYVAR = ""
+deltask do_fetch ${MYVAR} ${EMPTYVAR}
+deltask ${EMPTYVAR}
"""
def test_parse_addtask_deltask(self):
import sys
@@ -189,6 +192,5 @@ deltask do_fetch do_patch
self.assertTrue("addtask contained multiple 'before' keywords" in stdout)
self.assertTrue("addtask contained multiple 'after' keywords" in stdout)
self.assertTrue('addtask ignored: " do_patch"' in stdout)
- self.assertTrue('deltask ignored: " do_patch"' in stdout)
#self.assertTrue('dependent task do_foo for do_patch does not exist' in stdout)
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
index 5e451fc2c..efebf001a 100644
--- a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
@@ -1,7 +1,8 @@
CACHE = "${TOPDIR}/cache"
THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
COREBASE := "${@os.path.normpath(os.path.dirname(d.getVar('FILE')+'/../../'))}"
-BBFILES = "${COREBASE}/recipes/*.bb"
+EXTRA_BBFILES ?= ""
+BBFILES = "${COREBASE}/recipes/*.bb ${EXTRA_BBFILES}"
PROVIDES = "${PN}"
PN = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[0]}"
PF = "${BB_CURRENT_MC}:${PN}"
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc1.conf b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc1.conf
index ecf23e1c7..f34b8dccc 100644
--- a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc1.conf
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc1.conf
@@ -1 +1,2 @@
TMPDIR = "${TOPDIR}/mc1/"
+BBMASK += "recipes/fails-mc/fails-mc1.bb"
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc2.conf b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc2.conf
index eef338e4c..c3360fc5c 100644
--- a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc2.conf
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/multiconfig/mc2.conf
@@ -1 +1,2 @@
TMPDIR = "${TOPDIR}/mc2/"
+BBMASK += "recipes/fails-mc/fails-mc2.bb"
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/f1.bb b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/f1.bb
new file mode 100644
index 000000000..d45a4cff5
--- /dev/null
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/f1.bb
@@ -0,0 +1 @@
+do_install[mcdepends] = "mc:mc1:mc2:a1:do_build"
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc1.bb b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc1.bb
new file mode 100644
index 000000000..17a181fff
--- /dev/null
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc1.bb
@@ -0,0 +1,5 @@
+python () {
+ if d.getVar("BB_CURRENT_MC") == "mc1":
+ bb.fatal("Multiconfig is mc1")
+}
+
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc2.bb b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc2.bb
new file mode 100644
index 000000000..cc69e7b82
--- /dev/null
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/recipes/fails-mc/fails-mc2.bb
@@ -0,0 +1,4 @@
+python () {
+ if d.getVar("BB_CURRENT_MC") == "mc2":
+ bb.fatal("Multiconfig is mc2")
+}
diff --git a/poky/bitbake/lib/bb/tests/runqueue.py b/poky/bitbake/lib/bb/tests/runqueue.py
index 4ba12a077..d3d62b98f 100644
--- a/poky/bitbake/lib/bb/tests/runqueue.py
+++ b/poky/bitbake/lib/bb/tests/runqueue.py
@@ -232,6 +232,51 @@ class RunQueueTests(unittest.TestCase):
expected.remove(x)
self.assertEqual(set(tasks), set(expected))
+ def test_multiconfig_bbmask(self):
+ # This test validates that multiconfigs can independently mask off
+ # recipes they do not want with BBMASK. It works by having recipes
+ # that will fail to parse for mc1 and mc2, then making each multiconfig
+ # build the one that does parse. This ensures that the recipes are in
+ # each multiconfigs BBFILES, but each is masking only the one that
+ # doesn't parse
+ with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
+ extraenv = {
+ "BBMULTICONFIG" : "mc1 mc2",
+ "BB_SIGNATURE_HANDLER" : "basic",
+ "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb",
+ }
+ cmd = ["bitbake", "mc:mc1:fails-mc2", "mc:mc2:fails-mc1"]
+ self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv)
+
+ def test_multiconfig_mcdepends(self):
+ with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
+ extraenv = {
+ "BBMULTICONFIG" : "mc1 mc2",
+ "BB_SIGNATURE_HANDLER" : "TestMulticonfigDepends",
+ "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb",
+ }
+ tasks = self.run_bitbakecmd(["bitbake", "mc:mc1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
+ expected = ["mc1:f1:%s" % t for t in self.alltasks] + \
+ ["mc2:a1:%s" % t for t in self.alltasks]
+ self.assertEqual(set(tasks), set(expected))
+
+ # A rebuild does nothing
+ tasks = self.run_bitbakecmd(["bitbake", "mc:mc1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
+ self.assertEqual(set(tasks), set())
+
+ # Test that a signature change in the dependent task causes
+ # mcdepends to rebuild
+ tasks = self.run_bitbakecmd(["bitbake", "mc:mc2:a1", "-c", "compile", "-f"], tempdir, "", extraenv=extraenv, cleanup=True)
+ expected = ["mc2:a1:compile"]
+ self.assertEqual(set(tasks), set(expected))
+
+ rerun_tasks = self.alltasks[:]
+ for x in ("fetch", "unpack", "patch", "prepare_recipe_sysroot", "configure", "compile"):
+ rerun_tasks.remove(x)
+ tasks = self.run_bitbakecmd(["bitbake", "mc:mc1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
+ expected = ["mc1:f1:%s" % t for t in rerun_tasks] + \
+ ["mc2:a1:%s" % t for t in rerun_tasks]
+ self.assertEqual(set(tasks), set(expected))
@unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
def test_hashserv_single(self):
diff --git a/poky/bitbake/lib/bb/tests/siggen.py b/poky/bitbake/lib/bb/tests/siggen.py
new file mode 100644
index 000000000..c21ab4e4f
--- /dev/null
+++ b/poky/bitbake/lib/bb/tests/siggen.py
@@ -0,0 +1,91 @@
+#
+# BitBake Test for lib/bb/siggen.py
+#
+# Copyright (C) 2020 Jean-François Dagenais
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import unittest
+import logging
+import bb
+import time
+
+logger = logging.getLogger('BitBake.TestSiggen')
+
+import bb.siggen
+
+class SiggenTest(unittest.TestCase):
+
+ def test_clean_basepath_simple_target_basepath(self):
+ basepath = '/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
+ expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask'
+
+ actual_cleaned = bb.siggen.clean_basepath(basepath)
+
+ self.assertEqual(actual_cleaned, expected_cleaned)
+
+ def test_clean_basepath_basic_virtual_basepath(self):
+ basepath = 'virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
+ expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something'
+
+ actual_cleaned = bb.siggen.clean_basepath(basepath)
+
+ self.assertEqual(actual_cleaned, expected_cleaned)
+
+ def test_clean_basepath_mc_basepath(self):
+ basepath = 'mc:somemachine:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
+ expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:mc:somemachine'
+
+ actual_cleaned = bb.siggen.clean_basepath(basepath)
+
+ self.assertEqual(actual_cleaned, expected_cleaned)
+
+ def test_clean_basepath_virtual_long_prefix_basepath(self):
+ basepath = 'virtual:something:A:B:C:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
+ expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:A:B:C'
+
+ actual_cleaned = bb.siggen.clean_basepath(basepath)
+
+ self.assertEqual(actual_cleaned, expected_cleaned)
+
+ def test_clean_basepath_mc_virtual_basepath(self):
+ basepath = 'mc:somemachine:virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
+ expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:mc:somemachine'
+
+ actual_cleaned = bb.siggen.clean_basepath(basepath)
+
+ self.assertEqual(actual_cleaned, expected_cleaned)
+
+ def test_clean_basepath_mc_virtual_long_prefix_basepath(self):
+ basepath = 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
+ expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:C:B:A:mc:X'
+
+ actual_cleaned = bb.siggen.clean_basepath(basepath)
+
+ self.assertEqual(actual_cleaned, expected_cleaned)
+
+
+ # def test_clean_basepath_performance(self):
+ # input_basepaths = [
+ # 'mc:X:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
+ # 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
+ # 'virtual:something:C:B:A:/different/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
+ # 'virtual:something:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
+ # '/this/is/most/common/input/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
+ # '/and/should/be/tested/with/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
+ # '/more/weight/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
+ # ]
+
+ # time_start = time.time()
+
+ # i = 2000000
+ # while i >= 0:
+ # for basepath in input_basepaths:
+ # bb.siggen.clean_basepath(basepath)
+ # i -= 1
+
+ # elapsed = time.time() - time_start
+ # print('{} ({}s)'.format(self.id(), round(elapsed, 3)))
+
+ # self.assertTrue(False)
diff --git a/poky/bitbake/lib/bb/tinfoil.py b/poky/bitbake/lib/bb/tinfoil.py
index 8c9b6b8ca..2fb1bb7d2 100644
--- a/poky/bitbake/lib/bb/tinfoil.py
+++ b/poky/bitbake/lib/bb/tinfoil.py
@@ -22,7 +22,6 @@ import bb.taskdata
import bb.utils
import bb.command
import bb.remotedata
-from bb.cookerdata import CookerConfiguration
from bb.main import setup_bitbake, BitBakeConfigParameters
import bb.fetch2
@@ -117,15 +116,16 @@ class TinfoilCookerAdapter:
class TinfoilCookerCollectionAdapter:
""" cooker.collection adapter """
- def __init__(self, tinfoil):
+ def __init__(self, tinfoil, mc=''):
self.tinfoil = tinfoil
+ self.mc = mc
def get_file_appends(self, fn):
- return self.tinfoil.get_file_appends(fn)
+ return self.tinfoil.get_file_appends(fn, self.mc)
def __getattr__(self, name):
if name == 'overlayed':
- return self.tinfoil.get_overlayed_recipes()
+ return self.tinfoil.get_overlayed_recipes(self.mc)
elif name == 'bbappends':
- return self.tinfoil.run_command('getAllAppends')
+ return self.tinfoil.run_command('getAllAppends', self.mc)
else:
raise AttributeError("%s instance has no attribute '%s'" % (self.__class__.__name__, name))
@@ -185,10 +185,11 @@ class TinfoilCookerAdapter:
def __init__(self, tinfoil):
self.tinfoil = tinfoil
- self.collection = self.TinfoilCookerCollectionAdapter(tinfoil)
+ self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split()
+ self.collections = {}
self.recipecaches = {}
- self.recipecaches[''] = self.TinfoilRecipeCacheAdapter(tinfoil)
- for mc in (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split():
+ for mc in self.multiconfigs:
+ self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc)
self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc)
self._cache = {}
def __getattr__(self, name):
@@ -379,18 +380,13 @@ class Tinfoil:
if not config_params:
config_params = TinfoilConfigParameters(config_only=config_only, quiet=quiet)
- cookerconfig = CookerConfiguration()
- cookerconfig.setConfigParameters(config_params)
-
if not config_only:
# Disable local loggers because the UI module is going to set up its own
for handler in self.localhandlers:
self.logger.handlers.remove(handler)
self.localhandlers = []
- self.server_connection, ui_module = setup_bitbake(config_params,
- cookerconfig,
- extrafeatures)
+ self.server_connection, ui_module = setup_bitbake(config_params, extrafeatures)
self.ui_module = ui_module
@@ -492,11 +488,11 @@ class Tinfoil:
raise Exception('Not connected to server (did you call .prepare()?)')
return self.server_connection.events.waitEvent(timeout)
- def get_overlayed_recipes(self):
+ def get_overlayed_recipes(self, mc=''):
"""
Find recipes which are overlayed (i.e. where recipes exist in multiple layers)
"""
- return defaultdict(list, self.run_command('getOverlayedRecipes'))
+ return defaultdict(list, self.run_command('getOverlayedRecipes', mc))
def get_skipped_recipes(self):
"""
@@ -534,11 +530,11 @@ class Tinfoil:
raise bb.providers.NoProvider('Unable to find any recipe file matching "%s"' % pn)
return best[3]
- def get_file_appends(self, fn):
+ def get_file_appends(self, fn, mc=''):
"""
Find the bbappends for a recipe file
"""
- return self.run_command('getFileAppends', fn)
+ return self.run_command('getFileAppends', fn, mc)
def all_recipes(self, mc='', sort=True):
"""
@@ -736,7 +732,7 @@ class Tinfoil:
continue
if helper.eventHandler(event):
if isinstance(event, bb.build.TaskFailedSilent):
- logger.warning("Logfile for failed setscene task is %s" % event.logfile)
+ self.logger.warning("Logfile for failed setscene task is %s" % event.logfile)
elif isinstance(event, bb.build.TaskFailed):
bb.ui.knotty.print_event_log(event, includelogs, loglines, termfilter)
continue
@@ -810,18 +806,22 @@ class Tinfoil:
prepare() has been called, or use a with... block when you create
the tinfoil object which will ensure that it gets called.
"""
- if self.server_connection:
- self.run_command('clientComplete')
- _server_connections.remove(self.server_connection)
- bb.event.ui_queue = []
- self.server_connection.terminate()
- self.server_connection = None
-
- # Restore logging handlers to how it looked when we started
- if self.oldhandlers:
- for handler in self.logger.handlers:
- if handler not in self.oldhandlers:
- self.logger.handlers.remove(handler)
+ try:
+ if self.server_connection:
+ try:
+ self.run_command('clientComplete')
+ finally:
+ _server_connections.remove(self.server_connection)
+ bb.event.ui_queue = []
+ self.server_connection.terminate()
+ self.server_connection = None
+
+ finally:
+ # Restore logging handlers to how it looked when we started
+ if self.oldhandlers:
+ for handler in self.logger.handlers:
+ if handler not in self.oldhandlers:
+ self.logger.handlers.remove(handler)
def _reconvert_type(self, obj, origtypename):
"""
diff --git a/poky/bitbake/lib/bb/ui/knotty.py b/poky/bitbake/lib/bb/ui/knotty.py
index 87e873d64..a91e4fd15 100644
--- a/poky/bitbake/lib/bb/ui/knotty.py
+++ b/poky/bitbake/lib/bb/ui/knotty.py
@@ -144,7 +144,7 @@ class TerminalFilter(object):
pass
if not cr:
try:
- cr = (env['LINES'], env['COLUMNS'])
+ cr = (os.environ['LINES'], os.environ['COLUMNS'])
except:
cr = (25, 80)
return cr
@@ -380,14 +380,27 @@ _evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.Lo
"bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent",
"bb.build.TaskProgress", "bb.event.ProcessStarted", "bb.event.ProcessProgress", "bb.event.ProcessFinished"]
+def drain_events_errorhandling(eventHandler):
+ # We don't have logging setup, we do need to show any events we see before exiting
+ event = True
+ logger = bb.msg.logger_create('bitbake', sys.stdout)
+ while event:
+ event = eventHandler.waitEvent(0)
+ if isinstance(event, logging.LogRecord):
+ logger.handle(event)
+
def main(server, eventHandler, params, tf = TerminalFilter):
- if not params.observe_only:
- params.updateToServer(server, os.environ.copy())
+ try:
+ if not params.observe_only:
+ params.updateToServer(server, os.environ.copy())
- includelogs, loglines, consolelogfile, logconfigfile = _log_settings_from_server(server, params.observe_only)
+ includelogs, loglines, consolelogfile, logconfigfile = _log_settings_from_server(server, params.observe_only)
- loglevel, _ = bb.msg.constructLogOptions()
+ loglevel, _ = bb.msg.constructLogOptions()
+ except bb.BBHandledException:
+ drain_events_errorhandling(eventHandler)
+ return 1
if params.options.quiet == 0:
console_loglevel = loglevel
diff --git a/poky/bitbake/lib/bb/ui/ncurses.py b/poky/bitbake/lib/bb/ui/ncurses.py
index da4fbeabb..cf1c876a5 100644
--- a/poky/bitbake/lib/bb/ui/ncurses.py
+++ b/poky/bitbake/lib/bb/ui/ncurses.py
@@ -48,6 +48,8 @@ import bb
import xmlrpc.client
from bb.ui import uihelper
+logger = logging.getLogger(__name__)
+
parsespin = itertools.cycle( r'|/-\\' )
X = 0
diff --git a/poky/bitbake/lib/bb/ui/taskexp.py b/poky/bitbake/lib/bb/ui/taskexp.py
index 8fff24423..2b246710c 100644
--- a/poky/bitbake/lib/bb/ui/taskexp.py
+++ b/poky/bitbake/lib/bb/ui/taskexp.py
@@ -8,9 +8,16 @@
#
import sys
-import gi
-gi.require_version('Gtk', '3.0')
-from gi.repository import Gtk, Gdk, GObject
+
+try:
+ import gi
+ gi.require_version('Gtk', '3.0')
+ from gi.repository import Gtk, Gdk, GObject
+except ValueError:
+ sys.exit("FATAL: Gtk version needs to be 3.0")
+except ImportError:
+ sys.exit("FATAL: Gtk ui could not load the required gi python module")
+
import threading
from xmlrpc import client
import bb
@@ -51,7 +58,12 @@ class PackageReverseDepView(Gtk.TreeView):
self.current = None
self.filter_model = model.filter_new()
self.filter_model.set_visible_func(self._filter)
- self.sort_model = self.filter_model.sort_new_with_model()
+ # The introspected API was fixed but we can't rely on a pygobject that hides this.
+ # https://gitlab.gnome.org/GNOME/pygobject/-/commit/9cdbc56fbac4db2de78dc080934b8f0a7efc892a
+ if hasattr(Gtk.TreeModelSort, "new_with_model"):
+ self.sort_model = Gtk.TreeModelSort.new_with_model(self.filter_model)
+ else:
+ self.sort_model = self.filter_model.sort_new_with_model()
self.sort_model.set_sort_column_id(COL_DEP_PARENT, Gtk.SortType.ASCENDING)
self.set_model(self.sort_model)
self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PARENT))
diff --git a/poky/bitbake/lib/bb/ui/teamcity.py b/poky/bitbake/lib/bb/ui/teamcity.py
index 1854292fa..fca46c287 100644
--- a/poky/bitbake/lib/bb/ui/teamcity.py
+++ b/poky/bitbake/lib/bb/ui/teamcity.py
@@ -167,8 +167,6 @@ def main(server, eventHandler, params):
forcelevel = bb.msg.BBLogFormatter.ERROR
else:
forcelevel = bb.msg.BBLogFormatter.WARNING
- bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut, forcelevel)
- bb.msg.addDefaultlogFilter(errconsole, bb.msg.BBLogFilterStdErr)
console.setFormatter(format)
errconsole.setFormatter(format)
if not bb.msg.has_console_handler(logger):
diff --git a/poky/bitbake/lib/bb/ui/uievent.py b/poky/bitbake/lib/bb/ui/uievent.py
index 13d0d4a04..8607d0523 100644
--- a/poky/bitbake/lib/bb/ui/uievent.py
+++ b/poky/bitbake/lib/bb/ui/uievent.py
@@ -11,9 +11,13 @@ server and queue them for the UI to process. This process must be used to avoid
client/server deadlocks.
"""
-import socket, threading, pickle, collections
+import collections, logging, pickle, socket, threading
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
+import bb
+
+logger = logging.getLogger(__name__)
+
class BBUIEventQueue:
def __init__(self, BBServer, clientinfo=("localhost, 0")):
diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py
index 5f5767c1d..f73d31fb7 100644
--- a/poky/bitbake/lib/bb/utils.py
+++ b/poky/bitbake/lib/bb/utils.py
@@ -402,8 +402,8 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception
(t, value, tb) = sys.exc_info()
try:
_print_exception(t, value, tb, realfile, text, context)
- except Exception as e:
- logger.error("Exception handler error: %s" % str(e))
+ except Exception as e2:
+ logger.error("Exception handler error: %s" % str(e2))
e = bb.BBHandledException(e)
raise e
@@ -433,20 +433,6 @@ def fileslocked(files):
for lock in locks:
bb.utils.unlockfile(lock)
-@contextmanager
-def timeout(seconds):
- def timeout_handler(signum, frame):
- pass
-
- original_handler = signal.signal(signal.SIGALRM, timeout_handler)
-
- try:
- signal.alarm(seconds)
- yield
- finally:
- signal.alarm(0)
- signal.signal(signal.SIGALRM, original_handler)
-
def lockfile(name, shared=False, retry=True, block=False):
"""
Use the specified file as a lock file, return when the lock has
@@ -580,7 +566,6 @@ def preserved_envvars_exported():
'PATH',
'PWD',
'SHELL',
- 'TERM',
'USER',
'LC_ALL',
'BBSERVER',
@@ -959,6 +944,17 @@ def which(path, item, direction = 0, history = False, executable=False):
return "", hist
return ""
+@contextmanager
+def umask(new_mask):
+ """
+ Context manager to set the umask to a specific mask, and restore it afterwards.
+ """
+ current_mask = os.umask(new_mask)
+ try:
+ yield
+ finally:
+ os.umask(current_mask)
+
def to_boolean(string, default=None):
if not string:
return default
@@ -1086,21 +1082,20 @@ def process_profilelog(fn, pout = None):
# Either call with a list of filenames and set pout or a filename and optionally pout.
if not pout:
pout = fn + '.processed'
- pout = open(pout, 'w')
-
- import pstats
- if isinstance(fn, list):
- p = pstats.Stats(*fn, stream=pout)
- else:
- p = pstats.Stats(fn, stream=pout)
- p.sort_stats('time')
- p.print_stats()
- p.print_callers()
- p.sort_stats('cumulative')
- p.print_stats()
- pout.flush()
- pout.close()
+ with open(pout, 'w') as pout:
+ import pstats
+ if isinstance(fn, list):
+ p = pstats.Stats(*fn, stream=pout)
+ else:
+ p = pstats.Stats(fn, stream=pout)
+ p.sort_stats('time')
+ p.print_stats()
+ p.print_callers()
+ p.sort_stats('cumulative')
+ p.print_stats()
+
+ pout.flush()
#
# Was present to work around multiprocessing pool bugs in python < 2.7.3
@@ -1473,14 +1468,20 @@ def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
return (notadded, notremoved)
-
-def get_file_layer(filename, d):
- """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
+def get_collection_res(d):
collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
collection_res = {}
for collection in collections:
collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
+ return collection_res
+
+
+def get_file_layer(filename, d, collection_res={}):
+ """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
+ if not collection_res:
+ collection_res = get_collection_res(d)
+
def path_to_layer(path):
# Use longest path so we handle nested layers
matchlen = 0
@@ -1492,12 +1493,13 @@ def get_file_layer(filename, d):
return match
result = None
- bbfiles = (d.getVar('BBFILES') or '').split()
+ bbfiles = (d.getVar('BBFILES_PRIORITIZED') or '').split()
bbfilesmatch = False
for bbfilesentry in bbfiles:
- if fnmatch.fnmatch(filename, bbfilesentry):
+ if fnmatch.fnmatchcase(filename, bbfilesentry):
bbfilesmatch = True
result = path_to_layer(bbfilesentry)
+ break
if not bbfilesmatch:
# Probably a bbclass
diff --git a/poky/bitbake/lib/bblayers/action.py b/poky/bitbake/lib/bblayers/action.py
index d6459d661..5b78195ad 100644
--- a/poky/bitbake/lib/bblayers/action.py
+++ b/poky/bitbake/lib/bblayers/action.py
@@ -143,11 +143,12 @@ build results (as the layer priority order has effectively changed).
applied_appends = []
for layer in layers:
- overlayed = []
- for f in self.tinfoil.cooker.collection.overlayed.keys():
- for of in self.tinfoil.cooker.collection.overlayed[f]:
- if of.startswith(layer):
- overlayed.append(of)
+ overlayed = set()
+ for mc in self.tinfoil.cooker.multiconfigs:
+ for f in self.tinfoil.cooker.collections[mc].overlayed.keys():
+ for of in self.tinfoil.cooker.collections[mc].overlayed[f]:
+ if of.startswith(layer):
+ overlayed.add(of)
logger.plain('Copying files from %s...' % layer )
for root, dirs, files in os.walk(layer):
@@ -174,14 +175,21 @@ build results (as the layer priority order has effectively changed).
logger.warning('Overwriting file %s', fdest)
bb.utils.copyfile(f1full, fdest)
if ext == '.bb':
- for append in self.tinfoil.cooker.collection.get_file_appends(f1full):
+ appends = set()
+ for mc in self.tinfoil.cooker.multiconfigs:
+ appends |= set(self.tinfoil.cooker.collections[mc].get_file_appends(f1full))
+ for append in appends:
if layer_path_match(append):
logger.plain(' Applying append %s to %s' % (append, fdest))
self.apply_append(append, fdest)
applied_appends.append(append)
# Take care of when some layers are excluded and yet we have included bbappends for those recipes
- for b in self.tinfoil.cooker.collection.bbappends:
+ bbappends = set()
+ for mc in self.tinfoil.cooker.multiconfigs:
+ bbappends |= set(self.tinfoil.cooker.collections[mc].bbappends)
+
+ for b in bbappends:
(recipename, appendname) = b
if appendname not in applied_appends:
first_append = None
diff --git a/poky/bitbake/lib/bblayers/query.py b/poky/bitbake/lib/bblayers/query.py
index e2cc31053..f5e3c8474 100644
--- a/poky/bitbake/lib/bblayers/query.py
+++ b/poky/bitbake/lib/bblayers/query.py
@@ -21,6 +21,10 @@ def plugin_init(plugins):
class QueryPlugin(LayerPlugin):
+ def __init__(self):
+ super(QueryPlugin, self).__init__()
+ self.collection_res = {}
+
def do_show_layers(self, args):
"""show current configured layers."""
logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority"))
@@ -222,7 +226,6 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
multilayer = True
if prov[0] != pref[0]:
same_ver = False
-
if (multilayer or not show_overlayed_only) and (same_ver or not show_same_ver_only):
if not items_listed:
logger.plain('=== %s ===' % title)
@@ -243,8 +246,13 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
else:
return '?'
+ def get_collection_res(self):
+ if not self.collection_res:
+ self.collection_res = bb.utils.get_collection_res(self.tinfoil.config_data)
+ return self.collection_res
+
def get_file_layerdir(self, filename):
- layer = bb.utils.get_file_layer(filename, self.tinfoil.config_data)
+ layer = bb.utils.get_file_layer(filename, self.tinfoil.config_data, self.get_collection_res())
return self.bbfile_collections.get(layer, None)
def remove_layer_prefix(self, f):
@@ -320,12 +328,12 @@ Lists recipes with the bbappends that apply to them as subitems.
def get_appends_for_files(self, filenames):
appended, notappended = [], []
for filename in filenames:
- _, cls, _ = bb.cache.virtualfn2realfn(filename)
+ _, cls, mc = bb.cache.virtualfn2realfn(filename)
if cls:
continue
basename = os.path.basename(filename)
- appends = self.tinfoil.cooker.collection.get_file_appends(basename)
+ appends = self.tinfoil.cooker.collections[mc].get_file_appends(basename)
if appends:
appended.append((basename, list(appends)))
else:
diff --git a/poky/bitbake/lib/hashserv/__init__.py b/poky/bitbake/lib/hashserv/__init__.py
index c3318620f..f95e8f43f 100644
--- a/poky/bitbake/lib/hashserv/__init__.py
+++ b/poky/bitbake/lib/hashserv/__init__.py
@@ -6,12 +6,20 @@
from contextlib import closing
import re
import sqlite3
+import itertools
+import json
UNIX_PREFIX = "unix://"
ADDR_TYPE_UNIX = 0
ADDR_TYPE_TCP = 1
+# The Python async server defaults to a 64K receive buffer, so we hardcode our
+# maximum chunk size. It would be better if the client and server reported to
+# each other what the maximum chunk sizes were, but that will slow down the
+# connection setup with a round trip delay so I'd rather not do that unless it
+# is necessary
+DEFAULT_MAX_CHUNK = 32 * 1024
def setup_database(database, sync=True):
db = sqlite3.connect(database)
@@ -66,6 +74,20 @@ def parse_address(addr):
return (ADDR_TYPE_TCP, (host, int(port)))
+def chunkify(msg, max_chunk):
+ if len(msg) < max_chunk - 1:
+ yield ''.join((msg, "\n"))
+ else:
+ yield ''.join((json.dumps({
+ 'chunk-stream': None
+ }), "\n"))
+
+ args = [iter(msg)] * (max_chunk - 1)
+ for m in map(''.join, itertools.zip_longest(*args, fillvalue='')):
+ yield ''.join(itertools.chain(m, "\n"))
+ yield "\n"
+
+
def create_server(addr, dbname, *, sync=True):
from . import server
db = setup_database(dbname, sync=sync)
diff --git a/poky/bitbake/lib/hashserv/client.py b/poky/bitbake/lib/hashserv/client.py
index 46085d641..a29af836d 100644
--- a/poky/bitbake/lib/hashserv/client.py
+++ b/poky/bitbake/lib/hashserv/client.py
@@ -7,6 +7,7 @@ import json
import logging
import socket
import os
+from . import chunkify, DEFAULT_MAX_CHUNK
logger = logging.getLogger('hashserv.client')
@@ -25,6 +26,7 @@ class Client(object):
self.reader = None
self.writer = None
self.mode = self.MODE_NORMAL
+ self.max_chunk = DEFAULT_MAX_CHUNK
def connect_tcp(self, address, port):
def connect_sock():
@@ -58,7 +60,7 @@ class Client(object):
self.reader = self._socket.makefile('r', encoding='utf-8')
self.writer = self._socket.makefile('w', encoding='utf-8')
- self.writer.write('OEHASHEQUIV 1.0\n\n')
+ self.writer.write('OEHASHEQUIV 1.1\n\n')
self.writer.flush()
# Restore mode if the socket is being re-created
@@ -91,18 +93,35 @@ class Client(object):
count += 1
def send_message(self, msg):
+ def get_line():
+ line = self.reader.readline()
+ if not line:
+ raise HashConnectionError('Connection closed')
+
+ if not line.endswith('\n'):
+ raise HashConnectionError('Bad message %r' % message)
+
+ return line
+
def proc():
- self.writer.write('%s\n' % json.dumps(msg))
+ for c in chunkify(json.dumps(msg), self.max_chunk):
+ self.writer.write(c)
self.writer.flush()
- l = self.reader.readline()
- if not l:
- raise HashConnectionError('Connection closed')
+ l = get_line()
- if not l.endswith('\n'):
- raise HashConnectionError('Bad message %r' % message)
+ m = json.loads(l)
+ if 'chunk-stream' in m:
+ lines = []
+ while True:
+ l = get_line().rstrip('\n')
+ if not l:
+ break
+ lines.append(l)
- return json.loads(l)
+ m = json.loads(''.join(lines))
+
+ return m
return self._send_wrapper(proc)
@@ -155,6 +174,14 @@ class Client(object):
m['unihash'] = unihash
return self.send_message({'report-equiv': m})
+ def get_taskhash(self, method, taskhash, all_properties=False):
+ self._set_mode(self.MODE_NORMAL)
+ return self.send_message({'get': {
+ 'taskhash': taskhash,
+ 'method': method,
+ 'all': all_properties
+ }})
+
def get_stats(self):
self._set_mode(self.MODE_NORMAL)
return self.send_message({'get-stats': None})
diff --git a/poky/bitbake/lib/hashserv/server.py b/poky/bitbake/lib/hashserv/server.py
index cc7e48233..81050715e 100644
--- a/poky/bitbake/lib/hashserv/server.py
+++ b/poky/bitbake/lib/hashserv/server.py
@@ -13,6 +13,7 @@ import os
import signal
import socket
import time
+from . import chunkify, DEFAULT_MAX_CHUNK
logger = logging.getLogger('hashserv.server')
@@ -107,12 +108,29 @@ class Stats(object):
return {k: getattr(self, k) for k in ('num', 'total_time', 'max_time', 'average', 'stdev')}
+class ClientError(Exception):
+ pass
+
class ServerClient(object):
+ FAST_QUERY = 'SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
+ ALL_QUERY = 'SELECT * FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
+
def __init__(self, reader, writer, db, request_stats):
self.reader = reader
self.writer = writer
self.db = db
self.request_stats = request_stats
+ self.max_chunk = DEFAULT_MAX_CHUNK
+
+ self.handlers = {
+ 'get': self.handle_get,
+ 'report': self.handle_report,
+ 'report-equiv': self.handle_equivreport,
+ 'get-stream': self.handle_get_stream,
+ 'get-stats': self.handle_get_stats,
+ 'reset-stats': self.handle_reset_stats,
+ 'chunk-stream': self.handle_chunk,
+ }
async def process_requests(self):
try:
@@ -125,7 +143,11 @@ class ServerClient(object):
return
(proto_name, proto_version) = protocol.decode('utf-8').rstrip().split()
- if proto_name != 'OEHASHEQUIV' or proto_version != '1.0':
+ if proto_name != 'OEHASHEQUIV':
+ return
+
+ proto_version = tuple(int(v) for v in proto_version.split('.'))
+ if proto_version < (1, 0) or proto_version > (1, 1):
return
# Read headers. Currently, no headers are implemented, so look for
@@ -140,40 +162,34 @@ class ServerClient(object):
break
# Handle messages
- handlers = {
- 'get': self.handle_get,
- 'report': self.handle_report,
- 'report-equiv': self.handle_equivreport,
- 'get-stream': self.handle_get_stream,
- 'get-stats': self.handle_get_stats,
- 'reset-stats': self.handle_reset_stats,
- }
-
while True:
d = await self.read_message()
if d is None:
break
-
- for k in handlers.keys():
- if k in d:
- logger.debug('Handling %s' % k)
- if 'stream' in k:
- await handlers[k](d[k])
- else:
- with self.request_stats.start_sample() as self.request_sample, \
- self.request_sample.measure():
- await handlers[k](d[k])
- break
- else:
- logger.warning("Unrecognized command %r" % d)
- break
-
+ await self.dispatch_message(d)
await self.writer.drain()
+ except ClientError as e:
+ logger.error(str(e))
finally:
self.writer.close()
+ async def dispatch_message(self, msg):
+ for k in self.handlers.keys():
+ if k in msg:
+ logger.debug('Handling %s' % k)
+ if 'stream' in k:
+ await self.handlers[k](msg[k])
+ else:
+ with self.request_stats.start_sample() as self.request_sample, \
+ self.request_sample.measure():
+ await self.handlers[k](msg[k])
+ return
+
+ raise ClientError("Unrecognized command %r" % msg)
+
def write_message(self, msg):
- self.writer.write(('%s\n' % json.dumps(msg)).encode('utf-8'))
+ for c in chunkify(json.dumps(msg), self.max_chunk):
+ self.writer.write(c.encode('utf-8'))
async def read_message(self):
l = await self.reader.readline()
@@ -191,14 +207,38 @@ class ServerClient(object):
logger.error('Bad message from client: %r' % message)
raise e
+ async def handle_chunk(self, request):
+ lines = []
+ try:
+ while True:
+ l = await self.reader.readline()
+ l = l.rstrip(b"\n").decode("utf-8")
+ if not l:
+ break
+ lines.append(l)
+
+ msg = json.loads(''.join(lines))
+ except (json.JSONDecodeError, UnicodeDecodeError) as e:
+ logger.error('Bad message from client: %r' % message)
+ raise e
+
+ if 'chunk-stream' in msg:
+ raise ClientError("Nested chunks are not allowed")
+
+ await self.dispatch_message(msg)
+
async def handle_get(self, request):
method = request['method']
taskhash = request['taskhash']
- row = self.query_equivalent(method, taskhash)
+ if request.get('all', False):
+ row = self.query_equivalent(method, taskhash, self.ALL_QUERY)
+ else:
+ row = self.query_equivalent(method, taskhash, self.FAST_QUERY)
+
if row is not None:
logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
- d = {k: row[k] for k in ('taskhash', 'method', 'unihash')}
+ d = {k: row[k] for k in row.keys()}
self.write_message(d)
else:
@@ -228,7 +268,7 @@ class ServerClient(object):
(method, taskhash) = l.split()
#logger.debug('Looking up %s %s' % (method, taskhash))
- row = self.query_equivalent(method, taskhash)
+ row = self.query_equivalent(method, taskhash, self.FAST_QUERY)
if row is not None:
msg = ('%s\n' % row['unihash']).encode('utf-8')
#logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
@@ -328,7 +368,7 @@ class ServerClient(object):
# Fetch the unihash that will be reported for the taskhash. If the
# unihash matches, it means this row was inserted (or the mapping
# was already valid)
- row = self.query_equivalent(data['method'], data['taskhash'])
+ row = self.query_equivalent(data['method'], data['taskhash'], self.FAST_QUERY)
if row['unihash'] == data['unihash']:
logger.info('Adding taskhash equivalence for %s with unihash %s',
@@ -354,12 +394,11 @@ class ServerClient(object):
self.request_stats.reset()
self.write_message(d)
- def query_equivalent(self, method, taskhash):
+ def query_equivalent(self, method, taskhash, query):
# This is part of the inner loop and must be as fast as possible
try:
cursor = self.db.cursor()
- cursor.execute('SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1',
- {'method': method, 'taskhash': taskhash})
+ cursor.execute(query, {'method': method, 'taskhash': taskhash})
return cursor.fetchone()
except:
cursor.close()
diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py
index a5472a996..4566f2473 100644
--- a/poky/bitbake/lib/hashserv/tests.py
+++ b/poky/bitbake/lib/hashserv/tests.py
@@ -9,10 +9,12 @@ from . import create_server, create_client
import hashlib
import logging
import multiprocessing
+import os
import sys
import tempfile
import threading
import unittest
+import socket
class TestHashEquivalenceServer(object):
@@ -99,6 +101,29 @@ class TestHashEquivalenceServer(object):
result = self.client.get_unihash(self.METHOD, taskhash)
self.assertEqual(result, unihash)
+ def test_huge_message(self):
+ # Simple test that hashes can be created
+ taskhash = 'c665584ee6817aa99edfc77a44dd853828279370'
+ outhash = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44'
+ unihash = '90e9bc1d1f094c51824adca7f8ea79a048d68824'
+
+ result = self.client.get_unihash(self.METHOD, taskhash)
+ self.assertIsNone(result, msg='Found unexpected task, %r' % result)
+
+ siginfo = "0" * (self.client.max_chunk * 4)
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash, {
+ 'outhash_siginfo': siginfo
+ })
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ result = self.client.get_taskhash(self.METHOD, taskhash, True)
+ self.assertEqual(result['taskhash'], taskhash)
+ self.assertEqual(result['unihash'], unihash)
+ self.assertEqual(result['method'], self.METHOD)
+ self.assertEqual(result['outhash'], outhash)
+ self.assertEqual(result['outhash_siginfo'], siginfo)
+
def test_stress(self):
def query_server(failures):
client = Client(self.server.address)
@@ -139,4 +164,8 @@ class TestHashEquivalenceUnixServer(TestHashEquivalenceServer, unittest.TestCase
class TestHashEquivalenceTCPServer(TestHashEquivalenceServer, unittest.TestCase):
def get_server_addr(self):
- return "localhost:0"
+ # Some hosts cause asyncio module to misbehave, when IPv6 is not enabled.
+ # If IPv6 is enabled, it should be safe to use localhost directly, in general
+ # case it is more reliable to resolve the IP address explicitly.
+ return socket.gethostbyname("localhost") + ":0"
+
diff --git a/poky/bitbake/lib/layerindexlib/__init__.py b/poky/bitbake/lib/layerindexlib/__init__.py
index 77196b408..45157b668 100644
--- a/poky/bitbake/lib/layerindexlib/__init__.py
+++ b/poky/bitbake/lib/layerindexlib/__init__.py
@@ -7,6 +7,7 @@ import datetime
import logging
import imp
+import os
from collections import OrderedDict
from layerindexlib.plugin import LayerIndexPluginUrlError
@@ -70,7 +71,7 @@ class LayerIndex():
if self.__class__ != newIndex.__class__ or \
other.__class__ != newIndex.__class__:
- raise TypeException("Can not add different types.")
+ raise TypeError("Can not add different types.")
for indexEnt in self.indexes:
newIndex.indexes.append(indexEnt)
@@ -266,8 +267,8 @@ will write out the individual elements split by layer and related components.
logger.debug(1, "Store not implemented in %s" % plugin.type)
pass
else:
- logger.debug(1, "No plugins support %s" % url)
- raise LayerIndexException("No plugins support %s" % url)
+ logger.debug(1, "No plugins support %s" % indexURI)
+ raise LayerIndexException("No plugins support %s" % indexURI)
def is_empty(self):
@@ -657,7 +658,7 @@ class LayerIndexObj():
if obj.id in self._index[indexname]:
if self._index[indexname][obj.id] == obj:
continue
- raise LayerIndexError('Conflict adding object %s(%s) to index' % (indexname, obj.id))
+ raise LayerIndexException('Conflict adding object %s(%s) to index' % (indexname, obj.id))
self._index[indexname][obj.id] = obj
def add_raw_element(self, indexname, objtype, rawobjs):
@@ -842,11 +843,11 @@ class LayerIndexObj():
def _resolve_dependencies(layerbranches, ignores, dependencies, invalid):
for layerbranch in layerbranches:
- if ignores and layerBranch.layer.name in ignores:
+ if ignores and layerbranch.layer.name in ignores:
continue
- for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerBranch.id]:
- deplayerbranch = layerDependency.dependency_layerBranch
+ for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]:
+ deplayerbranch = layerdependency.dependency_layerBranch
if ignores and deplayerbranch.layer.name in ignores:
continue
diff --git a/poky/bitbake/lib/layerindexlib/cooker.py b/poky/bitbake/lib/layerindexlib/cooker.py
index 65b23d087..21ec438a2 100644
--- a/poky/bitbake/lib/layerindexlib/cooker.py
+++ b/poky/bitbake/lib/layerindexlib/cooker.py
@@ -4,6 +4,7 @@
#
import logging
+import os
from collections import defaultdict
@@ -73,7 +74,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin):
d = self.layerindex.data
if not branches:
- raise LayerIndexFetchError("No branches specified for _load_bblayers!")
+ raise layerindexlib.LayerIndexFetchError("No branches specified for _load_bblayers!")
index = layerindexlib.LayerIndexObj()
@@ -202,7 +203,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin):
try:
depDict = bb.utils.explode_dep_versions2(deps)
except bb.utils.VersionStringException as vse:
- bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
+ bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (collection, str(vse)))
for dep, oplist in list(depDict.items()):
# We need to search ourselves, so use the _ version...
@@ -268,7 +269,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin):
layer = bb.utils.get_file_layer(realfn[0], self.config_data)
- depBranchId = collection_layerbranch[layer]
+ depBranchId = collection[layer]
recipeId += 1
recipe = layerindexlib.Recipe(index, None)
diff --git a/poky/bitbake/lib/layerindexlib/restapi.py b/poky/bitbake/lib/layerindexlib/restapi.py
index 21fd14414..7023f42f2 100644
--- a/poky/bitbake/lib/layerindexlib/restapi.py
+++ b/poky/bitbake/lib/layerindexlib/restapi.py
@@ -5,9 +5,13 @@
import logging
import json
+import os
+
from urllib.parse import unquote
from urllib.parse import urlparse
+import bb
+
import layerindexlib
import layerindexlib.plugin
@@ -163,7 +167,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
parsed = _get_json_response(apiurl=up_stripped.geturl(), username=username, password=password, retry=False)
logger.debug(1, "%s: retry successful.")
else:
- raise LayerIndexFetchError('%s: Connection reset by peer. Is there a firewall blocking your connection?' % apiurl)
+ raise layerindexlib.LayerIndexFetchError('%s: Connection reset by peer. Is there a firewall blocking your connection?' % apiurl)
return parsed
diff --git a/poky/bitbake/lib/layerindexlib/tests/restapi.py b/poky/bitbake/lib/layerindexlib/tests/restapi.py
index e5ccafe5c..4646d01f9 100644
--- a/poky/bitbake/lib/layerindexlib/tests/restapi.py
+++ b/poky/bitbake/lib/layerindexlib/tests/restapi.py
@@ -112,7 +112,7 @@ class LayerIndexWebRestApiTest(LayersTest):
break
else:
self.logger.debug(1, "meta-python was not found")
- self.assetTrue(False)
+ raise self.failureException
# Only check the first element...
break
diff --git a/poky/bitbake/lib/ply/lex.py b/poky/bitbake/lib/ply/lex.py
index 267ec100f..182f2e837 100644
--- a/poky/bitbake/lib/ply/lex.py
+++ b/poky/bitbake/lib/ply/lex.py
@@ -705,11 +705,7 @@ class LexerReflect(object):
# Sort the functions by line number
for f in self.funcsym.values():
- if sys.version_info[0] < 3:
- f.sort(lambda x,y: cmp(func_code(x[1]).co_firstlineno,func_code(y[1]).co_firstlineno))
- else:
- # Python 3.0
- f.sort(key=lambda x: func_code(x[1]).co_firstlineno)
+ f.sort(key=lambda x: func_code(x[1]).co_firstlineno)
# Sort the strings by regular expression length
for s in self.strsym.values():
diff --git a/poky/bitbake/lib/ply/yacc.py b/poky/bitbake/lib/ply/yacc.py
index 561784f2f..46e7dc96f 100644
--- a/poky/bitbake/lib/ply/yacc.py
+++ b/poky/bitbake/lib/ply/yacc.py
@@ -1205,7 +1205,7 @@ class Production(object):
# Precompute the list of productions immediately following. Hack. Remove later
try:
- p.lr_after = Prodnames[p.prod[n+1]]
+ p.lr_after = self.Prodnames[p.prod[n+1]]
except (IndexError,KeyError):
p.lr_after = []
try:
diff --git a/poky/bitbake/lib/toaster/tests/functional/functional_helpers.py b/poky/bitbake/lib/toaster/tests/functional/functional_helpers.py
index 455c408e9..5c4ea7179 100644
--- a/poky/bitbake/lib/toaster/tests/functional/functional_helpers.py
+++ b/poky/bitbake/lib/toaster/tests/functional/functional_helpers.py
@@ -75,7 +75,7 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
try:
table_element = self.get_table_element(table_id)
element = table_element.find_element_by_link_text(link_text)
- except NoSuchElementException as e:
+ except self.NoSuchElementException:
print('no element found')
raise
return element
@@ -86,7 +86,7 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
element_xpath = "//*[@id='" + table_id + "']"
try:
element = self.driver.find_element_by_xpath(element_xpath)
- except NoSuchElementException as e:
+ except self.NoSuchElementException:
raise
return element
row = coordinate[0]
@@ -96,7 +96,7 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]"
try:
element = self.driver.find_element_by_xpath(element_xpath)
- except NoSuchElementException as e:
+ except self.NoSuchElementException:
return False
return element
#now we are looking for an element with specified X and Y
@@ -105,6 +105,6 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]/td[" + str(column) + "]"
try:
element = self.driver.find_element_by_xpath(element_xpath)
- except NoSuchElementException as e:
+ except self.NoSuchElementException:
return False
return element