summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb')
-rw-r--r--poky/bitbake/lib/bb/build.py82
-rw-r--r--poky/bitbake/lib/bb/cache.py3
-rw-r--r--poky/bitbake/lib/bb/command.py10
-rw-r--r--poky/bitbake/lib/bb/cooker.py12
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py7
-rw-r--r--poky/bitbake/lib/bb/data.py2
-rw-r--r--poky/bitbake/lib/bb/data_smart.py18
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py10
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py21
-rw-r--r--poky/bitbake/lib/bb/fetch2/svn.py10
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py2
-rwxr-xr-xpoky/bitbake/lib/bb/main.py7
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/BBHandler.py3
-rw-r--r--poky/bitbake/lib/bb/persist_data.py55
-rw-r--r--poky/bitbake/lib/bb/process.py3
-rw-r--r--poky/bitbake/lib/bb/runqueue.py52
-rw-r--r--poky/bitbake/lib/bb/server/process.py2
-rw-r--r--poky/bitbake/lib/bb/siggen.py14
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py61
-rw-r--r--poky/bitbake/lib/bb/ui/knotty.py2
-rw-r--r--poky/bitbake/lib/bb/ui/uihelper.py4
21 files changed, 230 insertions, 150 deletions
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index 6ce8f1e6d..7e4ab9f64 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -295,9 +295,13 @@ def exec_func_python(func, d, runfile, cwd=None):
lineno = int(d.getVarFlag(func, "lineno", False))
bb.methodpool.insert_method(func, text, fn, lineno - 1)
- comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
- utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated")
+ comp = utils.better_compile(code, func, "exec_func_python() autogenerated")
+ utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated")
finally:
+ # We want any stdout/stderr to be printed before any other log messages to make debugging
+ # more accurate. In some cases we seem to lose stdout/stderr entirely in logging tests without this.
+ sys.stdout.flush()
+ sys.stderr.flush()
bb.debug(2, "Python function %s finished" % func)
if cwd and olddir:
@@ -682,47 +686,51 @@ def _exec_task(fn, task, d, quieterr):
try:
try:
event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata)
- except (bb.BBHandledException, SystemExit):
- return 1
- try:
for func in (prefuncs or '').split():
exec_func(func, localdata)
exec_func(task, localdata)
for func in (postfuncs or '').split():
exec_func(func, localdata)
- except bb.BBHandledException:
- event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata)
- return 1
- except Exception as exc:
- if quieterr:
- event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
- else:
- errprinted = errchk.triggered
- logger.error(str(exc))
- event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
- return 1
- finally:
- sys.stdout.flush()
- sys.stderr.flush()
-
- bblogger.removeHandler(handler)
-
- # Restore the backup fds
- os.dup2(osi[0], osi[1])
- os.dup2(oso[0], oso[1])
- os.dup2(ose[0], ose[1])
-
- # Close the backup fds
- os.close(osi[0])
- os.close(oso[0])
- os.close(ose[0])
+ finally:
+ # Need to flush and close the logs before sending events where the
+ # UI may try to look at the logs.
+ sys.stdout.flush()
+ sys.stderr.flush()
+
+ bblogger.removeHandler(handler)
+
+ # Restore the backup fds
+ os.dup2(osi[0], osi[1])
+ os.dup2(oso[0], oso[1])
+ os.dup2(ose[0], ose[1])
+
+ # Close the backup fds
+ os.close(osi[0])
+ os.close(oso[0])
+ os.close(ose[0])
+
+ logfile.close()
+ if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
+ logger.debug2("Zero size logfn %s, removing", logfn)
+ bb.utils.remove(logfn)
+ bb.utils.remove(loglink)
+ except bb.BBHandledException:
+ event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata)
+ return 1
+ except (Exception, SystemExit) as exc:
+ if quieterr:
+ event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
+ else:
+ errprinted = errchk.triggered
+ # If the output is already on stdout, we've printed the information in the
+ # logs once already so don't duplicate
+ if verboseStdoutLogging:
+ errprinted = True
+ logger.error(repr(exc))
+ event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
+ return 1
- logfile.close()
- if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
- logger.debug2("Zero size logfn %s, removing", logfn)
- bb.utils.remove(logfn)
- bb.utils.remove(loglink)
event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata)
if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
@@ -1030,6 +1038,8 @@ def tasksbetween(task_start, task_end, d):
def follow_chain(task, endtask, chain=None):
if not chain:
chain = []
+ if task in chain:
+ bb.fatal("Circular task dependencies as %s depends on itself via the chain %s" % (task, " -> ".join(chain)))
chain.append(task)
for othertask in tasks:
if othertask == task:
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 73bc6e966..4e08c100a 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -19,7 +19,8 @@
import os
import logging
import pickle
-from collections import defaultdict, Mapping
+from collections import defaultdict
+from collections.abc import Mapping
import bb.utils
from bb import PrefixLoggerAdapter
import re
diff --git a/poky/bitbake/lib/bb/command.py b/poky/bitbake/lib/bb/command.py
index a81dcb136..ec8688522 100644
--- a/poky/bitbake/lib/bb/command.py
+++ b/poky/bitbake/lib/bb/command.py
@@ -667,6 +667,16 @@ class CommandsAsync:
command.finishAsyncCommand()
findFilesMatchingInDir.needcache = False
+ def testCookerCommandEvent(self, command, params):
+ """
+ Dummy command used by OEQA selftest to test tinfoil without IO
+ """
+ pattern = params[0]
+
+ command.cooker.testCookerCommandEvent(pattern)
+ command.finishAsyncCommand()
+ testCookerCommandEvent.needcache = False
+
def findConfigFilePath(self, command, params):
"""
Find the path of the requested configuration file
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index db991702e..af794b4c4 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -389,7 +389,12 @@ class BBCooker:
if not self.hashserv:
dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
- self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
+ self.hashserv = hashserv.create_server(
+ self.hashservaddr,
+ dbfile,
+ sync=False,
+ upstream=self.data.getVar("BB_HASHSERVE_UPSTREAM") or None,
+ )
self.hashserv.serve_as_process()
self.data.setVar("BB_HASHSERVE", self.hashservaddr)
self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
@@ -1063,6 +1068,11 @@ class BBCooker:
if matches:
bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
+ def testCookerCommandEvent(self, filepattern):
+ # Dummy command used by OEQA selftest to test tinfoil without IO
+ matches = ["A", "B"]
+ bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
+
def findProviders(self, mc=''):
return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index 1c1e008c6..ba657c03b 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -291,6 +291,8 @@ class CookerDataBuilder(object):
multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
for config in multiconfig:
+ if config[0].isdigit():
+ bb.fatal("Multiconfig name '%s' is invalid as multiconfigs cannot start with a digit" % config)
mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
bb.event.fire(bb.event.ConfigParsed(), mcdata)
self.mcdata[config] = mcdata
@@ -342,6 +344,9 @@ class CookerDataBuilder(object):
layers = (data.getVar('BBLAYERS') or "").split()
broken_layers = []
+ if not layers:
+ bb.fatal("The bblayers.conf file doesn't contain any BBLAYERS definition")
+
data = bb.data.createCopy(data)
approved = bb.utils.approved_variables()
@@ -396,6 +401,8 @@ class CookerDataBuilder(object):
if c in collections_tmp:
bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split())
+ if compat and not layerseries:
+ bb.fatal("No core layer found to work with layer '%s'. Missing entry in bblayers.conf?" % c)
if compat and not (compat & layerseries):
bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
% (c, " ".join(layerseries), " ".join(compat)))
diff --git a/poky/bitbake/lib/bb/data.py b/poky/bitbake/lib/bb/data.py
index 97022853c..9d18b1e2b 100644
--- a/poky/bitbake/lib/bb/data.py
+++ b/poky/bitbake/lib/bb/data.py
@@ -226,7 +226,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
deps = newdeps
seen |= deps
newdeps = set()
- for dep in deps:
+ for dep in sorted(deps):
if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
emit_var(dep, o, d, False) and o.write('\n')
newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep))
diff --git a/poky/bitbake/lib/bb/data_smart.py b/poky/bitbake/lib/bb/data_smart.py
index 65528c6ae..8d235da12 100644
--- a/poky/bitbake/lib/bb/data_smart.py
+++ b/poky/bitbake/lib/bb/data_smart.py
@@ -17,7 +17,7 @@ BitBake build tools.
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import copy, re, sys, traceback
-from collections import MutableMapping
+from collections.abc import MutableMapping
import logging
import hashlib
import bb, bb.codeparser
@@ -151,6 +151,7 @@ class ExpansionError(Exception):
self.expression = expression
self.variablename = varname
self.exception = exception
+ self.varlist = [varname or expression or ""]
if varname:
if expression:
self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
@@ -160,8 +161,14 @@ class ExpansionError(Exception):
self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
Exception.__init__(self, self.msg)
self.args = (varname, expression, exception)
+
+ def addVar(self, varname):
+ if varname:
+ self.varlist.append(varname)
+
def __str__(self):
- return self.msg
+ chain = "\nThe variable dependency chain for the failure is: " + " -> ".join(self.varlist)
+ return self.msg + chain
class IncludeHistory(object):
def __init__(self, parent = None, filename = '[TOP LEVEL]'):
@@ -403,14 +410,17 @@ class DataSmart(MutableMapping):
s = __expand_python_regexp__.sub(varparse.python_sub, s)
except SyntaxError as e:
# Likely unmatched brackets, just don't expand the expression
- if e.msg != "EOL while scanning string literal":
+ if e.msg != "EOL while scanning string literal" and not e.msg.startswith("unterminated string literal"):
raise
if s == olds:
break
- except ExpansionError:
+ except ExpansionError as e:
+ e.addVar(varname)
raise
except bb.parse.SkipRecipe:
raise
+ except bb.BBHandledException:
+ raise
except Exception as exc:
tb = sys.exc_info()[2]
raise ExpansionError(varname, s, exc).with_traceback(tb) from exc
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 914fa5c02..ee29d89b1 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -466,7 +466,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
# Kill parameters, they make no sense for mirror tarballs
uri_decoded[5] = {}
elif ud.localpath and ud.method.supports_checksum(ud):
- basename = os.path.basename(ud.localpath)
+ basename = os.path.basename(uri_decoded[loc])
if basename and not result_decoded[loc].endswith(basename):
result_decoded[loc] = os.path.join(result_decoded[loc], basename)
else:
@@ -754,6 +754,11 @@ def get_srcrev(d, method_name='sortable_revision'):
that fetcher provides a method with the given name and the same signature as sortable_revision.
"""
+ recursion = d.getVar("__BBINSRCREV")
+ if recursion:
+ raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
+ d.setVar("__BBINSRCREV", True)
+
scms = []
fetcher = Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud
@@ -768,6 +773,7 @@ def get_srcrev(d, method_name='sortable_revision'):
autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
if len(rev) > 10:
rev = rev[:10]
+ d.delVar("__BBINSRCREV")
if autoinc:
return "AUTOINC+" + rev
return rev
@@ -802,6 +808,7 @@ def get_srcrev(d, method_name='sortable_revision'):
if seenautoinc:
format = "AUTOINC+" + format
+ d.delVar("__BBINSRCREV")
return format
def localpath(url, d):
@@ -827,6 +834,7 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH',
'DBUS_SESSION_BUS_ADDRESS',
'P4CONFIG',
'SSL_CERT_FILE',
+ 'AWS_PROFILE',
'AWS_ACCESS_KEY_ID',
'AWS_SECRET_ACCESS_KEY',
'AWS_DEFAULT_REGION']
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 488f4c741..e8ddf2c76 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -68,6 +68,7 @@ import subprocess
import tempfile
import bb
import bb.progress
+from contextlib import contextmanager
from bb.fetch2 import FetchMethod
from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger
@@ -418,6 +419,20 @@ class Git(FetchMethod):
bb.utils.remove(tmpdir, recurse=True)
def build_mirror_data(self, ud, d):
+
+ # Create as a temp file and move atomically into position to avoid races
+ @contextmanager
+ def create_atomic(filename):
+ fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
+ try:
+ yield tfile
+ umask = os.umask(0o666)
+ os.umask(umask)
+ os.chmod(tfile, (0o666 & ~umask))
+ os.rename(tfile, filename)
+ finally:
+ os.close(fd)
+
if ud.shallow and ud.write_shallow_tarballs:
if not os.path.exists(ud.fullshallow):
if os.path.islink(ud.fullshallow):
@@ -428,7 +443,8 @@ class Git(FetchMethod):
self.clone_shallow_local(ud, shallowclone, d)
logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
+ with create_atomic(ud.fullshallow) as tfile:
+ runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
runfetchcmd("touch %s.done" % ud.fullshallow, d)
finally:
bb.utils.remove(tempdir, recurse=True)
@@ -437,7 +453,8 @@ class Git(FetchMethod):
os.unlink(ud.fullmirror)
logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
+ with create_atomic(ud.fullmirror) as tfile:
+ runfetchcmd("tar -czf %s ." % tfile, d, workdir=ud.clonedir)
runfetchcmd("touch %s.done" % ud.fullmirror, d)
def clone_shallow_local(self, ud, dest, d):
diff --git a/poky/bitbake/lib/bb/fetch2/svn.py b/poky/bitbake/lib/bb/fetch2/svn.py
index 80102b44f..d40e4d290 100644
--- a/poky/bitbake/lib/bb/fetch2/svn.py
+++ b/poky/bitbake/lib/bb/fetch2/svn.py
@@ -57,7 +57,12 @@ class Svn(FetchMethod):
if 'rev' in ud.parm:
ud.revision = ud.parm['rev']
- ud.localfile = d.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision))
+ # Whether to use the @REV peg-revision syntax in the svn command or not
+ ud.pegrevision = True
+ if 'nopegrevision' in ud.parm:
+ ud.pegrevision = False
+
+ ud.localfile = d.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ["0", "1"][ud.pegrevision]))
def _buildsvncommand(self, ud, d, command):
"""
@@ -98,7 +103,8 @@ class Svn(FetchMethod):
if ud.revision:
options.append("-r %s" % ud.revision)
- suffix = "@%s" % (ud.revision)
+ if ud.pegrevision:
+ suffix = "@%s" % (ud.revision)
if command == "fetch":
transportuser = ud.parm.get("transportuser", "")
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 9a49e64a0..349891e85 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -69,7 +69,7 @@ class Wget(FetchMethod):
"""
Check to see if a given url can be fetched with wget.
"""
- return ud.type in ['http', 'https', 'ftp']
+ return ud.type in ['http', 'https', 'ftp', 'ftps']
def recommends_checksum(self, urldata):
return True
diff --git a/poky/bitbake/lib/bb/main.py b/poky/bitbake/lib/bb/main.py
index 06bad495a..639fc1885 100755
--- a/poky/bitbake/lib/bb/main.py
+++ b/poky/bitbake/lib/bb/main.py
@@ -112,13 +112,6 @@ def _showwarning(message, category, filename, lineno, file=None, line=None):
warnlog.warning(s)
warnings.showwarning = _showwarning
-warnings.filterwarnings("ignore")
-warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
-warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
-warnings.filterwarnings("ignore", category=ImportWarning)
-warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
-warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
-
def create_bitbake_parser():
parser = optparse.OptionParser(
diff --git a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 152ef6ab7..ee9bd760c 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -19,9 +19,6 @@ from . import ConfHandler
from .. import resolve_file, ast, logger, ParseError
from .ConfHandler import include, init
-# For compatibility
-bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
-
__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
diff --git a/poky/bitbake/lib/bb/persist_data.py b/poky/bitbake/lib/bb/persist_data.py
index c6a209fb3..49c9a0d51 100644
--- a/poky/bitbake/lib/bb/persist_data.py
+++ b/poky/bitbake/lib/bb/persist_data.py
@@ -12,14 +12,14 @@ currently, providing a key/value store accessed by 'domain'.
#
import collections
+import collections.abc
import contextlib
import functools
import logging
import os.path
import sqlite3
import sys
-import warnings
-from collections import Mapping
+from collections.abc import Mapping
sqlversion = sqlite3.sqlite_version_info
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -29,7 +29,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
logger = logging.getLogger("BitBake.PersistData")
@functools.total_ordering
-class SQLTable(collections.MutableMapping):
+class SQLTable(collections.abc.MutableMapping):
class _Decorators(object):
@staticmethod
def retry(*, reconnect=True):
@@ -238,55 +238,6 @@ class SQLTable(collections.MutableMapping):
def has_key(self, key):
return key in self
-
-class PersistData(object):
- """Deprecated representation of the bitbake persistent data store"""
- def __init__(self, d):
- warnings.warn("Use of PersistData is deprecated. Please use "
- "persist(domain, d) instead.",
- category=DeprecationWarning,
- stacklevel=2)
-
- self.data = persist(d)
- logger.debug("Using '%s' as the persistent data cache",
- self.data.filename)
-
- def addDomain(self, domain):
- """
- Add a domain (pending deprecation)
- """
- return self.data[domain]
-
- def delDomain(self, domain):
- """
- Removes a domain and all the data it contains
- """
- del self.data[domain]
-
- def getKeyValues(self, domain):
- """
- Return a list of key + value pairs for a domain
- """
- return list(self.data[domain].items())
-
- def getValue(self, domain, key):
- """
- Return the value of a key for a domain
- """
- return self.data[domain][key]
-
- def setValue(self, domain, key, value):
- """
- Sets the value of a key for a domain
- """
- self.data[domain][key] = value
-
- def delValue(self, domain, key):
- """
- Deletes a key/value pair
- """
- del self.data[domain][key]
-
def persist(domain, d):
"""Convenience factory for SQLTable objects based upon metadata"""
import bb.utils
diff --git a/poky/bitbake/lib/bb/process.py b/poky/bitbake/lib/bb/process.py
index 7c3995cce..d5a1775fc 100644
--- a/poky/bitbake/lib/bb/process.py
+++ b/poky/bitbake/lib/bb/process.py
@@ -181,5 +181,8 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
stderr = stderr.decode("utf-8")
if pipe.returncode != 0:
+ if log:
+ # Don't duplicate the output in the exception if logging it
+ raise ExecutionError(cmd, pipe.returncode, None, None)
raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
return stdout, stderr
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 25e012125..10511a09d 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -85,15 +85,19 @@ class RunQueueStats:
"""
Holds statistics on the tasks handled by the associated runQueue
"""
- def __init__(self, total):
+ def __init__(self, total, setscene_total):
self.completed = 0
self.skipped = 0
self.failed = 0
self.active = 0
+ self.setscene_active = 0
+ self.setscene_covered = 0
+ self.setscene_notcovered = 0
+ self.setscene_total = setscene_total
self.total = total
def copy(self):
- obj = self.__class__(self.total)
+ obj = self.__class__(self.total, self.setscene_total)
obj.__dict__.update(self.__dict__)
return obj
@@ -112,6 +116,13 @@ class RunQueueStats:
def taskActive(self):
self.active = self.active + 1
+ def updateCovered(self, covered, notcovered):
+ self.setscene_covered = covered
+ self.setscene_notcovered = notcovered
+
+ def updateActiveSetscene(self, active):
+ self.setscene_active = active
+
# These values indicate the next step due to be run in the
# runQueue state machine
runQueuePrepare = 2
@@ -1735,8 +1746,7 @@ class RunQueueExecute:
self.holdoff_need_update = True
self.sqdone = False
- self.stats = RunQueueStats(len(self.rqdata.runtaskentries))
- self.sq_stats = RunQueueStats(len(self.rqdata.runq_setscene_tids))
+ self.stats = RunQueueStats(len(self.rqdata.runtaskentries), len(self.rqdata.runq_setscene_tids))
for mc in rq.worker:
rq.worker[mc].pipe.setrunqueueexec(self)
@@ -1787,6 +1797,7 @@ class RunQueueExecute:
else:
self.sq_task_complete(task)
self.sq_live.remove(task)
+ self.stats.updateActiveSetscene(len(self.sq_live))
else:
if status != 0:
self.task_fail(task, status, fakerootlog=fakerootlog)
@@ -1820,7 +1831,7 @@ class RunQueueExecute:
def finish(self):
self.rq.state = runQueueCleanUp
- active = self.stats.active + self.sq_stats.active
+ active = self.stats.active + len(self.sq_live)
if active > 0:
bb.event.fire(runQueueExitWait(active), self.cfgData)
self.rq.read_workers()
@@ -1853,7 +1864,7 @@ class RunQueueExecute:
return valid
def can_start_task(self):
- active = self.stats.active + self.sq_stats.active
+ active = self.stats.active + len(self.sq_live)
can_start = active < self.number_tasks
return can_start
@@ -1904,6 +1915,12 @@ class RunQueueExecute:
self.setbuildable(revdep)
logger.debug("Marking task %s as buildable", revdep)
+ for t in self.sq_deferred.copy():
+ if self.sq_deferred[t] == task:
+ logger.debug2("Deferred task %s now buildable" % t)
+ del self.sq_deferred[t]
+ update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
+
def task_complete(self, task):
self.stats.taskCompleted()
bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
@@ -1950,7 +1967,7 @@ class RunQueueExecute:
err = False
if not self.sqdone:
logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered)))
- completeevent = sceneQueueComplete(self.sq_stats, self.rq)
+ completeevent = sceneQueueComplete(self.stats, self.rq)
bb.event.fire(completeevent, self.cfgData)
if self.sq_deferred:
logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred))
@@ -2061,7 +2078,7 @@ class RunQueueExecute:
self.sq_task_failoutright(task)
return True
- startevent = sceneQueueTaskStarted(task, self.sq_stats, self.rq)
+ startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
bb.event.fire(startevent, self.cfgData)
taskdepdata = self.sq_build_taskdepdata(task)
@@ -2082,7 +2099,7 @@ class RunQueueExecute:
self.build_stamps2.append(self.build_stamps[task])
self.sq_running.add(task)
self.sq_live.add(task)
- self.sq_stats.taskActive()
+ self.stats.updateActiveSetscene(len(self.sq_live))
if self.can_start_task():
return True
@@ -2172,7 +2189,7 @@ class RunQueueExecute:
if self.can_start_task():
return True
- if self.stats.active > 0 or self.sq_stats.active > 0:
+ if self.stats.active > 0 or len(self.sq_live) > 0:
self.rq.read_workers()
return self.rq.active_fds()
@@ -2180,7 +2197,8 @@ class RunQueueExecute:
if self.sq_deferred:
tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0])
logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid)
- self.sq_task_failoutright(tid)
+ if tid not in self.runq_complete:
+ self.sq_task_failoutright(tid)
return True
if len(self.failed_tids) != 0:
@@ -2457,6 +2475,7 @@ class RunQueueExecute:
self.sq_task_failoutright(tid)
if changed:
+ self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
self.holdoff_need_update = True
def scenequeue_updatecounters(self, task, fail=False):
@@ -2490,6 +2509,7 @@ class RunQueueExecute:
new.add(dep)
next = new
+ self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
self.holdoff_need_update = True
def sq_task_completeoutright(self, task):
@@ -2513,13 +2533,11 @@ class RunQueueExecute:
self.rq.state = runQueueCleanUp
def sq_task_complete(self, task):
- self.sq_stats.taskCompleted()
- bb.event.fire(sceneQueueTaskCompleted(task, self.sq_stats, self.rq), self.cfgData)
+ bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
self.sq_task_completeoutright(task)
def sq_task_fail(self, task, result):
- self.sq_stats.taskFailed()
- bb.event.fire(sceneQueueTaskFailed(task, self.sq_stats, result, self), self.cfgData)
+ bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
self.scenequeue_notcovered.add(task)
self.scenequeue_updatecounters(task, True)
self.sq_check_taskfail(task)
@@ -2527,8 +2545,6 @@ class RunQueueExecute:
def sq_task_failoutright(self, task):
self.sq_running.add(task)
self.sq_buildable.add(task)
- self.sq_stats.taskSkipped()
- self.sq_stats.taskCompleted()
self.scenequeue_notcovered.add(task)
self.scenequeue_updatecounters(task, True)
@@ -2536,8 +2552,6 @@ class RunQueueExecute:
self.sq_running.add(task)
self.sq_buildable.add(task)
self.sq_task_completeoutright(task)
- self.sq_stats.taskSkipped()
- self.sq_stats.taskCompleted()
def sq_build_taskdepdata(self, task):
def getsetscenedeps(tid):
diff --git a/poky/bitbake/lib/bb/server/process.py b/poky/bitbake/lib/bb/server/process.py
index b593830cc..8fdcc66dc 100644
--- a/poky/bitbake/lib/bb/server/process.py
+++ b/poky/bitbake/lib/bb/server/process.py
@@ -661,7 +661,7 @@ class BBUIEventQueue:
self.reader = ConnectionReader(readfd)
self.t = threading.Thread()
- self.t.setDaemon(True)
+ self.t.daemon = True
self.t.run = self.startCallbackHandler
self.t.start()
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 3f9fe5064..625a9cf3b 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -864,21 +864,21 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
if changed:
- for dep in changed:
+ for dep in sorted(changed):
output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
if added:
- for dep in added:
+ for dep in sorted(added):
output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
if removed:
- for dep in removed:
+ for dep in sorted(removed):
output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
if changed:
- for dep in changed:
+ for dep in sorted(changed):
oldval = a_data['varvals'][dep]
newval = b_data['varvals'][dep]
if newval and oldval and ('\n' in oldval or '\n' in newval):
@@ -948,7 +948,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
b = b_data['runtaskhashes']
changed, added, removed = dict_diff(a, b)
if added:
- for dep in added:
+ for dep in sorted(added):
bdep_found = False
if removed:
for bdep in removed:
@@ -958,7 +958,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
if not bdep_found:
output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (clean_basepath(dep), b[dep]))
if removed:
- for dep in removed:
+ for dep in sorted(removed):
adep_found = False
if added:
for adep in added:
@@ -968,7 +968,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
if not adep_found:
output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (clean_basepath(dep), a[dep]))
if changed:
- for dep in changed:
+ for dep in sorted(changed):
if not collapsed:
output.append(color_format("{color_title}Hash for dependent task %s changed{color_default} from %s to %s") % (clean_basepath(dep), a[dep], b[dep]))
if callable(recursecb):
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index 9291ce4a0..af292a216 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -867,6 +867,27 @@ class FetcherNetworkTest(FetcherTest):
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
@skipIfNoNetwork()
+ def test_fetch_specify_downloadfilename(self):
+ fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
+ fetcher.download()
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
+
+ @skipIfNoNetwork()
+ def test_fetch_premirror_specify_downloadfilename_regex_uri(self):
+ self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake/")
+ fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
+ fetcher.download()
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
+
+ @skipIfNoNetwork()
+ # BZ13039
+ def test_fetch_premirror_specify_downloadfilename_specific_uri(self):
+ self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake http://downloads.yoctoproject.org/releases/bitbake")
+ fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
+ fetcher.download()
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
+
+ @skipIfNoNetwork()
def gitfetcher(self, url1, url2):
def checkrevision(self, fetcher):
fetcher.unpack(self.unpackdir)
@@ -1232,7 +1253,7 @@ class FetchLatestVersionTest(FetcherTest):
("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
: "1.0",
# version pattern "pkg_name-vX.Y.Z"
- ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
+ ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
: "1.4.0",
# combination version pattern
("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
@@ -1244,13 +1265,13 @@ class FetchLatestVersionTest(FetcherTest):
: "20120614",
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
# mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
- ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
+ ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))")
: "0.4.3",
- ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
+ ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))")
: "11.0.0",
- ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+ ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
: "1.3.59",
- ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+ ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
: "3.82+dbg0.9",
}
@@ -1290,11 +1311,11 @@ class FetchLatestVersionTest(FetcherTest):
#
# http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
# https://github.com/apple/cups/releases
- ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
+ ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
: "2.0.0",
# http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
# http://ftp.debian.org/debian/pool/main/d/db5.3/
- ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
+ ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
: "5.3.10",
}
@@ -2257,9 +2278,10 @@ class NPMTest(FetcherTest):
fetcher.download()
self.assertTrue(os.path.exists(ud.localpath))
# Setup the mirror
+ pkgname = os.path.basename(ud.proxy.urls[0].split(';')[0])
mirrordir = os.path.join(self.tempdir, 'mirror')
bb.utils.mkdirhier(mirrordir)
- os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
+ os.replace(ud.localpath, os.path.join(mirrordir, pkgname))
self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir)
self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
# Fetch again
@@ -2269,6 +2291,27 @@ class NPMTest(FetcherTest):
@skipIfNoNpm()
@skipIfNoNetwork()
+ def test_npm_premirrors_with_specified_filename(self):
+ url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'
+ # Fetch once to get a tarball
+ fetcher = bb.fetch.Fetch([url], self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+ fetcher.download()
+ self.assertTrue(os.path.exists(ud.localpath))
+ # Setup the mirror
+ mirrordir = os.path.join(self.tempdir, 'mirror')
+ bb.utils.mkdirhier(mirrordir)
+ mirrorfilename = os.path.join(mirrordir, os.path.basename(ud.localpath))
+ os.replace(ud.localpath, mirrorfilename)
+ self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s\n' % mirrorfilename)
+ self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
+ # Fetch again
+ self.assertFalse(os.path.exists(ud.localpath))
+ fetcher.download()
+ self.assertTrue(os.path.exists(ud.localpath))
+
+ @skipIfNoNpm()
+ @skipIfNoNetwork()
def test_npm_mirrors(self):
# Fetch once to get a tarball
url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'
@@ -2329,7 +2372,7 @@ class NPMTest(FetcherTest):
@skipIfNoNpm()
@skipIfNoNetwork()
def test_npm_registry_alternate(self):
- url = 'npm://registry.freajs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'
+ url = 'npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0'
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
fetcher.unpack(self.unpackdir)
diff --git a/poky/bitbake/lib/bb/ui/knotty.py b/poky/bitbake/lib/bb/ui/knotty.py
index 65ff2727d..8df745d13 100644
--- a/poky/bitbake/lib/bb/ui/knotty.py
+++ b/poky/bitbake/lib/bb/ui/knotty.py
@@ -753,7 +753,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
continue
if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
- logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring))
+ logger.info("Running setscene task %d of %d (%s)" % (event.stats.setscene_covered + event.stats.setscene_active + event.stats.setscene_notcovered + 1, event.stats.setscene_total, event.taskstring))
continue
if isinstance(event, bb.runqueue.runQueueTaskStarted):
diff --git a/poky/bitbake/lib/bb/ui/uihelper.py b/poky/bitbake/lib/bb/ui/uihelper.py
index 48d808ae2..52fdae3fe 100644
--- a/poky/bitbake/lib/bb/ui/uihelper.py
+++ b/poky/bitbake/lib/bb/ui/uihelper.py
@@ -49,8 +49,8 @@ class BBUIHelper:
tid = event._fn + ":" + event._task
removetid(event.pid, tid)
self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
- elif isinstance(event, bb.runqueue.runQueueTaskStarted):
- self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1
+ elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
+ self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + event.stats.setscene_active + 1
self.tasknumber_total = event.stats.total
self.needUpdate = True
elif isinstance(event, bb.build.TaskProgress):