summaryrefslogtreecommitdiff
path: root/poky/bitbake
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake')
-rw-r--r--poky/bitbake/README2
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst3
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst28
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst13
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst93
-rw-r--r--poky/bitbake/lib/bb/__init__.py2
-rw-r--r--poky/bitbake/lib/bb/build.py17
-rw-r--r--poky/bitbake/lib/bb/cache.py3
-rw-r--r--poky/bitbake/lib/bb/codeparser.py6
-rw-r--r--poky/bitbake/lib/bb/cooker.py34
-rw-r--r--poky/bitbake/lib/bb/event.py31
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/az.py93
-rw-r--r--poky/bitbake/lib/bb/providers.py84
-rw-r--r--poky/bitbake/lib/bb/runqueue.py67
-rw-r--r--poky/bitbake/lib/bb/tests/color.py2
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py2
-rw-r--r--poky/bitbake/lib/bb/tinfoil.py2
-rw-r--r--poky/bitbake/lib/bblayers/query.py2
19 files changed, 380 insertions, 108 deletions
diff --git a/poky/bitbake/README b/poky/bitbake/README
index 479c37658..96e6007e7 100644
--- a/poky/bitbake/README
+++ b/poky/bitbake/README
@@ -11,7 +11,7 @@ For information about Bitbake, see the OpenEmbedded website:
Bitbake plain documentation can be found under the doc directory or its integrated
html version at the Yocto Project website:
- http://yoctoproject.org/documentation
+ https://docs.yoctoproject.org
Contributing
------------
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
index d74e768f6..56abf7735 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
@@ -244,7 +244,8 @@ want upstream. Here is an example: ::
BBFILE_COLLECTIONS = "upstream local"
BBFILE_PATTERN_upstream = "^/stuff/openembedded/"
BBFILE_PATTERN_local = "^/stuff/openembedded.modified/"
- BBFILE_PRIORITY_upstream = "5" BBFILE_PRIORITY_local = "10"
+ BBFILE_PRIORITY_upstream = "5"
+ BBFILE_PRIORITY_local = "10"
.. note::
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
index 6760b1082..e9a5f336d 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
@@ -624,6 +624,34 @@ Here are some example URLs: ::
SRC_URI = "repo://REPOROOT;protocol=git;branch=some_branch;manifest=my_manifest.xml"
SRC_URI = "repo://REPOROOT;protocol=file;branch=some_branch;manifest=my_manifest.xml"
+.. _az-fetcher:
+
+Az Fetcher (``az://``)
+--------------------------
+
+This submodule fetches data from an
+`Azure Storage account <https://docs.microsoft.com/en-us/azure/storage/>`__ ,
+it inherits its functionality from the HTTP wget fetcher, but modifies its
+behavior to accomodate the usage of a
+`Shared Access Signature (SAS) <https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview>`__
+for non-public data.
+
+Such functionality is set by the variable:
+
+- :term:`AZ_SAS`: The Azure Storage Shared Access Signature provides secure
+ delegate access to resources, if this variable is set, the Az Fetcher will
+ use it when fetching artifacts from the cloud.
+
+You can specify the AZ_SAS variable as shown below: ::
+
+ AZ_SAS = "se=2021-01-01&sp=r&sv=2018-11-09&sr=c&skoid=<skoid>&sig=<signature>"
+
+Here is an example URL: ::
+
+ SRC_URI = "az://<azure-storage-account>.blob.core.windows.net/<foo_container>/<bar_file>"
+
+It can also be used when setting mirrors definitions using the :term:`PREMIRRORS` variable.
+
Other Fetchers
--------------
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
index 7ea68ade7..d4190c26e 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
@@ -1296,6 +1296,17 @@ For more information on task dependencies, see the
See the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:variable flags`" section for information
on variable flags you can use with tasks.
+.. note::
+
+ While it's infrequent, it's possible to define multiple tasks as
+ dependencies when calling ``addtask``. For example, here's a snippet
+ from the OpenEmbedded class file ``package_tar.bbclass``::
+
+ addtask package_write_tar before do_build after do_packagedata do_package
+
+ Note how the ``package_write_tar`` task has to wait until both of
+ ``do_packagedata`` and ``do_package`` complete.
+
Deleting a Task
---------------
@@ -1569,7 +1580,7 @@ might have an interest in viewing:
events when each of the workers parse the base configuration or if
the server changes configuration and reparses. Any given datastore
only has one such event executed against it, however. If
- ```BB_INVALIDCONF`` <#>`__ is set in the datastore by the event
+ :term:`BB_INVALIDCONF` is set in the datastore by the event
handler, the configuration is reparsed and a new event triggered,
allowing the metadata to update configuration.
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
index 6469f9d1a..1528b0406 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
@@ -39,6 +39,19 @@ overview of their function and contents.
when specified allows for the Git binary from the host to be used
rather than building ``git-native``.
+ :term:`AZ_SAS`
+ Azure Storage Shared Access Signature, when using the
+ :ref:`Azure Storage fetcher <bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
+ This variable can be defined to be used by the fetcher to authenticate
+ and gain access to non-public artifacts.
+ ::
+
+ AZ_SAS = ""se=2021-01-01&sp=r&sv=2018-11-09&sr=c&skoid=<skoid>&sig=<signature>""
+
+ For more information see Microsoft's Azure Storage documentation at
+ https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview
+
+
:term:`B`
The directory in which BitBake executes functions during a recipe's
build process.
@@ -257,45 +270,6 @@ overview of their function and contents.
``my-recipe.bb`` is executing, the ``BB_FILENAME`` variable contains
"/foo/path/my-recipe.bb".
- :term:`BBFILES_DYNAMIC`
- Activates content depending on presence of identified layers. You
- identify the layers by the collections that the layers define.
-
- Use the ``BBFILES_DYNAMIC`` variable to avoid ``.bbappend`` files whose
- corresponding ``.bb`` file is in a layer that attempts to modify other
- layers through ``.bbappend`` but does not want to introduce a hard
- dependency on those other layers.
-
- Additionally you can prefix the rule with "!" to add ``.bbappend`` and
- ``.bb`` files in case a layer is not present. Use this avoid hard
- dependency on those other layers.
-
- Use the following form for ``BBFILES_DYNAMIC``: ::
-
- collection_name:filename_pattern
-
- The following example identifies two collection names and two filename
- patterns: ::
-
- BBFILES_DYNAMIC += "\
- clang-layer:${LAYERDIR}/bbappends/meta-clang/*/*/*.bbappend \
- core:${LAYERDIR}/bbappends/openembedded-core/meta/*/*/*.bbappend \
- "
-
- When the collection name is prefixed with "!" it will add the file pattern in case
- the layer is absent: ::
-
- BBFILES_DYNAMIC += "\
- !clang-layer:${LAYERDIR}/backfill/meta-clang/*/*/*.bb \
- "
-
- This next example shows an error message that occurs because invalid
- entries are found, which cause parsing to abort: ::
-
- ERROR: BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:
- /work/my-layer/bbappends/meta-security-isafw/*/*/*.bbappend
- /work/my-layer/bbappends/openembedded-core/meta/*/*/*.bbappend
-
:term:`BB_GENERATE_MIRROR_TARBALLS`
Causes tarballs of the Git repositories, including the Git metadata,
to be placed in the :term:`DL_DIR` directory. Anyone
@@ -671,6 +645,45 @@ overview of their function and contents.
For details on the syntax, see the documentation by following the
previous link.
+ :term:`BBFILES_DYNAMIC`
+ Activates content depending on presence of identified layers. You
+ identify the layers by the collections that the layers define.
+
+ Use the ``BBFILES_DYNAMIC`` variable to avoid ``.bbappend`` files whose
+ corresponding ``.bb`` file is in a layer that attempts to modify other
+ layers through ``.bbappend`` but does not want to introduce a hard
+ dependency on those other layers.
+
+ Additionally you can prefix the rule with "!" to add ``.bbappend`` and
+ ``.bb`` files in case a layer is not present. Use this avoid hard
+ dependency on those other layers.
+
+ Use the following form for ``BBFILES_DYNAMIC``: ::
+
+ collection_name:filename_pattern
+
+ The following example identifies two collection names and two filename
+ patterns: ::
+
+ BBFILES_DYNAMIC += "\
+ clang-layer:${LAYERDIR}/bbappends/meta-clang/*/*/*.bbappend \
+ core:${LAYERDIR}/bbappends/openembedded-core/meta/*/*/*.bbappend \
+ "
+
+ When the collection name is prefixed with "!" it will add the file pattern in case
+ the layer is absent: ::
+
+ BBFILES_DYNAMIC += "\
+ !clang-layer:${LAYERDIR}/backfill/meta-clang/*/*/*.bb \
+ "
+
+ This next example shows an error message that occurs because invalid
+ entries are found, which cause parsing to abort: ::
+
+ ERROR: BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:
+ /work/my-layer/bbappends/meta-security-isafw/*/*/*.bbappend
+ /work/my-layer/bbappends/openembedded-core/meta/*/*/*.bbappend
+
:term:`BBINCLUDED`
Contains a space-separated list of all of all files that BitBake's
parser included during parsing of the current file.
@@ -1303,6 +1316,8 @@ overview of their function and contents.
- ``svn://`` : Fetches files from a Subversion (``svn``) revision
control repository.
+ - ``az://`` : Fetches files from an Azure Storage account using HTTPS.
+
Here are some additional options worth mentioning:
- ``unpack`` : Controls whether or not to unpack the file if it is
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index 84a9051c1..afce5ccb8 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -58,7 +58,7 @@ class BBLoggerMixin(object):
if not bb.event.worker_pid:
if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]):
return
- if loglevel > bb.msg.loggerDefaultLogLevel:
+ if loglevel < bb.msg.loggerDefaultLogLevel:
return
return self.log(loglevel, msg, *args, **kwargs)
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index f4f897e41..b2715fc53 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -854,6 +854,23 @@ def make_stamp(task, d, file_name = None):
file_name = d.getVar('BB_FILENAME')
bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
+def find_stale_stamps(task, d, file_name=None):
+ current = stamp_internal(task, d, file_name)
+ current2 = stamp_internal(task + "_setscene", d, file_name)
+ cleanmask = stamp_cleanmask_internal(task, d, file_name)
+ found = []
+ for mask in cleanmask:
+ for name in glob.glob(mask):
+ if "sigdata" in name or "sigbasedata" in name:
+ continue
+ if name.endswith('.taint'):
+ continue
+ if name == current or name == current2:
+ continue
+ logger.debug2("Stampfile %s does not match %s or %s" % (name, current, current2))
+ found.append(name)
+ return found
+
def del_stamp(task, d, file_name = None):
"""
Removes a stamp for a given task
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index aea2b8bc1..27eb27179 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -126,6 +126,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
+ self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
@@ -163,6 +164,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
cachedata.fakerootenv = {}
cachedata.fakerootnoenv = {}
cachedata.fakerootdirs = {}
+ cachedata.fakerootlogs = {}
cachedata.extradepsfunc = {}
def add_cacheData(self, cachedata, fn):
@@ -231,6 +233,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
cachedata.fakerootenv[fn] = self.fakerootenv
cachedata.fakerootnoenv[fn] = self.fakerootnoenv
cachedata.fakerootdirs[fn] = self.fakerootdirs
+ cachedata.fakerootlogs[fn] = self.fakerootlogs
cachedata.extradepsfunc[fn] = self.extradepsfunc
def virtualfn2realfn(virtualfn):
diff --git a/poky/bitbake/lib/bb/codeparser.py b/poky/bitbake/lib/bb/codeparser.py
index 25a7ac69d..0cec452c0 100644
--- a/poky/bitbake/lib/bb/codeparser.py
+++ b/poky/bitbake/lib/bb/codeparser.py
@@ -212,9 +212,9 @@ class PythonParser():
funcstr = codegen.to_source(func)
argstr = codegen.to_source(arg)
except TypeError:
- self.log.debug(2, 'Failed to convert function and argument to source form')
+ self.log.debug2('Failed to convert function and argument to source form')
else:
- self.log.debug(1, self.unhandled_message % (funcstr, argstr))
+ self.log.debug(self.unhandled_message % (funcstr, argstr))
def visit_Call(self, node):
name = self.called_node_name(node.func)
@@ -450,7 +450,7 @@ class ShellParser():
cmd = word[1]
if cmd.startswith("$"):
- self.log.debug(1, self.unhandled_template % cmd)
+ self.log.debug(self.unhandled_template % cmd)
elif cmd == "eval":
command = " ".join(word for _, word in words[1:])
self._parse_shell(command)
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index f4ab797ed..39e10e613 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -502,22 +502,30 @@ class BBCooker:
def showVersions(self):
- (latest_versions, preferred_versions) = self.findProviders()
+ (latest_versions, preferred_versions, required) = self.findProviders()
- logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
- logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
+ logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
+ logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
for p in sorted(self.recipecaches[''].pkg_pn):
- pref = preferred_versions[p]
+ preferred = preferred_versions[p]
latest = latest_versions[p]
+ requiredstr = ""
+ preferredstr = ""
+ if required[p]:
+ if preferred[0] is not None:
+ requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
+ else:
+ bb.fatal("REQUIRED_VERSION of package %s not available" % p)
+ else:
+ preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
- prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
- if pref == latest:
- prefstr = ""
+ if preferred == latest:
+ preferredstr = ""
- logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
+ logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
def showEnvironment(self, buildfile=None, pkgs_to_build=None):
"""
@@ -1063,10 +1071,16 @@ class BBCooker:
if pn in self.recipecaches[mc].providers:
filenames = self.recipecaches[mc].providers[pn]
eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
- filename = eligible[0]
+ if eligible is not None:
+ filename = eligible[0]
+ else:
+ filename = None
return None, None, None, filename
elif pn in self.recipecaches[mc].pkg_pn:
- return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
+ (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
+ if required and preferred_file is None:
+ return None, None, None, None
+ return (latest, latest_f, preferred_ver, preferred_file)
else:
return None, None, None, None
diff --git a/poky/bitbake/lib/bb/event.py b/poky/bitbake/lib/bb/event.py
index 23e1f3187..0454c7533 100644
--- a/poky/bitbake/lib/bb/event.py
+++ b/poky/bitbake/lib/bb/event.py
@@ -118,7 +118,7 @@ def fire_class_handlers(event, d):
if _eventfilter:
if not _eventfilter(name, handler, event, d):
continue
- if d and not name in (d.getVar("__BBHANDLERS_MC") or []):
+ if d is not None and not name in (d.getVar("__BBHANDLERS_MC") or set()):
continue
execute_handler(name, handler, event, d)
@@ -232,12 +232,16 @@ noop = lambda _: None
def register(name, handler, mask=None, filename=None, lineno=None, data=None):
"""Register an Event handler"""
- if data and data.getVar("BB_CURRENT_MC"):
+ if data is not None and data.getVar("BB_CURRENT_MC"):
mc = data.getVar("BB_CURRENT_MC")
name = '%s%s' % (mc.replace('-', '_'), name)
# already registered
if name in _handlers:
+ if data is not None:
+ bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
+ bbhands_mc.add(name)
+ data.setVar("__BBHANDLERS_MC", bbhands_mc)
return AlreadyRegistered
if handler is not None:
@@ -274,16 +278,16 @@ def register(name, handler, mask=None, filename=None, lineno=None, data=None):
_event_handler_map[m] = {}
_event_handler_map[m][name] = True
- if data:
- bbhands_mc = (data.getVar("__BBHANDLERS_MC") or [])
- bbhands_mc.append(name)
+ if data is not None:
+ bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
+ bbhands_mc.add(name)
data.setVar("__BBHANDLERS_MC", bbhands_mc)
return Registered
def remove(name, handler, data=None):
"""Remove an Event handler"""
- if data:
+ if data is not None:
if data.getVar("BB_CURRENT_MC"):
mc = data.getVar("BB_CURRENT_MC")
name = '%s%s' % (mc.replace('-', '_'), name)
@@ -295,8 +299,8 @@ def remove(name, handler, data=None):
if name in _event_handler_map[event]:
_event_handler_map[event].pop(name)
- if data:
- bbhands_mc = (data.getVar("__BBHANDLERS_MC") or [])
+ if data is not None:
+ bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
if name in bbhands_mc:
bbhands_mc.remove(name)
data.setVar("__BBHANDLERS_MC", bbhands_mc)
@@ -666,6 +670,17 @@ class ReachableStamps(Event):
Event.__init__(self)
self.stamps = stamps
+class StaleSetSceneTasks(Event):
+ """
+ An event listing setscene tasks which are 'stale' and will
+ be rerun. The metadata may use to clean up stale data.
+ tasks is a mapping of tasks and matching stale stamps.
+ """
+
+ def __init__(self, tasks):
+ Event.__init__(self)
+ self.tasks = tasks
+
class FilesMatchingFound(Event):
"""
Event when a list of files matching the supplied pattern has
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 19169d780..cf0201c49 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -1243,7 +1243,7 @@ class FetchData(object):
if checksum_name in self.parm:
checksum_expected = self.parm[checksum_name]
- elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
+ elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az"]:
checksum_expected = None
else:
checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
@@ -1908,6 +1908,7 @@ from . import repo
from . import clearcase
from . import npm
from . import npmsw
+from . import az
methods.append(local.Local())
methods.append(wget.Wget())
@@ -1927,3 +1928,4 @@ methods.append(repo.Repo())
methods.append(clearcase.ClearCase())
methods.append(npm.Npm())
methods.append(npmsw.NpmShrinkWrap())
+methods.append(az.Az())
diff --git a/poky/bitbake/lib/bb/fetch2/az.py b/poky/bitbake/lib/bb/fetch2/az.py
new file mode 100644
index 000000000..3ccc594c2
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/az.py
@@ -0,0 +1,93 @@
+"""
+BitBake 'Fetch' Azure Storage implementation
+
+"""
+
+# Copyright (C) 2021 Alejandro Hernandez Samaniego
+#
+# Based on bb.fetch2.wget:
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import shlex
+import os
+import bb
+from bb.fetch2 import FetchError
+from bb.fetch2 import logger
+from bb.fetch2.wget import Wget
+
+
+class Az(Wget):
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched from Azure Storage
+ """
+ return ud.type in ['az']
+
+
+ def checkstatus(self, fetch, ud, d, try_again=True):
+
+ # checkstatus discards parameters either way, we need to do this before adding the SAS
+ ud.url = ud.url.replace('az://','https://').split(';')[0]
+
+ az_sas = d.getVar('AZ_SAS')
+ if az_sas and az_sas not in ud.url:
+ ud.url += az_sas
+
+ return Wget.checkstatus(self, fetch, ud, d, try_again)
+
+ # Override download method, include retries
+ def download(self, ud, d, retries=3):
+ """Fetch urls"""
+
+ # If were reaching the account transaction limit we might be refused a connection,
+ # retrying allows us to avoid false negatives since the limit changes over time
+ fetchcmd = self.basecmd + ' --retry-connrefused --waitretry=5'
+
+ # We need to provide a localpath to avoid wget using the SAS
+ # ud.localfile either has the downloadfilename or ud.path
+ localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
+ bb.utils.mkdirhier(os.path.dirname(localpath))
+ fetchcmd += " -O %s" % shlex.quote(localpath)
+
+
+ if ud.user and ud.pswd:
+ fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
+
+ # Check if a Shared Access Signature was given and use it
+ az_sas = d.getVar('AZ_SAS')
+
+ if az_sas:
+ azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas)
+ else:
+ azuri = '%s%s%s' % ('https://', ud.host, ud.path)
+
+ if os.path.exists(ud.localpath):
+ # file exists, but we didnt complete it.. trying again.
+ fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % azuri)
+ else:
+ fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % azuri)
+
+ try:
+ self._runwget(ud, d, fetchcmd, False)
+ except FetchError as e:
+ # Azure fails on handshake sometimes when using wget after some stress, producing a
+ # FetchError from the fetcher, if the artifact exists retyring should succeed
+ if 'Unable to establish SSL connection' in str(e):
+ logger.debug2('Unable to establish SSL connection: Retries remaining: %s, Retrying...' % retries)
+ self.download(ud, d, retries -1)
+
+ # Sanity check since wget can pretend it succeed when it didn't
+ # Also, this used to happen if sourceforge sent us to the mirror page
+ if not os.path.exists(ud.localpath):
+ raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (azuri, ud.localpath), azuri)
+
+ if os.path.getsize(ud.localpath) == 0:
+ os.remove(ud.localpath)
+ raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (azuri), azuri)
+
+ return True
diff --git a/poky/bitbake/lib/bb/providers.py b/poky/bitbake/lib/bb/providers.py
index b5a6cd009..3ec11a40e 100644
--- a/poky/bitbake/lib/bb/providers.py
+++ b/poky/bitbake/lib/bb/providers.py
@@ -38,16 +38,17 @@ def findProviders(cfgData, dataCache, pkg_pn = None):
localdata = data.createCopy(cfgData)
bb.data.expandKeys(localdata)
+ required = {}
preferred_versions = {}
latest_versions = {}
for pn in pkg_pn:
- (last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn)
+ (last_ver, last_file, pref_ver, pref_file, req) = findBestProvider(pn, localdata, dataCache, pkg_pn)
preferred_versions[pn] = (pref_ver, pref_file)
latest_versions[pn] = (last_ver, last_file)
+ required[pn] = req
- return (latest_versions, preferred_versions)
-
+ return (latest_versions, preferred_versions, required)
def allProviders(dataCache):
"""
@@ -59,7 +60,6 @@ def allProviders(dataCache):
all_providers[pn].append((ver, fn))
return all_providers
-
def sortPriorities(pn, dataCache, pkg_pn = None):
"""
Reorder pkg_pn by file priority and default preference
@@ -87,6 +87,21 @@ def sortPriorities(pn, dataCache, pkg_pn = None):
return tmp_pn
+def versionVariableMatch(cfgData, keyword, pn):
+ """
+ Return the value of the <keyword>_VERSION variable if set.
+ """
+
+ # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
+ # hence we do this manually rather than use OVERRIDES
+ ver = cfgData.getVar("%s_VERSION_pn-%s" % (keyword, pn))
+ if not ver:
+ ver = cfgData.getVar("%s_VERSION_%s" % (keyword, pn))
+ if not ver:
+ ver = cfgData.getVar("%s_VERSION" % keyword)
+
+ return ver
+
def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
"""
Check if the version pe,pv,pr is the preferred one.
@@ -102,19 +117,28 @@ def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
"""
- Find the first provider in pkg_pn with a PREFERRED_VERSION set.
+ Find the first provider in pkg_pn with REQUIRED_VERSION or PREFERRED_VERSION set.
"""
preferred_file = None
preferred_ver = None
+ required = False
- # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
- # hence we do this manually rather than use OVERRIDES
- preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn)
- if not preferred_v:
- preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn)
- if not preferred_v:
- preferred_v = cfgData.getVar("PREFERRED_VERSION")
+ required_v = versionVariableMatch(cfgData, "REQUIRED", pn)
+ preferred_v = versionVariableMatch(cfgData, "PREFERRED", pn)
+
+ itemstr = ""
+ if item:
+ itemstr = " (for item %s)" % item
+
+ if required_v is not None:
+ if preferred_v is not None:
+ logger.warn("REQUIRED_VERSION and PREFERRED_VERSION for package %s%s are both set using REQUIRED_VERSION %s", pn, itemstr, required_v)
+ else:
+ logger.debug("REQUIRED_VERSION is set for package %s%s", pn, itemstr)
+ # REQUIRED_VERSION always takes precedence over PREFERRED_VERSION
+ preferred_v = required_v
+ required = True
if preferred_v:
m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v)
@@ -147,11 +171,9 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
pv_str = preferred_v
if not (preferred_e is None):
pv_str = '%s:%s' % (preferred_e, pv_str)
- itemstr = ""
- if item:
- itemstr = " (for item %s)" % item
if preferred_file is None:
- logger.warn("preferred version %s of %s not available%s", pv_str, pn, itemstr)
+ if not required:
+ logger.warn("preferred version %s of %s not available%s", pv_str, pn, itemstr)
available_vers = []
for file_set in pkg_pn:
for f in file_set:
@@ -164,11 +186,15 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
if available_vers:
available_vers.sort()
logger.warn("versions of %s available: %s", pn, ' '.join(available_vers))
+ if required:
+ logger.error("required version %s of %s not available%s", pv_str, pn, itemstr)
else:
- logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
-
- return (preferred_ver, preferred_file)
+ if required:
+ logger.debug("selecting %s as REQUIRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
+ else:
+ logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
+ return (preferred_ver, preferred_file, required)
def findLatestProvider(pn, cfgData, dataCache, file_set):
"""
@@ -189,7 +215,6 @@ def findLatestProvider(pn, cfgData, dataCache, file_set):
return (latest, latest_f)
-
def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
"""
If there is a PREFERRED_VERSION, find the highest-priority bbfile
@@ -198,17 +223,16 @@ def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
"""
sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
- # Find the highest priority provider with a PREFERRED_VERSION set
- (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
+ # Find the highest priority provider with a REQUIRED_VERSION or PREFERRED_VERSION set
+ (preferred_ver, preferred_file, required) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
# Find the latest version of the highest priority provider
(latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
- if preferred_file is None:
+ if not required and preferred_file is None:
preferred_file = latest_f
preferred_ver = latest
- return (latest, latest_f, preferred_ver, preferred_file)
-
+ return (latest, latest_f, preferred_ver, preferred_file, required)
def _filterProviders(providers, item, cfgData, dataCache):
"""
@@ -234,10 +258,13 @@ def _filterProviders(providers, item, cfgData, dataCache):
logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys())))
- # First add PREFERRED_VERSIONS
+ # First add REQUIRED_VERSIONS or PREFERRED_VERSIONS
for pn in sorted(pkg_pn):
sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
- preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
+ preferred_ver, preferred_file, required = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
+ if required and preferred_file is None:
+ return eligible
+ preferred_versions[pn] = (preferred_ver, preferred_file)
if preferred_versions[pn][1]:
eligible.append(preferred_versions[pn][1])
@@ -249,7 +276,6 @@ def _filterProviders(providers, item, cfgData, dataCache):
eligible.append(preferred_versions[pn][1])
if not eligible:
- logger.error("no eligible providers for %s", item)
return eligible
# If pn == item, give it a slight default preference
@@ -266,7 +292,6 @@ def _filterProviders(providers, item, cfgData, dataCache):
return eligible
-
def filterProviders(providers, item, cfgData, dataCache):
"""
Take a list of providers and filter/reorder according to the
@@ -388,7 +413,6 @@ def getRuntimeProviders(dataCache, rdepend):
return rproviders
-
def buildWorldTargetList(dataCache, task=None):
"""
Build package list for "bitbake world"
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 54ef245a6..80d7f6ca6 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -1242,6 +1242,7 @@ class RunQueue:
magic = "decafbad"
if self.cooker.configuration.profile:
magic = "decafbadbad"
+ fakerootlogs = None
if fakeroot:
magic = magic + "beef"
mcdata = self.cooker.databuilder.mcdata[mc]
@@ -1251,10 +1252,11 @@ class RunQueue:
for key, value in (var.split('=') for var in fakerootenv):
env[key] = value
worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
+ fakerootlogs = self.rqdata.dataCaches[mc].fakerootlogs
else:
worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
bb.utils.nonblockingfd(worker.stdout)
- workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
+ workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs)
workerdata = {
"taskdeps" : self.rqdata.dataCaches[mc].task_deps,
@@ -1772,7 +1774,7 @@ class RunQueueExecute:
self.sqdata = SQData()
build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self)
- def runqueue_process_waitpid(self, task, status):
+ def runqueue_process_waitpid(self, task, status, fakerootlog=None):
# self.build_stamps[pid] may not exist when use shared work directory.
if task in self.build_stamps:
@@ -1787,7 +1789,7 @@ class RunQueueExecute:
self.sq_live.remove(task)
else:
if status != 0:
- self.task_fail(task, status)
+ self.task_fail(task, status, fakerootlog=fakerootlog)
else:
self.task_complete(task)
return True
@@ -1908,14 +1910,31 @@ class RunQueueExecute:
self.task_completeoutright(task)
self.runq_tasksrun.add(task)
- def task_fail(self, task, exitcode):
+ def task_fail(self, task, exitcode, fakerootlog=None):
"""
Called when a task has failed
Updates the state engine with the failure
"""
self.stats.taskFailed()
self.failed_tids.append(task)
- bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData)
+
+ fakeroot_log = ""
+ if fakerootlog and os.path.exists(fakerootlog):
+ with open(fakerootlog) as fakeroot_log_file:
+ fakeroot_failed = False
+ for line in reversed(fakeroot_log_file.readlines()):
+ for fakeroot_error in ['mismatch', 'error', 'fatal']:
+ if fakeroot_error in line.lower():
+ fakeroot_failed = True
+ if 'doing new pid setup and server start' in line:
+ break
+ fakeroot_log = line + fakeroot_log
+
+ if not fakeroot_failed:
+ fakeroot_log = None
+
+ bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=fakeroot_log), self.cfgData)
+
if self.rqdata.taskData[''].abort:
self.rq.state = runQueueCleanUp
@@ -1943,6 +1962,10 @@ class RunQueueExecute:
logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks))
err = True
+ for tid in self.scenequeue_covered.intersection(self.scenequeue_notcovered):
+ # No task should end up in both covered and uncovered, that is a bug.
+ logger.error("Setscene task %s in both covered and notcovered." % tid)
+
for tid in self.rqdata.runq_setscene_tids:
if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered:
err = True
@@ -2431,6 +2454,9 @@ class RunQueueExecute:
for dep in sorted(self.sqdata.sq_deps[task]):
if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]:
+ if dep in self.scenequeue_covered or dep in self.scenequeue_notcovered:
+ # dependency could be already processed, e.g. noexec setscene task
+ continue
logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
self.sq_task_failoutright(dep)
continue
@@ -2755,6 +2781,20 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True)
+ # Compute a list of 'stale' sstate tasks where the current hash does not match the one
+ # in any stamp files. Pass the list out to metadata as an event.
+ found = {}
+ for tid in rqdata.runq_setscene_tids:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ stamps = bb.build.find_stale_stamps(taskname, rqdata.dataCaches[mc], taskfn)
+ if stamps:
+ if mc not in found:
+ found[mc] = {}
+ found[mc][tid] = stamps
+ for mc in found:
+ event = bb.event.StaleSetSceneTasks(found[mc])
+ bb.event.fire(event, cooker.databuilder.mcdata[mc])
+
def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True):
tocheck = set()
@@ -2876,12 +2916,16 @@ class runQueueTaskFailed(runQueueEvent):
"""
Event notifying a task failed
"""
- def __init__(self, task, stats, exitcode, rq):
+ def __init__(self, task, stats, exitcode, rq, fakeroot_log=None):
runQueueEvent.__init__(self, task, stats, rq)
self.exitcode = exitcode
+ self.fakeroot_log = fakeroot_log
def __str__(self):
- return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode)
+ if self.fakeroot_log:
+ return "Task (%s) failed with exit code '%s' \nPseudo log:\n%s" % (self.taskstring, self.exitcode, self.fakeroot_log)
+ else:
+ return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode)
class sceneQueueTaskFailed(sceneQueueEvent):
"""
@@ -2933,7 +2977,7 @@ class runQueuePipe():
"""
Abstraction for a pipe between a worker thread and the server
"""
- def __init__(self, pipein, pipeout, d, rq, rqexec):
+ def __init__(self, pipein, pipeout, d, rq, rqexec, fakerootlogs=None):
self.input = pipein
if pipeout:
pipeout.close()
@@ -2942,6 +2986,7 @@ class runQueuePipe():
self.d = d
self.rq = rq
self.rqexec = rqexec
+ self.fakerootlogs = fakerootlogs
def setrunqueueexec(self, rqexec):
self.rqexec = rqexec
@@ -2987,7 +3032,11 @@ class runQueuePipe():
task, status = pickle.loads(self.queue[10:index])
except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e:
bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index]))
- self.rqexec.runqueue_process_waitpid(task, status)
+ (_, _, _, taskfn) = split_tid_mcfn(task)
+ fakerootlog = None
+ if self.fakerootlogs and taskfn and taskfn in self.fakerootlogs:
+ fakerootlog = self.fakerootlogs[taskfn]
+ self.rqexec.runqueue_process_waitpid(task, status, fakerootlog=fakerootlog)
found = True
self.queue = self.queue[index+11:]
index = self.queue.find(b"</exitcode>")
diff --git a/poky/bitbake/lib/bb/tests/color.py b/poky/bitbake/lib/bb/tests/color.py
index bf03750c6..88dd27800 100644
--- a/poky/bitbake/lib/bb/tests/color.py
+++ b/poky/bitbake/lib/bb/tests/color.py
@@ -31,7 +31,7 @@ class ColorCodeTests(unittest.TestCase):
def setUp(self):
self.d = bb.data.init()
self._progress_watcher = ProgressWatcher()
- bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event)
+ bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event, data=self.d)
def tearDown(self):
bb.event.remove("bb.build.TaskProgress", None)
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index 7b2dac7b8..ddf6e9743 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -1345,7 +1345,7 @@ class FetchCheckStatusTest(FetcherTest):
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
"https://yoctoproject.org/",
- "https://yoctoproject.org/documentation",
+ "https://docs.yoctoproject.org",
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
"ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
diff --git a/poky/bitbake/lib/bb/tinfoil.py b/poky/bitbake/lib/bb/tinfoil.py
index 763c32981..796a98f05 100644
--- a/poky/bitbake/lib/bb/tinfoil.py
+++ b/poky/bitbake/lib/bb/tinfoil.py
@@ -440,7 +440,7 @@ class Tinfoil:
to initialise Tinfoil and use it with config_only=True first and
then conditionally call this function to parse recipes later.
"""
- config_params = TinfoilConfigParameters(config_only=False)
+ config_params = TinfoilConfigParameters(config_only=False, quiet=self.quiet)
self.run_actions(config_params)
self.recipes_parsed = True
diff --git a/poky/bitbake/lib/bblayers/query.py b/poky/bitbake/lib/bblayers/query.py
index f5e3c8474..947422a72 100644
--- a/poky/bitbake/lib/bblayers/query.py
+++ b/poky/bitbake/lib/bblayers/query.py
@@ -128,7 +128,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
sys.exit(1)
pkg_pn = self.tinfoil.cooker.recipecaches[mc].pkg_pn
- (latest_versions, preferred_versions) = self.tinfoil.find_providers(mc)
+ (latest_versions, preferred_versions, required_versions) = self.tinfoil.find_providers(mc)
allproviders = self.tinfoil.get_all_providers(mc)
# Ensure we list skipped recipes