summaryrefslogtreecommitdiff
path: root/poky/bitbake
diff options
context:
space:
mode:
authorBrad Bishop <bradleyb@fuzziesquirrel.com>2019-04-05 22:28:33 +0300
committerBrad Bishop <bradleyb@fuzziesquirrel.com>2019-04-05 22:31:28 +0300
commit193236933b0f4ab91b1625b64e2187e2db4e0e8f (patch)
treee12769d7c76d8b0517d6de3d3c72189753d253ed /poky/bitbake
parentbd93df9478f2f56ffcbc8cb88f1709c735dcd85b (diff)
downloadopenbmc-193236933b0f4ab91b1625b64e2187e2db4e0e8f.tar.xz
reset upstream subtrees to HEAD
Reset the following subtrees on HEAD: poky: 8217b477a1(master) meta-xilinx: 64aa3d35ae(master) meta-openembedded: 0435c9e193(master) meta-raspberrypi: 490a4441ac(master) meta-security: cb6d1c85ee(master) Squashed patches: meta-phosphor: drop systemd 239 patches meta-phosphor: mrw-api: use correct install path Change-Id: I268e2646d9174ad305630c6bbd3fbc1a6105f43d Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
Diffstat (limited to 'poky/bitbake')
-rw-r--r--poky/bitbake/.gitattributes2
-rwxr-xr-xpoky/bitbake/bin/bitbake-hashserv67
-rwxr-xr-xpoky/bitbake/bin/bitbake-selftest3
-rwxr-xr-xpoky/bitbake/bin/bitbake-worker9
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml78
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml48
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml28
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml2
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml78
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml422
-rw-r--r--poky/bitbake/lib/bb/build.py3
-rw-r--r--poky/bitbake/lib/bb/cache.py2
-rw-r--r--poky/bitbake/lib/bb/codeparser.py9
-rw-r--r--poky/bitbake/lib/bb/cooker.py55
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py4
-rw-r--r--poky/bitbake/lib/bb/data.py4
-rw-r--r--poky/bitbake/lib/bb/data_smart.py20
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py28
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py22
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py20
-rw-r--r--poky/bitbake/lib/bb/fetch2/hg.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/npm.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py77
-rwxr-xr-xpoky/bitbake/lib/bb/main.py5
-rw-r--r--poky/bitbake/lib/bb/monitordisk.py12
-rw-r--r--poky/bitbake/lib/bb/parse/ast.py2
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/BBHandler.py27
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py2
-rw-r--r--poky/bitbake/lib/bb/persist_data.py222
-rw-r--r--poky/bitbake/lib/bb/providers.py4
-rw-r--r--poky/bitbake/lib/bb/pysh/builtin.py710
-rw-r--r--poky/bitbake/lib/bb/pysh/interp.py1367
-rw-r--r--poky/bitbake/lib/bb/pysh/lsprof.py116
-rw-r--r--poky/bitbake/lib/bb/pysh/pysh.py167
-rw-r--r--poky/bitbake/lib/bb/pysh/pyshlex.py5
-rw-r--r--poky/bitbake/lib/bb/pysh/pyshyacc.py17
-rw-r--r--poky/bitbake/lib/bb/pysh/sherrors.py26
-rw-r--r--poky/bitbake/lib/bb/pysh/subprocess_fix.py77
-rw-r--r--poky/bitbake/lib/bb/runqueue.py92
-rw-r--r--poky/bitbake/lib/bb/siggen.py28
-rw-r--r--poky/bitbake/lib/bb/tests/codeparser.py7
-rw-r--r--poky/bitbake/lib/bb/tests/data.py9
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py27
-rw-r--r--poky/bitbake/lib/bb/tests/persist_data.py142
-rw-r--r--poky/bitbake/lib/bb/tests/utils.py4
-rw-r--r--poky/bitbake/lib/bb/utils.py47
-rw-r--r--poky/bitbake/lib/bs4/dammit.py12
-rw-r--r--poky/bitbake/lib/bs4/element.py22
-rw-r--r--poky/bitbake/lib/hashserv/__init__.py152
-rw-r--r--poky/bitbake/lib/hashserv/tests.py141
50 files changed, 1324 insertions, 3103 deletions
diff --git a/poky/bitbake/.gitattributes b/poky/bitbake/.gitattributes
new file mode 100644
index 000000000..e4f8f62fc
--- /dev/null
+++ b/poky/bitbake/.gitattributes
@@ -0,0 +1,2 @@
+*min.js binary
+*min.css binary
diff --git a/poky/bitbake/bin/bitbake-hashserv b/poky/bitbake/bin/bitbake-hashserv
new file mode 100755
index 000000000..c49397b73
--- /dev/null
+++ b/poky/bitbake/bin/bitbake-hashserv
@@ -0,0 +1,67 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2018 Garmin Ltd.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+import os
+import sys
+import logging
+import argparse
+import sqlite3
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),'lib'))
+
+import hashserv
+
+VERSION = "1.0.0"
+
+DEFAULT_HOST = ''
+DEFAULT_PORT = 8686
+
+def main():
+ parser = argparse.ArgumentParser(description='HTTP Equivalence Reference Server. Version=%s' % VERSION)
+ parser.add_argument('--address', default=DEFAULT_HOST, help='Bind address (default "%(default)s")')
+ parser.add_argument('--port', type=int, default=DEFAULT_PORT, help='Bind port (default %(default)d)')
+ parser.add_argument('--prefix', default='', help='HTTP path prefix (default "%(default)s")')
+ parser.add_argument('--database', default='./hashserv.db', help='Database file (default "%(default)s")')
+ parser.add_argument('--log', default='WARNING', help='Set logging level')
+
+ args = parser.parse_args()
+
+ logger = logging.getLogger('hashserv')
+
+ level = getattr(logging, args.log.upper(), None)
+ if not isinstance(level, int):
+ raise ValueError('Invalid log level: %s' % args.log)
+
+ logger.setLevel(level)
+ console = logging.StreamHandler()
+ console.setLevel(level)
+ logger.addHandler(console)
+
+ db = sqlite3.connect(args.database)
+
+ server = hashserv.create_server((args.address, args.port), db, args.prefix)
+ server.serve_forever()
+ return 0
+
+if __name__ == '__main__':
+ try:
+ ret = main()
+ except Exception:
+ ret = 1
+ import traceback
+ traceback.print_exc()
+ sys.exit(ret)
+
diff --git a/poky/bitbake/bin/bitbake-selftest b/poky/bitbake/bin/bitbake-selftest
index cfa7ac539..99f1af910 100755
--- a/poky/bitbake/bin/bitbake-selftest
+++ b/poky/bitbake/bin/bitbake-selftest
@@ -22,6 +22,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib
import unittest
try:
import bb
+ import hashserv
import layerindexlib
except RuntimeError as exc:
sys.exit(str(exc))
@@ -33,7 +34,9 @@ tests = ["bb.tests.codeparser",
"bb.tests.event",
"bb.tests.fetch",
"bb.tests.parse",
+ "bb.tests.persist_data",
"bb.tests.utils",
+ "hashserv.tests",
"layerindexlib.tests.layerindexobj",
"layerindexlib.tests.restapi",
"layerindexlib.tests.cooker"]
diff --git a/poky/bitbake/bin/bitbake-worker b/poky/bitbake/bin/bitbake-worker
index e925054b7..a9e997e1f 100755
--- a/poky/bitbake/bin/bitbake-worker
+++ b/poky/bitbake/bin/bitbake-worker
@@ -136,7 +136,7 @@ def sigterm_handler(signum, frame):
os.killpg(0, signal.SIGTERM)
sys.exit()
-def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
+def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
# We need to setup the environment BEFORE the fork, since
# a fork() or exec*() activates PSEUDO...
@@ -234,7 +234,8 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append
ret = 0
the_data = bb_cache.loadDataFull(fn, appends)
- the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
+ the_data.setVar('BB_TASKHASH', taskhash)
+ the_data.setVar('BB_UNIHASH', unihash)
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
@@ -425,10 +426,10 @@ class BitbakeWorker(object):
sys.exit(0)
def handle_runtask(self, data):
- fn, task, taskname, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
+ fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
- pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
+ pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
self.build_pids[pid] = task
self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml
index f1caaecd2..46dafeee3 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.xml
@@ -31,7 +31,7 @@
<para>
Prior to executing BitBake, you should take advantage of available
parallel thread execution on your build host by setting the
- <link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
+ <link linkend='var-bb-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
variable in your project's <filename>local.conf</filename>
configuration file.
</para>
@@ -87,9 +87,9 @@
<para>
The <filename>layer.conf</filename> files are used to
construct key variables such as
- <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+ <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
and
- <link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
+ <link linkend='var-bb-BBFILES'><filename>BBFILES</filename></link>.
<filename>BBPATH</filename> is used to search for
configuration and class files under the
<filename>conf</filename> and <filename>classes</filename>
@@ -117,19 +117,19 @@
at certain variables, including:
<itemizedlist>
<listitem><para>
- <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
+ <link linkend='var-bb-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
</para></listitem>
<listitem><para>
- <link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
+ <link linkend='var-bb-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
</para></listitem>
<listitem><para>
- <link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
+ <link linkend='var-bb-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
</para></listitem>
<listitem><para>
- <link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>
+ <link linkend='var-bb-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>
</para></listitem>
<listitem><para>
- <link linkend='var-BITBAKE_UI'><filename>BITBAKE_UI</filename></link>
+ <link linkend='var-bb-BITBAKE_UI'><filename>BITBAKE_UI</filename></link>
</para></listitem>
</itemizedlist>
The first four variables in this list relate to how BitBake treats shell
@@ -156,7 +156,7 @@
BitBake first searches the current working directory for an
optional <filename>conf/bblayers.conf</filename> configuration file.
This file is expected to contain a
- <link linkend='var-BBLAYERS'><filename>BBLAYERS</filename></link>
+ <link linkend='var-bb-BBLAYERS'><filename>BBLAYERS</filename></link>
variable that is a space-delimited list of 'layer' directories.
Recall that if BitBake cannot find a <filename>bblayers.conf</filename>
file, then it is assumed the user has set the <filename>BBPATH</filename>
@@ -166,10 +166,10 @@
<para>
For each directory (layer) in this list, a <filename>conf/layer.conf</filename>
file is located and parsed with the
- <link linkend='var-LAYERDIR'><filename>LAYERDIR</filename></link>
+ <link linkend='var-bb-LAYERDIR'><filename>LAYERDIR</filename></link>
variable being set to the directory where the layer was found.
The idea is these files automatically set up
- <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+ <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
and other variables correctly for a given build directory.
</para>
@@ -189,7 +189,7 @@
depending on the environment variables previously
mentioned or set in the configuration files.
The
- "<link linkend='ref-variables-glos'>Variables Glossary</link>"
+ "<link linkend='ref-bb-variables-glos'>Variables Glossary</link>"
chapter presents a full list of variables.
</para>
@@ -204,7 +204,7 @@
<para>
The <filename>base.bbclass</filename> file is always included.
Other classes that are specified in the configuration using the
- <link linkend='var-INHERIT'><filename>INHERIT</filename></link>
+ <link linkend='var-bb-INHERIT'><filename>INHERIT</filename></link>
variable are also included.
BitBake searches for class files in a
<filename>classes</filename> subdirectory under
@@ -270,7 +270,7 @@
<para>
During the configuration phase, BitBake will have set
- <link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
+ <link linkend='var-bb-BBFILES'><filename>BBFILES</filename></link>.
BitBake now uses it to construct a list of recipes to parse,
along with any append files (<filename>.bbappend</filename>)
to apply.
@@ -292,7 +292,7 @@
Any inherit statements cause BitBake to find and
then parse class files (<filename>.bbclass</filename>)
using
- <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+ <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
as the search path.
Finally, BitBake parses in order any append files found in
<filename>BBFILES</filename>.
@@ -303,8 +303,8 @@
pieces of metadata.
For example, in <filename>bitbake.conf</filename> the recipe
name and version are used to set the variables
- <link linkend='var-PN'><filename>PN</filename></link> and
- <link linkend='var-PV'><filename>PV</filename></link>:
+ <link linkend='var-bb-PN'><filename>PN</filename></link> and
+ <link linkend='var-bb-PV'><filename>PV</filename></link>:
<literallayout class='monospaced'>
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
@@ -336,7 +336,7 @@
recipe information.
The validity of this cache is determined by first computing a
checksum of the base configuration data (see
- <link linkend='var-BB_HASHCONFIG_WHITELIST'><filename>BB_HASHCONFIG_WHITELIST</filename></link>)
+ <link linkend='var-bb-BB_HASHCONFIG_WHITELIST'><filename>BB_HASHCONFIG_WHITELIST</filename></link>)
and then checking if the checksum matches.
If that checksum matches what is in the cache and the recipe
and class files have not changed, Bitbake is able to use
@@ -384,9 +384,9 @@
the recipe can be known.
Each recipe's <filename>PROVIDES</filename> list is created
implicitly through the recipe's
- <link linkend='var-PN'><filename>PN</filename></link> variable
+ <link linkend='var-bb-PN'><filename>PN</filename></link> variable
and explicitly through the recipe's
- <link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>
+ <link linkend='var-bb-PROVIDES'><filename>PROVIDES</filename></link>
variable, which is optional.
</para>
@@ -427,7 +427,7 @@
PREFERRED_PROVIDER_virtual/kernel = "linux-yocto"
</literallayout>
The default
- <link linkend='var-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>
+ <link linkend='var-bb-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>
is the provider with the same name as the target.
Bitbake iterates through each target it needs to build and
resolves them and their dependencies using this process.
@@ -439,10 +439,10 @@
BitBake defaults to the highest version of a provider.
Version comparisons are made using the same method as Debian.
You can use the
- <link linkend='var-PREFERRED_VERSION'><filename>PREFERRED_VERSION</filename></link>
+ <link linkend='var-bb-PREFERRED_VERSION'><filename>PREFERRED_VERSION</filename></link>
variable to specify a particular version.
You can influence the order by using the
- <link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
+ <link linkend='var-bb-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
variable.
</para>
@@ -464,7 +464,7 @@
BitBake defaults to selecting the most recent
version, unless otherwise specified.
If the recipe in question has a
- <link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
+ <link linkend='var-bb-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
set lower than the other recipes (default is 0), then
it will not be selected.
This allows the person or persons maintaining
@@ -475,9 +475,9 @@
<para>
If the first recipe is named <filename>a_1.1.bb</filename>, then the
- <link linkend='var-PN'><filename>PN</filename></link> variable
+ <link linkend='var-bb-PN'><filename>PN</filename></link> variable
will be set to “a”, and the
- <link linkend='var-PV'><filename>PV</filename></link>
+ <link linkend='var-bb-PV'><filename>PV</filename></link>
variable will be set to 1.1.
</para>
@@ -532,11 +532,11 @@
<para>
Dependencies are defined through several variables.
You can find information about variables BitBake uses in
- the <link linkend='ref-variables-glos'>Variables Glossary</link>
+ the <link linkend='ref-bb-variables-glos'>Variables Glossary</link>
near the end of this manual.
At a basic level, it is sufficient to know that BitBake uses the
- <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link> and
- <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link> variables when
+ <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link> and
+ <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link> variables when
calculating dependencies.
</para>
@@ -560,7 +560,7 @@
<para>
The build now starts with BitBake forking off threads up to the limit set in the
- <link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
+ <link linkend='var-bb-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
variable.
BitBake continues to fork threads as long as there are tasks ready to run,
those tasks have all their dependencies met, and the thread threshold has not been
@@ -574,7 +574,7 @@
<para>
As each task completes, a timestamp is written to the directory specified by the
- <link linkend='var-STAMP'><filename>STAMP</filename></link> variable.
+ <link linkend='var-bb-STAMP'><filename>STAMP</filename></link> variable.
On subsequent runs, BitBake looks in the build directory within
<filename>tmp/stamps</filename> and does not rerun
tasks that are already completed unless a timestamp is found to be invalid.
@@ -618,7 +618,7 @@
<para>
Tasks can be either a shell task or a Python task.
For shell tasks, BitBake writes a shell script to
- <filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
+ <filename>${</filename><link linkend='var-bb-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
and then executes the script.
The generated shell script contains all the exported variables,
and the shell functions with all variables expanded.
@@ -645,10 +645,10 @@
behavior:
<itemizedlist>
<listitem><para>
- <link linkend='var-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
+ <link linkend='var-bb-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
</para></listitem>
<listitem><para>
- <link linkend='var-BB_SCHEDULERS'><filename>BB_SCHEDULERS</filename></link>
+ <link linkend='var-bb-BB_SCHEDULERS'><filename>BB_SCHEDULERS</filename></link>
</para></listitem>
</itemizedlist>
It is possible to have functions run before and after a task's main
@@ -684,7 +684,7 @@
The simplistic approach for excluding the working directory is to set
it to some fixed value and create the checksum for the "run" script.
BitBake goes one step better and uses the
- <link linkend='var-BB_HASHBASE_WHITELIST'><filename>BB_HASHBASE_WHITELIST</filename></link>
+ <link linkend='var-bb-BB_HASHBASE_WHITELIST'><filename>BB_HASHBASE_WHITELIST</filename></link>
variable to define a list of variables that should never be included
when generating the signatures.
</para>
@@ -795,7 +795,7 @@
This results in any metadata change that changes the task hash, automatically
causing the task to be run again.
This removes the need to bump
- <link linkend='var-PR'><filename>PR</filename></link>
+ <link linkend='var-bb-PR'><filename>PR</filename></link>
values, and changes to metadata automatically ripple across the build.
</para>
@@ -884,7 +884,7 @@
<para>
BitBake first calls the function defined by the
- <link linkend='var-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>
+ <link linkend='var-bb-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>
variable with a list of tasks and corresponding
hashes it wants to build.
This function is designed to be fast and returns a list
@@ -908,7 +908,7 @@
For example, it is pointless to obtain a compiler if you
already have the compiled binary.
To handle this, BitBake calls the
- <link linkend='var-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>
+ <link linkend='var-bb-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>
function for each successful setscene task to know whether or not it needs
to obtain the dependencies of that task.
</para>
@@ -916,7 +916,7 @@
<para>
Finally, after all the setscene tasks have executed, BitBake calls the
function listed in
- <link linkend='var-BB_SETSCENE_VERIFY_FUNCTION2'><filename>BB_SETSCENE_VERIFY_FUNCTION2</filename></link>
+ <link linkend='var-bb-BB_SETSCENE_VERIFY_FUNCTION2'><filename>BB_SETSCENE_VERIFY_FUNCTION2</filename></link>
with the list of tasks BitBake thinks has been "covered".
The metadata can then ensure that this list is correct and can
inform BitBake that it wants specific tasks to be run regardless
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml
index 92b2c3d1b..3acd7c403 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml
@@ -44,7 +44,7 @@
</literallayout>
This code sets up an instance of the fetch class.
The instance uses a space-separated list of URLs from the
- <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+ <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>
variable and then calls the <filename>download</filename>
method to download the files.
</para>
@@ -78,7 +78,7 @@
<listitem><para><emphasis>Pre-mirror Sites:</emphasis>
BitBake first uses pre-mirrors to try and find source files.
These locations are defined using the
- <link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
+ <link linkend='var-bb-PREMIRRORS'><filename>PREMIRRORS</filename></link>
variable.
</para></listitem>
<listitem><para><emphasis>Source URI:</emphasis>
@@ -88,7 +88,7 @@
<listitem><para><emphasis>Mirror Sites:</emphasis>
If fetch failures occur, BitBake next uses mirror locations as
defined by the
- <link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
+ <link linkend='var-bb-MIRRORS'><filename>MIRRORS</filename></link>
variable.
</para></listitem>
</itemizedlist>
@@ -144,7 +144,7 @@
Any source files that are not local (i.e.
downloaded from the Internet) are placed into the download
directory, which is specified by the
- <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+ <link linkend='var-bb-DL_DIR'><filename>DL_DIR</filename></link>
variable.
</para>
@@ -184,11 +184,11 @@
<para>
If
- <link linkend='var-BB_STRICT_CHECKSUM'><filename>BB_STRICT_CHECKSUM</filename></link>
+ <link linkend='var-bb-BB_STRICT_CHECKSUM'><filename>BB_STRICT_CHECKSUM</filename></link>
is set, any download without a checksum triggers an
error message.
The
- <link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
+ <link linkend='var-bb-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
variable can be used to make any attempted network access a fatal
error, which is useful for checking that mirrors are complete
as well as other things.
@@ -265,11 +265,11 @@
The filename you specify within the URL can be
either an absolute or relative path to a file.
If the filename is relative, the contents of the
- <link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
+ <link linkend='var-bb-FILESPATH'><filename>FILESPATH</filename></link>
variable is used in the same way
<filename>PATH</filename> is used to find executables.
If the file cannot be found, it is assumed that it is available in
- <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+ <link linkend='var-bb-DL_DIR'><filename>DL_DIR</filename></link>
by the time the <filename>download()</filename> method is called.
</para>
@@ -304,7 +304,7 @@
allows the name of the downloaded file to be specified.
Specifying the name of the downloaded file is useful
for avoiding collisions in
- <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+ <link linkend='var-bb-DL_DIR'><filename>DL_DIR</filename></link>
when dealing with multiple files that have the same name.
</para>
@@ -355,7 +355,7 @@
A special value of "now" causes the checkout to
be updated on every build.
</para></listitem>
- <listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
+ <listitem><para><emphasis><link linkend='var-bb-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
Specifies where a temporary checkout is saved.
The location is often <filename>DL_DIR/cvs</filename>.
</para></listitem>
@@ -395,7 +395,7 @@
<listitem><para><emphasis>"date":</emphasis>
Specifies a date.
If no "date" is specified, the
- <link linkend='var-SRCDATE'><filename>SRCDATE</filename></link>
+ <link linkend='var-bb-SRCDATE'><filename>SRCDATE</filename></link>
of the configuration is used to checkout a specific date.
The special value of "now" causes the checkout to be
updated on every build.
@@ -406,7 +406,7 @@
to which the module is unpacked.
You are forcing the module into a special
directory relative to
- <link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
+ <link linkend='var-bb-CVSDIR'><filename>CVSDIR</filename></link>.
</para></listitem>
<listitem><para><emphasis>"rsh"</emphasis>
Used in conjunction with the "method" parameter.
@@ -448,7 +448,7 @@
<filename>FETCHCMD_svn</filename>, which defaults
to "svn".
The fetcher's temporary working directory is set by
- <link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
+ <link linkend='var-bb-SVNDIR'><filename>SVNDIR</filename></link>,
which is usually <filename>DL_DIR/svn</filename>.
</para>
@@ -509,7 +509,7 @@
source control system.
The fetcher works by creating a bare clone of the
remote into
- <link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
+ <link linkend='var-bb-GITDIR'><filename>GITDIR</filename></link>,
which is usually <filename>DL_DIR/git2</filename>.
This bare clone is then cloned into the work directory during the
unpack stage when a specific tree is checked out.
@@ -612,7 +612,7 @@
This fetcher submodule inherits from the
<link linkend='git-fetcher'>Git fetcher</link> and extends
that fetcher's behavior by fetching a repository's submodules.
- <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+ <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>
is passed to the Git fetcher as described in the
"<link linkend='git-fetcher'>Git Fetcher (<filename>git://</filename>)</link>"
section.
@@ -647,9 +647,9 @@
<para>
To use this fetcher, make sure your recipe has proper
- <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
- <link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
- <link linkend='var-PV'><filename>PV</filename></link> settings.
+ <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>,
+ <link linkend='var-bb-SRCREV'><filename>SRCREV</filename></link>, and
+ <link linkend='var-bb-PV'><filename>PV</filename></link> settings.
Here is an example:
<literallayout class='monospaced'>
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
@@ -734,15 +734,15 @@
<filename>FETCHCMD_p4</filename>, which defaults
to "p4".
The fetcher's temporary working directory is set by
- <link linkend='var-P4DIR'><filename>P4DIR</filename></link>,
+ <link linkend='var-bb-P4DIR'><filename>P4DIR</filename></link>,
which defaults to "DL_DIR/p4".
</para>
<para>
To use this fetcher, make sure your recipe has proper
- <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
- <link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
- <link linkend='var-PV'><filename>PV</filename></link> values.
+ <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>,
+ <link linkend='var-bb-SRCREV'><filename>SRCREV</filename></link>, and
+ <link linkend='var-bb-PV'><filename>PV</filename></link> values.
The p4 executable is able to use the config file defined by your
system's <filename>P4CONFIG</filename> environment variable in
order to define the Perforce server URL and port, username, and
@@ -793,9 +793,9 @@
<filename>google-repo</filename> source control system.
The fetcher works by initiating and syncing sources of the
repository into
- <link linkend='var-REPODIR'><filename>REPODIR</filename></link>,
+ <link linkend='var-bb-REPODIR'><filename>REPODIR</filename></link>,
which is usually
- <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link><filename>/repo</filename>.
+ <link linkend='var-bb-DL_DIR'><filename>DL_DIR</filename></link><filename>/repo</filename>.
</para>
<para>
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml
index 9076f0fcd..39066e4b1 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml
@@ -194,7 +194,7 @@
<para>
When you run BitBake, it begins looking for metadata files.
The
- <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+ <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
variable is what tells BitBake where to look for those files.
<filename>BBPATH</filename> is not set and you need to set it.
Without <filename>BBPATH</filename>, Bitbake cannot
@@ -273,14 +273,14 @@
some editor to create the <filename>bitbake.conf</filename>
so that it contains the following:
<literallayout class='monospaced'>
- <link linkend='var-PN'>PN</link> = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
+ <link linkend='var-bb-PN'>PN</link> = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
</literallayout>
<literallayout class='monospaced'>
- TMPDIR = "${<link linkend='var-TOPDIR'>TOPDIR</link>}/tmp"
- <link linkend='var-CACHE'>CACHE</link> = "${TMPDIR}/cache"
- <link linkend='var-STAMP'>STAMP</link> = "${TMPDIR}/${PN}/stamps"
- <link linkend='var-T'>T</link> = "${TMPDIR}/${PN}/work"
- <link linkend='var-B'>B</link> = "${TMPDIR}/${PN}"
+ TMPDIR = "${<link linkend='var-bb-TOPDIR'>TOPDIR</link>}/tmp"
+ <link linkend='var-bb-CACHE'>CACHE</link> = "${TMPDIR}/cache"
+ <link linkend='var-bb-STAMP'>STAMP</link> = "${TMPDIR}/${PN}/stamps"
+ <link linkend='var-bb-T'>T</link> = "${TMPDIR}/${PN}/work"
+ <link linkend='var-bb-B'>B</link> = "${TMPDIR}/${PN}"
</literallayout>
<note>
Without a value for <filename>PN</filename>, the
@@ -402,12 +402,12 @@
Move to the <filename>conf</filename> directory and create a
<filename>layer.conf</filename> file that has the following:
<literallayout class='monospaced'>
- BBPATH .= ":${<link linkend='var-LAYERDIR'>LAYERDIR</link>}"
+ BBPATH .= ":${<link linkend='var-bb-LAYERDIR'>LAYERDIR</link>}"
- <link linkend='var-BBFILES'>BBFILES</link> += "${LAYERDIR}/*.bb"
+ <link linkend='var-bb-BBFILES'>BBFILES</link> += "${LAYERDIR}/*.bb"
- <link linkend='var-BBFILE_COLLECTIONS'>BBFILE_COLLECTIONS</link> += "mylayer"
- <link linkend='var-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR_RE}/"
+ <link linkend='var-bb-BBFILE_COLLECTIONS'>BBFILE_COLLECTIONS</link> += "mylayer"
+ <link linkend='var-bb-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR_RE}/"
</literallayout>
For information on these variables, click the links
to go to the definitions in the glossary.</para>
@@ -416,9 +416,9 @@
a recipe file named <filename>printhello.bb</filename> that
has the following:
<literallayout class='monospaced'>
- <link linkend='var-DESCRIPTION'>DESCRIPTION</link> = "Prints Hello World"
- <link linkend='var-PN'>PN</link> = 'printhello'
- <link linkend='var-PV'>PV</link> = '1'
+ <link linkend='var-bb-DESCRIPTION'>DESCRIPTION</link> = "Prints Hello World"
+ <link linkend='var-bb-PN'>PN</link> = 'printhello'
+ <link linkend='var-bb-PV'>PV</link> = '1'
python do_build() {
bb.plain("********************");
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml
index f7d312a32..02058a6f6 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml
@@ -781,7 +781,7 @@
target, you must also enable BitBake to perform multiple
configuration builds.
Enabling is accomplished by setting the
- <link linkend='var-BBMULTICONFIG'><filename>BBMULTICONFIG</filename></link>
+ <link linkend='var-bb-BBMULTICONFIG'><filename>BBMULTICONFIG</filename></link>
variable in the <filename>local.conf</filename>
configuration file.
As an example, suppose you had configuration files
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml
index d91f437d3..199ab2309 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml
@@ -595,7 +595,7 @@
<para>
BitBake uses
- <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+ <link linkend='var-bb-OVERRIDES'><filename>OVERRIDES</filename></link>
to control what variables are overridden after BitBake
parses recipes and configuration files.
This section describes how you can use
@@ -705,7 +705,7 @@
<para>Internally, this is implemented by prepending
the task (e.g. "task-compile:") to the value of
- <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+ <link linkend='var-bb-OVERRIDES'><filename>OVERRIDES</filename></link>
for the local datastore of the <filename>do_compile</filename>
task.</para>
@@ -868,7 +868,7 @@
<para>
BitBake uses the
- <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+ <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>
variable to locate needed include and class files.
Additionally, BitBake searches the current directory for
<filename>include</filename> and <filename>require</filename>
@@ -1086,7 +1086,7 @@
<para>
When creating a configuration file (<filename>.conf</filename>),
you can use the
- <link linkend='var-INHERIT'><filename>INHERIT</filename></link>
+ <link linkend='var-bb-INHERIT'><filename>INHERIT</filename></link>
configuration directive to inherit a class.
BitBake only supports this directive when used within
a configuration file.
@@ -1370,7 +1370,7 @@
</para></listitem>
<listitem><para>
BitBake-style Python functions generate a separate
- <filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.</filename><replaceable>function-name</replaceable><filename>.</filename><replaceable>pid</replaceable>
+ <filename>${</filename><link linkend='var-bb-T'><filename>T</filename></link><filename>}/run.</filename><replaceable>function-name</replaceable><filename>.</filename><replaceable>pid</replaceable>
script that is executed to run the function, and also
generate a log file in
<filename>${T}/log.</filename><replaceable>function-name</replaceable><filename>.</filename><replaceable>pid</replaceable>
@@ -1773,7 +1773,7 @@
things exported or listed in its whitelist to ensure that the build
environment is reproducible and consistent.
You can prevent this "cleaning" by setting the
- <link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
+ <link linkend='var-bb-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
variable.
</note>
Consequently, if you do want something to get passed into the
@@ -1783,9 +1783,9 @@
Tell BitBake to load what you want from the environment
into the datastore.
You can do so through the
- <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
+ <link linkend='var-bb-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
and
- <link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
+ <link linkend='var-bb-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
variables.
For example, assume you want to prevent the build system from
accessing your <filename>$HOME/.ccache</filename>
@@ -1824,7 +1824,7 @@
from the original execution environment.
Bitbake saves a copy of the original environment into
a special variable named
- <link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>.
+ <link linkend='var-bb-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>.
</para>
<para>
@@ -1883,7 +1883,7 @@
<listitem><para><emphasis><filename>[depends]</filename>:</emphasis>
Controls inter-task dependencies.
See the
- <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+ <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
variable and the
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
section for more information.
@@ -1891,7 +1891,7 @@
<listitem><para><emphasis><filename>[deptask]</filename>:</emphasis>
Controls task build-time dependencies.
See the
- <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+ <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
variable and the
"<link linkend='build-dependencies'>Build Dependencies</link>"
section for more information.
@@ -1937,7 +1937,7 @@
of cores but certain tasks need to be rate-limited due to various
kinds of resource constraints (e.g. to avoid network throttling).
<filename>number_threads</filename> works similarly to the
- <link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
+ <link linkend='var-bb-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
variable but is task-specific.</para>
<para>Set the value globally.
@@ -1971,9 +1971,9 @@
<listitem><para><emphasis><filename>[rdepends]</filename>:</emphasis>
Controls inter-task runtime dependencies.
See the
- <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+ <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>
variable, the
- <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+ <link linkend='var-bb-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
variable, and the
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
section for more information.
@@ -1981,9 +1981,9 @@
<listitem><para><emphasis><filename>[rdeptask]</filename>:</emphasis>
Controls task runtime dependencies.
See the
- <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+ <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>
variable, the
- <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+ <link linkend='var-bb-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
variable, and the
"<link linkend='runtime-dependencies'>Runtime Dependencies</link>"
section for more information.
@@ -1996,9 +1996,9 @@
<listitem><para><emphasis><filename>[recrdeptask]</filename>:</emphasis>
Controls task recursive runtime dependencies.
See the
- <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+ <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>
variable, the
- <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+ <link linkend='var-bb-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
variable, and the
"<link linkend='recursive-dependencies'>Recursive Dependencies</link>"
section for more information.
@@ -2127,7 +2127,7 @@
Any given datastore only has one such event executed
against it, however.
If
- <link linkende='var-BB_INVALIDCONF'><filename>BB_INVALIDCONF</filename></link>
+ <link linkende='var-bb-BB_INVALIDCONF'><filename>BB_INVALIDCONF</filename></link>
is set in the datastore by the event handler, the
configuration is reparsed and a new event triggered,
allowing the metadata to update configuration.
@@ -2256,17 +2256,17 @@
from a single recipe file multiple incarnations of that
recipe file where all incarnations are buildable.
These features are enabled through the
- <link linkend='var-BBCLASSEXTEND'><filename>BBCLASSEXTEND</filename></link>
+ <link linkend='var-bb-BBCLASSEXTEND'><filename>BBCLASSEXTEND</filename></link>
and
- <link linkend='var-BBVERSIONS'><filename>BBVERSIONS</filename></link>
+ <link linkend='var-bb-BBVERSIONS'><filename>BBVERSIONS</filename></link>
variables.
<note>
The mechanism for this class extension is extremely
specific to the implementation.
Usually, the recipe's
- <link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>,
- <link linkend='var-PN'><filename>PN</filename></link>, and
- <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+ <link linkend='var-bb-PROVIDES'><filename>PROVIDES</filename></link>,
+ <link linkend='var-bb-PN'><filename>PN</filename></link>, and
+ <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
variables would need to be modified by the extension class.
For specific examples, see the OE-Core
<filename>native</filename>, <filename>nativesdk</filename>,
@@ -2287,7 +2287,7 @@
project from a single recipe file.
You can also specify conditional metadata
(using the
- <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+ <link linkend='var-bb-OVERRIDES'><filename>OVERRIDES</filename></link>
mechanism) for a single version, or an optionally named range of versions.
Here is an example:
<literallayout class='monospaced'>
@@ -2306,7 +2306,7 @@
into overrides, but it is also made available for the metadata to use
in the variable that defines the base recipe versions for use in
<filename>file://</filename> search paths
- (<link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>).
+ (<link linkend='var-bb-FILESPATH'><filename>FILESPATH</filename></link>).
</para></listitem>
</itemizedlist>
</para>
@@ -2408,7 +2408,7 @@
<para>
BitBake uses the
- <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+ <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
variable to manage build time dependencies.
The <filename>[deptask]</filename> varflag for tasks
signifies the task of each
@@ -2429,9 +2429,9 @@
<para>
BitBake uses the
- <link linkend='var-PACKAGES'><filename>PACKAGES</filename></link>,
- <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>, and
- <link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
+ <link linkend='var-bb-PACKAGES'><filename>PACKAGES</filename></link>,
+ <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>, and
+ <link linkend='var-bb-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
variables to manage runtime dependencies.
</para>
@@ -2686,7 +2686,7 @@
<para>
These checksums are stored in
- <link linkend='var-STAMP'><filename>STAMP</filename></link>.
+ <link linkend='var-bb-STAMP'><filename>STAMP</filename></link>.
You can examine the checksums using the following BitBake command:
<literallayout class='monospaced'>
$ bitbake-dumpsigs
@@ -2708,44 +2708,44 @@
The following list describes related variables:
<itemizedlist>
<listitem><para>
- <link linkend='var-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>:
+ <link linkend='var-bb-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>:
Specifies the name of the function to call during
the "setscene" part of the task's execution in order
to validate the list of task hashes.
</para></listitem>
<listitem><para>
- <link linkend='var-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>:
+ <link linkend='var-bb-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>:
Specifies a function BitBake calls that determines
whether BitBake requires a setscene dependency to
be met.
</para></listitem>
<listitem><para>
- <link linkend='var-BB_SETSCENE_VERIFY_FUNCTION2'><filename>BB_SETSCENE_VERIFY_FUNCTION2</filename></link>:
+ <link linkend='var-bb-BB_SETSCENE_VERIFY_FUNCTION2'><filename>BB_SETSCENE_VERIFY_FUNCTION2</filename></link>:
Specifies a function to call that verifies the list of
planned task execution before the main task execution
happens.
</para></listitem>
<listitem><para>
- <link linkend='var-BB_STAMP_POLICY'><filename>BB_STAMP_POLICY</filename></link>:
+ <link linkend='var-bb-BB_STAMP_POLICY'><filename>BB_STAMP_POLICY</filename></link>:
Defines the mode for comparing timestamps of stamp files.
</para></listitem>
<listitem><para>
- <link linkend='var-BB_STAMP_WHITELIST'><filename>BB_STAMP_WHITELIST</filename></link>:
+ <link linkend='var-bb-BB_STAMP_WHITELIST'><filename>BB_STAMP_WHITELIST</filename></link>:
Lists stamp files that are looked at when the stamp policy
is "whitelist".
</para></listitem>
<listitem><para>
- <link linkend='var-BB_TASKHASH'><filename>BB_TASKHASH</filename></link>:
+ <link linkend='var-bb-BB_TASKHASH'><filename>BB_TASKHASH</filename></link>:
Within an executing task, this variable holds the hash
of the task as returned by the currently enabled
signature generator.
</para></listitem>
<listitem><para>
- <link linkend='var-STAMP'><filename>STAMP</filename></link>:
+ <link linkend='var-bb-STAMP'><filename>STAMP</filename></link>:
The base path to create stamp files.
</para></listitem>
<listitem><para>
- <link linkend='var-STAMPCLEAN'><filename>STAMPCLEAN</filename></link>:
+ <link linkend='var-bb-STAMPCLEAN'><filename>STAMPCLEAN</filename></link>:
Again, the base path to create stamp files but can use wildcards
for matching a range of files for clean operations.
</para></listitem>
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml
index a84b2bc99..aca6741c2 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml
@@ -3,7 +3,7 @@
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
<!-- Dummy chapter -->
-<chapter id='ref-variables-glos'>
+<chapter id='ref-bb-variables-glos'>
<title>Variables Glossary</title>
@@ -34,29 +34,29 @@
</itemizedlist>
</note>
-<glossary id='ref-variables-glossary'>
+<glossary id='ref-bb-variables-glossary'>
<para>
- <link linkend='var-ASSUME_PROVIDED'>A</link>
- <link linkend='var-B'>B</link>
- <link linkend='var-CACHE'>C</link>
- <link linkend='var-DEFAULT_PREFERENCE'>D</link>
- <link linkend='var-EXCLUDE_FROM_WORLD'>E</link>
- <link linkend='var-FAKEROOT'>F</link>
- <link linkend='var-GITDIR'>G</link>
- <link linkend='var-HGDIR'>H</link>
-<!-- <link linkend='var-ICECC_DISABLED'>I</link> -->
+ <link linkend='var-bb-ASSUME_PROVIDED'>A</link>
+ <link linkend='var-bb-B'>B</link>
+ <link linkend='var-bb-CACHE'>C</link>
+ <link linkend='var-bb-DEFAULT_PREFERENCE'>D</link>
+ <link linkend='var-bb-EXCLUDE_FROM_WORLD'>E</link>
+ <link linkend='var-bb-FAKEROOT'>F</link>
+ <link linkend='var-bb-GITDIR'>G</link>
+ <link linkend='var-bb-HGDIR'>H</link>
+ <link linkend='var-bb-INHERIT'>I</link>
<!-- <link linkend='var-glossary-j'>J</link> -->
<!-- <link linkend='var-KARCH'>K</link> -->
- <link linkend='var-LAYERDEPENDS'>L</link>
- <link linkend='var-MIRRORS'>M</link>
+ <link linkend='var-bb-LAYERDEPENDS'>L</link>
+ <link linkend='var-bb-MIRRORS'>M</link>
<!-- <link linkend='var-glossary-n'>N</link> -->
- <link linkend='var-OVERRIDES'>O</link>
- <link linkend='var-P4DIR'>P</link>
+ <link linkend='var-bb-OVERRIDES'>O</link>
+ <link linkend='var-bb-P4DIR'>P</link>
<!-- <link linkend='var-QMAKE_PROFILES'>Q</link> -->
- <link linkend='var-RDEPENDS'>R</link>
- <link linkend='var-SECTION'>S</link>
- <link linkend='var-T'>T</link>
+ <link linkend='var-bb-RDEPENDS'>R</link>
+ <link linkend='var-bb-SECTION'>S</link>
+ <link linkend='var-bb-T'>T</link>
<!-- <link linkend='var-UBOOT_CONFIG'>U</link> -->
<!-- <link linkend='var-glossary-v'>V</link> -->
<!-- <link linkend='var-WARN_QA'>W</link> -->
@@ -65,13 +65,13 @@
<!-- <link linkend='var-glossary-z'>Z</link>-->
</para>
- <glossdiv id='var-glossary-a'><title>A</title>
+ <glossdiv id='var-bb-glossary-a'><title>A</title>
- <glossentry id='var-ASSUME_PROVIDED'><glossterm>ASSUME_PROVIDED</glossterm>
+ <glossentry id='var-bb-ASSUME_PROVIDED'><glossterm>ASSUME_PROVIDED</glossterm>
<glossdef>
<para>
Lists recipe names
- (<link linkend='var-PN'><filename>PN</filename></link>
+ (<link linkend='var-bb-PN'><filename>PN</filename></link>
values) BitBake does not attempt to build.
Instead, BitBake assumes these recipes have already been
built.
@@ -91,9 +91,9 @@
</glossdiv>
- <glossdiv id='var-glossary-b'><title>B</title>
+ <glossdiv id='var-bb-glossary-b'><title>B</title>
- <glossentry id='var-B'><glossterm>B</glossterm>
+ <glossentry id='var-bb-B'><glossterm>B</glossterm>
<glossdef>
<para>
The directory in which BitBake executes functions
@@ -102,7 +102,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_ALLOWED_NETWORKS'><glossterm>BB_ALLOWED_NETWORKS</glossterm>
+ <glossentry id='var-bb-BB_ALLOWED_NETWORKS'><glossterm>BB_ALLOWED_NETWORKS</glossterm>
<glossdef>
<para>
Specifies a space-delimited list of hosts that the fetcher
@@ -111,7 +111,7 @@
<itemizedlist>
<listitem><para>
This host list is only used if
- <link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
+ <link linkend='var-bb-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
is either not set or set to "0".
</para></listitem>
<listitem><para>
@@ -151,13 +151,13 @@
</itemizedlist>
Using <filename>BB_ALLOWED_NETWORKS</filename> in
conjunction with
- <link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
+ <link linkend='var-bb-PREMIRRORS'><filename>PREMIRRORS</filename></link>
is very useful.
Adding the host you want to use to
<filename>PREMIRRORS</filename> results in the source code
being fetched from an allowed location and avoids raising
an error when a host that is not allowed is in a
- <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+ <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>
statement.
This is because the fetcher does not attempt to use the
host listed in <filename>SRC_URI</filename> after a
@@ -167,7 +167,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_CONSOLELOG'><glossterm>BB_CONSOLELOG</glossterm>
+ <glossentry id='var-bb-BB_CONSOLELOG'><glossterm>BB_CONSOLELOG</glossterm>
<glossdef>
<para>
Specifies the path to a log file into which BitBake's user
@@ -176,7 +176,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_CURRENTTASK'><glossterm>BB_CURRENTTASK</glossterm>
+ <glossentry id='var-bb-BB_CURRENTTASK'><glossterm>BB_CURRENTTASK</glossterm>
<glossdef>
<para>
Contains the name of the currently running task.
@@ -186,7 +186,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_DANGLINGAPPENDS_WARNONLY'><glossterm>BB_DANGLINGAPPENDS_WARNONLY</glossterm>
+ <glossentry id='var-bb-BB_DANGLINGAPPENDS_WARNONLY'><glossterm>BB_DANGLINGAPPENDS_WARNONLY</glossterm>
<glossdef>
<para>
Defines how BitBake handles situations where an append
@@ -208,7 +208,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_DEFAULT_TASK'><glossterm>BB_DEFAULT_TASK</glossterm>
+ <glossentry id='var-bb-BB_DEFAULT_TASK'><glossterm>BB_DEFAULT_TASK</glossterm>
<glossdef>
<para>
The default task to use when none is specified (e.g.
@@ -219,7 +219,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_DISKMON_DIRS'><glossterm>BB_DISKMON_DIRS</glossterm>
+ <glossentry id='var-bb-BB_DISKMON_DIRS'><glossterm>BB_DISKMON_DIRS</glossterm>
<glossdef>
<para>
Monitors disk space and available inodes during the build
@@ -245,7 +245,7 @@
build when a threshold is broken.
Subsequent warnings are issued as
defined by the
- <link linkend='var-BB_DISKMON_WARNINTERVAL'>BB_DISKMON_WARNINTERVAL</link> variable,
+ <link linkend='var-bb-BB_DISKMON_WARNINTERVAL'>BB_DISKMON_WARNINTERVAL</link> variable,
which must be defined.
&lt;dir&gt; is:
@@ -275,7 +275,7 @@
BB_DISKMON_DIRS = "ABORT,${TMPDIR},,100K"
</literallayout>
The first example works only if you also set
- the <link linkend='var-BB_DISKMON_WARNINTERVAL'><filename>BB_DISKMON_WARNINTERVAL</filename></link> variable.
+ the <link linkend='var-bb-BB_DISKMON_WARNINTERVAL'><filename>BB_DISKMON_WARNINTERVAL</filename></link> variable.
This example causes the build system to immediately
abort when either the disk space in <filename>${TMPDIR}</filename> drops
below 1 Gbyte or the available free inodes drops below
@@ -309,7 +309,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_DISKMON_WARNINTERVAL'><glossterm>BB_DISKMON_WARNINTERVAL</glossterm>
+ <glossentry id='var-bb-BB_DISKMON_WARNINTERVAL'><glossterm>BB_DISKMON_WARNINTERVAL</glossterm>
<glossdef>
<para>
Defines the disk space and free inode warning intervals.
@@ -319,7 +319,7 @@
If you are going to use the
<filename>BB_DISKMON_WARNINTERVAL</filename> variable, you must
also use the
- <link linkend='var-BB_DISKMON_DIRS'><filename>BB_DISKMON_DIRS</filename></link> variable
+ <link linkend='var-bb-BB_DISKMON_DIRS'><filename>BB_DISKMON_DIRS</filename></link> variable
and define its action as "WARN".
During the build, subsequent warnings are issued each time
disk space or number of free inodes further reduces by
@@ -374,7 +374,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_ENV_WHITELIST'><glossterm>BB_ENV_WHITELIST</glossterm>
+ <glossentry id='var-bb-BB_ENV_WHITELIST'><glossterm>BB_ENV_WHITELIST</glossterm>
<glossdef>
<para>
Specifies the internal whitelist of variables to allow
@@ -382,11 +382,11 @@
datastore.
If the value of this variable is not specified
(which is the default), the following list is used:
- <link linkend='var-BBPATH'><filename>BBPATH</filename></link>,
- <link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>,
- <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>,
+ <link linkend='var-bb-BBPATH'><filename>BBPATH</filename></link>,
+ <link linkend='var-bb-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>,
+ <link linkend='var-bb-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>,
and
- <link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>.
+ <link linkend='var-bb-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>.
<note>
You must set this variable in the external environment
in order for it to work.
@@ -395,7 +395,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_ENV_EXTRAWHITE'><glossterm>BB_ENV_EXTRAWHITE</glossterm>
+ <glossentry id='var-bb-BB_ENV_EXTRAWHITE'><glossterm>BB_ENV_EXTRAWHITE</glossterm>
<glossdef>
<para>
Specifies an additional set of variables to allow through
@@ -403,7 +403,7 @@
datastore.
This list of variables are on top of the internal list
set in
- <link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>.
+ <link linkend='var-bb-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>.
<note>
You must set this variable in the external
environment in order for it to work.
@@ -412,22 +412,22 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_FETCH_PREMIRRORONLY'><glossterm>BB_FETCH_PREMIRRORONLY</glossterm>
+ <glossentry id='var-bb-BB_FETCH_PREMIRRORONLY'><glossterm>BB_FETCH_PREMIRRORONLY</glossterm>
<glossdef>
<para>
When set to "1", causes BitBake's fetcher module to only
search
- <link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
+ <link linkend='var-bb-PREMIRRORS'><filename>PREMIRRORS</filename></link>
for files.
BitBake will not search the main
- <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
+ <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>
or
- <link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>.
+ <link linkend='var-bb-MIRRORS'><filename>MIRRORS</filename></link>.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-BB_FILENAME'><glossterm>BB_FILENAME</glossterm>
+ <glossentry id='var-bb-BB_FILENAME'><glossterm>BB_FILENAME</glossterm>
<glossdef>
<para>
Contains the filename of the recipe that owns the currently
@@ -440,12 +440,12 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_GENERATE_MIRROR_TARBALLS'><glossterm>BB_GENERATE_MIRROR_TARBALLS</glossterm>
+ <glossentry id='var-bb-BB_GENERATE_MIRROR_TARBALLS'><glossterm>BB_GENERATE_MIRROR_TARBALLS</glossterm>
<glossdef>
<para>
Causes tarballs of the Git repositories, including the
Git metadata, to be placed in the
- <link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
+ <link linkend='var-bb-DL_DIR'><filename>DL_DIR</filename></link>
directory.
Anyone wishing to create a source mirror would want to
enable this variable.
@@ -461,7 +461,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_HASHCONFIG_WHITELIST'><glossterm>BB_HASHCONFIG_WHITELIST</glossterm>
+ <glossentry id='var-bb-BB_HASHCONFIG_WHITELIST'><glossterm>BB_HASHCONFIG_WHITELIST</glossterm>
<glossdef>
<para>
Lists variables that are excluded from base configuration
@@ -485,7 +485,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_HASHBASE_WHITELIST'><glossterm>BB_HASHBASE_WHITELIST</glossterm>
+ <glossentry id='var-bb-BB_HASHBASE_WHITELIST'><glossterm>BB_HASHBASE_WHITELIST</glossterm>
<glossdef>
<para>
Lists variables that are excluded from checksum and
@@ -500,7 +500,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_HASHCHECK_FUNCTION'><glossterm>BB_HASHCHECK_FUNCTION</glossterm>
+ <glossentry id='var-bb-BB_HASHCHECK_FUNCTION'><glossterm>BB_HASHCHECK_FUNCTION</glossterm>
<glossdef>
<para>
Specifies the name of the function to call during the
@@ -524,7 +524,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_INVALIDCONF'><glossterm>BB_INVALIDCONF</glossterm>
+ <glossentry id='var-bb-BB_INVALIDCONF'><glossterm>BB_INVALIDCONF</glossterm>
<glossdef>
<para>
Used in combination with the
@@ -539,11 +539,11 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_LOGFMT'><glossterm>BB_LOGFMT</glossterm>
+ <glossentry id='var-bb-BB_LOGFMT'><glossterm>BB_LOGFMT</glossterm>
<glossdef>
<para>
Specifies the name of the log files saved into
- <filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}</filename>.
+ <filename>${</filename><link linkend='var-bb-T'><filename>T</filename></link><filename>}</filename>.
By default, the <filename>BB_LOGFMT</filename> variable
is undefined and the log file names get created using the
following form:
@@ -556,7 +556,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_NICE_LEVEL'><glossterm>BB_NICE_LEVEL</glossterm>
+ <glossentry id='var-bb-BB_NICE_LEVEL'><glossterm>BB_NICE_LEVEL</glossterm>
<glossdef>
<para>
Allows BitBake to run at a specific priority
@@ -564,13 +564,13 @@
System permissions usually mean that BitBake can reduce its
priority but not raise it again.
See
- <link linkend='var-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
+ <link linkend='var-bb-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
for additional information.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-BB_NO_NETWORK'><glossterm>BB_NO_NETWORK</glossterm>
+ <glossentry id='var-bb-BB_NO_NETWORK'><glossterm>BB_NO_NETWORK</glossterm>
<glossdef>
<para>
Disables network access in the BitBake fetcher modules.
@@ -587,7 +587,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_NUMBER_THREADS'><glossterm>BB_NUMBER_THREADS</glossterm>
+ <glossentry id='var-bb-BB_NUMBER_THREADS'><glossterm>BB_NUMBER_THREADS</glossterm>
<glossdef>
<para>
The maximum number of tasks BitBake should run in parallel
@@ -599,7 +599,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_NUMBER_PARSE_THREADS'><glossterm>BB_NUMBER_PARSE_THREADS</glossterm>
+ <glossentry id='var-bb-BB_NUMBER_PARSE_THREADS'><glossterm>BB_NUMBER_PARSE_THREADS</glossterm>
<glossdef>
<para>
Sets the number of threads BitBake uses when parsing.
@@ -609,7 +609,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_ORIGENV'><glossterm>BB_ORIGENV</glossterm>
+ <glossentry id='var-bb-BB_ORIGENV'><glossterm>BB_ORIGENV</glossterm>
<glossdef>
<para>
Contains a copy of the original external environment in
@@ -625,7 +625,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_PRESERVE_ENV'><glossterm>BB_PRESERVE_ENV</glossterm>
+ <glossentry id='var-bb-BB_PRESERVE_ENV'><glossterm>BB_PRESERVE_ENV</glossterm>
<glossdef>
<para>
Disables whitelisting and instead allows all variables
@@ -639,12 +639,12 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_RUNFMT'><glossterm>BB_RUNFMT</glossterm>
+ <glossentry id='var-bb-BB_RUNFMT'><glossterm>BB_RUNFMT</glossterm>
<glossdef>
<para>
Specifies the name of the executable script files
(i.e. run files) saved into
- <filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}</filename>.
+ <filename>${</filename><link linkend='var-bb-T'><filename>T</filename></link><filename>}</filename>.
By default, the <filename>BB_RUNFMT</filename> variable
is undefined and the run file names get created using the
following form:
@@ -657,7 +657,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_RUNTASK'><glossterm>BB_RUNTASK</glossterm>
+ <glossentry id='var-bb-BB_RUNTASK'><glossterm>BB_RUNTASK</glossterm>
<glossdef>
<para>
Contains the name of the currently executing task.
@@ -669,7 +669,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_SCHEDULER'><glossterm>BB_SCHEDULER</glossterm>
+ <glossentry id='var-bb-BB_SCHEDULER'><glossterm>BB_SCHEDULER</glossterm>
<glossdef>
<para>
Selects the name of the scheduler to use for the
@@ -695,7 +695,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_SCHEDULERS'><glossterm>BB_SCHEDULERS</glossterm>
+ <glossentry id='var-bb-BB_SCHEDULERS'><glossterm>BB_SCHEDULERS</glossterm>
<glossdef>
<para>
Defines custom schedulers to import.
@@ -705,13 +705,13 @@
<para>
For information how to select a scheduler, see the
- <link linkend='var-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
+ <link linkend='var-bb-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
variable.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-BB_SETSCENE_DEPVALID'><glossterm>BB_SETSCENE_DEPVALID</glossterm>
+ <glossentry id='var-bb-BB_SETSCENE_DEPVALID'><glossterm>BB_SETSCENE_DEPVALID</glossterm>
<glossdef>
<para>
Specifies a function BitBake calls that determines
@@ -731,7 +731,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_SETSCENE_VERIFY_FUNCTION2'><glossterm>BB_SETSCENE_VERIFY_FUNCTION2</glossterm>
+ <glossentry id='var-bb-BB_SETSCENE_VERIFY_FUNCTION2'><glossterm>BB_SETSCENE_VERIFY_FUNCTION2</glossterm>
<glossdef>
<para>
Specifies a function to call that verifies the list of
@@ -752,7 +752,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_SIGNATURE_EXCLUDE_FLAGS'><glossterm>BB_SIGNATURE_EXCLUDE_FLAGS</glossterm>
+ <glossentry id='var-bb-BB_SIGNATURE_EXCLUDE_FLAGS'><glossterm>BB_SIGNATURE_EXCLUDE_FLAGS</glossterm>
<glossdef>
<para>
Lists variable flags (varflags)
@@ -771,7 +771,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_SIGNATURE_HANDLER'><glossterm>BB_SIGNATURE_HANDLER</glossterm>
+ <glossentry id='var-bb-BB_SIGNATURE_HANDLER'><glossterm>BB_SIGNATURE_HANDLER</glossterm>
<glossdef>
<para>
Defines the name of the signature handler BitBake uses.
@@ -790,7 +790,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_SRCREV_POLICY'><glossterm>BB_SRCREV_POLICY</glossterm>
+ <glossentry id='var-bb-BB_SRCREV_POLICY'><glossterm>BB_SRCREV_POLICY</glossterm>
<glossdef>
<para>
Defines the behavior of the fetcher when it interacts with
@@ -817,7 +817,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_STAMP_POLICY'><glossterm>BB_STAMP_POLICY</glossterm>
+ <glossentry id='var-bb-BB_STAMP_POLICY'><glossterm>BB_STAMP_POLICY</glossterm>
<glossdef>
<para>
Defines the mode used for how timestamps of stamp files
@@ -836,7 +836,7 @@
<listitem><para><emphasis>whitelist</emphasis> -
Identical to "full" mode except timestamp
comparisons are made for recipes listed in the
- <link linkend='var-BB_STAMP_WHITELIST'><filename>BB_STAMP_WHITELIST</filename></link>
+ <link linkend='var-bb-BB_STAMP_WHITELIST'><filename>BB_STAMP_WHITELIST</filename></link>
variable.
</para></listitem>
</itemizedlist>
@@ -848,19 +848,19 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_STAMP_WHITELIST'><glossterm>BB_STAMP_WHITELIST</glossterm>
+ <glossentry id='var-bb-BB_STAMP_WHITELIST'><glossterm>BB_STAMP_WHITELIST</glossterm>
<glossdef>
<para>
Lists files whose stamp file timestamps are compared when
the stamp policy mode is set to "whitelist".
For information on stamp policies, see the
- <link linkend='var-BB_STAMP_POLICY'><filename>BB_STAMP_POLICY</filename></link>
+ <link linkend='var-bb-BB_STAMP_POLICY'><filename>BB_STAMP_POLICY</filename></link>
variable.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-BB_STRICT_CHECKSUM'><glossterm>BB_STRICT_CHECKSUM</glossterm>
+ <glossentry id='var-bb-BB_STRICT_CHECKSUM'><glossterm>BB_STRICT_CHECKSUM</glossterm>
<glossdef>
<para>
Sets a more strict checksum mechanism for non-local URLs.
@@ -871,7 +871,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_TASK_IONICE_LEVEL'><glossterm>BB_TASK_IONICE_LEVEL</glossterm>
+ <glossentry id='var-bb-BB_TASK_IONICE_LEVEL'><glossterm>BB_TASK_IONICE_LEVEL</glossterm>
<glossdef>
<para>
Allows adjustment of a task's Input/Output priority.
@@ -882,7 +882,7 @@
variable to adjust the I/O priority of these tasks.
<note>
This variable works similarly to the
- <link linkend='var-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
+ <link linkend='var-bb-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
variable except with a task's I/O priorities.
</note>
</para>
@@ -921,7 +921,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_TASK_NICE_LEVEL'><glossterm>BB_TASK_NICE_LEVEL</glossterm>
+ <glossentry id='var-bb-BB_TASK_NICE_LEVEL'><glossterm>BB_TASK_NICE_LEVEL</glossterm>
<glossdef>
<para>
Allows specific tasks to change their priority
@@ -940,7 +940,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_TASKHASH'><glossterm>BB_TASKHASH</glossterm>
+ <glossentry id='var-bb-BB_TASKHASH'><glossterm>BB_TASKHASH</glossterm>
<glossdef>
<para>
Within an executing task, this variable holds the hash
@@ -950,7 +950,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_VERBOSE_LOGS'><glossterm>BB_VERBOSE_LOGS</glossterm>
+ <glossentry id='var-bb-BB_VERBOSE_LOGS'><glossterm>BB_VERBOSE_LOGS</glossterm>
<glossdef>
<para>
Controls how verbose BitBake is during builds.
@@ -960,7 +960,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_WORKERCONTEXT'><glossterm>BB_WORKERCONTEXT</glossterm>
+ <glossentry id='var-bb-BB_WORKERCONTEXT'><glossterm>BB_WORKERCONTEXT</glossterm>
<glossdef>
<para>
Specifies if the current context is executing a task.
@@ -973,7 +973,7 @@
</glossentry>
- <glossentry id='var-BBCLASSEXTEND'><glossterm>BBCLASSEXTEND</glossterm>
+ <glossentry id='var-bb-BBCLASSEXTEND'><glossterm>BBCLASSEXTEND</glossterm>
<glossdef>
<para>
Allows you to extend a recipe so that it builds variants
@@ -1009,7 +1009,7 @@
<filename>_class-native</filename>.
For example, to generate a native version of a recipe,
a
- <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+ <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
on "foo" is rewritten to a <filename>DEPENDS</filename>
on "foo-native".
</para>
@@ -1028,7 +1028,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBDEBUG'><glossterm>BBDEBUG</glossterm>
+ <glossentry id='var-bb-BBDEBUG'><glossterm>BBDEBUG</glossterm>
<glossdef>
<para>
Sets the BitBake debug output level to a specific value
@@ -1042,7 +1042,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBFILE_COLLECTIONS'><glossterm>BBFILE_COLLECTIONS</glossterm>
+ <glossentry id='var-bb-BBFILE_COLLECTIONS'><glossterm>BBFILE_COLLECTIONS</glossterm>
<glossdef>
<para>Lists the names of configured layers.
These names are used to find the other <filename>BBFILE_*</filename>
@@ -1053,10 +1053,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBFILE_PATTERN'><glossterm>BBFILE_PATTERN</glossterm>
+ <glossentry id='var-bb-BBFILE_PATTERN'><glossterm>BBFILE_PATTERN</glossterm>
<glossdef>
<para>Variable that expands to match files from
- <link linkend='var-BBFILES'><filename>BBFILES</filename></link>
+ <link linkend='var-bb-BBFILES'><filename>BBFILES</filename></link>
in a particular layer.
This variable is used in the <filename>conf/layer.conf</filename> file and must
be suffixed with the name of the specific layer (e.g.
@@ -1064,7 +1064,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBFILE_PRIORITY'><glossterm>BBFILE_PRIORITY</glossterm>
+ <glossentry id='var-bb-BBFILE_PRIORITY'><glossterm>BBFILE_PRIORITY</glossterm>
<glossdef>
<para>Assigns the priority for recipe files in each layer.</para>
<para>This variable is useful in situations where the same recipe appears in
@@ -1074,7 +1074,7 @@
letting you control the precedence for the multiple layers.
The precedence established through this variable stands regardless of a
recipe's version
- (<link linkend='var-PV'><filename>PV</filename></link> variable).
+ (<link linkend='var-bb-PV'><filename>PV</filename></link> variable).
For example, a layer that has a recipe with a higher <filename>PV</filename> value but for
which the <filename>BBFILE_PRIORITY</filename> is set to have a lower precedence still has a
lower precedence.</para>
@@ -1083,7 +1083,7 @@
For example, the value 6 has a higher precedence than the value 5.
If not specified, the <filename>BBFILE_PRIORITY</filename> variable is set based on layer
dependencies (see the
- <filename><link linkend='var-LAYERDEPENDS'>LAYERDEPENDS</link></filename> variable for
+ <filename><link linkend='var-bb-LAYERDEPENDS'>LAYERDEPENDS</link></filename> variable for
more information.
The default priority, if unspecified
for a layer with no dependencies, is the lowest defined priority + 1
@@ -1095,7 +1095,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBFILES'><glossterm>BBFILES</glossterm>
+ <glossentry id='var-bb-BBFILES'><glossterm>BBFILES</glossterm>
<glossdef>
<para>
A space-separated list of recipe files BitBake uses to
@@ -1113,7 +1113,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBINCLUDED'><glossterm>BBINCLUDED</glossterm>
+ <glossentry id='var-bb-BBINCLUDED'><glossterm>BBINCLUDED</glossterm>
<glossdef>
<para>
Contains a space-separated list of all of all files that
@@ -1123,7 +1123,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBINCLUDELOGS'><glossterm>BBINCLUDELOGS</glossterm>
+ <glossentry id='var-bb-BBINCLUDELOGS'><glossterm>BBINCLUDELOGS</glossterm>
<glossdef>
<para>
If set to a value, enables printing the task log when
@@ -1132,11 +1132,11 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBINCLUDELOGS_LINES'><glossterm>BBINCLUDELOGS_LINES</glossterm>
+ <glossentry id='var-bb-BBINCLUDELOGS_LINES'><glossterm>BBINCLUDELOGS_LINES</glossterm>
<glossdef>
<para>
If
- <link linkend='var-BBINCLUDELOGS'><filename>BBINCLUDELOGS</filename></link>
+ <link linkend='var-bb-BBINCLUDELOGS'><filename>BBINCLUDELOGS</filename></link>
is set, specifies the maximum number of lines from the
task log file to print when reporting a failed task.
If you do not set <filename>BBINCLUDELOGS_LINES</filename>,
@@ -1145,7 +1145,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBLAYERS'><glossterm>BBLAYERS</glossterm>
+ <glossentry id='var-bb-BBLAYERS'><glossterm>BBLAYERS</glossterm>
<glossdef>
<para>Lists the layers to enable during the build.
This variable is defined in the <filename>bblayers.conf</filename> configuration
@@ -1166,7 +1166,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBLAYERS_FETCH_DIR'><glossterm>BBLAYERS_FETCH_DIR</glossterm>
+ <glossentry id='var-bb-BBLAYERS_FETCH_DIR'><glossterm>BBLAYERS_FETCH_DIR</glossterm>
<glossdef>
<para>
Sets the base location where layers are stored.
@@ -1178,7 +1178,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBMASK'><glossterm>BBMASK</glossterm>
+ <glossentry id='var-bb-BBMASK'><glossterm>BBMASK</glossterm>
<glossdef>
<para>
Prevents BitBake from processing recipes and recipe
@@ -1236,7 +1236,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBMULTICONFIG'><glossterm>BBMULTICONFIG</glossterm>
+ <glossentry id='var-bb-BBMULTICONFIG'><glossterm>BBMULTICONFIG</glossterm>
<info>
BBMULTICONFIG[doc] = "Enables BitBake to perform multiple configuration builds and lists each separate configuration (multiconfig)."
</info>
@@ -1275,7 +1275,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBPATH'><glossterm>BBPATH</glossterm>
+ <glossentry id='var-bb-BBPATH'><glossterm>BBPATH</glossterm>
<glossdef>
<para>
Used by BitBake to locate class
@@ -1302,7 +1302,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBSERVER'><glossterm>BBSERVER</glossterm>
+ <glossentry id='var-bb-BBSERVER'><glossterm>BBSERVER</glossterm>
<glossdef>
<para>
Points to the server that runs memory-resident BitBake.
@@ -1312,7 +1312,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBTARGETS'><glossterm>BBTARGETS</glossterm>
+ <glossentry id='var-bb-BBTARGETS'><glossterm>BBTARGETS</glossterm>
<glossdef>
<para>
Allows you to use a configuration file to add to the list
@@ -1321,14 +1321,14 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBVERSIONS'><glossterm>BBVERSIONS</glossterm>
+ <glossentry id='var-bb-BBVERSIONS'><glossterm>BBVERSIONS</glossterm>
<glossdef>
<para>
Allows a single recipe to build multiple versions of a
project from a single recipe file.
You also able to specify conditional metadata
using the
- <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
+ <link linkend='var-bb-OVERRIDES'><filename>OVERRIDES</filename></link>
mechanism for a single version or for an optionally named
range of versions.
</para>
@@ -1342,7 +1342,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BITBAKE_UI'><glossterm>BITBAKE_UI</glossterm>
+ <glossentry id='var-bb-BITBAKE_UI'><glossterm>BITBAKE_UI</glossterm>
<glossdef>
<para>
Used to specify the UI module to use when running BitBake.
@@ -1356,7 +1356,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDNAME'><glossterm>BUILDNAME</glossterm>
+ <glossentry id='var-bb-BUILDNAME'><glossterm>BUILDNAME</glossterm>
<glossdef>
<para>
A name assigned to the build.
@@ -1366,7 +1366,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-BZRDIR'><glossterm>BZRDIR</glossterm>
+ <glossentry id='var-bb-BZRDIR'><glossterm>BZRDIR</glossterm>
<glossdef>
<para>
The directory in which files checked out of a Bazaar
@@ -1377,9 +1377,9 @@
</glossdiv>
- <glossdiv id='var-glossary-c'><title>C</title>
+ <glossdiv id='var-bb-glossary-c'><title>C</title>
- <glossentry id='var-CACHE'><glossterm>CACHE</glossterm>
+ <glossentry id='var-bb-CACHE'><glossterm>CACHE</glossterm>
<glossdef>
<para>
Specifies the directory BitBake uses to store a cache
@@ -1389,7 +1389,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-CVSDIR'><glossterm>CVSDIR</glossterm>
+ <glossentry id='var-bb-CVSDIR'><glossterm>CVSDIR</glossterm>
<glossdef>
<para>
The directory in which files checked out under the
@@ -1400,9 +1400,9 @@
</glossdiv>
- <glossdiv id='var-glossary-d'><title>D</title>
+ <glossdiv id='var-bb-glossary-d'><title>D</title>
- <glossentry id='var-DEFAULT_PREFERENCE'><glossterm>DEFAULT_PREFERENCE</glossterm>
+ <glossentry id='var-bb-DEFAULT_PREFERENCE'><glossterm>DEFAULT_PREFERENCE</glossterm>
<glossdef>
<para>
Specifies a weak bias for recipe selection priority.
@@ -1413,20 +1413,20 @@
piece of software.
Using the variable in this way causes the stable version
of the recipe to build by default in the absence of
- <filename><link linkend='var-PREFERRED_VERSION'>PREFERRED_VERSION</link></filename>
+ <filename><link linkend='var-bb-PREFERRED_VERSION'>PREFERRED_VERSION</link></filename>
being used to build the development version.
</para>
<note>
The bias provided by <filename>DEFAULT_PREFERENCE</filename>
is weak and is overridden by
- <filename><link linkend='var-BBFILE_PRIORITY'>BBFILE_PRIORITY</link></filename>
+ <filename><link linkend='var-bb-BBFILE_PRIORITY'>BBFILE_PRIORITY</link></filename>
if that variable is different between two layers
that contain different versions of the same recipe.
</note>
</glossdef>
</glossentry>
- <glossentry id='var-DEPENDS'><glossterm>DEPENDS</glossterm>
+ <glossentry id='var-bb-DEPENDS'><glossterm>DEPENDS</glossterm>
<glossdef>
<para>
Lists a recipe's build-time dependencies
@@ -1451,13 +1451,13 @@
<para>
For information on runtime dependencies, see the
- <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+ <link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>
variable.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-DESCRIPTION'><glossterm>DESCRIPTION</glossterm>
+ <glossentry id='var-bb-DESCRIPTION'><glossterm>DESCRIPTION</glossterm>
<glossdef>
<para>
A long description for the recipe.
@@ -1465,7 +1465,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-DL_DIR'><glossterm>DL_DIR</glossterm>
+ <glossentry id='var-bb-DL_DIR'><glossterm>DL_DIR</glossterm>
<glossdef>
<para>
The central download directory used by the build process to
@@ -1474,7 +1474,7 @@
suitable for mirroring for everything except Git
repositories.
If you want tarballs of Git repositories, use the
- <link linkend='var-BB_GENERATE_MIRROR_TARBALLS'><filename>BB_GENERATE_MIRROR_TARBALLS</filename></link>
+ <link linkend='var-bb-BB_GENERATE_MIRROR_TARBALLS'><filename>BB_GENERATE_MIRROR_TARBALLS</filename></link>
variable.
</para>
</glossdef>
@@ -1482,9 +1482,9 @@
</glossentry>
</glossdiv>
- <glossdiv id='var-glossary-e'><title>E</title>
+ <glossdiv id='var-bb-glossary-e'><title>E</title>
- <glossentry id='var-EXCLUDE_FROM_WORLD'><glossterm>EXCLUDE_FROM_WORLD</glossterm>
+ <glossentry id='var-bb-EXCLUDE_FROM_WORLD'><glossterm>EXCLUDE_FROM_WORLD</glossterm>
<glossdef>
<para>
Directs BitBake to exclude a recipe from world builds (i.e.
@@ -1512,9 +1512,9 @@
</glossdiv>
- <glossdiv id='var-glossary-f'><title>F</title>
+ <glossdiv id='var-bb-glossary-f'><title>F</title>
- <glossentry id='var-FAKEROOT'><glossterm>FAKEROOT</glossterm>
+ <glossentry id='var-bb-FAKEROOT'><glossterm>FAKEROOT</glossterm>
<glossdef>
<para>
Contains the command to use when running a shell script
@@ -1527,19 +1527,19 @@
</glossdef>
</glossentry>
- <glossentry id='var-FAKEROOTBASEENV'><glossterm>FAKEROOTBASEENV</glossterm>
+ <glossentry id='var-bb-FAKEROOTBASEENV'><glossterm>FAKEROOTBASEENV</glossterm>
<glossdef>
<para>
Lists environment variables to set when executing
the command defined by
- <link linkend='var-FAKEROOTCMD'><filename>FAKEROOTCMD</filename></link>
+ <link linkend='var-bb-FAKEROOTCMD'><filename>FAKEROOTCMD</filename></link>
that starts the bitbake-worker process
in the fakeroot environment.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-FAKEROOTCMD'><glossterm>FAKEROOTCMD</glossterm>
+ <glossentry id='var-bb-FAKEROOTCMD'><glossterm>FAKEROOTCMD</glossterm>
<glossdef>
<para>
Contains the command that starts the bitbake-worker
@@ -1548,7 +1548,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-FAKEROOTDIRS'><glossterm>FAKEROOTDIRS</glossterm>
+ <glossentry id='var-bb-FAKEROOTDIRS'><glossterm>FAKEROOTDIRS</glossterm>
<glossdef>
<para>
Lists directories to create before running a task in
@@ -1557,33 +1557,33 @@
</glossdef>
</glossentry>
- <glossentry id='var-FAKEROOTENV'><glossterm>FAKEROOTENV</glossterm>
+ <glossentry id='var-bb-FAKEROOTENV'><glossterm>FAKEROOTENV</glossterm>
<glossdef>
<para>
Lists environment variables to set when running a task
in the fakeroot environment.
For additional information on environment variables and
the fakeroot environment, see the
- <link linkend='var-FAKEROOTBASEENV'><filename>FAKEROOTBASEENV</filename></link>
+ <link linkend='var-bb-FAKEROOTBASEENV'><filename>FAKEROOTBASEENV</filename></link>
variable.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-FAKEROOTNOENV'><glossterm>FAKEROOTNOENV</glossterm>
+ <glossentry id='var-bb-FAKEROOTNOENV'><glossterm>FAKEROOTNOENV</glossterm>
<glossdef>
<para>
Lists environment variables to set when running a task
that is not in the fakeroot environment.
For additional information on environment variables and
the fakeroot environment, see the
- <link linkend='var-FAKEROOTENV'><filename>FAKEROOTENV</filename></link>
+ <link linkend='var-bb-FAKEROOTENV'><filename>FAKEROOTENV</filename></link>
variable.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-FETCHCMD'><glossterm>FETCHCMD</glossterm>
+ <glossentry id='var-bb-FETCHCMD'><glossterm>FETCHCMD</glossterm>
<glossdef>
<para>
Defines the command the BitBake fetcher module
@@ -1595,7 +1595,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-FILE'><glossterm>FILE</glossterm>
+ <glossentry id='var-bb-FILE'><glossterm>FILE</glossterm>
<glossdef>
<para>
Points at the current file.
@@ -1607,7 +1607,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-FILESPATH'><glossterm>FILESPATH</glossterm>
+ <glossentry id='var-bb-FILESPATH'><glossterm>FILESPATH</glossterm>
<glossdef>
<para>
Specifies directories BitBake uses when searching for
@@ -1625,9 +1625,9 @@
</glossdiv>
- <glossdiv id='var-glossary-g'><title>G</title>
+ <glossdiv id='var-bb-glossary-g'><title>G</title>
- <glossentry id='var-GITDIR'><glossterm>GITDIR</glossterm>
+ <glossentry id='var-bb-GITDIR'><glossterm>GITDIR</glossterm>
<glossdef>
<para>
The directory in which a local copy of a Git repository
@@ -1639,9 +1639,9 @@
</glossdiv>
- <glossdiv id='var-glossary-h'><title>H</title>
+ <glossdiv id='var-bb-glossary-h'><title>H</title>
- <glossentry id='var-HGDIR'><glossterm>HGDIR</glossterm>
+ <glossentry id='var-bb-HGDIR'><glossterm>HGDIR</glossterm>
<glossdef>
<para>
The directory in which files checked out of a Mercurial
@@ -1650,7 +1650,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-HOMEPAGE'><glossterm>HOMEPAGE</glossterm>
+ <glossentry id='var-bb-HOMEPAGE'><glossterm>HOMEPAGE</glossterm>
<glossdef>
<para>Website where more information about the software the recipe is building
can be found.</para>
@@ -1659,9 +1659,9 @@
</glossdiv>
- <glossdiv id='var-glossary-i'><title>I</title>
+ <glossdiv id='var-bb-glossary-i'><title>I</title>
- <glossentry id='var-INHERIT'><glossterm>INHERIT</glossterm>
+ <glossentry id='var-bb-INHERIT'><glossterm>INHERIT</glossterm>
<glossdef>
<para>
Causes the named class or classes to be inherited globally.
@@ -1691,15 +1691,15 @@
</glossdiv>
-->
- <glossdiv id='var-glossary-l'><title>L</title>
+ <glossdiv id='var-bb-glossary-l'><title>L</title>
- <glossentry id='var-LAYERDEPENDS'><glossterm>LAYERDEPENDS</glossterm>
+ <glossentry id='var-bb-LAYERDEPENDS'><glossterm>LAYERDEPENDS</glossterm>
<glossdef>
<para>Lists the layers, separated by spaces, upon which this recipe depends.
Optionally, you can specify a specific layer version for a dependency
by adding it to the end of the layer name with a colon, (e.g. "anotherlayer:3"
to be compared against
- <link linkend='var-LAYERVERSION'><filename>LAYERVERSION</filename></link><filename>_anotherlayer</filename>
+ <link linkend='var-bb-LAYERVERSION'><filename>LAYERVERSION</filename></link><filename>_anotherlayer</filename>
in this case).
BitBake produces an error if any dependency is missing or
the version numbers do not match exactly (if specified).</para>
@@ -1710,7 +1710,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-LAYERDIR'><glossterm>LAYERDIR</glossterm>
+ <glossentry id='var-bb-LAYERDIR'><glossterm>LAYERDIR</glossterm>
<glossdef>
<para>When used inside the <filename>layer.conf</filename> configuration
file, this variable provides the path of the current layer.
@@ -1719,22 +1719,22 @@
</glossdef>
</glossentry>
- <glossentry id='var-LAYERDIR_RE'><glossterm>LAYERDIR_RE</glossterm>
+ <glossentry id='var-bb-LAYERDIR_RE'><glossterm>LAYERDIR_RE</glossterm>
<glossdef>
<para>When used inside the <filename>layer.conf</filename> configuration
file, this variable provides the path of the current layer,
escaped for use in a regular expression
- (<link linkend='var-BBFILE_PATTERN'><filename>BBFILE_PATTERN</filename></link>).
+ (<link linkend='var-bb-BBFILE_PATTERN'><filename>BBFILE_PATTERN</filename></link>).
This variable is not available outside of <filename>layer.conf</filename>
and references are expanded immediately when parsing of the file completes.</para>
</glossdef>
</glossentry>
- <glossentry id='var-LAYERVERSION'><glossterm>LAYERVERSION</glossterm>
+ <glossentry id='var-bb-LAYERVERSION'><glossterm>LAYERVERSION</glossterm>
<glossdef>
<para>Optionally specifies the version of a layer as a single number.
You can use this variable within
- <link linkend='var-LAYERDEPENDS'><filename>LAYERDEPENDS</filename></link>
+ <link linkend='var-bb-LAYERDEPENDS'><filename>LAYERDEPENDS</filename></link>
for another layer in order to depend on a specific version
of the layer.</para>
<para>
@@ -1744,7 +1744,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-LICENSE'><glossterm>LICENSE</glossterm>
+ <glossentry id='var-bb-LICENSE'><glossterm>LICENSE</glossterm>
<glossdef>
<para>
The list of source licenses for the recipe.
@@ -1754,9 +1754,9 @@
</glossdiv>
- <glossdiv id='var-glossary-m'><title>M</title>
+ <glossdiv id='var-bb-glossary-m'><title>M</title>
- <glossentry id='var-MIRRORS'><glossterm>MIRRORS</glossterm>
+ <glossentry id='var-bb-MIRRORS'><glossterm>MIRRORS</glossterm>
<glossdef>
<para>
Specifies additional paths from which BitBake gets source code.
@@ -1764,14 +1764,14 @@
tries the local download directory.
If that location fails, the build system tries locations
defined by
- <link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>,
+ <link linkend='var-bb-PREMIRRORS'><filename>PREMIRRORS</filename></link>,
the upstream source, and then locations specified by
<filename>MIRRORS</filename> in that order.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-MULTI_PROVIDER_WHITELIST'><glossterm>MULTI_PROVIDER_WHITELIST</glossterm>
+ <glossentry id='var-bb-MULTI_PROVIDER_WHITELIST'><glossterm>MULTI_PROVIDER_WHITELIST</glossterm>
<glossdef>
<para>
Allows you to suppress BitBake warnings caused when
@@ -1804,9 +1804,9 @@
</glossdiv>
-->
- <glossdiv id='var-glossary-o'><title>O</title>
+ <glossdiv id='var-bb-glossary-o'><title>O</title>
- <glossentry id='var-OVERRIDES'><glossterm>OVERRIDES</glossterm>
+ <glossentry id='var-bb-OVERRIDES'><glossterm>OVERRIDES</glossterm>
<glossdef>
<para>
BitBake uses <filename>OVERRIDES</filename> to control
@@ -1829,9 +1829,9 @@
</glossentry>
</glossdiv>
- <glossdiv id='var-glossary-p'><title>P</title>
+ <glossdiv id='var-bb-glossary-p'><title>P</title>
- <glossentry id='var-P4DIR'><glossterm>P4DIR</glossterm>
+ <glossentry id='var-bb-P4DIR'><glossterm>P4DIR</glossterm>
<glossdef>
<para>
The directory in which a local copy of a Perforce depot
@@ -1840,14 +1840,14 @@
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGES'><glossterm>PACKAGES</glossterm>
+ <glossentry id='var-bb-PACKAGES'><glossterm>PACKAGES</glossterm>
<glossdef>
<para>The list of packages the recipe creates.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGES_DYNAMIC'><glossterm>PACKAGES_DYNAMIC</glossterm>
+ <glossentry id='var-bb-PACKAGES_DYNAMIC'><glossterm>PACKAGES_DYNAMIC</glossterm>
<glossdef>
<para>
A promise that your recipe satisfies runtime dependencies
@@ -1856,7 +1856,7 @@
does not actually satisfy the dependencies, it only states that
they should be satisfied.
For example, if a hard, runtime dependency
- (<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>)
+ (<link linkend='var-bb-RDEPENDS'><filename>RDEPENDS</filename></link>)
of another package is satisfied during the build
through the <filename>PACKAGES_DYNAMIC</filename>
variable, but a package with the module name is never actually
@@ -1865,7 +1865,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-PE'><glossterm>PE</glossterm>
+ <glossentry id='var-bb-PE'><glossterm>PE</glossterm>
<glossdef>
<para>
The epoch of the recipe.
@@ -1877,7 +1877,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-PERSISTENT_DIR'><glossterm>PERSISTENT_DIR</glossterm>
+ <glossentry id='var-bb-PERSISTENT_DIR'><glossterm>PERSISTENT_DIR</glossterm>
<glossdef>
<para>
Specifies the directory BitBake uses to store data that
@@ -1889,7 +1889,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-PF'><glossterm>PF</glossterm>
+ <glossentry id='var-bb-PF'><glossterm>PF</glossterm>
<glossdef>
<para>
Specifies the recipe or package name and includes all version and revision
@@ -1899,27 +1899,27 @@
</glossdef>
</glossentry>
- <glossentry id='var-PN'><glossterm>PN</glossterm>
+ <glossentry id='var-bb-PN'><glossterm>PN</glossterm>
<glossdef>
<para>The recipe name.</para>
</glossdef>
</glossentry>
- <glossentry id='var-PR'><glossterm>PR</glossterm>
+ <glossentry id='var-bb-PR'><glossterm>PR</glossterm>
<glossdef>
<para>The revision of the recipe.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-PREFERRED_PROVIDER'><glossterm>PREFERRED_PROVIDER</glossterm>
+ <glossentry id='var-bb-PREFERRED_PROVIDER'><glossterm>PREFERRED_PROVIDER</glossterm>
<glossdef>
<para>
Determines which recipe should be given preference when
multiple recipes provide the same item.
You should always suffix the variable with the name of the
provided item, and you should set it to the
- <link linkend='var-PN'><filename>PN</filename></link>
+ <link linkend='var-bb-PN'><filename>PN</filename></link>
of the recipe to which you want to give precedence.
Some examples:
<literallayout class='monospaced'>
@@ -1931,14 +1931,14 @@
</glossdef>
</glossentry>
- <glossentry id='var-PREFERRED_PROVIDERS'><glossterm>PREFERRED_PROVIDERS</glossterm>
+ <glossentry id='var-bb-PREFERRED_PROVIDERS'><glossterm>PREFERRED_PROVIDERS</glossterm>
<glossdef>
<para>
Determines which recipe should be given preference for
cases where multiple recipes provide the same item.
Functionally,
<filename>PREFERRED_PROVIDERS</filename> is identical to
- <link linkend='var-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>.
+ <link linkend='var-bb-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>.
However, the <filename>PREFERRED_PROVIDERS</filename>
variable lets you define preferences for multiple
situations using the following form:
@@ -1954,15 +1954,15 @@
</glossdef>
</glossentry>
- <glossentry id='var-PREFERRED_VERSION'><glossterm>PREFERRED_VERSION</glossterm>
+ <glossentry id='var-bb-PREFERRED_VERSION'><glossterm>PREFERRED_VERSION</glossterm>
<glossdef>
<para>
If there are multiple versions of recipes available, this
variable determines which recipe should be given preference.
You must always suffix the variable with the
- <link linkend='var-PN'><filename>PN</filename></link>
+ <link linkend='var-bb-PN'><filename>PN</filename></link>
you want to select, and you should set
- <link linkend='var-PV'><filename>PV</filename></link>
+ <link linkend='var-bb-PV'><filename>PV</filename></link>
accordingly for precedence.
</para>
@@ -1989,7 +1989,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-PREMIRRORS'><glossterm>PREMIRRORS</glossterm>
+ <glossentry id='var-bb-PREMIRRORS'><glossterm>PREMIRRORS</glossterm>
<glossdef>
<para>
Specifies additional paths from which BitBake gets source code.
@@ -1998,7 +1998,7 @@
If that location fails, the build system tries locations
defined by <filename>PREMIRRORS</filename>, the upstream
source, and then locations specified by
- <link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
+ <link linkend='var-bb-MIRRORS'><filename>MIRRORS</filename></link>
in that order.
</para>
@@ -2022,20 +2022,20 @@
</glossdef>
</glossentry>
- <glossentry id='var-PROVIDES'><glossterm>PROVIDES</glossterm>
+ <glossentry id='var-bb-PROVIDES'><glossterm>PROVIDES</glossterm>
<glossdef>
<para>
A list of aliases by which a particular recipe can be
known.
By default, a recipe's own
- <filename><link linkend='var-PN'>PN</link></filename>
+ <filename><link linkend='var-bb-PN'>PN</link></filename>
is implicitly already in its <filename>PROVIDES</filename>
list.
If a recipe uses <filename>PROVIDES</filename>, the
additional aliases are synonyms for the recipe and can
be useful satisfying dependencies of other recipes during
the build as specified by
- <filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>.
+ <filename><link linkend='var-bb-DEPENDS'>DEPENDS</link></filename>.
</para>
<para>
@@ -2059,7 +2059,7 @@
virtual target in <filename>PROVIDES</filename>.
Recipes that depend on the functionality in question can
include the virtual target in
- <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+ <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
to leave the choice of provider open.
</para>
@@ -2072,11 +2072,11 @@
</glossdef>
</glossentry>
- <glossentry id='var-PRSERV_HOST'><glossterm>PRSERV_HOST</glossterm>
+ <glossentry id='var-bb-PRSERV_HOST'><glossterm>PRSERV_HOST</glossterm>
<glossdef>
<para>
The network based
- <link linkend='var-PR'><filename>PR</filename></link>
+ <link linkend='var-bb-PR'><filename>PR</filename></link>
service host and port.
</para>
@@ -2094,7 +2094,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-PV'><glossterm>PV</glossterm>
+ <glossentry id='var-bb-PV'><glossterm>PV</glossterm>
<glossdef>
<para>The version of the recipe.
</para>
@@ -2108,9 +2108,9 @@
</glossdiv>
-->
- <glossdiv id='var-glossary-r'><title>R</title>
+ <glossdiv id='var-bb-glossary-r'><title>R</title>
- <glossentry id='var-RDEPENDS'><glossterm>RDEPENDS</glossterm>
+ <glossentry id='var-bb-RDEPENDS'><glossterm>RDEPENDS</glossterm>
<glossdef>
<para>
Lists a package's runtime dependencies (i.e. other packages)
@@ -2165,13 +2165,13 @@
<para>
For information on build-time dependencies, see the
- <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
+ <link linkend='var-bb-DEPENDS'><filename>DEPENDS</filename></link>
variable.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-REPODIR'><glossterm>REPODIR</glossterm>
+ <glossentry id='var-bb-REPODIR'><glossterm>REPODIR</glossterm>
<glossdef>
<para>
The directory in which a local copy of a
@@ -2181,14 +2181,14 @@
</glossdef>
</glossentry>
- <glossentry id='var-RPROVIDES'><glossterm>RPROVIDES</glossterm>
+ <glossentry id='var-bb-RPROVIDES'><glossterm>RPROVIDES</glossterm>
<glossdef>
<para>
A list of package name aliases that a package also provides.
These aliases are useful for satisfying runtime dependencies
of other packages both during the build and on the target
(as specified by
- <filename><link linkend='var-RDEPENDS'>RDEPENDS</link></filename>).
+ <filename><link linkend='var-bb-RDEPENDS'>RDEPENDS</link></filename>).
</para>
<para>
As with all package-controlling variables, you must always
@@ -2201,7 +2201,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-RRECOMMENDS'><glossterm>RRECOMMENDS</glossterm>
+ <glossentry id='var-bb-RRECOMMENDS'><glossterm>RRECOMMENDS</glossterm>
<glossdef>
<para>
A list of packages that extends the usability of a package
@@ -2210,7 +2210,7 @@
packages in order to successfully build, but needs them for
the extended usability.
To specify runtime dependencies for packages, see the
- <filename><link linkend='var-RDEPENDS'>RDEPENDS</link></filename>
+ <filename><link linkend='var-bb-RDEPENDS'>RDEPENDS</link></filename>
variable.
</para>
@@ -2243,15 +2243,15 @@
</glossdiv>
- <glossdiv id='var-glossary-s'><title>S</title>
+ <glossdiv id='var-bb-glossary-s'><title>S</title>
- <glossentry id='var-SECTION'><glossterm>SECTION</glossterm>
+ <glossentry id='var-bb-SECTION'><glossterm>SECTION</glossterm>
<glossdef>
<para>The section in which packages should be categorized.</para>
</glossdef>
</glossentry>
- <glossentry id='var-SRC_URI'><glossterm>SRC_URI</glossterm>
+ <glossentry id='var-bb-SRC_URI'><glossterm>SRC_URI</glossterm>
<glossdef>
<para>
The list of source files - local or remote.
@@ -2272,7 +2272,7 @@
the metadata,
from the local machine.
The path is relative to the
- <link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
+ <link linkend='var-bb-FILESPATH'><filename>FILESPATH</filename></link>
variable.</para></listitem>
<listitem><para><emphasis><filename>bzr://</filename> -</emphasis> Fetches files from a
Bazaar revision control repository.</para></listitem>
@@ -2322,7 +2322,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-SRCDATE'><glossterm>SRCDATE</glossterm>
+ <glossentry id='var-bb-SRCDATE'><glossterm>SRCDATE</glossterm>
<glossdef>
<para>
The date of the source code used to build the package.
@@ -2331,7 +2331,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-SRCREV'><glossterm>SRCREV</glossterm>
+ <glossentry id='var-bb-SRCREV'><glossterm>SRCREV</glossterm>
<glossdef>
<para>
The revision of the source code used to build the package.
@@ -2344,13 +2344,13 @@
</glossdef>
</glossentry>
- <glossentry id='var-SRCREV_FORMAT'><glossterm>SRCREV_FORMAT</glossterm>
+ <glossentry id='var-bb-SRCREV_FORMAT'><glossterm>SRCREV_FORMAT</glossterm>
<glossdef>
<para>
Helps construct valid
- <link linkend='var-SRCREV'><filename>SRCREV</filename></link>
+ <link linkend='var-bb-SRCREV'><filename>SRCREV</filename></link>
values when multiple source controlled URLs are used in
- <link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>.
+ <link linkend='var-bb-SRC_URI'><filename>SRC_URI</filename></link>.
</para>
<para>
@@ -2371,7 +2371,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-STAMP'><glossterm>STAMP</glossterm>
+ <glossentry id='var-bb-STAMP'><glossterm>STAMP</glossterm>
<glossdef>
<para>
Specifies the base path used to create recipe stamp files.
@@ -2381,12 +2381,12 @@
</glossdef>
</glossentry>
- <glossentry id='var-STAMPCLEAN'><glossterm>STAMPCLEAN</glossterm>
+ <glossentry id='var-bb-STAMPCLEAN'><glossterm>STAMPCLEAN</glossterm>
<glossdef>
<para>
Specifies the base path used to create recipe stamp files.
Unlike the
- <link linkend='var-STAMP'><filename>STAMP</filename></link>
+ <link linkend='var-bb-STAMP'><filename>STAMP</filename></link>
variable, <filename>STAMPCLEAN</filename> can contain
wildcards to match the range of files a clean operation
should remove.
@@ -2396,7 +2396,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-SUMMARY'><glossterm>SUMMARY</glossterm>
+ <glossentry id='var-bb-SUMMARY'><glossterm>SUMMARY</glossterm>
<glossdef>
<para>
A short summary for the recipe, which is 72 characters or less.
@@ -2404,7 +2404,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-SVNDIR'><glossterm>SVNDIR</glossterm>
+ <glossentry id='var-bb-SVNDIR'><glossterm>SVNDIR</glossterm>
<glossdef>
<para>
The directory in which files checked out of a Subversion
@@ -2415,9 +2415,9 @@
</glossdiv>
- <glossdiv id='var-glossary-t'><title>T</title>
+ <glossdiv id='var-bb-glossary-t'><title>T</title>
- <glossentry id='var-T'><glossterm>T</glossterm>
+ <glossentry id='var-bb-T'><glossterm>T</glossterm>
<glossdef>
<para>Points to a directory were BitBake places
temporary files, which consist mostly of task logs and
@@ -2426,7 +2426,7 @@
</glossdef>
</glossentry>
- <glossentry id='var-TOPDIR'><glossterm>TOPDIR</glossterm>
+ <glossentry id='var-bb-TOPDIR'><glossterm>TOPDIR</glossterm>
<glossdef>
<para>
Points to the build directory.
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index 3e2a94edb..7571421d7 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -304,9 +304,10 @@ def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated", pythonexception=pythonexception)
except (bb.parse.SkipRecipe, bb.build.FuncFailed):
raise
- except:
+ except Exception as e:
if pythonexception:
raise
+ logger.error(str(e))
raise FuncFailed(func, None)
finally:
bb.debug(2, "Python function %s finished" % func)
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 258d679dc..65c514b90 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -97,7 +97,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
self.skipreason = self.getvar('__SKIPPED', metadata)
if self.skipreason:
- self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
+ self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
self.skipped = True
self.provides = self.depvar('PROVIDES', metadata)
self.rprovides = self.depvar('RPROVIDES', metadata)
diff --git a/poky/bitbake/lib/bb/codeparser.py b/poky/bitbake/lib/bb/codeparser.py
index ddd1b97dc..ac995a6a1 100644
--- a/poky/bitbake/lib/bb/codeparser.py
+++ b/poky/bitbake/lib/bb/codeparser.py
@@ -33,7 +33,7 @@ from bb.cache import MultiProcessCache
logger = logging.getLogger('BitBake.CodeParser')
def bbhash(s):
- return hashlib.md5(s.encode("utf-8")).hexdigest()
+ return hashlib.sha256(s.encode("utf-8")).hexdigest()
def check_indent(codestr):
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
@@ -140,7 +140,7 @@ class CodeParserCache(MultiProcessCache):
# so that an existing cache gets invalidated. Additionally you'll need
# to increment __cache_version__ in cache.py in order to ensure that old
# recipe caches don't trigger "Taskhash mismatch" errors.
- CACHE_VERSION = 10
+ CACHE_VERSION = 11
def __init__(self):
MultiProcessCache.__init__(self)
@@ -368,8 +368,9 @@ class ShellParser():
def _parse_shell(self, value):
try:
tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
- except pyshlex.NeedMore:
- raise sherrors.ShellSyntaxError("Unexpected EOF")
+ except Exception:
+ bb.error('Error during parse shell code, the last 5 lines are:\n%s' % '\n'.join(value.split('\n')[-5:]))
+ raise
self.process_tokens(tokens)
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index adc41014e..9ccaa79f5 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -1216,8 +1216,8 @@ class BBCooker:
continue
elif regex == "":
parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
+ cre = re.compile('^NULL$')
errors = False
- continue
else:
try:
cre = re.compile(regex)
@@ -1890,35 +1890,6 @@ class ParsingFailure(Exception):
self.recipe = recipe
Exception.__init__(self, realexception, recipe)
-class Feeder(multiprocessing.Process):
- def __init__(self, jobs, to_parsers, quit):
- self.quit = quit
- self.jobs = jobs
- self.to_parsers = to_parsers
- multiprocessing.Process.__init__(self)
-
- def run(self):
- while True:
- try:
- quit = self.quit.get_nowait()
- except queue.Empty:
- pass
- else:
- if quit == 'cancel':
- self.to_parsers.cancel_join_thread()
- break
-
- try:
- job = self.jobs.pop()
- except IndexError:
- break
-
- try:
- self.to_parsers.put(job, timeout=0.5)
- except queue.Full:
- self.jobs.insert(0, job)
- continue
-
class Parser(multiprocessing.Process):
def __init__(self, jobs, results, quit, init, profile):
self.jobs = jobs
@@ -1965,11 +1936,8 @@ class Parser(multiprocessing.Process):
result = pending.pop()
else:
try:
- job = self.jobs.get(timeout=0.25)
- except queue.Empty:
- continue
-
- if job is None:
+ job = self.jobs.pop()
+ except IndexError:
break
result = self.parse(*job)
@@ -2053,14 +2021,15 @@ class CookerParser(object):
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
- self.feeder_quit = multiprocessing.Queue(maxsize=1)
self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
- self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
self.result_queue = multiprocessing.Queue()
- self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
- self.feeder.start()
+
+ def chunkify(lst,n):
+ return [lst[i::n] for i in range(n)]
+ self.jobs = chunkify(self.willparse, self.num_processes)
+
for i in range(0, self.num_processes):
- parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
+ parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
parser.start()
self.process_names.append(parser.name)
self.processes.append(parser)
@@ -2081,25 +2050,19 @@ class CookerParser(object):
self.total)
bb.event.fire(event, self.cfgdata)
- self.feeder_quit.put(None)
for process in self.processes:
self.parser_quit.put(None)
else:
- self.feeder_quit.put('cancel')
-
self.parser_quit.cancel_join_thread()
for process in self.processes:
self.parser_quit.put(None)
- self.jobs.cancel_join_thread()
-
for process in self.processes:
if force:
process.join(.1)
process.terminate()
else:
process.join()
- self.feeder.join()
sync = threading.Thread(target=self.bb_cache.sync)
sync.start()
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index 5df66e617..09412e28c 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -391,7 +391,11 @@ class CookerDataBuilder(object):
bb.fatal("BBFILES_DYNAMIC entries must be of the form <collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid))
layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
+ collections_tmp = collections[:]
for c in collections:
+ collections_tmp.remove(c)
+ if c in collections_tmp:
+ bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split())
if compat and not (compat & layerseries):
bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
diff --git a/poky/bitbake/lib/bb/data.py b/poky/bitbake/lib/bb/data.py
index d66d98cc8..29c238803 100644
--- a/poky/bitbake/lib/bb/data.py
+++ b/poky/bitbake/lib/bb/data.py
@@ -322,8 +322,6 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
if varflags.get("python"):
value = d.getVarFlag(key, "_content", False)
parser = bb.codeparser.PythonParser(key, logger)
- if value and "\t" in value:
- logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE")))
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
deps = deps | parser.references
deps = deps | (keys & parser.execs)
@@ -438,7 +436,7 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
if var is not None:
data = data + str(var)
k = fn + "." + task
- basehash[k] = hashlib.md5(data.encode("utf-8")).hexdigest()
+ basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest()
taskdeps[task] = alldeps
return taskdeps, basehash
diff --git a/poky/bitbake/lib/bb/data_smart.py b/poky/bitbake/lib/bb/data_smart.py
index 6b94fc4b4..07db7be97 100644
--- a/poky/bitbake/lib/bb/data_smart.py
+++ b/poky/bitbake/lib/bb/data_smart.py
@@ -39,10 +39,11 @@ from bb.COW import COWDictBase
logger = logging.getLogger("BitBake.Data")
__setvar_keyword__ = ["_append", "_prepend", "_remove"]
-__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
-__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
+__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
+__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
-__whitespace_split__ = re.compile('(\s)')
+__whitespace_split__ = re.compile(r'(\s)')
+__override_regexp__ = re.compile(r'[a-z0-9]+')
def infer_caller_details(loginfo, parent = False, varval = True):
"""Save the caller the trouble of specifying everything."""
@@ -122,7 +123,11 @@ class VariableParse:
connector = self.d["_remote_data"]
return connector.expandPythonRef(self.varname, code, self.d)
- codeobj = compile(code.strip(), self.varname or "<expansion>", "eval")
+ if self.varname:
+ varname = 'Var <%s>' % self.varname
+ else:
+ varname = '<expansion>'
+ codeobj = compile(code.strip(), varname, "eval")
parser = bb.codeparser.PythonParser(self.varname, logger)
parser.parse_python(code)
@@ -427,7 +432,8 @@ class DataSmart(MutableMapping):
except bb.parse.SkipRecipe:
raise
except Exception as exc:
- raise ExpansionError(varname, s, exc) from exc
+ tb = sys.exc_info()[2]
+ raise ExpansionError(varname, s, exc).with_traceback(tb) from exc
varparse.value = s
@@ -592,7 +598,7 @@ class DataSmart(MutableMapping):
# aka pay the cookie monster
override = var[var.rfind('_')+1:]
shortvar = var[:var.rfind('_')]
- while override and override.islower():
+ while override and __override_regexp__.match(override):
if shortvar not in self.overridedata:
self.overridedata[shortvar] = []
if [var, override] not in self.overridedata[shortvar]:
@@ -1068,4 +1074,4 @@ class DataSmart(MutableMapping):
data.update({i:value})
data_str = str([(k, data[k]) for k in sorted(data.keys())])
- return hashlib.md5(data_str.encode("utf-8")).hexdigest()
+ return hashlib.sha256(data_str.encode("utf-8")).hexdigest()
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 709372e16..8fecc809d 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -524,7 +524,7 @@ def fetcher_parse_save():
def fetcher_parse_done():
_checksum_cache.save_merge()
-def fetcher_compare_revisions():
+def fetcher_compare_revisions(d):
"""
Compare the revisions in the persistant cache with current values and
return true/false on whether they've changed.
@@ -777,7 +777,8 @@ def get_srcrev(d, method_name='sortable_revision'):
#
format = d.getVar('SRCREV_FORMAT')
if not format:
- raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+ raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
+ "The SCMs are:\n%s" % '\n'.join(scms))
name_to_rev = {}
seenautoinc = False
@@ -858,7 +859,10 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
# Disable pseudo as it may affect ssh, potentially causing it to hang.
cmd = 'export PSEUDO_DISABLED=1; ' + cmd
- logger.debug(1, "Running %s", cmd)
+ if workdir:
+ logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
+ else:
+ logger.debug(1, "Running %s", cmd)
success = False
error_message = ""
@@ -894,7 +898,7 @@ def check_network_access(d, info, url):
log remote network access, and error if BB_NO_NETWORK is set or the given
URI is untrusted
"""
- if d.getVar("BB_NO_NETWORK") == "1":
+ if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
raise NetworkAccess(url, info)
elif not trusted_network(d, url):
raise UntrustedUrl(url, info)
@@ -1027,7 +1031,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
raise
except IOError as e:
- if e.errno in [os.errno.ESTALE]:
+ if e.errno in [errno.ESTALE]:
logger.warning("Stale Error Observed %s." % ud.url)
return False
raise
@@ -1094,7 +1098,7 @@ def trusted_network(d, url):
BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
Note: modifies SRC_URI & mirrors.
"""
- if d.getVar('BB_NO_NETWORK') == "1":
+ if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
return True
pkgname = d.expand(d.getVar('PN', False))
@@ -1403,7 +1407,7 @@ class FetchMethod(object):
Fetch urls
Assumes localpath was called first
"""
- raise NoMethodError(url)
+ raise NoMethodError(urldata.url)
def unpack(self, urldata, rootdir, data):
iterate = False
@@ -1547,7 +1551,7 @@ class FetchMethod(object):
Check the status of a URL
Assumes localpath was called first
"""
- logger.info("URL %s could not be checked for status since no method exists.", url)
+ logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
return True
def latest_revision(self, ud, d, name):
@@ -1555,7 +1559,7 @@ class FetchMethod(object):
Look in the cache for the latest revision, if not present ask the SCM.
"""
if not hasattr(self, "_latest_revision"):
- raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
+ raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
key = self.generate_revision_key(ud, d, name)
@@ -1638,7 +1642,7 @@ class Fetch(object):
urls = self.urls
network = self.d.getVar("BB_NO_NETWORK")
- premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
+ premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
for u in urls:
ud = self.ud[u]
@@ -1716,7 +1720,7 @@ class Fetch(object):
update_stamp(ud, self.d)
except IOError as e:
- if e.errno in [os.errno.ESTALE]:
+ if e.errno in [errno.ESTALE]:
logger.error("Stale Error Observed %s." % u)
raise ChecksumError("Stale Error Detected")
@@ -1786,7 +1790,7 @@ class Fetch(object):
for url in urls:
if url not in self.ud:
- self.ud[url] = FetchData(url, d)
+ self.ud[url] = FetchData(url, self.d)
ud = self.ud[url]
ud.setup_localpath(self.d)
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 59a2ee8f8..8185bf4db 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -199,7 +199,7 @@ class Git(FetchMethod):
depth_default = 1
ud.shallow_depths = collections.defaultdict(lambda: depth_default)
- revs_default = d.getVar("BB_GIT_SHALLOW_REVS", True)
+ revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
ud.shallow_revs = []
ud.branches = {}
for pos, name in enumerate(ud.names):
@@ -318,7 +318,7 @@ class Git(FetchMethod):
def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors
# is not possible
- if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+ if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
return True
if os.path.exists(ud.clonedir):
return False
@@ -522,9 +522,17 @@ class Git(FetchMethod):
def clean(self, ud, d):
""" clean the git directory """
- bb.utils.remove(ud.localpath, True)
- bb.utils.remove(ud.fullmirror)
- bb.utils.remove(ud.fullmirror + ".done")
+ to_remove = [ud.localpath, ud.fullmirror, ud.fullmirror + ".done"]
+ # The localpath is a symlink to clonedir when it is cloned from a
+ # mirror, so remove both of them.
+ if os.path.islink(ud.localpath):
+ clonedir = os.path.realpath(ud.localpath)
+ to_remove.append(clonedir)
+
+ for r in to_remove:
+ if os.path.exists(r):
+ bb.note('Removing %s' % r)
+ bb.utils.remove(r, True)
def supports_srcrev(self):
return True
@@ -615,7 +623,7 @@ class Git(FetchMethod):
"""
pupver = ('', '')
- tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
+ tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
try:
output = self._lsremote(ud, d, "refs/tags/*")
except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
@@ -630,7 +638,7 @@ class Git(FetchMethod):
tag_head = line.split("/")[-1]
# Ignore non-released branches
- m = re.search("(alpha|beta|rc|final)+", tag_head)
+ m = re.search(r"(alpha|beta|rc|final)+", tag_head)
if m:
continue
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index b21fed266..32389130b 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -147,6 +147,23 @@ class GitSM(Git):
return submodules != []
+ def need_update(self, ud, d):
+ if Git.need_update(self, ud, d):
+ return True
+
+ try:
+ # Check for the nugget dropped by the download operation
+ known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
+ (ud.basecmd), d, workdir=ud.clonedir)
+
+ if ud.revisions[ud.names[0]] not in known_srcrevs.split():
+ return True
+ except bb.fetch2.FetchError:
+ # No srcrev nuggets, so this is new and needs to be updated
+ return True
+
+ return False
+
def download(self, ud, d):
def download_submodule(ud, url, module, modpath, d):
url += ";bareclone=1;nobranch=1"
@@ -157,6 +174,9 @@ class GitSM(Git):
try:
newfetch = Fetch([url], d, cache=False)
newfetch.download()
+ # Drop a nugget to add each of the srcrevs we've fetched (used by need_update)
+ runfetchcmd("%s config --add bitbake.srcrev %s" % \
+ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
except Exception as e:
logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e)))
raise
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
index 936d04311..5a2985e16 100644
--- a/poky/bitbake/lib/bb/fetch2/hg.py
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -99,7 +99,7 @@ class Hg(FetchMethod):
def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors
# is not possible
- if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+ if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
return True
if os.path.exists(ud.moddir):
return False
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
index 408dfc3d0..65bf5a364 100644
--- a/poky/bitbake/lib/bb/fetch2/npm.py
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -226,7 +226,7 @@ class Npm(FetchMethod):
self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
return
outputurl = "invalid"
- if ('resolved' not in data) or (not data['resolved'].startswith('http')):
+ if ('resolved' not in data) or (not data['resolved'].startswith('http://') and not data['resolved'].startswith('https://')):
# will be the case for ${PN}
fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 8f505b6de..3bb3e3bb0 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -33,11 +33,14 @@ import logging
import errno
import bb
import bb.progress
+import socket
+import http.client
import urllib.request, urllib.parse, urllib.error
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
+from bb.fetch2 import FetchConnectionCache
from bb.utils import export_proxies
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
@@ -132,10 +135,6 @@ class Wget(FetchMethod):
return True
def checkstatus(self, fetch, ud, d, try_again=True):
- import urllib.request, urllib.error, urllib.parse, socket, http.client
- from urllib.response import addinfourl
- from bb.fetch2 import FetchConnectionCache
-
class HTTPConnectionCache(http.client.HTTPConnection):
if fetch.connection_cache:
def connect(self):
@@ -168,7 +167,7 @@ class Wget(FetchMethod):
"""
host = req.host
if not host:
- raise urlllib2.URLError('no host given')
+ raise urllib.error.URLError('no host given')
h = http_class(host, timeout=req.timeout) # will parse host:port
h.set_debuglevel(self._debuglevel)
@@ -185,7 +184,7 @@ class Wget(FetchMethod):
# request.
# Don't close connection when connection_cache is enabled,
- if fetch.connection_cache is None:
+ if fetch.connection_cache is None:
headers["Connection"] = "close"
else:
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
@@ -252,7 +251,7 @@ class Wget(FetchMethod):
pass
closed = False
- resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
+ resp = urllib.response.addinfourl(fp_dummy(), r.msg, req.get_full_url())
resp.code = r.status
resp.msg = r.reason
@@ -271,17 +270,16 @@ class Wget(FetchMethod):
fp.read()
fp.close()
- newheaders = dict((k,v) for k,v in list(req.headers.items())
+ newheaders = dict((k, v) for k, v in list(req.headers.items())
if k.lower() not in ("content-length", "content-type"))
return self.parent.open(urllib.request.Request(req.get_full_url(),
headers=newheaders,
origin_req_host=req.origin_req_host,
unverifiable=True))
- """
- Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
- Forbidden when they actually mean 405 Method Not Allowed.
- """
+
+ # Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
+ # Forbidden when they actually mean 405 Method Not Allowed.
http_error_403 = http_error_405
@@ -292,15 +290,15 @@ class Wget(FetchMethod):
"""
def redirect_request(self, req, fp, code, msg, headers, newurl):
newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
- newreq.get_method = lambda: req.get_method()
+ newreq.get_method = req.get_method
return newreq
exported_proxies = export_proxies(d)
handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
- if export_proxies:
+ if exported_proxies:
handlers.append(urllib.request.ProxyHandler())
handlers.append(CacheHTTPHandler())
- # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
+ # Since Python 2.7.9 ssl cert validation is enabled by default
# see PEP-0476, this causes verification errors on some https servers
# so disable by default.
import ssl
@@ -319,19 +317,19 @@ class Wget(FetchMethod):
'''Adds Basic auth to http request, pass in login:password as string'''
import base64
encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
- authheader = "Basic %s" % encodeuser
+ authheader = "Basic %s" % encodeuser
r.add_header("Authorization", authheader)
- if ud.user:
- add_basic_auth(ud.user, r)
+ if ud.user and ud.pswd:
+ add_basic_auth(ud.user + ':' + ud.pswd, r)
try:
- import netrc, urllib.parse
+ import netrc
n = netrc.netrc()
login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
add_basic_auth("%s:%s" % (login, password), r)
except (TypeError, ImportError, IOError, netrc.NetrcParseError):
- pass
+ pass
with opener.open(r) as response:
pass
@@ -396,18 +394,14 @@ class Wget(FetchMethod):
(oldpn, oldpv, oldsuffix) = old
(newpn, newpv, newsuffix) = new
- """
- Check for a new suffix type that we have never heard of before
- """
- if (newsuffix):
+ # Check for a new suffix type that we have never heard of before
+ if newsuffix:
m = self.suffix_regex_comp.search(newsuffix)
if not m:
bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
return False
- """
- Not our package so ignore it
- """
+ # Not our package so ignore it
if oldpn != newpn:
return False
@@ -473,15 +467,14 @@ class Wget(FetchMethod):
return ""
- def _check_latest_version_by_dir(self, dirver, package, package_regex,
- current_version, ud, d):
+ def _check_latest_version_by_dir(self, dirver, package, package_regex, current_version, ud, d):
"""
- Scan every directory in order to get upstream version.
+ Scan every directory in order to get upstream version.
"""
version_dir = ['', '', '']
version = ['', '', '']
- dirver_regex = re.compile("(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
+ dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
s = dirver_regex.search(dirver)
if s:
version_dir[1] = s.group('ver')
@@ -541,26 +534,26 @@ class Wget(FetchMethod):
gst-fluendo-mp3
"""
# match most patterns which uses "-" as separator to version digits
- pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
+ pn_prefix1 = r"[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
# a loose pattern such as for unzip552.tar.gz
- pn_prefix2 = "[a-zA-Z]+"
+ pn_prefix2 = r"[a-zA-Z]+"
# a loose pattern such as for 80325-quicky-0.4.tar.gz
- pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
+ pn_prefix3 = r"[0-9]+[-]?[a-zA-Z]+"
# Save the Package Name (pn) Regex for use later
- pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
+ pn_regex = r"(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
# match version
- pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
+ pver_regex = r"(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
# match arch
parch_regex = "-source|_all_"
# src.rpm extension was added only for rpm package. Can be removed if the rpm
# packaged will always be considered as having to be manually upgraded
- psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+ psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
# match name, version and archive type of a package
- package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
+ package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
% (pn_regex, pver_regex, parch_regex, psuffix_regex))
self.suffix_regex_comp = re.compile(psuffix_regex)
@@ -572,7 +565,7 @@ class Wget(FetchMethod):
version = self._parse_path(package_regex_comp, package)
if version:
package_custom_regex_comp = re.compile(
- "(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
+ r"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
(re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
else:
package_custom_regex_comp = None
@@ -589,7 +582,7 @@ class Wget(FetchMethod):
current_version = ['', d.getVar('PV'), '']
"""possible to have no version in pkg name, such as spectrum-fw"""
- if not re.search("\d+", package):
+ if not re.search(r"\d+", package):
current_version[1] = re.sub('_', '.', current_version[1])
current_version[1] = re.sub('-', '.', current_version[1])
return (current_version[1], '')
@@ -607,13 +600,13 @@ class Wget(FetchMethod):
# search for version matches on folders inside the path, like:
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
- dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
+ dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
m = dirver_regex.search(path)
if m:
pn = d.getVar('PN')
dirver = m.group('dirver')
- dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
+ dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
if not dirver_pn_regex.search(dirver):
return (self._check_latest_version_by_dir(dirver,
package, package_regex, current_version, ud, d), '')
diff --git a/poky/bitbake/lib/bb/main.py b/poky/bitbake/lib/bb/main.py
index 7dc953da6..41dd3b9e0 100755
--- a/poky/bitbake/lib/bb/main.py
+++ b/poky/bitbake/lib/bb/main.py
@@ -475,10 +475,11 @@ def setup_bitbake(configParams, configuration, extrafeatures=None):
if not retries:
raise
retries -= 1
+ tryno = 8 - retries
if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError)):
- logger.info("Retrying server connection...")
+ logger.info("Retrying server connection (#%d)..." % tryno)
else:
- logger.info("Retrying server connection... (%s)" % traceback.format_exc())
+ logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc()))
if not retries:
bb.fatal("Unable to connect to bitbake server, or start one")
if retries < 5:
diff --git a/poky/bitbake/lib/bb/monitordisk.py b/poky/bitbake/lib/bb/monitordisk.py
index 833cd3d34..2ad1e6156 100644
--- a/poky/bitbake/lib/bb/monitordisk.py
+++ b/poky/bitbake/lib/bb/monitordisk.py
@@ -28,16 +28,16 @@ def convertGMK(unit):
""" Convert the space unit G, M, K, the unit is case-insensitive """
- unitG = re.match('([1-9][0-9]*)[gG]\s?$', unit)
+ unitG = re.match(r'([1-9][0-9]*)[gG]\s?$', unit)
if unitG:
return int(unitG.group(1)) * (1024 ** 3)
- unitM = re.match('([1-9][0-9]*)[mM]\s?$', unit)
+ unitM = re.match(r'([1-9][0-9]*)[mM]\s?$', unit)
if unitM:
return int(unitM.group(1)) * (1024 ** 2)
- unitK = re.match('([1-9][0-9]*)[kK]\s?$', unit)
+ unitK = re.match(r'([1-9][0-9]*)[kK]\s?$', unit)
if unitK:
return int(unitK.group(1)) * 1024
- unitN = re.match('([1-9][0-9]*)\s?$', unit)
+ unitN = re.match(r'([1-9][0-9]*)\s?$', unit)
if unitN:
return int(unitN.group(1))
else:
@@ -83,7 +83,7 @@ def getDiskData(BBDirs, configuration):
for pathSpaceInode in BBDirs.split():
# The input format is: "dir,space,inode", dir is a must, space
# and inode are optional
- pathSpaceInodeRe = re.match('([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
+ pathSpaceInodeRe = re.match(r'([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
if not pathSpaceInodeRe:
printErr("Invalid value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
return None
@@ -147,7 +147,7 @@ def getInterval(configuration):
else:
# The disk space or inode interval is optional, but it should
# have a correct value once it is specified
- intervalRe = re.match('([^,]*),?\s*(.*)', interval)
+ intervalRe = re.match(r'([^,]*),?\s*(.*)', interval)
if intervalRe:
intervalSpace = intervalRe.group(1)
if intervalSpace:
diff --git a/poky/bitbake/lib/bb/parse/ast.py b/poky/bitbake/lib/bb/parse/ast.py
index 9d20c323f..6d7c80b34 100644
--- a/poky/bitbake/lib/bb/parse/ast.py
+++ b/poky/bitbake/lib/bb/parse/ast.py
@@ -178,7 +178,7 @@ class MethodNode(AstNode):
funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
self.python = True
text = "def %s(d):\n" % (funcname) + text
- bb.methodpool.insert_method(funcname, text, self.filename, self.lineno - len(self.body))
+ bb.methodpool.insert_method(funcname, text, self.filename, self.lineno - len(self.body) - 1)
anonfuncs = data.getVar('__BBANONFUNCS', False) or []
anonfuncs.append(funcname)
data.setVar('__BBANONFUNCS', anonfuncs)
diff --git a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index e5039e3bd..9dba5f233 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -38,14 +38,15 @@ from .ConfHandler import include, init
# For compatibility
bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
-__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
-__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
-__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
-__addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
-__deltask_regexp__ = re.compile("deltask\s+(?P<func>\w+)")
-__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
-__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
-__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
+__func_start_regexp__ = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
+__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
+__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
+__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
+__deltask_regexp__ = re.compile(r"deltask\s+(?P<func>\w+)")
+__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" )
+__def_regexp__ = re.compile(r"def\s+(\w+).*:" )
+__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" )
+__python_tab_regexp__ = re.compile(r" *\t")
__infunc__ = []
__inpython__ = False
@@ -160,6 +161,16 @@ def handle(fn, d, include):
def feeder(lineno, s, fn, root, statements, eof=False):
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
+
+ # Check tabs in python functions:
+ # - def py_funcname(): covered by __inpython__
+ # - python(): covered by '__anonymous' == __infunc__[0]
+ # - python funcname(): covered by __infunc__[3]
+ if __inpython__ or (__infunc__ and ('__anonymous' == __infunc__[0] or __infunc__[3])):
+ tab = __python_tab_regexp__.match(s)
+ if tab:
+ bb.warn('python should use 4 spaces indentation, but found tabs in %s, line %s' % (root, lineno))
+
if __infunc__:
if s == '}':
__body__.append('')
diff --git a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 9d3ebe16f..ea49f8ca9 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -147,7 +147,7 @@ def handle(fn, data, include):
continue
s = s.rstrip()
while s[-1] == '\\':
- s2 = f.readline().strip()
+ s2 = f.readline().rstrip()
lineno = lineno + 1
if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
diff --git a/poky/bitbake/lib/bb/persist_data.py b/poky/bitbake/lib/bb/persist_data.py
index bef701861..0d44100f1 100644
--- a/poky/bitbake/lib/bb/persist_data.py
+++ b/poky/bitbake/lib/bb/persist_data.py
@@ -29,6 +29,7 @@ import warnings
from bb.compat import total_ordering
from collections import Mapping
import sqlite3
+import contextlib
sqlversion = sqlite3.sqlite_version_info
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -36,84 +37,181 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
logger = logging.getLogger("BitBake.PersistData")
-if hasattr(sqlite3, 'enable_shared_cache'):
- try:
- sqlite3.enable_shared_cache(True)
- except sqlite3.OperationalError:
- pass
-
@total_ordering
class SQLTable(collections.MutableMapping):
+ class _Decorators(object):
+ @staticmethod
+ def retry(*, reconnect=True):
+ """
+ Decorator that restarts a function if a database locked sqlite
+ exception occurs. If reconnect is True, the database connection
+ will be closed and reopened each time a failure occurs
+ """
+ def retry_wrapper(f):
+ def wrap_func(self, *args, **kwargs):
+ # Reconnect if necessary
+ if self.connection is None and reconnect:
+ self.reconnect()
+
+ count = 0
+ while True:
+ try:
+ return f(self, *args, **kwargs)
+ except sqlite3.OperationalError as exc:
+ if count < 500 and ('is locked' in str(exc) or 'locking protocol' in str(exc)):
+ count = count + 1
+ if reconnect:
+ self.reconnect()
+ continue
+ raise
+ return wrap_func
+ return retry_wrapper
+
+ @staticmethod
+ def transaction(f):
+ """
+ Decorator that starts a database transaction and creates a database
+ cursor for performing queries. If no exception is thrown, the
+ database results are commited. If an exception occurs, the database
+ is rolled back. In all cases, the cursor is closed after the
+ function ends.
+
+ Note that the cursor is passed as an extra argument to the function
+ after `self` and before any of the normal arguments
+ """
+ def wrap_func(self, *args, **kwargs):
+ # Context manager will COMMIT the database on success,
+ # or ROLLBACK on an exception
+ with self.connection:
+ # Automatically close the cursor when done
+ with contextlib.closing(self.connection.cursor()) as cursor:
+ return f(self, cursor, *args, **kwargs)
+ return wrap_func
+
"""Object representing a table/domain in the database"""
def __init__(self, cachefile, table):
self.cachefile = cachefile
self.table = table
- self.cursor = connect(self.cachefile)
-
- self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
- % table)
-
- def _execute(self, *query):
- """Execute a query, waiting to acquire a lock if necessary"""
- count = 0
- while True:
- try:
- return self.cursor.execute(*query)
- except sqlite3.OperationalError as exc:
- if 'database is locked' in str(exc) and count < 500:
- count = count + 1
+
+ self.connection = None
+ self._execute_single("CREATE TABLE IF NOT EXISTS %s(key TEXT PRIMARY KEY NOT NULL, value TEXT);" % table)
+
+ @_Decorators.retry(reconnect=False)
+ @_Decorators.transaction
+ def _setup_database(self, cursor):
+ cursor.execute("pragma synchronous = off;")
+ # Enable WAL and keep the autocheckpoint length small (the default is
+ # usually 1000). Persistent caches are usually read-mostly, so keeping
+ # this short will keep readers running quickly
+ cursor.execute("pragma journal_mode = WAL;")
+ cursor.execute("pragma wal_autocheckpoint = 100;")
+
+ def reconnect(self):
+ if self.connection is not None:
+ self.connection.close()
+ self.connection = sqlite3.connect(self.cachefile, timeout=5)
+ self.connection.text_factory = str
+ self._setup_database()
+
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def _execute_single(self, cursor, *query):
+ """
+ Executes a single query and discards the results. This correctly closes
+ the database cursor when finished
+ """
+ cursor.execute(*query)
+
+ @_Decorators.retry()
+ def _row_iter(self, f, *query):
+ """
+ Helper function that returns a row iterator. Each time __next__ is
+ called on the iterator, the provided function is evaluated to determine
+ the return value
+ """
+ class CursorIter(object):
+ def __init__(self, cursor):
+ self.cursor = cursor
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ row = self.cursor.fetchone()
+ if row is None:
self.cursor.close()
- self.cursor = connect(self.cachefile)
- continue
- raise
+ raise StopIteration
+ return f(row)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, typ, value, traceback):
+ self.cursor.close()
+ return False
+
+ cursor = self.connection.cursor()
+ try:
+ cursor.execute(*query)
+ return CursorIter(cursor)
+ except:
+ cursor.close()
def __enter__(self):
- self.cursor.__enter__()
+ self.connection.__enter__()
return self
def __exit__(self, *excinfo):
- self.cursor.__exit__(*excinfo)
-
- def __getitem__(self, key):
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- for row in data:
+ self.connection.__exit__(*excinfo)
+
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __getitem__(self, cursor, key):
+ cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
+ row = cursor.fetchone()
+ if row is not None:
return row[1]
raise KeyError(key)
- def __delitem__(self, key):
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __delitem__(self, cursor, key):
if key not in self:
raise KeyError(key)
- self._execute("DELETE from %s where key=?;" % self.table, [key])
+ cursor.execute("DELETE from %s where key=?;" % self.table, [key])
- def __setitem__(self, key, value):
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __setitem__(self, cursor, key, value):
if not isinstance(key, str):
raise TypeError('Only string keys are supported')
elif not isinstance(value, str):
raise TypeError('Only string values are supported')
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- exists = len(list(data))
- if exists:
- self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
- [value, key])
+ cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
+ row = cursor.fetchone()
+ if row is not None:
+ cursor.execute("UPDATE %s SET value=? WHERE key=?;" % self.table, [value, key])
else:
- self._execute("INSERT into %s(key, value) values (?, ?);" %
- self.table, [key, value])
-
- def __contains__(self, key):
- return key in set(self)
-
- def __len__(self):
- data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
- for row in data:
+ cursor.execute("INSERT into %s(key, value) values (?, ?);" % self.table, [key, value])
+
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __contains__(self, cursor, key):
+ cursor.execute('SELECT * from %s where key=?;' % self.table, [key])
+ return cursor.fetchone() is not None
+
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def __len__(self, cursor):
+ cursor.execute("SELECT COUNT(key) FROM %s;" % self.table)
+ row = cursor.fetchone()
+ if row is not None:
return row[0]
def __iter__(self):
- data = self._execute("SELECT key FROM %s;" % self.table)
- return (row[0] for row in data)
+ return self._row_iter(lambda row: row[0], "SELECT key from %s;" % self.table)
def __lt__(self, other):
if not isinstance(other, Mapping):
@@ -122,25 +220,27 @@ class SQLTable(collections.MutableMapping):
return len(self) < len(other)
def get_by_pattern(self, pattern):
- data = self._execute("SELECT * FROM %s WHERE key LIKE ?;" %
- self.table, [pattern])
- return [row[1] for row in data]
+ return self._row_iter(lambda row: row[1], "SELECT * FROM %s WHERE key LIKE ?;" %
+ self.table, [pattern])
def values(self):
return list(self.itervalues())
def itervalues(self):
- data = self._execute("SELECT value FROM %s;" % self.table)
- return (row[0] for row in data)
+ return self._row_iter(lambda row: row[0], "SELECT value FROM %s;" %
+ self.table)
def items(self):
return list(self.iteritems())
def iteritems(self):
- return self._execute("SELECT * FROM %s;" % self.table)
+ return self._row_iter(lambda row: (row[0], row[1]), "SELECT * FROM %s;" %
+ self.table)
- def clear(self):
- self._execute("DELETE FROM %s;" % self.table)
+ @_Decorators.retry()
+ @_Decorators.transaction
+ def clear(self, cursor):
+ cursor.execute("DELETE FROM %s;" % self.table)
def has_key(self, key):
return key in self
@@ -194,12 +294,6 @@ class PersistData(object):
"""
del self.data[domain][key]
-def connect(database):
- connection = sqlite3.connect(database, timeout=5, isolation_level=None)
- connection.execute("pragma synchronous = off;")
- connection.text_factory = str
- return connection
-
def persist(domain, d):
"""Convenience factory for SQLTable objects based upon metadata"""
import bb.utils
diff --git a/poky/bitbake/lib/bb/providers.py b/poky/bitbake/lib/bb/providers.py
index c2aa98c06..f496d84d1 100644
--- a/poky/bitbake/lib/bb/providers.py
+++ b/poky/bitbake/lib/bb/providers.py
@@ -129,7 +129,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
preferred_v = cfgData.getVar("PREFERRED_VERSION")
if preferred_v:
- m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
+ m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v)
if m:
if m.group(1):
preferred_e = m.group(1)[:-1]
@@ -384,7 +384,7 @@ def getRuntimeProviders(dataCache, rdepend):
# Only search dynamic packages if we can't find anything in other variables
for pattern in dataCache.packages_dynamic:
- pattern = pattern.replace('+', "\+")
+ pattern = pattern.replace(r'+', r"\+")
if pattern in regexp_cache:
regexp = regexp_cache[pattern]
else:
diff --git a/poky/bitbake/lib/bb/pysh/builtin.py b/poky/bitbake/lib/bb/pysh/builtin.py
deleted file mode 100644
index a8814dc33..000000000
--- a/poky/bitbake/lib/bb/pysh/builtin.py
+++ /dev/null
@@ -1,710 +0,0 @@
-# builtin.py - builtins and utilities definitions for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Builtin and internal utilities implementations.
-
-- Beware not to use python interpreter environment as if it were the shell
-environment. For instance, commands working directory must be explicitely handled
-through env['PWD'] instead of relying on python working directory.
-"""
-import errno
-import optparse
-import os
-import re
-import subprocess
-import sys
-import time
-
-def has_subprocess_bug():
- return getattr(subprocess, 'list2cmdline') and \
- ( subprocess.list2cmdline(['']) == '' or \
- subprocess.list2cmdline(['foo|bar']) == 'foo|bar')
-
-# Detect python bug 1634343: "subprocess swallows empty arguments under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1634343&group_id=5470&atid=105470>
-# Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1710802&group_id=5470&atid=105470>
-if has_subprocess_bug():
- import subprocess_fix
- subprocess.list2cmdline = subprocess_fix.list2cmdline
-
-from sherrors import *
-
-class NonExitingParser(optparse.OptionParser):
- """OptionParser default behaviour upon error is to print the error message and
- exit. Raise a utility error instead.
- """
- def error(self, msg):
- raise UtilityError(msg)
-
-#-------------------------------------------------------------------------------
-# set special builtin
-#-------------------------------------------------------------------------------
-OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False,
- help='The shell shall disable pathname expansion.')
-OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False,
- help="""When this option is on, if a simple command fails for any of the \
- reasons listed in Consequences of Shell Errors or returns an exit status \
- value >0, and is not part of the compound list following a while, until, \
- or if keyword, and is not a part of an AND or OR list, and is not a \
- pipeline preceded by the ! reserved word, then the shell shall immediately \
- exit.""")
-OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False,
- help="""The shell shall write to standard error a trace for each command \
- after it expands the command and before it executes it. It is unspecified \
- whether the command that turns tracing off is traced.""")
-
-def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SET.parse_args(args)
- env = interp.get_env()
-
- if option.has_f:
- env.set_opt('-f')
- if option.has_e:
- env.set_opt('-e')
- if option.has_x:
- env.set_opt('-x')
- return 0
-
-#-------------------------------------------------------------------------------
-# shift special builtin
-#-------------------------------------------------------------------------------
-def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- params = interp.get_env().get_positional_args()
- if args:
- try:
- n = int(args[0])
- if n > len(params):
- raise ValueError()
- except ValueError:
- return 1
- else:
- n = 1
-
- params[:n] = []
- interp.get_env().set_positional_args(params)
- return 0
-
-#-------------------------------------------------------------------------------
-# export special builtin
-#-------------------------------------------------------------------------------
-OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_EXPORT.parse_args(args)
- if option.has_p:
- raise NotImplementedError()
-
- for arg in args:
- try:
- name, value = arg.split('=', 1)
- except ValueError:
- name, value = arg, None
- env = interp.get_env().export(name, value)
-
- return 0
-
-#-------------------------------------------------------------------------------
-# return special builtin
-#-------------------------------------------------------------------------------
-def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- res = 0
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = 0
- if not 0<=res<=255:
- res = 0
-
- # BUG: should be last executed command exit code
- raise ReturnSignal(res)
-
-#-------------------------------------------------------------------------------
-# trap special builtin
-#-------------------------------------------------------------------------------
-def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- if len(args) < 2:
- stderr.write('trap: usage: trap [[arg] signal_spec ...]\n')
- return 2
-
- action = args[0]
- for sig in args[1:]:
- try:
- env.traps[sig] = action
- except Exception as e:
- stderr.write('trap: %s\n' % str(e))
- return 0
-
-#-------------------------------------------------------------------------------
-# unset special builtin
-#-------------------------------------------------------------------------------
-OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions")
-OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False)
-OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False)
-
-def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_UNSET.parse_args(args)
-
- status = 0
- env = interp.get_env()
- for arg in args:
- try:
- if option.has_f:
- env.remove_function(arg)
- else:
- del env[arg]
- except KeyError:
- pass
- except VarAssignmentError:
- status = 1
-
- return status
-
-#-------------------------------------------------------------------------------
-# wait special builtin
-#-------------------------------------------------------------------------------
-def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return interp.wait([int(arg) for arg in args])
-
-#-------------------------------------------------------------------------------
-# cat utility
-#-------------------------------------------------------------------------------
-def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if not args:
- args = ['-']
-
- status = 0
- for arg in args:
- if arg == '-':
- data = stdin.read()
- else:
- path = os.path.join(env['PWD'], arg)
- try:
- f = file(path, 'rb')
- try:
- data = f.read()
- finally:
- f.close()
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
- status = 1
- continue
- stdout.write(data)
- stdout.flush()
- return status
-
-#-------------------------------------------------------------------------------
-# cd utility
-#-------------------------------------------------------------------------------
-OPT_CD = NonExitingParser("cd - change the working directory")
-
-def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_CD.parse_args(args)
- env = interp.get_env()
-
- directory = None
- printdir = False
- if not args:
- home = env.get('HOME')
- if home:
- # Unspecified, do nothing
- return 0
- else:
- directory = home
- elif len(args)==1:
- directory = args[0]
- if directory=='-':
- if 'OLDPWD' not in env:
- raise UtilityError("OLDPWD not set")
- printdir = True
- directory = env['OLDPWD']
- else:
- raise UtilityError("too many arguments")
-
- curpath = None
- # Absolute directories will be handled correctly by the os.path.join call.
- if not directory.startswith('.') and not directory.startswith('..'):
- cdpaths = env.get('CDPATH', '.').split(';')
- for cdpath in cdpaths:
- p = os.path.join(cdpath, directory)
- if os.path.isdir(p):
- curpath = p
- break
-
- if curpath is None:
- curpath = directory
- curpath = os.path.join(env['PWD'], directory)
-
- env['OLDPWD'] = env['PWD']
- env['PWD'] = curpath
- if printdir:
- stdout.write('%s\n' % curpath)
- return 0
-
-#-------------------------------------------------------------------------------
-# colon utility
-#-------------------------------------------------------------------------------
-def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# echo utility
-#-------------------------------------------------------------------------------
-def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Echo only takes arguments, no options. Use printf if you need fancy stuff.
- output = ' '.join(args) + '\n'
- stdout.write(output)
- stdout.flush()
- return 0
-
-#-------------------------------------------------------------------------------
-# egrep utility
-#-------------------------------------------------------------------------------
-# egrep is usually a shell script.
-# Unfortunately, pysh does not support shell scripts *with arguments* right now,
-# so the redirection is implemented here, assuming grep is available.
-def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-E'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# env utility
-#-------------------------------------------------------------------------------
-def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if args and args[0]=='-i':
- raise NotImplementedError('env: -i option is not implemented')
-
- i = 0
- for arg in args:
- if '=' not in arg:
- break
- # Update the current environment
- name, value = arg.split('=', 1)
- env[name] = value
- i += 1
-
- if args[i:]:
- # Find then execute the specified interpreter
- utility = env.find_in_path(args[i])
- if not utility:
- return 127
- args[i:i+1] = utility
- name = args[i]
- args = args[i+1:]
- try:
- return run_command(name, args, interp, env, stdin, stdout, stderr,
- debugflags)
- except UtilityError:
- stderr.write('env: failed to execute %s' % ' '.join([name]+args))
- return 126
- else:
- for pair in env.get_variables().iteritems():
- stdout.write('%s=%s\n' % pair)
- return 0
-
-#-------------------------------------------------------------------------------
-# exit utility
-#-------------------------------------------------------------------------------
-def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- res = None
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = None
- if not 0<=res<=255:
- res = None
-
- if res is None:
- # BUG: should be last executed command exit code
- res = 0
-
- raise ExitSignal(res)
-
-#-------------------------------------------------------------------------------
-# fgrep utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-F'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# gunzip utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# kill utility
-#-------------------------------------------------------------------------------
-def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- for arg in args:
- pid = int(arg)
- status = subprocess.call(['pskill', '/T', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- # pskill is asynchronous, hence the stupid polling loop
- while 1:
- p = subprocess.Popen(['pslist', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- output = p.communicate()[0]
- if ('process %d was not' % pid) in output:
- break
- time.sleep(1)
- return status
-
-#-------------------------------------------------------------------------------
-# mkdir utility
-#-------------------------------------------------------------------------------
-OPT_MKDIR = NonExitingParser("mkdir - make directories.")
-OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # TODO: implement umask
- # TODO: implement proper utility error report
- option, args = OPT_MKDIR.parse_args(args)
- for arg in args:
- path = os.path.join(env['PWD'], arg)
- if option.has_p:
- try:
- os.makedirs(path)
- except IOError as e:
- if e.errno != errno.EEXIST:
- raise
- else:
- os.mkdir(path)
- return 0
-
-#-------------------------------------------------------------------------------
-# netstat utility
-#-------------------------------------------------------------------------------
-def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- # Do you really expect me to implement netstat ?
- # This empty form is enough for Mercurial tests since it's
- # supposed to generate nothing upon success. Faking this test
- # is not a big deal either.
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# pwd utility
-#-------------------------------------------------------------------------------
-OPT_PWD = NonExitingParser("pwd - return working directory name")
-OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True,
- help="""If the PWD environment variable contains an absolute pathname of \
- the current directory that does not contain the filenames dot or dot-dot, \
- pwd shall write this pathname to standard output. Otherwise, the -L option \
- shall behave as the -P option.""")
-OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False,
- help="""The absolute pathname written shall not contain filenames that, in \
- the context of the pathname, refer to files of type symbolic link.""")
-
-def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_PWD.parse_args(args)
- stdout.write('%s\n' % env['PWD'])
- return 0
-
-#-------------------------------------------------------------------------------
-# printf utility
-#-------------------------------------------------------------------------------
-RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)')
-
-def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- def replace(m):
- assert m.group()
- g = m.group()[1:]
- if g.startswith('x'):
- return chr(int(g[1:], 16))
- if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g):
- # Yay, an octal number
- return chr(int(g, 8))
- return {
- 'a': '\a',
- 'b': '\b',
- 'f': '\f',
- 'n': '\n',
- 'r': '\r',
- 't': '\t',
- 'v': '\v',
- '\\': '\\',
- }.get(g)
-
- # Convert escape sequences
- format = re.sub(RE_UNESCAPE, replace, args[0])
- stdout.write(format % tuple(args[1:]))
- return 0
-
-#-------------------------------------------------------------------------------
-# true utility
-#-------------------------------------------------------------------------------
-def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# sed utility
-#-------------------------------------------------------------------------------
-RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$')
-
-# cygwin sed fails with some expressions when they do not end with a single space.
-# see unit tests for details. Interestingly, the same expressions works perfectly
-# in cygwin shell.
-def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Scan pattern arguments and append a space if necessary
- for i in range(len(args)):
- if not RE_SED.search(args[i]):
- continue
- args[i] = args[i] + ' '
-
- return run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# sleep utility
-#-------------------------------------------------------------------------------
-def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- time.sleep(int(args[0]))
- return 0
-
-#-------------------------------------------------------------------------------
-# sort utility
-#-------------------------------------------------------------------------------
-OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files")
-
-def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags):
-
- def sort(path):
- if path == '-':
- lines = stdin.readlines()
- else:
- try:
- f = file(path)
- try:
- lines = f.readlines()
- finally:
- f.close()
- except IOError as e:
- stderr.write(str(e) + '\n')
- return 1
-
- if lines and lines[-1][-1]!='\n':
- lines[-1] = lines[-1] + '\n'
- return lines
-
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SORT.parse_args(args)
- alllines = []
-
- if len(args)<=0:
- args += ['-']
-
- # Load all files lines
- curdir = os.getcwd()
- try:
- os.chdir(env['PWD'])
- for path in args:
- alllines += sort(path)
- finally:
- os.chdir(curdir)
-
- alllines.sort()
- for line in alllines:
- stdout.write(line)
- return 0
-
-#-------------------------------------------------------------------------------
-# hg utility
-#-------------------------------------------------------------------------------
-
-hgcommands = [
- 'add',
- 'addremove',
- 'commit', 'ci',
- 'debugrename',
- 'debugwalk',
- 'falabala', # Dummy command used in a mercurial test
- 'incoming',
- 'locate',
- 'pull',
- 'push',
- 'qinit',
- 'remove', 'rm',
- 'rename', 'mv',
- 'revert',
- 'showconfig',
- 'status', 'st',
- 'strip',
- ]
-
-def rewriteslashes(name, args):
- # Several hg commands output file paths, rewrite the separators
- if len(args) > 1 and name.lower().endswith('python') \
- and args[0].endswith('hg'):
- for cmd in hgcommands:
- if cmd in args[1:]:
- return True
-
- # svn output contains many paths with OS specific separators.
- # Normalize these to unix paths.
- base = os.path.basename(name)
- if base.startswith('svn'):
- return True
-
- return False
-
-def rewritehg(output):
- if not output:
- return output
- # Rewrite os specific messages
- output = output.replace(': The system cannot find the file specified',
- ': No such file or directory')
- output = re.sub(': Access is denied.*$', ': Permission denied', output)
- output = output.replace(': No connection could be made because the target machine actively refused it',
- ': Connection refused')
- return output
-
-
-def run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags):
- # Execute the command
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- hgbin = interp.options().hgbinary
- ishg = hgbin and ('hg' in name or args and 'hg' in args[0])
- unixoutput = 'cygwin' in name or ishg
-
- exec_env = env.get_variables()
- try:
- # BUG: comparing file descriptor is clearly not a reliable way to tell
- # whether they point on the same underlying object. But in pysh limited
- # scope this is usually right, we do not expect complicated redirections
- # besides usual 2>&1.
- # Still there is one case we have but cannot deal with is when stdout
- # and stderr are redirected *by pysh caller*. This the reason for the
- # --redirect pysh() option.
- # Now, we want to know they are the same because we sometimes need to
- # transform the command output, mostly remove CR-LF to ensure that
- # command output is unix-like. Cygwin utilies are a special case because
- # they explicitely set their output streams to binary mode, so we have
- # nothing to do. For all others commands, we have to guess whether they
- # are sending text data, in which case the transformation must be done.
- # Again, the NUL character test is unreliable but should be enough for
- # hg tests.
- redirected = stdout.fileno()==stderr.fileno()
- if not redirected:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- else:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, err = p.communicate()
- except WindowsError as e:
- raise UtilityError(str(e))
-
- if not unixoutput:
- def encode(s):
- if '\0' in s:
- return s
- return s.replace('\r\n', '\n')
- else:
- encode = lambda s: s
-
- if rewriteslashes(name, args):
- encode1_ = encode
- def encode(s):
- s = encode1_(s)
- s = s.replace('\\\\', '\\')
- s = s.replace('\\', '/')
- return s
-
- if ishg:
- encode2_ = encode
- def encode(s):
- return rewritehg(encode2_(s))
-
- stdout.write(encode(out))
- if not redirected:
- stderr.write(encode(err))
- return p.returncode
-
diff --git a/poky/bitbake/lib/bb/pysh/interp.py b/poky/bitbake/lib/bb/pysh/interp.py
deleted file mode 100644
index d14ecf3c6..000000000
--- a/poky/bitbake/lib/bb/pysh/interp.py
+++ /dev/null
@@ -1,1367 +0,0 @@
-# interp.py - shell interpreter for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Implement the shell interpreter.
-
-Most references are made to "The Open Group Base Specifications Issue 6".
-<http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html>
-"""
-# TODO: document the fact input streams must implement fileno() so Popen will work correctly.
-# it requires non-stdin stream to be implemented as files. Still to be tested...
-# DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here.
-# TODO: stop command execution upon error.
-# TODO: sort out the filename/io_number mess. It should be possible to use filenames only.
-# TODO: review subshell implementation
-# TODO: test environment cloning for non-special builtins
-# TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost
-# TODO: unit test for variable assignment
-# TODO: test error management wrt error type/utility type
-# TODO: test for binary output everywhere
-# BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary.
-import base64
-import cPickle as pickle
-import errno
-import glob
-import os
-import re
-import subprocess
-import sys
-import tempfile
-
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
-
-import builtin
-from sherrors import *
-import pyshlex
-import pyshyacc
-
-def mappend(func, *args, **kargs):
- """Like map but assume func returns a list. Returned lists are merged into
- a single one.
- """
- return reduce(lambda a,b: a+b, map(func, *args, **kargs), [])
-
-class FileWrapper:
- """File object wrapper to ease debugging.
-
- Allow mode checking and implement file duplication through a simple
- reference counting scheme. Not sure the latter is really useful since
- only real file descriptors can be used.
- """
- def __init__(self, mode, file, close=True):
- if mode not in ('r', 'w', 'a'):
- raise IOError('invalid mode: %s' % mode)
- self._mode = mode
- self._close = close
- if isinstance(file, FileWrapper):
- if file._refcount[0] <= 0:
- raise IOError(0, 'Error')
- self._refcount = file._refcount
- self._refcount[0] += 1
- self._file = file._file
- else:
- self._refcount = [1]
- self._file = file
-
- def dup(self):
- return FileWrapper(self._mode, self, self._close)
-
- def fileno(self):
- """fileno() should be only necessary for input streams."""
- return self._file.fileno()
-
- def read(self, size=-1):
- if self._mode!='r':
- raise IOError(0, 'Error')
- return self._file.read(size)
-
- def readlines(self, *args, **kwargs):
- return self._file.readlines(*args, **kwargs)
-
- def write(self, s):
- if self._mode not in ('w', 'a'):
- raise IOError(0, 'Error')
- return self._file.write(s)
-
- def flush(self):
- self._file.flush()
-
- def close(self):
- if not self._refcount:
- return
- assert self._refcount[0] > 0
-
- self._refcount[0] -= 1
- if self._refcount[0] == 0:
- self._mode = 'c'
- if self._close:
- self._file.close()
- self._refcount = None
-
- def mode(self):
- return self._mode
-
- def __getattr__(self, name):
- if name == 'name':
- self.name = getattr(self._file, name)
- return self.name
- else:
- raise AttributeError(name)
-
- def __del__(self):
- self.close()
-
-
-def win32_open_devnull(mode):
- return open('NUL', mode)
-
-
-class Redirections:
- """Stores open files and their mapping to pseudo-sh file descriptor.
- """
- # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does
- # not make 1 to redirect to 4
- def __init__(self, stdin=None, stdout=None, stderr=None):
- self._descriptors = {}
- if stdin is not None:
- self._add_descriptor(0, stdin)
- if stdout is not None:
- self._add_descriptor(1, stdout)
- if stderr is not None:
- self._add_descriptor(2, stderr)
-
- def add_here_document(self, interp, name, content, io_number=None):
- if io_number is None:
- io_number = 0
-
- if name==pyshlex.unquote_wordtree(name):
- content = interp.expand_here_document(('TOKEN', content))
-
- # Write document content in a temporary file
- tmp = tempfile.TemporaryFile()
- try:
- tmp.write(content)
- tmp.flush()
- tmp.seek(0)
- self._add_descriptor(io_number, FileWrapper('r', tmp))
- except:
- tmp.close()
- raise
-
- def add(self, interp, op, filename, io_number=None):
- if op not in ('<', '>', '>|', '>>', '>&'):
- # TODO: add descriptor duplication and here_documents
- raise RedirectionError('Unsupported redirection operator "%s"' % op)
-
- if io_number is not None:
- io_number = int(io_number)
-
- if (op == '>&' and filename.isdigit()) or filename=='-':
- # No expansion for file descriptors, quote them if you want a filename
- fullname = filename
- else:
- if filename.startswith('/'):
- # TODO: win32 kludge
- if filename=='/dev/null':
- fullname = 'NUL'
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError()
- else:
- fullname = interp.expand_redirection(('TOKEN', filename))
- if not fullname:
- raise RedirectionError('%s: ambiguous redirect' % filename)
- # Build absolute path based on PWD
- fullname = os.path.join(interp.get_env()['PWD'], fullname)
-
- if op=='<':
- return self._add_input_redirection(interp, fullname, io_number)
- elif op in ('>', '>|'):
- clobber = ('>|'==op)
- return self._add_output_redirection(interp, fullname, io_number, clobber)
- elif op=='>>':
- return self._add_output_appending(interp, fullname, io_number)
- elif op=='>&':
- return self._dup_output_descriptor(fullname, io_number)
-
- def close(self):
- if self._descriptors is not None:
- for desc in self._descriptors.itervalues():
- desc.flush()
- desc.close()
- self._descriptors = None
-
- def stdin(self):
- return self._descriptors[0]
-
- def stdout(self):
- return self._descriptors[1]
-
- def stderr(self):
- return self._descriptors[2]
-
- def clone(self):
- clone = Redirections()
- for desc, fileobj in self._descriptors.iteritems():
- clone._descriptors[desc] = fileobj.dup()
- return clone
-
- def _add_output_redirection(self, interp, filename, io_number, clobber):
- if io_number is None:
- # io_number default to standard output
- io_number = 1
-
- if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename):
- # File already exist in no-clobber mode, bail out
- raise RedirectionError('File "%s" already exists' % filename)
-
- # Open and register
- self._add_file_descriptor(io_number, filename, 'w')
-
- def _add_output_appending(self, interp, filename, io_number):
- if io_number is None:
- io_number = 1
- self._add_file_descriptor(io_number, filename, 'a')
-
- def _add_input_redirection(self, interp, filename, io_number):
- if io_number is None:
- io_number = 0
- self._add_file_descriptor(io_number, filename, 'r')
-
- def _add_file_descriptor(self, io_number, filename, mode):
- try:
- if filename.startswith('/'):
- if filename=='/dev/null':
- f = win32_open_devnull(mode+'b')
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError('cannot open absolute path %s' % repr(filename))
- else:
- f = file(filename, mode+'b')
- except IOError as e:
- raise RedirectionError(str(e))
-
- wrapper = None
- try:
- wrapper = FileWrapper(mode, f)
- f = None
- self._add_descriptor(io_number, wrapper)
- except:
- if f: f.close()
- if wrapper: wrapper.close()
- raise
-
- def _dup_output_descriptor(self, source_fd, dest_fd):
- if source_fd is None:
- source_fd = 1
- self._dup_file_descriptor(source_fd, dest_fd, 'w')
-
- def _dup_file_descriptor(self, source_fd, dest_fd, mode):
- source_fd = int(source_fd)
- if source_fd not in self._descriptors:
- raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd))
- source = self._descriptors[source_fd]
-
- if source.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode))
-
- if dest_fd=='-':
- # Close the source descriptor
- del self._descriptors[source_fd]
- source.close()
- else:
- dest_fd = int(dest_fd)
- if dest_fd not in self._descriptors:
- raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd))
-
- dest = self._descriptors[dest_fd]
- if dest.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode))
-
- self._descriptors[dest_fd] = source.dup()
- dest.close()
-
- def _add_descriptor(self, io_number, file):
- io_number = int(io_number)
-
- if io_number in self._descriptors:
- # Close the current descriptor
- d = self._descriptors[io_number]
- del self._descriptors[io_number]
- d.close()
-
- self._descriptors[io_number] = file
-
- def __str__(self):
- names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v
- in self._descriptors.iteritems()]
- names = ','.join(names)
- return 'Redirections(%s)' % names
-
- def __del__(self):
- self.close()
-
-def cygwin_to_windows_path(path):
- """Turn /cygdrive/c/foo into c:/foo, or return path if it
- is not a cygwin path.
- """
- if not path.startswith('/cygdrive/'):
- return path
- path = path[len('/cygdrive/'):]
- path = path[:1] + ':' + path[1:]
- return path
-
-def win32_to_unix_path(path):
- if path is not None:
- path = path.replace('\\', '/')
- return path
-
-_RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?')
-_SHEBANG_CMDS = {
- '/usr/bin/env': 'env',
- '/bin/sh': 'pysh',
- 'python': 'python',
-}
-
-def resolve_shebang(path, ignoreshell=False):
- """Return a list of arguments as shebang interpreter call or an empty list
- if path does not refer to an executable script.
- See <http://www.opengroup.org/austin/docs/austin_51r2.txt>.
-
- ignoreshell - set to True to ignore sh shebangs. Return an empty list instead.
- """
- try:
- f = file(path)
- try:
- # At most 80 characters in the first line
- header = f.read(80).splitlines()[0]
- finally:
- f.close()
-
- m = _RE_SHEBANG.search(header)
- if not m:
- return []
- cmd, arg = m.group(1,2)
- if os.path.isfile(cmd):
- # Keep this one, the hg script for instance contains a weird windows
- # shebang referencing the current python install.
- cmdfile = os.path.basename(cmd).lower()
- if cmdfile == 'python.exe':
- cmd = 'python'
- pass
- elif cmd not in _SHEBANG_CMDS:
- raise CommandNotFound('Unknown interpreter "%s" referenced in '\
- 'shebang' % header)
- cmd = _SHEBANG_CMDS.get(cmd)
- if cmd is None or (ignoreshell and cmd == 'pysh'):
- return []
- if arg is None:
- return [cmd, win32_to_unix_path(path)]
- return [cmd, arg, win32_to_unix_path(path)]
- except IOError as e:
- if e.errno!=errno.ENOENT and \
- (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM
- raise
- return []
-
-def win32_find_in_path(name, path):
- if isinstance(path, str):
- path = path.split(os.pathsep)
-
- exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep)
- for p in path:
- p_name = os.path.join(p, name)
-
- prefix = resolve_shebang(p_name)
- if prefix:
- return prefix
-
- for ext in exts:
- p_name_ext = p_name + ext
- if os.path.exists(p_name_ext):
- return [win32_to_unix_path(p_name_ext)]
- return []
-
-class Traps(dict):
- def __setitem__(self, key, value):
- if key not in ('EXIT',):
- raise NotImplementedError()
- super(Traps, self).__setitem__(key, value)
-
-# IFS white spaces character class
-_IFS_WHITESPACES = (' ', '\t', '\n')
-
-class Environment:
- """Environment holds environment variables, export table, function
- definitions and whatever is defined in 2.12 "Shell Execution Environment",
- redirection excepted.
- """
- def __init__(self, pwd):
- self._opt = set() #Shell options
-
- self._functions = {}
- self._env = {'?': '0', '#': '0'}
- self._exported = set([
- 'HOME', 'IFS', 'PATH'
- ])
-
- # Set environment vars with side-effects
- self._ifs_ws = None # Set of IFS whitespace characters
- self._ifs_re = None # Regular expression used to split between words using IFS classes
- self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values
- self['PWD'] = pwd
- self.traps = Traps()
-
- def clone(self, subshell=False):
- env = Environment(self['PWD'])
- env._opt = set(self._opt)
- for k,v in self.get_variables().iteritems():
- if k in self._exported:
- env.export(k,v)
- elif subshell:
- env[k] = v
-
- if subshell:
- env._functions = dict(self._functions)
-
- return env
-
- def __getitem__(self, key):
- if key in ('@', '*', '-', '$'):
- raise NotImplementedError('%s is not implemented' % repr(key))
- return self._env[key]
-
- def get(self, key, defval=None):
- try:
- return self[key]
- except KeyError:
- return defval
-
- def __setitem__(self, key, value):
- if key=='IFS':
- # Update the whitespace/non-whitespace classes
- self._update_ifs(value)
- elif key=='PWD':
- pwd = os.path.abspath(value)
- if not os.path.isdir(pwd):
- raise VarAssignmentError('Invalid directory %s' % value)
- value = pwd
- elif key in ('?', '!'):
- value = str(int(value))
- self._env[key] = value
-
- def __delitem__(self, key):
- if key in ('IFS', 'PWD', '?'):
- raise VarAssignmentError('%s cannot be unset' % key)
- del self._env[key]
-
- def __contains__(self, item):
- return item in self._env
-
- def set_positional_args(self, args):
- """Set the content of 'args' as positional argument from 1 to len(args).
- Return previous argument as a list of strings.
- """
- # Save and remove previous arguments
- prevargs = []
- for i in range(int(self._env['#'])):
- i = str(i+1)
- prevargs.append(self._env[i])
- del self._env[i]
- self._env['#'] = '0'
-
- #Set new ones
- for i,arg in enumerate(args):
- self._env[str(i+1)] = str(arg)
- self._env['#'] = str(len(args))
-
- return prevargs
-
- def get_positional_args(self):
- return [self._env[str(i+1)] for i in range(int(self._env['#']))]
-
- def get_variables(self):
- return dict(self._env)
-
- def export(self, key, value=None):
- if value is not None:
- self[key] = value
- self._exported.add(key)
-
- def get_exported(self):
- return [(k,self._env.get(k)) for k in self._exported]
-
- def split_fields(self, word):
- if not self._ifs_ws or not word:
- return [word]
- return re.split(self._ifs_re, word)
-
- def _update_ifs(self, value):
- """Update the split_fields related variables when IFS character set is
- changed.
- """
- # TODO: handle NULL IFS
-
- # Separate characters in whitespace and non-whitespace
- chars = set(value)
- ws = [c for c in chars if c in _IFS_WHITESPACES]
- nws = [c for c in chars if c not in _IFS_WHITESPACES]
-
- # Keep whitespaces in a string for left and right stripping
- self._ifs_ws = ''.join(ws)
-
- # Build a regexp to split fields
- trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']'
- if nws:
- # First, the single non-whitespace occurence.
- nws = '[' + ''.join([re.escape(c) for c in nws]) + ']'
- nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)'
- else:
- # Then mix all parts with quantifiers
- nws = trailing + '+'
- self._ifs_re = re.compile(nws)
-
- def has_opt(self, opt, val=None):
- return (opt, val) in self._opt
-
- def set_opt(self, opt, val=None):
- self._opt.add((opt, val))
-
- def find_in_path(self, name, pwd=False):
- path = self._env.get('PATH', '').split(os.pathsep)
- if pwd:
- path[:0] = [self['PWD']]
- if os.name == 'nt':
- return win32_find_in_path(name, self._env.get('PATH', ''))
- else:
- raise NotImplementedError()
-
- def define_function(self, name, body):
- if not is_name(name):
- raise ShellSyntaxError('%s is not a valid function name' % repr(name))
- self._functions[name] = body
-
- def remove_function(self, name):
- del self._functions[name]
-
- def is_function(self, name):
- return name in self._functions
-
- def get_function(self, name):
- return self._functions.get(name)
-
-
-name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
-name_charset = dict(zip(name_charset,name_charset))
-
-def match_name(s):
- """Return the length in characters of the longest prefix made of name
- allowed characters in s.
- """
- for i,c in enumerate(s):
- if c not in name_charset:
- return s[:i]
- return s
-
-def is_name(s):
- return len([c for c in s if c not in name_charset])<=0
-
-def is_special_param(c):
- return len(c)==1 and c in ('@','*','#','?','-','$','!','0')
-
-def utility_not_implemented(name, *args, **kwargs):
- raise NotImplementedError('%s utility is not implemented' % name)
-
-
-class Utility:
- """Define utilities properties:
- func -- utility callable. See builtin module for utility samples.
- is_special -- see XCU 2.8.
- """
- def __init__(self, func, is_special=0):
- self.func = func
- self.is_special = bool(is_special)
-
-
-def encodeargs(args):
- def encodearg(s):
- lines = base64.encodestring(s)
- lines = [l.splitlines()[0] for l in lines]
- return ''.join(lines)
-
- s = pickle.dumps(args)
- return encodearg(s)
-
-def decodeargs(s):
- s = base64.decodestring(s)
- return pickle.loads(s)
-
-
-class GlobError(Exception):
- pass
-
-class Options:
- def __init__(self):
- # True if Mercurial operates with binary streams
- self.hgbinary = True
-
-class Interpreter:
- # Implementation is very basic: the execute() method just makes a DFS on the
- # AST and execute nodes one by one. Nodes are tuple (name,obj) where name
- # is a string identifier and obj the AST element returned by the parser.
- #
- # Handler are named after the node identifiers.
- # TODO: check node names and remove the switch in execute with some
- # dynamic getattr() call to find node handlers.
- """Shell interpreter.
-
- The following debugging flags can be passed:
- debug-parsing - enable PLY debugging.
- debug-tree - print the generated AST.
- debug-cmd - trace command execution before word expansion, plus exit status.
- debug-utility - trace utility execution.
- """
-
- # List supported commands.
- COMMANDS = {
- 'cat': Utility(builtin.utility_cat,),
- 'cd': Utility(builtin.utility_cd,),
- ':': Utility(builtin.utility_colon,),
- 'echo': Utility(builtin.utility_echo),
- 'env': Utility(builtin.utility_env),
- 'exit': Utility(builtin.utility_exit),
- 'export': Utility(builtin.builtin_export, is_special=1),
- 'egrep': Utility(builtin.utility_egrep),
- 'fgrep': Utility(builtin.utility_fgrep),
- 'gunzip': Utility(builtin.utility_gunzip),
- 'kill': Utility(builtin.utility_kill),
- 'mkdir': Utility(builtin.utility_mkdir),
- 'netstat': Utility(builtin.utility_netstat),
- 'printf': Utility(builtin.utility_printf),
- 'pwd': Utility(builtin.utility_pwd),
- 'return': Utility(builtin.builtin_return, is_special=1),
- 'sed': Utility(builtin.utility_sed,),
- 'set': Utility(builtin.builtin_set,),
- 'shift': Utility(builtin.builtin_shift,),
- 'sleep': Utility(builtin.utility_sleep,),
- 'sort': Utility(builtin.utility_sort,),
- 'trap': Utility(builtin.builtin_trap, is_special=1),
- 'true': Utility(builtin.utility_true),
- 'unset': Utility(builtin.builtin_unset, is_special=1),
- 'wait': Utility(builtin.builtin_wait, is_special=1),
- }
-
- def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None,
- stdout=None, stderr=None, opts=Options()):
- self._env = env
- if self._env is None:
- self._env = Environment(pwd)
- self._children = {}
-
- self._redirs = redirs
- self._close_redirs = False
-
- if self._redirs is None:
- if stdin is None:
- stdin = sys.stdin
- if stdout is None:
- stdout = sys.stdout
- if stderr is None:
- stderr = sys.stderr
- stdin = FileWrapper('r', stdin, False)
- stdout = FileWrapper('w', stdout, False)
- stderr = FileWrapper('w', stderr, False)
- self._redirs = Redirections(stdin, stdout, stderr)
- self._close_redirs = True
-
- self._debugflags = list(debugflags)
- self._logfile = sys.stderr
- self._options = opts
-
- def close(self):
- """Must be called when the interpreter is no longer used."""
- script = self._env.traps.get('EXIT')
- if script:
- try:
- self.execute_script(script=script)
- except:
- pass
-
- if self._redirs is not None and self._close_redirs:
- self._redirs.close()
- self._redirs = None
-
- def log(self, s):
- self._logfile.write(s)
- self._logfile.flush()
-
- def __getitem__(self, key):
- return self._env[key]
-
- def __setitem__(self, key, value):
- self._env[key] = value
-
- def options(self):
- return self._options
-
- def redirect(self, redirs, ios):
- def add_redir(io):
- if isinstance(io, pyshyacc.IORedirect):
- redirs.add(self, io.op, io.filename, io.io_number)
- else:
- redirs.add_here_document(self, io.name, io.content, io.io_number)
-
- map(add_redir, ios)
- return redirs
-
- def execute_script(self, script=None, ast=None, sourced=False,
- scriptpath=None):
- """If script is not None, parse the input. Otherwise takes the supplied
- AST. Then execute the AST.
- Return the script exit status.
- """
- try:
- if scriptpath is not None:
- self._env['0'] = os.path.abspath(scriptpath)
-
- if script is not None:
- debug_parsing = ('debug-parsing' in self._debugflags)
- cmds, script = pyshyacc.parse(script, True, debug_parsing)
- if 'debug-tree' in self._debugflags:
- pyshyacc.print_commands(cmds, self._logfile)
- self._logfile.flush()
- else:
- cmds, script = ast, ''
-
- status = 0
- for cmd in cmds:
- try:
- status = self.execute(cmd)
- except ExitSignal as e:
- if sourced:
- raise
- status = int(e.args[0])
- return status
- except ShellError:
- self._env['?'] = 1
- raise
- if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags:
- self.log('returncode ' + str(status)+ '\n')
- return status
- except CommandNotFound as e:
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
- except RedirectionError as e:
- # TODO: should be handled depending on the utility status
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
-
- def dotcommand(self, env, args):
- if len(args) < 1:
- raise ShellError('. expects at least one argument')
- path = args[0]
- if '/' not in path:
- found = env.find_in_path(args[0], True)
- if found:
- path = found[0]
- script = file(path).read()
- return self.execute_script(script=script, sourced=True)
-
- def execute(self, token, redirs=None):
- """Execute and AST subtree with supplied redirections overriding default
- interpreter ones.
- Return the exit status.
- """
- if not token:
- return 0
-
- if redirs is None:
- redirs = self._redirs
-
- if isinstance(token, list):
- # Commands sequence
- res = 0
- for t in token:
- res = self.execute(t, redirs)
- return res
-
- type, value = token
- status = 0
- if type=='simple_command':
- redirs_copy = redirs.clone()
- try:
- # TODO: define and handle command return values
- # TODO: implement set -e
- status = self._execute_simple_command(value, redirs_copy)
- finally:
- redirs_copy.close()
- elif type=='pipeline':
- status = self._execute_pipeline(value, redirs)
- elif type=='and_or':
- status = self._execute_and_or(value, redirs)
- elif type=='for_clause':
- status = self._execute_for_clause(value, redirs)
- elif type=='while_clause':
- status = self._execute_while_clause(value, redirs)
- elif type=='function_definition':
- status = self._execute_function_definition(value, redirs)
- elif type=='brace_group':
- status = self._execute_brace_group(value, redirs)
- elif type=='if_clause':
- status = self._execute_if_clause(value, redirs)
- elif type=='subshell':
- status = self.subshell(ast=value.cmds, redirs=redirs)
- elif type=='async':
- status = self._asynclist(value)
- elif type=='redirect_list':
- redirs_copy = self.redirect(redirs.clone(), value.redirs)
- try:
- status = self.execute(value.cmd, redirs_copy)
- finally:
- redirs_copy.close()
- else:
- raise NotImplementedError('Unsupported token type ' + type)
-
- if status < 0:
- status = 255
- return status
-
- def _execute_if_clause(self, if_clause, redirs):
- cond_status = self.execute(if_clause.cond, redirs)
- if cond_status==0:
- return self.execute(if_clause.if_cmds, redirs)
- else:
- return self.execute(if_clause.else_cmds, redirs)
-
- def _execute_brace_group(self, group, redirs):
- status = 0
- for cmd in group.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_function_definition(self, fundef, redirs):
- self._env.define_function(fundef.name, fundef.body)
- return 0
-
- def _execute_while_clause(self, while_clause, redirs):
- status = 0
- while 1:
- cond_status = 0
- for cond in while_clause.condition:
- cond_status = self.execute(cond, redirs)
-
- if cond_status:
- break
-
- for cmd in while_clause.cmds:
- status = self.execute(cmd, redirs)
-
- return status
-
- def _execute_for_clause(self, for_clause, redirs):
- if not is_name(for_clause.name):
- raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name))
- items = mappend(self.expand_token, for_clause.items)
-
- status = 0
- for item in items:
- self._env[for_clause.name] = item
- for cmd in for_clause.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_and_or(self, or_and, redirs):
- res = self.execute(or_and.left, redirs)
- if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0):
- res = self.execute(or_and.right, redirs)
- return res
-
- def _execute_pipeline(self, pipeline, redirs):
- if len(pipeline.commands)==1:
- status = self.execute(pipeline.commands[0], redirs)
- else:
- # Execute all commands one after the other
- status = 0
- inpath, outpath = None, None
- try:
- # Commands inputs and outputs cannot really be plugged as done
- # by a real shell. Run commands sequentially and chain their
- # input/output throught temporary files.
- tmpfd, inpath = tempfile.mkstemp()
- os.close(tmpfd)
- tmpfd, outpath = tempfile.mkstemp()
- os.close(tmpfd)
-
- inpath = win32_to_unix_path(inpath)
- outpath = win32_to_unix_path(outpath)
-
- for i, cmd in enumerate(pipeline.commands):
- call_redirs = redirs.clone()
- try:
- if i!=0:
- call_redirs.add(self, '<', inpath)
- if i!=len(pipeline.commands)-1:
- call_redirs.add(self, '>', outpath)
-
- status = self.execute(cmd, call_redirs)
-
- # Chain inputs/outputs
- inpath, outpath = outpath, inpath
- finally:
- call_redirs.close()
- finally:
- if inpath: os.remove(inpath)
- if outpath: os.remove(outpath)
-
- if pipeline.reverse_status:
- status = int(not status)
- self._env['?'] = status
- return status
-
- def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others):
- assert interp is self
-
- func = env.get_function(name)
- #Set positional parameters
- prevargs = None
- try:
- prevargs = env.set_positional_args(args)
- try:
- redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup())
- try:
- status = self.execute(func, redirs)
- finally:
- redirs.close()
- except ReturnSignal as e:
- status = int(e.args[0])
- env['?'] = status
- return status
- finally:
- #Reset positional parameters
- if prevargs is not None:
- env.set_positional_args(prevargs)
-
- def _execute_simple_command(self, token, redirs):
- """Can raise ReturnSignal when return builtin is called, ExitSignal when
- exit is called, and other shell exceptions upon builtin failures.
- """
- debug_command = 'debug-cmd' in self._debugflags
- if debug_command:
- self.log('word' + repr(token.words) + '\n')
- self.log('assigns' + repr(token.assigns) + '\n')
- self.log('redirs' + repr(token.redirs) + '\n')
-
- is_special = None
- env = self._env
-
- try:
- # Word expansion
- args = []
- for word in token.words:
- args += self.expand_token(word)
- if is_special is None and args:
- is_special = env.is_function(args[0]) or \
- (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special)
-
- if debug_command:
- self.log('_execute_simple_command' + str(args) + '\n')
-
- if not args:
- # Redirections happen is a subshell
- redirs = redirs.clone()
- elif not is_special:
- env = self._env.clone()
-
- # Redirections
- self.redirect(redirs, token.redirs)
-
- # Variables assignments
- res = 0
- for type,(k,v) in token.assigns:
- status, expanded = self.expand_variable((k,v))
- if status is not None:
- res = status
- if args:
- env.export(k, expanded)
- else:
- env[k] = expanded
-
- if args and args[0] in ('.', 'source'):
- res = self.dotcommand(env, args[1:])
- elif args:
- if args[0] in self.COMMANDS:
- command = self.COMMANDS[args[0]]
- elif env.is_function(args[0]):
- command = Utility(self._execute_function, is_special=True)
- else:
- if not '/' in args[0].replace('\\', '/'):
- cmd = env.find_in_path(args[0])
- if not cmd:
- # TODO: test error code on unknown command => 127
- raise CommandNotFound('Unknown command: "%s"' % args[0])
- else:
- # Handle commands like '/cygdrive/c/foo.bat'
- cmd = cygwin_to_windows_path(args[0])
- if not os.path.exists(cmd):
- raise CommandNotFound('%s: No such file or directory' % args[0])
- shebang = resolve_shebang(cmd)
- if shebang:
- cmd = shebang
- else:
- cmd = [cmd]
- args[0:1] = cmd
- command = Utility(builtin.run_command)
-
- # Command execution
- if 'debug-cmd' in self._debugflags:
- self.log('redirections ' + str(redirs) + '\n')
-
- res = command.func(args[0], args[1:], self, env,
- redirs.stdin(), redirs.stdout(),
- redirs.stderr(), self._debugflags)
-
- if self._env.has_opt('-x'):
- # Trace command execution in shell environment
- # BUG: would be hard to reproduce a real shell behaviour since
- # the AST is not annotated with source lines/tokens.
- self._redirs.stdout().write(' '.join(args))
-
- except ReturnSignal:
- raise
- except ShellError as e:
- if is_special or isinstance(e, (ExitSignal,
- ShellSyntaxError, ExpansionError)):
- raise e
- self._redirs.stderr().write(str(e)+'\n')
- return 1
-
- return res
-
- def expand_token(self, word):
- """Expand a word as specified in [2.6 Word Expansions]. Return the list
- of expanded words.
- """
- status, wtrees = self._expand_word(word)
- return map(pyshlex.wordtree_as_string, wtrees)
-
- def expand_variable(self, word):
- """Return a status code (or None if no command expansion occurred)
- and a single word.
- """
- status, wtrees = self._expand_word(word, pathname=False, split=False)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return status, words[0]
-
- def expand_here_document(self, word):
- """Return the expanded document as a single word. The here document is
- assumed to be unquoted.
- """
- status, wtrees = self._expand_word(word, pathname=False,
- split=False, here_document=True)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return words[0]
-
- def expand_redirection(self, word):
- """Return a single word."""
- return self.expand_variable(word)[1]
-
- def get_env(self):
- return self._env
-
- def _expand_word(self, token, pathname=True, split=True, here_document=False):
- wtree = pyshlex.make_wordtree(token[1], here_document=here_document)
-
- # TODO: implement tilde expansion
- def expand(wtree):
- """Return a pseudo wordtree: the tree or its subelements can be empty
- lists when no value result from the expansion.
- """
- status = None
- for part in wtree:
- if not isinstance(part, list):
- continue
- if part[0]in ("'", '\\'):
- continue
- elif part[0] in ('`', '$('):
- status, result = self._expand_command(part)
- part[:] = result
- elif part[0] in ('$', '${'):
- part[:] = self._expand_parameter(part, wtree[0]=='"', split)
- elif part[0] in ('', '"'):
- status, result = expand(part)
- part[:] = result
- else:
- raise NotImplementedError('%s expansion is not implemented'
- % part[0])
- # [] is returned when an expansion result in no-field,
- # like an empty $@
- wtree = [p for p in wtree if p != []]
- if len(wtree) < 3:
- return status, []
- return status, wtree
-
- status, wtree = expand(wtree)
- if len(wtree) == 0:
- return status, wtree
- wtree = pyshlex.normalize_wordtree(wtree)
-
- if split:
- wtrees = self._split_fields(wtree)
- else:
- wtrees = [wtree]
-
- if pathname:
- wtrees = mappend(self._expand_pathname, wtrees)
-
- wtrees = map(self._remove_quotes, wtrees)
- return status, wtrees
-
- def _expand_command(self, wtree):
- # BUG: there is something to do with backslashes and quoted
- # characters here
- command = pyshlex.wordtree_as_string(wtree[1:-1])
- status, output = self.subshell_output(command)
- return status, ['', output, '']
-
- def _expand_parameter(self, wtree, quoted=False, split=False):
- """Return a valid wtree or an empty list when no parameter results."""
- # Get the parameter name
- # TODO: implement weird expansion rules with ':'
- name = pyshlex.wordtree_as_string(wtree[1:-1])
- if not is_name(name) and not is_special_param(name):
- raise ExpansionError('Bad substitution "%s"' % name)
- # TODO: implement special parameters
- if name in ('@', '*'):
- args = self._env.get_positional_args()
- if len(args) == 0:
- return []
- if len(args)<2:
- return ['', ''.join(args), '']
-
- sep = self._env.get('IFS', '')[:1]
- if split and quoted and name=='@':
- # Introduce a new token to tell the caller that these parameters
- # cause a split as specified in 2.5.2
- return ['@'] + args + ['']
- else:
- return ['', sep.join(args), '']
-
- return ['', self._env.get(name, ''), '']
-
- def _split_fields(self, wtree):
- def is_empty(split):
- return split==['', '', '']
-
- def split_positional(quoted):
- # Return a list of wtree split according positional parameters rules.
- # All remaining '@' groups are removed.
- assert quoted[0]=='"'
-
- splits = [[]]
- for part in quoted:
- if not isinstance(part, list) or part[0]!='@':
- splits[-1].append(part)
- else:
- # Empty or single argument list were dealt with already
- assert len(part)>3
- # First argument must join with the beginning part of the original word
- splits[-1].append(part[1])
- # Create double-quotes expressions for every argument after the first
- for arg in part[2:-1]:
- splits[-1].append('"')
- splits.append(['"', arg])
- return splits
-
- # At this point, all expansions but pathnames have occured. Only quoted
- # and positional sequences remain. Thus, all candidates for field splitting
- # are in the tree root, or are positional splits ('@') and lie in root
- # children.
- if not wtree or wtree[0] not in ('', '"'):
- # The whole token is quoted or empty, nothing to split
- return [wtree]
-
- if wtree[0]=='"':
- wtree = ['', wtree, '']
-
- result = [['', '']]
- for part in wtree[1:-1]:
- if isinstance(part, list):
- if part[0]=='"':
- splits = split_positional(part)
- if len(splits)<=1:
- result[-1] += [part, '']
- else:
- # Terminate the current split
- result[-1] += [splits[0], '']
- result += splits[1:-1]
- # Create a new split
- result += [['', splits[-1], '']]
- else:
- result[-1] += [part, '']
- else:
- splits = self._env.split_fields(part)
- if len(splits)<=1:
- # No split
- result[-1][-1] += part
- else:
- # Terminate the current resulting part and create a new one
- result[-1][-1] += splits[0]
- result[-1].append('')
- result += [['', r, ''] for r in splits[1:-1]]
- result += [['', splits[-1]]]
- result[-1].append('')
-
- # Leading and trailing empty groups come from leading/trailing blanks
- if result and is_empty(result[-1]):
- result[-1:] = []
- if result and is_empty(result[0]):
- result[:1] = []
- return result
-
- def _expand_pathname(self, wtree):
- """See [2.6.6 Pathname Expansion]."""
- if self._env.has_opt('-f'):
- return [wtree]
-
- # All expansions have been performed, only quoted sequences should remain
- # in the tree. Generate the pattern by folding the tree, escaping special
- # characters when appear quoted
- special_chars = '*?[]'
-
- def make_pattern(wtree):
- subpattern = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = make_pattern(part)
- elif wtree[0]!='':
- for c in part:
- # Meta-characters cannot be quoted
- if c in special_chars:
- raise GlobError()
- subpattern.append(part)
- return ''.join(subpattern)
-
- def pwd_glob(pattern):
- cwd = os.getcwd()
- os.chdir(self._env['PWD'])
- try:
- return glob.glob(pattern)
- finally:
- os.chdir(cwd)
-
- #TODO: check working directory issues here wrt relative patterns
- try:
- pattern = make_pattern(wtree)
- paths = pwd_glob(pattern)
- except GlobError:
- # BUG: Meta-characters were found in quoted sequences. The should
- # have been used literally but this is unsupported in current glob module.
- # Instead we consider the whole tree must be used literally and
- # therefore there is no point in globbing. This is wrong when meta
- # characters are mixed with quoted meta in the same pattern like:
- # < foo*"py*" >
- paths = []
-
- if not paths:
- return [wtree]
- return [['', path, ''] for path in paths]
-
- def _remove_quotes(self, wtree):
- """See [2.6.7 Quote Removal]."""
-
- def unquote(wtree):
- unquoted = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = unquote(part)
- unquoted.append(part)
- return ''.join(unquoted)
-
- return ['', unquote(wtree), '']
-
- def subshell(self, script=None, ast=None, redirs=None):
- """Execute the script or AST in a subshell, with inherited redirections
- if redirs is not None.
- """
- if redirs:
- sub_redirs = redirs
- else:
- sub_redirs = redirs.clone()
-
- subshell = None
- try:
- subshell = Interpreter(None, self._debugflags, self._env.clone(True),
- sub_redirs, opts=self._options)
- return subshell.execute_script(script, ast)
- finally:
- if not redirs: sub_redirs.close()
- if subshell: subshell.close()
-
- def subshell_output(self, script):
- """Execute the script in a subshell and return the captured output."""
- # Create temporary file to capture subshell output
- tmpfd, tmppath = tempfile.mkstemp()
- try:
- tmpfile = os.fdopen(tmpfd, 'wb')
- stdout = FileWrapper('w', tmpfile)
-
- redirs = Redirections(self._redirs.stdin().dup(),
- stdout,
- self._redirs.stderr().dup())
- try:
- status = self.subshell(script=script, redirs=redirs)
- finally:
- redirs.close()
- redirs = None
-
- # Extract subshell standard output
- tmpfile = open(tmppath, 'rb')
- try:
- output = tmpfile.read()
- return status, output.rstrip('\n')
- finally:
- tmpfile.close()
- finally:
- os.remove(tmppath)
-
- def _asynclist(self, cmd):
- args = (self._env.get_variables(), cmd)
- arg = encodeargs(args)
- assert len(args) < 30*1024
- cmd = ['pysh.bat', '--ast', '-c', arg]
- p = subprocess.Popen(cmd, cwd=self._env['PWD'])
- self._children[p.pid] = p
- self._env['!'] = p.pid
- return 0
-
- def wait(self, pids=None):
- if not pids:
- pids = self._children.keys()
-
- status = 127
- for pid in pids:
- if pid not in self._children:
- continue
- p = self._children.pop(pid)
- status = p.wait()
-
- return status
-
diff --git a/poky/bitbake/lib/bb/pysh/lsprof.py b/poky/bitbake/lib/bb/pysh/lsprof.py
deleted file mode 100644
index b1831c22a..000000000
--- a/poky/bitbake/lib/bb/pysh/lsprof.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#! /usr/bin/env python
-
-import sys
-from _lsprof import Profiler, profiler_entry
-
-__all__ = ['profile', 'Stats']
-
-def profile(f, *args, **kwds):
- """XXX docstring"""
- p = Profiler()
- p.enable(subcalls=True, builtins=True)
- try:
- f(*args, **kwds)
- finally:
- p.disable()
- return Stats(p.getstats())
-
-
-class Stats(object):
- """XXX docstring"""
-
- def __init__(self, data):
- self.data = data
-
- def sort(self, crit="inlinetime"):
- """XXX docstring"""
- if crit not in profiler_entry.__dict__:
- raise ValueError("Can't sort by %s" % crit)
- self.data.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
- for e in self.data:
- if e.calls:
- e.calls.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
-
- def pprint(self, top=None, file=None, limit=None, climit=None):
- """XXX docstring"""
- if file is None:
- file = sys.stdout
- d = self.data
- if top is not None:
- d = d[:top]
- cols = "% 12s %12s %11.4f %11.4f %s\n"
- hcols = "% 12s %12s %12s %12s %s\n"
- cols2 = "+%12s %12s %11.4f %11.4f + %s\n"
- file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
- "Inline(ms)", "module:lineno(function)"))
- count = 0
- for e in d:
- file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
- e.inlinetime, label(e.code)))
- count += 1
- if limit is not None and count == limit:
- return
- ccount = 0
- if e.calls:
- for se in e.calls:
- file.write(cols % ("+%s" % se.callcount, se.reccallcount,
- se.totaltime, se.inlinetime,
- "+%s" % label(se.code)))
- count += 1
- ccount += 1
- if limit is not None and count == limit:
- return
- if climit is not None and ccount == climit:
- break
-
- def freeze(self):
- """Replace all references to code objects with string
- descriptions; this makes it possible to pickle the instance."""
-
- # this code is probably rather ickier than it needs to be!
- for i in range(len(self.data)):
- e = self.data[i]
- if not isinstance(e.code, str):
- self.data[i] = type(e)((label(e.code),) + e[1:])
- if e.calls:
- for j in range(len(e.calls)):
- se = e.calls[j]
- if not isinstance(se.code, str):
- e.calls[j] = type(se)((label(se.code),) + se[1:])
-
-_fn2mod = {}
-
-def label(code):
- if isinstance(code, str):
- return code
- try:
- mname = _fn2mod[code.co_filename]
- except KeyError:
- for k, v in sys.modules.items():
- if v is None:
- continue
- if not hasattr(v, '__file__'):
- continue
- if not isinstance(v.__file__, str):
- continue
- if v.__file__.startswith(code.co_filename):
- mname = _fn2mod[code.co_filename] = k
- break
- else:
- mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
-
- return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
-
-
-if __name__ == '__main__':
- import os
- sys.argv = sys.argv[1:]
- if not sys.argv:
- print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
- sys.exit(2)
- sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
- stats = profile(execfile, sys.argv[0], globals(), locals())
- stats.sort()
- stats.pprint()
diff --git a/poky/bitbake/lib/bb/pysh/pysh.py b/poky/bitbake/lib/bb/pysh/pysh.py
deleted file mode 100644
index b4e6145b5..000000000
--- a/poky/bitbake/lib/bb/pysh/pysh.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# pysh.py - command processing for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-import optparse
-import os
-import sys
-
-import interp
-
-SH_OPT = optparse.OptionParser(prog='pysh', usage="%prog [OPTIONS]", version='0.1')
-SH_OPT.add_option('-c', action='store_true', dest='command_string', default=None,
- help='A string that shall be interpreted by the shell as one or more commands')
-SH_OPT.add_option('--redirect-to', dest='redirect_to', default=None,
- help='Redirect script commands stdout and stderr to the specified file')
-# See utility_command in builtin.py about the reason for this flag.
-SH_OPT.add_option('--redirected', dest='redirected', action='store_true', default=False,
- help='Tell the interpreter that stdout and stderr are actually the same objects, which is really stdout')
-SH_OPT.add_option('--debug-parsing', action='store_true', dest='debug_parsing', default=False,
- help='Trace PLY execution')
-SH_OPT.add_option('--debug-tree', action='store_true', dest='debug_tree', default=False,
- help='Display the generated syntax tree.')
-SH_OPT.add_option('--debug-cmd', action='store_true', dest='debug_cmd', default=False,
- help='Trace command execution before parameters expansion and exit status.')
-SH_OPT.add_option('--debug-utility', action='store_true', dest='debug_utility', default=False,
- help='Trace utility calls, after parameters expansions')
-SH_OPT.add_option('--ast', action='store_true', dest='ast', default=False,
- help='Encoded commands to execute in a subprocess')
-SH_OPT.add_option('--profile', action='store_true', default=False,
- help='Profile pysh run')
-
-
-def split_args(args):
- # Separate shell arguments from command ones
- # Just stop at the first argument not starting with a dash. I know, this is completely broken,
- # it ignores files starting with a dash or may take option values for command file. This is not
- # supposed to happen for now
- command_index = len(args)
- for i,arg in enumerate(args):
- if not arg.startswith('-'):
- command_index = i
- break
-
- return args[:command_index], args[command_index:]
-
-
-def fixenv(env):
- path = env.get('PATH')
- if path is not None:
- parts = path.split(os.pathsep)
- # Remove Windows utilities from PATH, they are useless at best and
- # some of them (find) may be confused with other utilities.
- parts = [p for p in parts if 'system32' not in p.lower()]
- env['PATH'] = os.pathsep.join(parts)
- if env.get('HOME') is None:
- # Several utilities, including cvsps, cannot work without
- # a defined HOME directory.
- env['HOME'] = os.path.expanduser('~')
- return env
-
-def _sh(cwd, shargs, cmdargs, options, debugflags=None, env=None):
- if os.environ.get('PYSH_TEXT') != '1':
- import msvcrt
- for fp in (sys.stdin, sys.stdout, sys.stderr):
- msvcrt.setmode(fp.fileno(), os.O_BINARY)
-
- hgbin = os.environ.get('PYSH_HGTEXT') != '1'
-
- if debugflags is None:
- debugflags = []
- if options.debug_parsing: debugflags.append('debug-parsing')
- if options.debug_utility: debugflags.append('debug-utility')
- if options.debug_cmd: debugflags.append('debug-cmd')
- if options.debug_tree: debugflags.append('debug-tree')
-
- if env is None:
- env = fixenv(dict(os.environ))
- if cwd is None:
- cwd = os.getcwd()
-
- if not cmdargs:
- # Nothing to do
- return 0
-
- ast = None
- command_file = None
- if options.command_string:
- input = cmdargs[0]
- if not options.ast:
- input += '\n'
- else:
- args, input = interp.decodeargs(input), None
- env, ast = args
- cwd = env.get('PWD', cwd)
- else:
- command_file = cmdargs[0]
- arguments = cmdargs[1:]
-
- prefix = interp.resolve_shebang(command_file, ignoreshell=True)
- if prefix:
- input = ' '.join(prefix + [command_file] + arguments)
- else:
- # Read commands from file
- f = file(command_file)
- try:
- # Trailing newline to help the parser
- input = f.read() + '\n'
- finally:
- f.close()
-
- redirect = None
- try:
- if options.redirected:
- stdout = sys.stdout
- stderr = stdout
- elif options.redirect_to:
- redirect = open(options.redirect_to, 'wb')
- stdout = redirect
- stderr = redirect
- else:
- stdout = sys.stdout
- stderr = sys.stderr
-
- # TODO: set arguments to environment variables
- opts = interp.Options()
- opts.hgbinary = hgbin
- ip = interp.Interpreter(cwd, debugflags, stdout=stdout, stderr=stderr,
- opts=opts)
- try:
- # Export given environment in shell object
- for k,v in env.iteritems():
- ip.get_env().export(k,v)
- return ip.execute_script(input, ast, scriptpath=command_file)
- finally:
- ip.close()
- finally:
- if redirect is not None:
- redirect.close()
-
-def sh(cwd=None, args=None, debugflags=None, env=None):
- if args is None:
- args = sys.argv[1:]
- shargs, cmdargs = split_args(args)
- options, shargs = SH_OPT.parse_args(shargs)
-
- if options.profile:
- import lsprof
- p = lsprof.Profiler()
- p.enable(subcalls=True)
- try:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
- finally:
- p.disable()
- stats = lsprof.Stats(p.getstats())
- stats.sort()
- stats.pprint(top=10, file=sys.stderr, climit=5)
- else:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
-
-def main():
- sys.exit(sh())
-
-if __name__=='__main__':
- main()
diff --git a/poky/bitbake/lib/bb/pysh/pyshlex.py b/poky/bitbake/lib/bb/pysh/pyshlex.py
index fbf094b7a..a42c29446 100644
--- a/poky/bitbake/lib/bb/pysh/pyshlex.py
+++ b/poky/bitbake/lib/bb/pysh/pyshlex.py
@@ -13,11 +13,6 @@
# PLY in pull mode. It was designed to work incrementally and it would not be
# that hard to enable pull mode.
import re
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
from ply import lex
from bb.pysh.sherrors import *
diff --git a/poky/bitbake/lib/bb/pysh/pyshyacc.py b/poky/bitbake/lib/bb/pysh/pyshyacc.py
index ba4cefdcb..de565dc9a 100644
--- a/poky/bitbake/lib/bb/pysh/pyshyacc.py
+++ b/poky/bitbake/lib/bb/pysh/pyshyacc.py
@@ -636,13 +636,16 @@ def p_empty(p):
def p_error(p):
msg = []
w = msg.append
- w('%r\n' % p)
- w('followed by:\n')
- for i in range(5):
- n = yacc.token()
- if not n:
- break
- w(' %r\n' % n)
+ if p:
+ w('%r\n' % p)
+ w('followed by:\n')
+ for i in range(5):
+ n = yacc.token()
+ if not n:
+ break
+ w(' %r\n' % n)
+ else:
+ w('Unexpected EOF')
raise sherrors.ShellSyntaxError(''.join(msg))
# Build the parser
diff --git a/poky/bitbake/lib/bb/pysh/sherrors.py b/poky/bitbake/lib/bb/pysh/sherrors.py
index 49d0533de..3fe8e47b2 100644
--- a/poky/bitbake/lib/bb/pysh/sherrors.py
+++ b/poky/bitbake/lib/bb/pysh/sherrors.py
@@ -13,29 +13,3 @@ class ShellError(Exception):
class ShellSyntaxError(ShellError):
pass
-
-class UtilityError(ShellError):
- """Raised upon utility syntax error (option or operand error)."""
- pass
-
-class ExpansionError(ShellError):
- pass
-
-class CommandNotFound(ShellError):
- """Specified command was not found."""
- pass
-
-class RedirectionError(ShellError):
- pass
-
-class VarAssignmentError(ShellError):
- """Variable assignment error."""
- pass
-
-class ExitSignal(ShellError):
- """Exit signal."""
- pass
-
-class ReturnSignal(ShellError):
- """Exit signal."""
- pass
diff --git a/poky/bitbake/lib/bb/pysh/subprocess_fix.py b/poky/bitbake/lib/bb/pysh/subprocess_fix.py
deleted file mode 100644
index 46eca2280..000000000
--- a/poky/bitbake/lib/bb/pysh/subprocess_fix.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# subprocess - Subprocesses with accessible I/O streams
-#
-# For more information about this module, see PEP 324.
-#
-# This module should remain compatible with Python 2.2, see PEP 291.
-#
-# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
-#
-# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/2.4/license for licensing details.
-
-def list2cmdline(seq):
- """
- Translate a sequence of arguments into a command line
- string, using the same rules as the MS C runtime:
-
- 1) Arguments are delimited by white space, which is either a
- space or a tab.
-
- 2) A string surrounded by double quotation marks is
- interpreted as a single argument, regardless of white space
- contained within. A quoted string can be embedded in an
- argument.
-
- 3) A double quotation mark preceded by a backslash is
- interpreted as a literal double quotation mark.
-
- 4) Backslashes are interpreted literally, unless they
- immediately precede a double quotation mark.
-
- 5) If backslashes immediately precede a double quotation mark,
- every pair of backslashes is interpreted as a literal
- backslash. If the number of backslashes is odd, the last
- backslash escapes the next double quotation mark as
- described in rule 3.
- """
-
- # See
- # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
- result = []
- needquote = False
- for arg in seq:
- bs_buf = []
-
- # Add a space to separate this argument from the others
- if result:
- result.append(' ')
-
- needquote = (" " in arg) or ("\t" in arg) or ("|" in arg) or arg == ""
- if needquote:
- result.append('"')
-
- for c in arg:
- if c == '\\':
- # Don't know if we need to double yet.
- bs_buf.append(c)
- elif c == '"':
- # Double backspaces.
- result.append('\\' * len(bs_buf)*2)
- bs_buf = []
- result.append('\\"')
- else:
- # Normal char
- if bs_buf:
- result.extend(bs_buf)
- bs_buf = []
- result.append(c)
-
- # Add remaining backspaces, if any.
- if bs_buf:
- result.extend(bs_buf)
-
- if needquote:
- result.extend(bs_buf)
- result.append('"')
-
- return ''.join(result)
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 383c18323..329cda33a 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -37,11 +37,12 @@ from bb import monitordisk
import subprocess
import pickle
from multiprocessing import Process
+import shlex
bblogger = logging.getLogger("BitBake")
logger = logging.getLogger("BitBake.RunQueue")
-__find_md5__ = re.compile( r'(?i)(?<![a-z0-9])[a-f0-9]{32}(?![a-z0-9])' )
+__find_sha256__ = re.compile( r'(?i)(?<![a-z0-9])[a-f0-9]{64}(?![a-z0-9])' )
def fn_from_tid(tid):
return tid.rsplit(":", 1)[0]
@@ -351,6 +352,7 @@ class RunTaskEntry(object):
self.depends = set()
self.revdeps = set()
self.hash = None
+ self.unihash = None
self.task = None
self.weight = 1
@@ -390,6 +392,9 @@ class RunQueueData:
def get_task_hash(self, tid):
return self.runtaskentries[tid].hash
+ def get_task_unihash(self, tid):
+ return self.runtaskentries[tid].unihash
+
def get_user_idstring(self, tid, task_name_suffix = ""):
return tid + task_name_suffix
@@ -1161,18 +1166,21 @@ class RunQueueData:
if len(self.runtaskentries[tid].depends - dealtwith) == 0:
dealtwith.add(tid)
todeal.remove(tid)
- procdep = []
- for dep in self.runtaskentries[tid].depends:
- procdep.append(fn_from_tid(dep) + "." + taskname_from_tid(dep))
- (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
- self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(taskfn, taskname, procdep, self.dataCaches[mc])
- task = self.runtaskentries[tid].task
+ self.prepare_task_hash(tid)
bb.parse.siggen.writeout_file_checksum_cache()
#self.dump_data()
return len(self.runtaskentries)
+ def prepare_task_hash(self, tid):
+ procdep = []
+ for dep in self.runtaskentries[tid].depends:
+ procdep.append(fn_from_tid(dep) + "." + taskname_from_tid(dep))
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(taskfn, taskname, procdep, self.dataCaches[mc])
+ self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(taskfn + "." + taskname)
+
def dump_data(self):
"""
Dump some debug information on the internal data structures
@@ -1224,28 +1232,23 @@ class RunQueue:
if fakeroot:
magic = magic + "beef"
mcdata = self.cooker.databuilder.mcdata[mc]
- fakerootcmd = mcdata.getVar("FAKEROOTCMD")
+ fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD"))
fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split()
env = os.environ.copy()
for key, value in (var.split('=') for var in fakerootenv):
env[key] = value
- worker = subprocess.Popen([fakerootcmd, "bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
+ worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
else:
worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
bb.utils.nonblockingfd(worker.stdout)
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
- runqhash = {}
- for tid in self.rqdata.runtaskentries:
- runqhash[tid] = self.rqdata.runtaskentries[tid].hash
-
workerdata = {
"taskdeps" : self.rqdata.dataCaches[mc].task_deps,
"fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv,
"fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs,
"fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
"sigdata" : bb.parse.siggen.get_taskdata(),
- "runq_hash" : runqhash,
"logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
"logdefaultverbose" : bb.msg.loggerDefaultVerbose,
"logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
@@ -1387,6 +1390,26 @@ class RunQueue:
cache[tid] = iscurrent
return iscurrent
+ def validate_hash(self, *, sq_fn, sq_task, sq_hash, sq_hashfn, siginfo, sq_unihash, d):
+ locs = {"sq_fn" : sq_fn, "sq_task" : sq_task, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn,
+ "sq_unihash" : sq_unihash, "siginfo" : siginfo, "d" : d}
+
+ hashvalidate_args = ("(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=siginfo, sq_unihash=sq_unihash)",
+ "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=siginfo)",
+ "(sq_fn, sq_task, sq_hash, sq_hashfn, d)")
+
+ for args in hashvalidate_args[:-1]:
+ try:
+ call = self.hashvalidate + args
+ return bb.utils.better_eval(call, locs)
+ except TypeError:
+ continue
+
+ # Call the last entry without a try...catch to propagate any thrown
+ # TypeError
+ call = self.hashvalidate + hashvalidate_args[-1]
+ return bb.utils.better_eval(call, locs)
+
def _execute_runqueue(self):
"""
Run the tasks in a queue prepared by rqdata.prepare()
@@ -1558,6 +1581,7 @@ class RunQueue:
valid = []
sq_hash = []
sq_hashfn = []
+ sq_unihash = []
sq_fn = []
sq_taskname = []
sq_task = []
@@ -1576,16 +1600,13 @@ class RunQueue:
sq_fn.append(fn)
sq_hashfn.append(self.rqdata.dataCaches[mc].hashfn[taskfn])
sq_hash.append(self.rqdata.runtaskentries[tid].hash)
+ sq_unihash.append(self.rqdata.runtaskentries[tid].unihash)
sq_taskname.append(taskname)
sq_task.append(tid)
- locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
- try:
- call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)"
- valid = bb.utils.better_eval(call, locs)
- # Handle version with no siginfo parameter
- except TypeError:
- call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
- valid = bb.utils.better_eval(call, locs)
+
+ valid = self.validate_hash(sq_fn=sq_fn, sq_task=sq_taskname, sq_hash=sq_hash, sq_hashfn=sq_hashfn,
+ siginfo=True, sq_unihash=sq_unihash, d=self.cooker.data)
+
for v in valid:
valid_new.add(sq_task[v])
@@ -1667,7 +1688,7 @@ class RunQueue:
matches = {k : v for k, v in iter(matches.items()) if h not in k}
if matches:
latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
- prevh = __find_md5__.search(latestmatch).group(0)
+ prevh = __find_sha256__.search(latestmatch).group(0)
output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
@@ -2042,6 +2063,8 @@ class RunQueueExecuteTasks(RunQueueExecute):
taskdepdata = self.build_taskdepdata(task)
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
+ taskhash = self.rqdata.get_task_hash(task)
+ unihash = self.rqdata.get_task_unihash(task)
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
if not mc in self.rq.fakeworker:
try:
@@ -2051,10 +2074,10 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.rq.state = runQueueFailed
self.stats.taskFailed()
return True
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
self.rq.worker[mc].process.stdin.flush()
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -2109,8 +2132,9 @@ class RunQueueExecuteTasks(RunQueueExecute):
deps = self.rqdata.runtaskentries[revdep].depends
provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
taskhash = self.rqdata.runtaskentries[revdep].hash
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash]
+ unihash = self.rqdata.runtaskentries[revdep].unihash
deps = self.filtermcdeps(task, deps)
+ taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
for revdep2 in deps:
if revdep2 not in taskdepdata:
additional.append(revdep2)
@@ -2313,6 +2337,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
if self.rq.hashvalidate:
sq_hash = []
sq_hashfn = []
+ sq_unihash = []
sq_fn = []
sq_taskname = []
sq_task = []
@@ -2344,14 +2369,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
sq_fn.append(fn)
sq_hashfn.append(self.rqdata.dataCaches[mc].hashfn[taskfn])
sq_hash.append(self.rqdata.runtaskentries[tid].hash)
+ sq_unihash.append(self.rqdata.runtaskentries[tid].unihash)
sq_taskname.append(taskname)
sq_task.append(tid)
self.cooker.data.setVar("BB_SETSCENE_STAMPCURRENT_COUNT", len(stamppresent))
- call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
- locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
- valid = bb.utils.better_eval(call, locs)
+ valid = self.rq.validate_hash(sq_fn=sq_fn, sq_task=sq_taskname, sq_hash=sq_hash, sq_hashfn=sq_hashfn,
+ siginfo=False, sq_unihash=sq_unihash, d=self.cooker.data)
self.cooker.data.delVar("BB_SETSCENE_STAMPCURRENT_COUNT")
@@ -2482,13 +2507,15 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
taskdepdata = self.build_taskdepdata(task)
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
+ taskhash = self.rqdata.get_task_hash(task)
+ unihash = self.rqdata.get_task_unihash(task)
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
if not mc in self.rq.fakeworker:
self.rq.start_fakeworker(self, mc)
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
self.rq.worker[mc].process.stdin.flush()
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -2552,7 +2579,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
deps = getsetscenedeps(revdep)
provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
taskhash = self.rqdata.runtaskentries[revdep].hash
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash]
+ unihash = self.rqdata.runtaskentries[revdep].unihash
+ taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
for revdep2 in deps:
if revdep2 not in taskdepdata:
additional.append(revdep2)
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 352dcab85..09c9c8a25 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -41,6 +41,9 @@ class SignatureGenerator(object):
def finalise(self, fn, d, varient):
return
+ def get_unihash(self, task):
+ return self.taskhash[task]
+
def get_taskhash(self, fn, task, deps, dataCache):
return "0"
@@ -87,7 +90,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.taints = {}
self.gendeps = {}
self.lookupcache = {}
- self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
+ self.pkgnameextract = re.compile(r"(?P<fn>.*)\..*")
self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split())
self.taskwhitelist = None
self.init_rundepcheck(data)
@@ -188,7 +191,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
continue
if dep not in self.taskhash:
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
- data = data + self.taskhash[dep]
+ data = data + self.get_unihash(dep)
self.runtaskdeps[k].append(dep)
if task in dataCache.file_checksums[fn]:
@@ -215,7 +218,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.taints[k] = taint
logger.warning("%s is tainted from a forced run" % k)
- h = hashlib.md5(data.encode("utf-8")).hexdigest()
+ h = hashlib.sha256(data.encode("utf-8")).hexdigest()
self.taskhash[k] = h
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
return h
@@ -263,7 +266,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k]]
data['runtaskhashes'] = {}
for dep in data['runtaskdeps']:
- data['runtaskhashes'][dep] = self.taskhash[dep]
+ data['runtaskhashes'][dep] = self.get_unihash(dep)
data['taskhash'] = self.taskhash[k]
taint = self.read_taint(fn, task, referencestamp)
@@ -313,6 +316,13 @@ class SignatureGeneratorBasic(SignatureGenerator):
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
name = "basichash"
+ def get_stampfile_hash(self, task):
+ if task in self.taskhash:
+ return self.taskhash[task]
+
+ # If task is not in basehash, then error
+ return self.basehash[task]
+
def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
if taskname != "do_setscene" and taskname.endswith("_setscene"):
k = fn + "." + taskname[:-9]
@@ -320,11 +330,9 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
k = fn + "." + taskname
if clean:
h = "*"
- elif k in self.taskhash:
- h = self.taskhash[k]
else:
- # If k is not in basehash, then error
- h = self.basehash[k]
+ h = self.get_stampfile_hash(k)
+
return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
@@ -644,7 +652,7 @@ def calc_basehash(sigdata):
if val is not None:
basedata = basedata + str(val)
- return hashlib.md5(basedata.encode("utf-8")).hexdigest()
+ return hashlib.sha256(basedata.encode("utf-8")).hexdigest()
def calc_taskhash(sigdata):
data = sigdata['basehash']
@@ -662,7 +670,7 @@ def calc_taskhash(sigdata):
else:
data = data + sigdata['taint']
- return hashlib.md5(data.encode("utf-8")).hexdigest()
+ return hashlib.sha256(data.encode("utf-8")).hexdigest()
def dump_sigfile(a):
diff --git a/poky/bitbake/lib/bb/tests/codeparser.py b/poky/bitbake/lib/bb/tests/codeparser.py
index e30e78c15..3fd76a8f9 100644
--- a/poky/bitbake/lib/bb/tests/codeparser.py
+++ b/poky/bitbake/lib/bb/tests/codeparser.py
@@ -123,6 +123,13 @@ ${D}${libdir}/pkgconfig/*.pc
self.parseExpression("sed -i -e 's:IP{:I${:g' $pc")
self.assertExecs(set(["sed"]))
+ def test_parameter_expansion_modifiers(self):
+ # - and + are also valid modifiers for parameter expansion, but are
+ # valid characters in bitbake variable names, so are not included here
+ for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'):
+ name = "foo%sbar" % i
+ self.parseExpression("${%s}" % name)
+ self.assertNotIn(name, self.references)
def test_until(self):
self.parseExpression("until false; do echo true; done")
diff --git a/poky/bitbake/lib/bb/tests/data.py b/poky/bitbake/lib/bb/tests/data.py
index db3e2010a..3c511f214 100644
--- a/poky/bitbake/lib/bb/tests/data.py
+++ b/poky/bitbake/lib/bb/tests/data.py
@@ -394,6 +394,15 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST"), " testvalue5")
+ # Test an override with _<numeric> in it based on a real world OE issue
+ def test_underscore_override(self):
+ self.d.setVar("TARGET_ARCH", "x86_64")
+ self.d.setVar("PN", "test-${TARGET_ARCH}")
+ self.d.setVar("VERSION", "1")
+ self.d.setVar("VERSION_pn-test-${TARGET_ARCH}", "2")
+ self.d.setVar("OVERRIDES", "pn-${PN}")
+ bb.data.expandKeys(self.d)
+ self.assertEqual(self.d.getVar("VERSION"), "2")
class TestKeyExpansion(unittest.TestCase):
def setUp(self):
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index 522d2024f..429998b34 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -942,6 +942,25 @@ class FetcherNetworkTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/json/config')), msg='Missing submodule config "extern/json"')
self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/sanitizers/config')), msg='Missing submodule config "extern/sanitizers"')
+ def test_git_submodule_update_CLI11(self):
+ """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714"
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+
+ # CLI11 that pulls in a newer nlohmann-json
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca"
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ # Previous cwd has been deleted
+ os.chdir(os.path.dirname(self.unpackdir))
+ fetcher.unpack(self.unpackdir)
+
+ repo_path = os.path.join(self.tempdir, 'unpacked', 'git')
+ self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/googletest/config')), msg='Missing submodule config "extern/googletest"')
+ self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/json/config')), msg='Missing submodule config "extern/json"')
+ self.assertTrue(os.path.exists(os.path.join(repo_path, '.git/modules/extern/sanitizers/config')), msg='Missing submodule config "extern/sanitizers"')
+
def test_git_submodule_aktualizr(self):
url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
fetcher = bb.fetch.Fetch([url], self.d)
@@ -1338,7 +1357,7 @@ class GitShallowTest(FetcherTest):
def fetch(self, uri=None):
if uri is None:
- uris = self.d.getVar('SRC_URI', True).split()
+ uris = self.d.getVar('SRC_URI').split()
uri = uris[0]
d = self.d
else:
@@ -1397,7 +1416,7 @@ class GitShallowTest(FetcherTest):
srcrev = self.git('rev-parse HEAD', cwd=self.srcdir).strip()
self.d.setVar('SRCREV', srcrev)
- uri = self.d.getVar('SRC_URI', True).split()[0]
+ uri = self.d.getVar('SRC_URI').split()[0]
uri = '%s;nobranch=1;bare=1' % uri
self.fetch_shallow(uri)
@@ -1576,7 +1595,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('f')
self.assertRevCount(7, cwd=self.srcdir)
- uri = self.d.getVar('SRC_URI', True).split()[0]
+ uri = self.d.getVar('SRC_URI').split()[0]
uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
@@ -1602,7 +1621,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('f')
self.assertRevCount(7, cwd=self.srcdir)
- uri = self.d.getVar('SRC_URI', True).split()[0]
+ uri = self.d.getVar('SRC_URI').split()[0]
uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
diff --git a/poky/bitbake/lib/bb/tests/persist_data.py b/poky/bitbake/lib/bb/tests/persist_data.py
new file mode 100644
index 000000000..812bcbd7b
--- /dev/null
+++ b/poky/bitbake/lib/bb/tests/persist_data.py
@@ -0,0 +1,142 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# BitBake Test for lib/bb/persist_data/
+#
+# Copyright (C) 2018 Garmin Ltd.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+
+import unittest
+import bb.data
+import bb.persist_data
+import tempfile
+import threading
+
+class PersistDataTest(unittest.TestCase):
+ def _create_data(self):
+ return bb.persist_data.persist('TEST_PERSIST_DATA', self.d)
+
+ def setUp(self):
+ self.d = bb.data.init()
+ self.tempdir = tempfile.TemporaryDirectory()
+ self.d['PERSISTENT_DIR'] = self.tempdir.name
+ self.data = self._create_data()
+ self.items = {
+ 'A1': '1',
+ 'B1': '2',
+ 'C2': '3'
+ }
+ self.stress_count = 10000
+ self.thread_count = 5
+
+ for k,v in self.items.items():
+ self.data[k] = v
+
+ def tearDown(self):
+ self.tempdir.cleanup()
+
+ def _iter_helper(self, seen, iterator):
+ with iter(iterator):
+ for v in iterator:
+ self.assertTrue(v in seen)
+ seen.remove(v)
+ self.assertEqual(len(seen), 0, '%s not seen' % seen)
+
+ def test_get(self):
+ for k, v in self.items.items():
+ self.assertEqual(self.data[k], v)
+
+ self.assertIsNone(self.data.get('D'))
+ with self.assertRaises(KeyError):
+ self.data['D']
+
+ def test_set(self):
+ for k, v in self.items.items():
+ self.data[k] += '-foo'
+
+ for k, v in self.items.items():
+ self.assertEqual(self.data[k], v + '-foo')
+
+ def test_delete(self):
+ self.data['D'] = '4'
+ self.assertEqual(self.data['D'], '4')
+ del self.data['D']
+ self.assertIsNone(self.data.get('D'))
+ with self.assertRaises(KeyError):
+ self.data['D']
+
+ def test_contains(self):
+ for k in self.items:
+ self.assertTrue(k in self.data)
+ self.assertTrue(self.data.has_key(k))
+ self.assertFalse('NotFound' in self.data)
+ self.assertFalse(self.data.has_key('NotFound'))
+
+ def test_len(self):
+ self.assertEqual(len(self.data), len(self.items))
+
+ def test_iter(self):
+ self._iter_helper(set(self.items.keys()), self.data)
+
+ def test_itervalues(self):
+ self._iter_helper(set(self.items.values()), self.data.itervalues())
+
+ def test_iteritems(self):
+ self._iter_helper(set(self.items.items()), self.data.iteritems())
+
+ def test_get_by_pattern(self):
+ self._iter_helper({'1', '2'}, self.data.get_by_pattern('_1'))
+
+ def _stress_read(self, data):
+ for i in range(self.stress_count):
+ for k in self.items:
+ data[k]
+
+ def _stress_write(self, data):
+ for i in range(self.stress_count):
+ for k, v in self.items.items():
+ data[k] = v + str(i)
+
+ def _validate_stress(self):
+ for k, v in self.items.items():
+ self.assertEqual(self.data[k], v + str(self.stress_count - 1))
+
+ def test_stress(self):
+ self._stress_read(self.data)
+ self._stress_write(self.data)
+ self._validate_stress()
+
+ def test_stress_threads(self):
+ def read_thread():
+ data = self._create_data()
+ self._stress_read(data)
+
+ def write_thread():
+ data = self._create_data()
+ self._stress_write(data)
+
+ threads = []
+ for i in range(self.thread_count):
+ threads.append(threading.Thread(target=read_thread))
+ threads.append(threading.Thread(target=write_thread))
+
+ for t in threads:
+ t.start()
+ self._stress_read(self.data)
+ for t in threads:
+ t.join()
+ self._validate_stress()
+
diff --git a/poky/bitbake/lib/bb/tests/utils.py b/poky/bitbake/lib/bb/tests/utils.py
index 2f4ccf3c6..f1cd83a41 100644
--- a/poky/bitbake/lib/bb/tests/utils.py
+++ b/poky/bitbake/lib/bb/tests/utils.py
@@ -42,6 +42,10 @@ class VerCmpString(unittest.TestCase):
self.assertTrue(result < 0)
result = bb.utils.vercmp_string('1.1', '1.0+1.1-beta1')
self.assertTrue(result > 0)
+ result = bb.utils.vercmp_string('1.', '1.1')
+ self.assertTrue(result < 0)
+ result = bb.utils.vercmp_string('1.1', '1.')
+ self.assertTrue(result > 0)
def test_explode_dep_versions(self):
correctresult = {"foo" : ["= 1.10"]}
diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py
index 73b6cb423..b652a6838 100644
--- a/poky/bitbake/lib/bb/utils.py
+++ b/poky/bitbake/lib/bb/utils.py
@@ -27,7 +27,8 @@ import bb
import bb.msg
import multiprocessing
import fcntl
-import imp
+import importlib
+from importlib import machinery
import itertools
import subprocess
import glob
@@ -43,7 +44,7 @@ from contextlib import contextmanager
from ctypes import cdll
logger = logging.getLogger("BitBake.Util")
-python_extensions = [e for e, _, _ in imp.get_suffixes()]
+python_extensions = importlib.machinery.all_suffixes()
def clean_context():
@@ -68,8 +69,8 @@ class VersionStringException(Exception):
def explode_version(s):
r = []
- alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
- numeric_regexp = re.compile('^(\d+)(.*)$')
+ alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
+ numeric_regexp = re.compile(r'^(\d+)(.*)$')
while (s != ''):
if s[0] in string.digits:
m = numeric_regexp.match(s)
@@ -120,6 +121,10 @@ def vercmp_part(a, b):
return -1
elif oa > ob:
return 1
+ elif ca is None:
+ return -1
+ elif cb is None:
+ return 1
elif ca < cb:
return -1
elif ca > cb:
@@ -317,10 +322,13 @@ def better_compile(text, file, realfile, mode = "exec", lineno = 0):
error = []
# split the text into lines again
body = text.split('\n')
- error.append("Error in compiling python function in %s, line %s:\n" % (realfile, lineno))
+ error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
if hasattr(e, "lineno"):
error.append("The code lines resulting in this error were:")
- error.extend(_print_trace(body, e.lineno))
+ # e.lineno: line's position in reaflile
+ # lineno: function name's "position -1" in realfile
+ # e.lineno - lineno: line's relative position in function
+ error.extend(_print_trace(body, e.lineno - lineno))
else:
error.append("The function causing this error was:")
for line in body:
@@ -704,15 +712,7 @@ def prunedir(topdir):
# CAUTION: This is dangerous!
if _check_unsafe_delete_path(topdir):
raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
- for root, dirs, files in os.walk(topdir, topdown = False):
- for name in files:
- os.remove(os.path.join(root, name))
- for name in dirs:
- if os.path.islink(os.path.join(root, name)):
- os.remove(os.path.join(root, name))
- else:
- os.rmdir(os.path.join(root, name))
- os.rmdir(topdir)
+ remove(topdir, recurse=True)
#
# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
@@ -1157,14 +1157,14 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
var_res = {}
if match_overrides:
- override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
+ override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
else:
override_re = ''
for var in variables:
if var.endswith('()'):
- var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
+ var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
else:
- var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
+ var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
updated = False
varset_start = ''
@@ -1501,6 +1501,8 @@ def ioprio_set(who, cls, value):
NR_ioprio_set = 251
elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
NR_ioprio_set = 289
+ elif _unamearch == "aarch64":
+ NR_ioprio_set = 30
if NR_ioprio_set:
ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
@@ -1544,12 +1546,9 @@ def export_proxies(d):
def load_plugins(logger, plugins, pluginpath):
def load_plugin(name):
logger.debug(1, 'Loading plugin %s' % name)
- fp, pathname, description = imp.find_module(name, [pluginpath])
- try:
- return imp.load_module(name, fp, pathname, description)
- finally:
- if fp:
- fp.close()
+ spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
+ if spec:
+ return spec.loader.load_module()
logger.debug(1, 'Loading plugins from %s...' % pluginpath)
diff --git a/poky/bitbake/lib/bs4/dammit.py b/poky/bitbake/lib/bs4/dammit.py
index 68d419feb..805aa908a 100644
--- a/poky/bitbake/lib/bs4/dammit.py
+++ b/poky/bitbake/lib/bs4/dammit.py
@@ -45,9 +45,9 @@ except ImportError:
pass
xml_encoding_re = re.compile(
- '^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
+ r'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
html_meta_re = re.compile(
- '<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
+ r'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
class EntitySubstitution(object):
@@ -80,11 +80,11 @@ class EntitySubstitution(object):
">": "gt",
}
- BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
- "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
- ")")
+ BARE_AMPERSAND_OR_BRACKET = re.compile(r"([<>]|"
+ r"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
+ r")")
- AMPERSAND_OR_BRACKET = re.compile("([<>&])")
+ AMPERSAND_OR_BRACKET = re.compile(r"([<>&])")
@classmethod
def _substitute_html_entity(cls, matchobj):
diff --git a/poky/bitbake/lib/bs4/element.py b/poky/bitbake/lib/bs4/element.py
index 0e62c2e10..3775a6045 100644
--- a/poky/bitbake/lib/bs4/element.py
+++ b/poky/bitbake/lib/bs4/element.py
@@ -1,7 +1,7 @@
__license__ = "MIT"
from pdb import set_trace
-import collections
+import collections.abc
import re
import sys
import warnings
@@ -10,7 +10,7 @@ from bs4.dammit import EntitySubstitution
DEFAULT_OUTPUT_ENCODING = "utf-8"
PY3K = (sys.version_info[0] > 2)
-whitespace_re = re.compile("\s+")
+whitespace_re = re.compile(r"\s+")
def _alias(attr):
"""Alias one attribute name to another for backward compatibility"""
@@ -67,7 +67,7 @@ class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution):
The value of the 'content' attribute will be one of these objects.
"""
- CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M)
+ CHARSET_RE = re.compile(r"((^|;)\s*charset=)([^;]*)", re.M)
def __new__(cls, original_value):
match = cls.CHARSET_RE.search(original_value)
@@ -155,7 +155,7 @@ class PageElement(object):
def format_string(self, s, formatter='minimal'):
"""Format the given string using the given formatter."""
- if not isinstance(formatter, collections.Callable):
+ if not isinstance(formatter, collections.abc.Callable):
formatter = self._formatter_for_name(formatter)
if formatter is None:
output = s
@@ -580,7 +580,7 @@ class PageElement(object):
# Methods for supporting CSS selectors.
- tag_name_re = re.compile('^[a-zA-Z0-9][-.a-zA-Z0-9:_]*$')
+ tag_name_re = re.compile(r'^[a-zA-Z0-9][-.a-zA-Z0-9:_]*$')
# /^([a-zA-Z0-9][-.a-zA-Z0-9:_]*)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/
# \---------------------------/ \---/\-------------/ \-------/
@@ -1077,7 +1077,7 @@ class Tag(PageElement):
# First off, turn a string formatter into a function. This
# will stop the lookup from happening over and over again.
- if not isinstance(formatter, collections.Callable):
+ if not isinstance(formatter, collections.abc.Callable):
formatter = self._formatter_for_name(formatter)
attrs = []
@@ -1181,7 +1181,7 @@ class Tag(PageElement):
"""
# First off, turn a string formatter into a function. This
# will stop the lookup from happening over and over again.
- if not isinstance(formatter, collections.Callable):
+ if not isinstance(formatter, collections.abc.Callable):
formatter = self._formatter_for_name(formatter)
pretty_print = (indent_level is not None)
@@ -1364,7 +1364,7 @@ class Tag(PageElement):
if tag_name == '':
raise ValueError(
"A pseudo-class must be prefixed with a tag name.")
- pseudo_attributes = re.match('([a-zA-Z\d-]+)\(([a-zA-Z\d]+)\)', pseudo)
+ pseudo_attributes = re.match(r'([a-zA-Z\d-]+)\(([a-zA-Z\d]+)\)', pseudo)
found = []
if pseudo_attributes is None:
pseudo_type = pseudo
@@ -1562,7 +1562,7 @@ class SoupStrainer(object):
def _normalize_search_value(self, value):
# Leave it alone if it's a Unicode string, a callable, a
# regular expression, a boolean, or None.
- if (isinstance(value, str) or isinstance(value, collections.Callable) or hasattr(value, 'match')
+ if (isinstance(value, str) or isinstance(value, collections.abc.Callable) or hasattr(value, 'match')
or isinstance(value, bool) or value is None):
return value
@@ -1602,7 +1602,7 @@ class SoupStrainer(object):
markup = markup_name
markup_attrs = markup
call_function_with_tag_data = (
- isinstance(self.name, collections.Callable)
+ isinstance(self.name, collections.abc.Callable)
and not isinstance(markup_name, Tag))
if ((not self.name)
@@ -1688,7 +1688,7 @@ class SoupStrainer(object):
# True matches any non-None value.
return markup is not None
- if isinstance(match_against, collections.Callable):
+ if isinstance(match_against, collections.abc.Callable):
return match_against(markup)
# Custom callables take the tag as an argument, but all
diff --git a/poky/bitbake/lib/hashserv/__init__.py b/poky/bitbake/lib/hashserv/__init__.py
new file mode 100644
index 000000000..46bca7cab
--- /dev/null
+++ b/poky/bitbake/lib/hashserv/__init__.py
@@ -0,0 +1,152 @@
+# Copyright (C) 2018 Garmin Ltd.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+from http.server import BaseHTTPRequestHandler, HTTPServer
+import contextlib
+import urllib.parse
+import sqlite3
+import json
+import traceback
+import logging
+from datetime import datetime
+
+logger = logging.getLogger('hashserv')
+
+class HashEquivalenceServer(BaseHTTPRequestHandler):
+ def log_message(self, f, *args):
+ logger.debug(f, *args)
+
+ def do_GET(self):
+ try:
+ p = urllib.parse.urlparse(self.path)
+
+ if p.path != self.prefix + '/v1/equivalent':
+ self.send_error(404)
+ return
+
+ query = urllib.parse.parse_qs(p.query, strict_parsing=True)
+ method = query['method'][0]
+ taskhash = query['taskhash'][0]
+
+ d = None
+ with contextlib.closing(self.db.cursor()) as cursor:
+ cursor.execute('SELECT taskhash, method, unihash FROM tasks_v1 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1',
+ {'method': method, 'taskhash': taskhash})
+
+ row = cursor.fetchone()
+
+ if row is not None:
+ logger.debug('Found equivalent task %s', row['taskhash'])
+ d = {k: row[k] for k in ('taskhash', 'method', 'unihash')}
+
+ self.send_response(200)
+ self.send_header('Content-Type', 'application/json; charset=utf-8')
+ self.end_headers()
+ self.wfile.write(json.dumps(d).encode('utf-8'))
+ except:
+ logger.exception('Error in GET')
+ self.send_error(400, explain=traceback.format_exc())
+ return
+
+ def do_POST(self):
+ try:
+ p = urllib.parse.urlparse(self.path)
+
+ if p.path != self.prefix + '/v1/equivalent':
+ self.send_error(404)
+ return
+
+ length = int(self.headers['content-length'])
+ data = json.loads(self.rfile.read(length).decode('utf-8'))
+
+ with contextlib.closing(self.db.cursor()) as cursor:
+ cursor.execute('''
+ SELECT taskhash, method, unihash FROM tasks_v1 WHERE method=:method AND outhash=:outhash
+ ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END,
+ created ASC
+ LIMIT 1
+ ''', {k: data[k] for k in ('method', 'outhash', 'taskhash')})
+
+ row = cursor.fetchone()
+
+ if row is None or row['taskhash'] != data['taskhash']:
+ unihash = data['unihash']
+ if row is not None:
+ unihash = row['unihash']
+
+ insert_data = {
+ 'method': data['method'],
+ 'outhash': data['outhash'],
+ 'taskhash': data['taskhash'],
+ 'unihash': unihash,
+ 'created': datetime.now()
+ }
+
+ for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'):
+ if k in data:
+ insert_data[k] = data[k]
+
+ cursor.execute('''INSERT INTO tasks_v1 (%s) VALUES (%s)''' % (
+ ', '.join(sorted(insert_data.keys())),
+ ', '.join(':' + k for k in sorted(insert_data.keys()))),
+ insert_data)
+
+ logger.info('Adding taskhash %s with unihash %s', data['taskhash'], unihash)
+ cursor.execute('SELECT taskhash, method, unihash FROM tasks_v1 WHERE id=:id', {'id': cursor.lastrowid})
+ row = cursor.fetchone()
+
+ self.db.commit()
+
+ d = {k: row[k] for k in ('taskhash', 'method', 'unihash')}
+
+ self.send_response(200)
+ self.send_header('Content-Type', 'application/json; charset=utf-8')
+ self.end_headers()
+ self.wfile.write(json.dumps(d).encode('utf-8'))
+ except:
+ logger.exception('Error in POST')
+ self.send_error(400, explain=traceback.format_exc())
+ return
+
+def create_server(addr, db, prefix=''):
+ class Handler(HashEquivalenceServer):
+ pass
+
+ Handler.prefix = prefix
+ Handler.db = db
+ db.row_factory = sqlite3.Row
+
+ with contextlib.closing(db.cursor()) as cursor:
+ cursor.execute('''
+ CREATE TABLE IF NOT EXISTS tasks_v1 (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ method TEXT NOT NULL,
+ outhash TEXT NOT NULL,
+ taskhash TEXT NOT NULL,
+ unihash TEXT NOT NULL,
+ created DATETIME,
+
+ -- Optional fields
+ owner TEXT,
+ PN TEXT,
+ PV TEXT,
+ PR TEXT,
+ task TEXT,
+ outhash_siginfo TEXT
+ )
+ ''')
+
+ logger.info('Starting server on %s', addr)
+ return HTTPServer(addr, Handler)
diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py
new file mode 100644
index 000000000..806b54c5e
--- /dev/null
+++ b/poky/bitbake/lib/hashserv/tests.py
@@ -0,0 +1,141 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2018 Garmin Ltd.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import unittest
+import threading
+import sqlite3
+import hashlib
+import urllib.request
+import json
+from . import create_server
+
+class TestHashEquivalenceServer(unittest.TestCase):
+ def setUp(self):
+ # Start an in memory hash equivalence server in the background bound to
+ # an ephemeral port
+ db = sqlite3.connect(':memory:', check_same_thread=False)
+ self.server = create_server(('localhost', 0), db)
+ self.server_addr = 'http://localhost:%d' % self.server.socket.getsockname()[1]
+ self.server_thread = threading.Thread(target=self.server.serve_forever)
+ self.server_thread.start()
+
+ def tearDown(self):
+ # Shutdown server
+ s = getattr(self, 'server', None)
+ if s is not None:
+ self.server.shutdown()
+ self.server_thread.join()
+ self.server.server_close()
+
+ def send_get(self, path):
+ url = '%s/%s' % (self.server_addr, path)
+ request = urllib.request.Request(url)
+ response = urllib.request.urlopen(request)
+ return json.loads(response.read().decode('utf-8'))
+
+ def send_post(self, path, data):
+ headers = {'content-type': 'application/json'}
+ url = '%s/%s' % (self.server_addr, path)
+ request = urllib.request.Request(url, json.dumps(data).encode('utf-8'), headers)
+ response = urllib.request.urlopen(request)
+ return json.loads(response.read().decode('utf-8'))
+
+ def test_create_hash(self):
+ # Simple test that hashes can be created
+ taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
+ outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
+ unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
+
+ d = self.send_get('v1/equivalent?method=TestMethod&taskhash=%s' % taskhash)
+ self.assertIsNone(d, msg='Found unexpected task, %r' % d)
+
+ d = self.send_post('v1/equivalent', {
+ 'taskhash': taskhash,
+ 'method': 'TestMethod',
+ 'outhash': outhash,
+ 'unihash': unihash,
+ })
+ self.assertEqual(d['unihash'], unihash, 'Server returned bad unihash')
+
+ def test_create_equivalent(self):
+ # Tests that a second reported task with the same outhash will be
+ # assigned the same unihash
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+ d = self.send_post('v1/equivalent', {
+ 'taskhash': taskhash,
+ 'method': 'TestMethod',
+ 'outhash': outhash,
+ 'unihash': unihash,
+ })
+ self.assertEqual(d['unihash'], unihash, 'Server returned bad unihash')
+
+ # Report a different task with the same outhash. The returned unihash
+ # should match the first task
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+ d = self.send_post('v1/equivalent', {
+ 'taskhash': taskhash2,
+ 'method': 'TestMethod',
+ 'outhash': outhash,
+ 'unihash': unihash2,
+ })
+ self.assertEqual(d['unihash'], unihash, 'Server returned bad unihash')
+
+ def test_duplicate_taskhash(self):
+ # Tests that duplicate reports of the same taskhash with different
+ # outhash & unihash always return the unihash from the first reported
+ # taskhash
+ taskhash = '8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a'
+ outhash = 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e'
+ unihash = '218e57509998197d570e2c98512d0105985dffc9'
+ d = self.send_post('v1/equivalent', {
+ 'taskhash': taskhash,
+ 'method': 'TestMethod',
+ 'outhash': outhash,
+ 'unihash': unihash,
+ })
+
+ d = self.send_get('v1/equivalent?method=TestMethod&taskhash=%s' % taskhash)
+ self.assertEqual(d['unihash'], unihash)
+
+ outhash2 = '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d'
+ unihash2 = 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'
+ d = self.send_post('v1/equivalent', {
+ 'taskhash': taskhash,
+ 'method': 'TestMethod',
+ 'outhash': outhash2,
+ 'unihash': unihash2
+ })
+
+ d = self.send_get('v1/equivalent?method=TestMethod&taskhash=%s' % taskhash)
+ self.assertEqual(d['unihash'], unihash)
+
+ outhash3 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash3 = '9217a7d6398518e5dc002ed58f2cbbbc78696603'
+ d = self.send_post('v1/equivalent', {
+ 'taskhash': taskhash,
+ 'method': 'TestMethod',
+ 'outhash': outhash3,
+ 'unihash': unihash3
+ })
+
+ d = self.send_get('v1/equivalent?method=TestMethod&taskhash=%s' % taskhash)
+ self.assertEqual(d['unihash'], unihash)
+
+