summaryrefslogtreecommitdiff
path: root/poky/bitbake
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake')
-rwxr-xr-xpoky/bitbake/bin/bitbake-diffsigs4
-rwxr-xr-xpoky/bitbake/bin/bitbake-worker8
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst2
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst42
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst2
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst22
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst138
-rw-r--r--poky/bitbake/lib/bb/build.py14
-rw-r--r--poky/bitbake/lib/bb/cache.py29
-rw-r--r--poky/bitbake/lib/bb/checksum.py12
-rw-r--r--poky/bitbake/lib/bb/cooker.py36
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py14
-rw-r--r--poky/bitbake/lib/bb/daemonize.py42
-rw-r--r--poky/bitbake/lib/bb/data.py24
-rw-r--r--poky/bitbake/lib/bb/data_smart.py8
-rw-r--r--poky/bitbake/lib/bb/fetch2/README57
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py55
-rw-r--r--poky/bitbake/lib/bb/fetch2/crate.py137
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py37
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/npm.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/npmsw.py8
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py16
-rw-r--r--poky/bitbake/lib/bb/parse/ast.py4
-rw-r--r--poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py5
-rw-r--r--poky/bitbake/lib/bb/process.py2
-rw-r--r--poky/bitbake/lib/bb/runqueue.py160
-rw-r--r--poky/bitbake/lib/bb/server/process.py6
-rw-r--r--poky/bitbake/lib/bb/siggen.py27
-rw-r--r--poky/bitbake/lib/bb/taskdata.py4
-rw-r--r--poky/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html59
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py113
-rw-r--r--poky/bitbake/lib/bb/tests/runqueue.py43
-rw-r--r--poky/bitbake/lib/bb/ui/buildinfohelper.py36
-rw-r--r--poky/bitbake/lib/bb/ui/uievent.py4
-rw-r--r--poky/bitbake/lib/bb/utils.py44
-rw-r--r--poky/bitbake/lib/pyinotify.py30
37 files changed, 863 insertions, 385 deletions
diff --git a/poky/bitbake/bin/bitbake-diffsigs b/poky/bitbake/bin/bitbake-diffsigs
index 6646dccdfa..cf4cc706a2 100755
--- a/poky/bitbake/bin/bitbake-diffsigs
+++ b/poky/bitbake/bin/bitbake-diffsigs
@@ -60,7 +60,7 @@ def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
if sig1 and sig2:
sigfiles = find_siginfo(bbhandler, pn, taskname, [sig1, sig2])
- if len(sigfiles) == 0:
+ if not sigfiles:
logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2))
sys.exit(1)
elif not sig1 in sigfiles:
@@ -86,7 +86,7 @@ def recursecb(key, hash1, hash2):
hashfiles = find_siginfo(tinfoil, key, None, hashes)
recout = []
- if len(hashfiles) == 0:
+ if not hashfiles:
recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
elif not hash1 in hashfiles:
recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1))
diff --git a/poky/bitbake/bin/bitbake-worker b/poky/bitbake/bin/bitbake-worker
index bf96207edc..3aaf3c2444 100755
--- a/poky/bitbake/bin/bitbake-worker
+++ b/poky/bitbake/bin/bitbake-worker
@@ -152,6 +152,10 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
fakeenv = {}
umask = None
+ uid = os.getuid()
+ gid = os.getgid()
+
+
taskdep = workerdata["taskdeps"][fn]
if 'umask' in taskdep and taskname in taskdep['umask']:
umask = taskdep['umask'][taskname]
@@ -257,6 +261,10 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
+ if not the_data.getVarFlag(taskname, 'network', False):
+ logger.debug("Attempting to disable network")
+ bb.utils.disable_network(uid, gid)
+
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
# successfully. We also need to unset anything from the environment which shouldn't be there
exports = bb.data.exported_vars(the_data)
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
index a4b1efbe8b..7b37f6615a 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
@@ -435,7 +435,7 @@ BitBake writes a shell script to
executes the script. The generated shell script contains all the
exported variables, and the shell functions with all variables expanded.
Output from the shell script goes to the file
-``${T}/log.do_taskname.pid``. Looking at the expanded shell functions in
+``${``\ :term:`T`\ ``}/log.do_taskname.pid``. Looking at the expanded shell functions in
the run file and the output in the log files is a useful debugging
technique.
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
index 4396830a2f..77384cfdc7 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
@@ -84,18 +84,18 @@ fetcher does know how to use HTTP as a transport.
Here are some examples that show commonly used mirror definitions::
PREMIRRORS ?= "\
- bzr://.*/.\* http://somemirror.org/sources/ \\n \
- cvs://.*/.\* http://somemirror.org/sources/ \\n \
- git://.*/.\* http://somemirror.org/sources/ \\n \
- hg://.*/.\* http://somemirror.org/sources/ \\n \
- osc://.*/.\* http://somemirror.org/sources/ \\n \
- p4://.*/.\* http://somemirror.org/sources/ \\n \
- svn://.*/.\* http://somemirror.org/sources/ \\n"
+ bzr://.*/.\* http://somemirror.org/sources/ \
+ cvs://.*/.\* http://somemirror.org/sources/ \
+ git://.*/.\* http://somemirror.org/sources/ \
+ hg://.*/.\* http://somemirror.org/sources/ \
+ osc://.*/.\* http://somemirror.org/sources/ \
+ p4://.*/.\* http://somemirror.org/sources/ \
+ svn://.*/.\* http://somemirror.org/sources/"
MIRRORS =+ "\
- ftp://.*/.\* http://somemirror.org/sources/ \\n \
- http://.*/.\* http://somemirror.org/sources/ \\n \
- https://.*/.\* http://somemirror.org/sources/ \\n"
+ ftp://.*/.\* http://somemirror.org/sources/ \
+ http://.*/.\* http://somemirror.org/sources/ \
+ https://.*/.\* http://somemirror.org/sources/"
It is useful to note that BitBake
supports cross-URLs. It is possible to mirror a Git repository on an
@@ -167,6 +167,9 @@ govern the behavior of the unpack stage:
- *dos:* Applies to ``.zip`` and ``.jar`` files and specifies whether
to use DOS line ending conversion on text files.
+- *striplevel:* Strip specified number of leading components (levels)
+ from file names on extraction
+
- *subdir:* Unpacks the specific URL to the specified subdirectory
within the root directory.
@@ -226,6 +229,11 @@ downloaded file is useful for avoiding collisions in
:term:`DL_DIR` when dealing with multiple files that
have the same name.
+If a username and password are specified in the ``SRC_URI``, a Basic
+Authorization header will be added to each request, including across redirects.
+To instead limit the Authorization header to the first request, add
+"redirectauth=0" to the list of parameters.
+
Some example URLs are as follows::
SRC_URI = "http://oe.handhelds.org/not_there.aac"
@@ -388,6 +396,19 @@ This fetcher supports the following parameters:
protocol is "file". You can also use "http", "https", "ssh" and
"rsync".
+ .. note::
+
+ When ``protocol`` is "ssh", the URL expected in :term:`SRC_URI` differs
+ from the one that is typically passed to ``git clone`` command and provided
+ by the Git server to fetch from. For example, the URL returned by GitLab
+ server for ``mesa`` when cloning over SSH is
+ ``git@gitlab.freedesktop.org:mesa/mesa.git``, however the expected URL in
+ :term:`SRC_URI` is the following::
+
+ SRC_URI = "git://git@gitlab.freedesktop.org/mesa/mesa.git;protocol=ssh;..."
+
+ Note the ``:`` character changed for a ``/`` before the path to the project.
+
- *"nocheckout":* Tells the fetcher to not checkout source code when
unpacking when set to "1". Set this option for the URL where there is
a custom routine to checkout code. The default is "0".
@@ -438,6 +459,7 @@ Here are some example URLs::
SRC_URI = "git://git.oe.handhelds.org/git/vip.git;tag=version-1"
SRC_URI = "git://git.oe.handhelds.org/git/vip.git;protocol=http"
+ SRC_URI = "git://git@gitlab.freedesktop.org/mesa/mesa.git;protocol=ssh;..."
.. note::
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
index 42263cef3a..1c31c1f9e8 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
@@ -537,7 +537,7 @@ current working directory:
- ``pn-buildlist``: Shows a simple list of targets that are to be
built.
-To stop depending on common depends, use the "-I" depend option and
+To stop depending on common depends, use the ``-I`` depend option and
BitBake omits them from the graph. Leaving this information out can
produce more readable graphs. This way, you can remove from the graph
:term:`DEPENDS` from inherited classes such as ``base.bbclass``.
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
index 119720d527..8496e1da53 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
@@ -104,15 +104,15 @@ Line Joining
Outside of :ref:`functions <bitbake-user-manual/bitbake-user-manual-metadata:functions>`,
BitBake joins any line ending in
-a backslash character ("\") with the following line before parsing
-statements. The most common use for the "\" character is to split
+a backslash character ("\\") with the following line before parsing
+statements. The most common use for the "\\" character is to split
variable assignments over multiple lines, as in the following example::
FOO = "bar \
baz \
qaz"
-Both the "\" character and the newline
+Both the "\\" character and the newline
character that follow it are removed when joining lines. Thus, no
newline characters end up in the value of ``FOO``.
@@ -125,7 +125,7 @@ Consider this additional example where the two assignments both assign
.. note::
- BitBake does not interpret escape sequences like "\n" in variable
+ BitBake does not interpret escape sequences like "\\n" in variable
values. For these to have an effect, the value must be passed to some
utility that interprets escape sequences, such as
``printf`` or ``echo -n``.
@@ -159,7 +159,7 @@ behavior::
C = "qux"
*At this point, ${A} equals "qux bar baz"*
B = "norf"
- *At this point, ${A} equals "norf baz"\*
+ *At this point, ${A} equals "norf baz"*
Contrast this behavior with the
:ref:`bitbake-user-manual/bitbake-user-manual-metadata:immediate variable
@@ -894,7 +894,7 @@ Regardless of the type of function, you can only define them in class
Shell Functions
---------------
-Functions written in shell script and executed either directly as
+Functions written in shell script are executed either directly as
functions, tasks, or both. They can also be called by other shell
functions. Here is an example shell function definition::
@@ -944,7 +944,7 @@ Running ``do_foo`` prints the following::
Overrides and override-style operators can be applied to any shell
function, not just :ref:`tasks <bitbake-user-manual/bitbake-user-manual-metadata:tasks>`.
-You can use the ``bitbake -e`` recipename command to view the final
+You can use the ``bitbake -e recipename`` command to view the final
assembled function after all overrides have been applied.
BitBake-Style Python Functions
@@ -996,7 +996,7 @@ Running ``do_foo`` prints the following::
recipename do_foo: second
recipename do_foo: third
-You can use the ``bitbake -e`` recipename command to view
+You can use the ``bitbake -e recipename`` command to view
the final assembled function after all overrides have been applied.
Python Functions
@@ -1921,12 +1921,6 @@ The following list describes related variables:
Specifies a function BitBake calls that determines whether BitBake
requires a setscene dependency to be met.
-- :term:`BB_STAMP_POLICY`: Defines the mode
- for comparing timestamps of stamp files.
-
-- :term:`BB_STAMP_WHITELIST`: Lists stamp
- files that are looked at when the stamp policy is "whitelist".
-
- :term:`BB_TASKHASH`: Within an executing task,
this variable holds the hash of the task as returned by the currently
enabled signature generator.
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
index e955beb130..1bb55fc501 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
@@ -374,12 +374,35 @@ overview of their function and contents.
Specifies the Hash Equivalence server to use.
If set to ``auto``, BitBake automatically starts its own server
- over a UNIX domain socket.
+ over a UNIX domain socket. An option is to connect this server
+ to an upstream one, by setting :term:`BB_HASHSERVE_UPSTREAM`.
- If set to ``host:port``, BitBake will use a remote server on the
+ If set to ``unix://path``, BitBake will connect to an existing
+ hash server available over a UNIX domain socket.
+
+ If set to ``host:port``, BitBake will connect to a remote server on the
specified host. This allows multiple clients to share the same
hash equivalence data.
+ The remote server can be started manually through
+ the ``bin/bitbake-hashserv`` script provided by BitBake,
+ which supports UNIX domain sockets too. This script also allows
+ to start the server in read-only mode, to avoid accepting
+ equivalences that correspond to Share State caches that are
+ only available on specific clients.
+
+ :term:`BB_HASHSERVE_UPSTREAM`
+ Specifies an upstream Hash Equivalence server.
+
+ This optional setting is only useful when a local Hash Equivalence
+ server is started (setting :term:`BB_HASHSERVE` to ``auto``),
+ and you wish the local server to query an upstream server for
+ Hash Equivalence data.
+
+ Example usage::
+
+ BB_HASHSERVE_UPSTREAM = "typhoon.yocto.io:8687"
+
:term:`BB_INVALIDCONF`
Used in combination with the ``ConfigParsed`` event to trigger
re-parsing the base metadata (i.e. all the recipes). The
@@ -526,29 +549,6 @@ overview of their function and contents.
- *clear* - Queries the source controls system every time. With this
policy, there is no cache. The "clear" policy is the default.
- :term:`BB_STAMP_POLICY`
- Defines the mode used for how timestamps of stamp files are compared.
- You can set the variable to one of the following modes:
-
- - *perfile* - Timestamp comparisons are only made between timestamps
- of a specific recipe. This is the default mode.
-
- - *full* - Timestamp comparisons are made for all dependencies.
-
- - *whitelist* - Identical to "full" mode except timestamp
- comparisons are made for recipes listed in the
- :term:`BB_STAMP_WHITELIST` variable.
-
- .. note::
-
- Stamp policies are largely obsolete with the introduction of
- setscene tasks.
-
- :term:`BB_STAMP_WHITELIST`
- Lists files whose stamp file timestamps are compared when the stamp
- policy mode is set to "whitelist". For information on stamp policies,
- see the :term:`BB_STAMP_POLICY` variable.
-
:term:`BB_STRICT_CHECKSUM`
Sets a more strict checksum mechanism for non-local URLs. Setting
this variable to a value causes BitBake to report an error if it
@@ -1333,67 +1333,103 @@ overview of their function and contents.
The list of source files - local or remote. This variable tells
BitBake which bits to pull for the build and how to pull them. For
example, if the recipe or append file needs to fetch a single tarball
- from the Internet, the recipe or append file uses a :term:`SRC_URI` entry
- that specifies that tarball. On the other hand, if the recipe or
- append file needs to fetch a tarball and include a custom file, the
- recipe or append file needs an :term:`SRC_URI` variable that specifies
- all those sources.
-
- The following list explains the available URI protocols:
+ from the Internet, the recipe or append file uses a :term:`SRC_URI`
+ entry that specifies that tarball. On the other hand, if the recipe or
+ append file needs to fetch a tarball, apply two patches, and include
+ a custom file, the recipe or append file needs an :term:`SRC_URI`
+ variable that specifies all those sources.
+
+ The following list explains the available URI protocols. URI
+ protocols are highly dependent on particular BitBake Fetcher
+ submodules. Depending on the fetcher BitBake uses, various URL
+ parameters are employed. For specifics on the supported Fetchers, see
+ the :ref:`bitbake-user-manual/bitbake-user-manual-fetching:fetchers`
+ section.
- - ``file://`` : Fetches files, which are usually files shipped
- with the metadata, from the local machine. The path is relative to
- the :term:`FILESPATH` variable.
+ - ``az://`` : Fetches files from an Azure Storage account using HTTPS.
- ``bzr://`` : Fetches files from a Bazaar revision control
repository.
- - ``git://`` : Fetches files from a Git revision control
+ - ``ccrc://`` - Fetches files from a ClearCase repository.
+
+ - ``cvs://`` : Fetches files from a CVS revision control
repository.
- - ``osc://`` : Fetches files from an OSC (OpenSUSE Build service)
- revision control repository.
+ - ``file://`` - Fetches files, which are usually files shipped
+ with the Metadata, from the local machine.
+ The path is relative to the :term:`FILESPATH`
+ variable. Thus, the build system searches, in order, from the
+ following directories, which are assumed to be a subdirectories of
+ the directory in which the recipe file (``.bb``) or append file
+ (``.bbappend``) resides:
- - ``repo://`` : Fetches files from a repo (Git) repository.
+ - ``${BPN}`` - The base recipe name without any special suffix
+ or version numbers.
- - ``http://`` : Fetches files from the Internet using HTTP.
+ - ``${BP}`` - ``${BPN}-${PV}``. The base recipe name and
+ version but without any special package name suffix.
- - ``https://`` : Fetches files from the Internet using HTTPS.
+ - *files -* Files within a directory, which is named ``files``
+ and is also alongside the recipe or append file.
- ``ftp://`` : Fetches files from the Internet using FTP.
- - ``cvs://`` : Fetches files from a CVS revision control
+ - ``git://`` : Fetches files from a Git revision control
+ repository.
+
+ - ``gitsm://`` : Fetches submodules from a Git revision control
repository.
- ``hg://`` : Fetches files from a Mercurial (``hg``) revision
control repository.
+ - ``http://`` : Fetches files from the Internet using HTTP.
+
+ - ``https://`` : Fetches files from the Internet using HTTPS.
+
+ - ``npm://`` - Fetches JavaScript modules from a registry.
+
+ - ``osc://`` : Fetches files from an OSC (OpenSUSE Build service)
+ revision control repository.
+
- ``p4://`` : Fetches files from a Perforce (``p4``) revision
control repository.
+ - ``repo://`` : Fetches files from a repo (Git) repository.
+
- ``ssh://`` : Fetches files from a secure shell.
- ``svn://`` : Fetches files from a Subversion (``svn``) revision
control repository.
- - ``az://`` : Fetches files from an Azure Storage account using HTTPS.
-
Here are some additional options worth mentioning:
- - ``unpack`` : Controls whether or not to unpack the file if it is
- an archive. The default action is to unpack the file.
+ - ``downloadfilename`` : Specifies the filename used when storing
+ the downloaded file.
+
+ - ``name`` - Specifies a name to be used for association with
+ :term:`SRC_URI` checksums or :term:`SRCREV` when you have more than one
+ file or git repository specified in :term:`SRC_URI`. For example::
+
+ SRC_URI = "git://example.com/foo.git;name=first \
+ git://example.com/bar.git;name=second \
+ http://example.com/file.tar.gz;name=third"
+
+ SRCREV_first = "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15"
+ SRCREV_second = "e242ed3bffccdf271b7fbaf34ed72d089537b42f"
+ SRC_URI[third.sha256sum] = "13550350a8681c84c861aac2e5b440161c2b33a3e4f302ac680ca5b686de48de"
- ``subdir`` : Places the file (or extracts its contents) into the
specified subdirectory. This option is useful for unusual tarballs
or other archives that do not have their files already in a
subdirectory within the archive.
- - ``name`` : Specifies a name to be used for association with
- :term:`SRC_URI` checksums when you have more than one file specified
- in :term:`SRC_URI`.
+ - ``subpath`` - Limits the checkout to a specific subpath of the
+ tree when using the Git fetcher is used.
- - ``downloadfilename`` : Specifies the filename used when storing
- the downloaded file.
+ - ``unpack`` : Controls whether or not to unpack the file if it is
+ an archive. The default action is to unpack the file.
:term:`SRCDATE`
The date of the source code used to build the package. This variable
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index d6418e40b3..65b7fc000d 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -714,19 +714,23 @@ def _exec_task(fn, task, d, quieterr):
logger.debug2("Zero size logfn %s, removing", logfn)
bb.utils.remove(logfn)
bb.utils.remove(loglink)
- except bb.BBHandledException:
- event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata)
- return 1
except (Exception, SystemExit) as exc:
+ handled = False
+ if isinstance(exc, bb.BBHandledException):
+ handled = True
+
if quieterr:
+ if not handled:
+ logger.warning(repr(exc))
event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
else:
errprinted = errchk.triggered
# If the output is already on stdout, we've printed the information in the
# logs once already so don't duplicate
- if verboseStdoutLogging:
+ if verboseStdoutLogging or handled:
errprinted = True
- logger.error(repr(exc))
+ if not handled:
+ logger.error(repr(exc))
event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
return 1
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 4e08c100ab..fcb15796cc 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -284,36 +284,15 @@ def parse_recipe(bb_data, bbfile, appends, mc=''):
Parse a recipe
"""
- chdir_back = False
-
bb_data.setVar("__BBMULTICONFIG", mc)
- # expand tmpdir to include this topdir
- bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
- oldpath = os.path.abspath(os.getcwd())
bb.parse.cached_mtime_noerror(bbfile_loc)
- # The ConfHandler first looks if there is a TOPDIR and if not
- # then it would call getcwd().
- # Previously, we chdir()ed to bbfile_loc, called the handler
- # and finally chdir()ed back, a couple of thousand times. We now
- # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
- if not bb_data.getVar('TOPDIR', False):
- chdir_back = True
- bb_data.setVar('TOPDIR', bbfile_loc)
- try:
- if appends:
- bb_data.setVar('__BBAPPEND', " ".join(appends))
- bb_data = bb.parse.handle(bbfile, bb_data)
- if chdir_back:
- os.chdir(oldpath)
- return bb_data
- except:
- if chdir_back:
- os.chdir(oldpath)
- raise
-
+ if appends:
+ bb_data.setVar('__BBAPPEND', " ".join(appends))
+ bb_data = bb.parse.handle(bbfile, bb_data)
+ return bb_data
class NoCache(object):
diff --git a/poky/bitbake/lib/bb/checksum.py b/poky/bitbake/lib/bb/checksum.py
index 1d50a26426..fb8a77f6ab 100644
--- a/poky/bitbake/lib/bb/checksum.py
+++ b/poky/bitbake/lib/bb/checksum.py
@@ -50,6 +50,7 @@ class FileChecksumCache(MultiProcessCache):
MultiProcessCache.__init__(self)
def get_checksum(self, f):
+ f = os.path.normpath(f)
entry = self.cachedata[0].get(f)
cmtime = self.mtime_cache.cached_mtime(f)
if entry:
@@ -84,15 +85,24 @@ class FileChecksumCache(MultiProcessCache):
return None
return checksum
+ #
+ # Changing the format of file-checksums is problematic as both OE and Bitbake have
+ # knowledge of them. We need to encode a new piece of data, the portion of the path
+ # we care about from a checksum perspective. This means that files that change subdirectory
+ # are tracked by the task hashes. To do this, we do something horrible and put a "/./" into
+ # the path. The filesystem handles it but it gives us a marker to know which subsection
+ # of the path to cache.
+ #
def checksum_dir(pth):
# Handle directories recursively
if pth == "/":
bb.fatal("Refusing to checksum /")
+ pth = pth.rstrip("/")
dirchecksums = []
for root, dirs, files in os.walk(pth, topdown=True):
[dirs.remove(d) for d in list(dirs) if d in localdirsexclude]
for name in files:
- fullpth = os.path.join(root, name)
+ fullpth = os.path.join(root, name).replace(pth, os.path.join(pth, "."))
checksum = checksum_file(fullpth)
if checksum:
dirchecksums.append((fullpth, checksum))
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index af794b4c42..08e45e79d0 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -388,12 +388,22 @@ class BBCooker:
# Create a new hash server bound to a unix domain socket
if not self.hashserv:
dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
+ upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
+ if upstream:
+ import socket
+ try:
+ sock = socket.create_connection(upstream.split(":"), 5)
+ sock.close()
+ except socket.error as e:
+ bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
+ % (upstream, repr(e)))
+
self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
self.hashserv = hashserv.create_server(
self.hashservaddr,
dbfile,
sync=False,
- upstream=self.data.getVar("BB_HASHSERVE_UPSTREAM") or None,
+ upstream=upstream,
)
self.hashserv.serve_as_process()
self.data.setVar("BB_HASHSERVE", self.hashservaddr)
@@ -804,7 +814,9 @@ class BBCooker:
for dep in rq.rqdata.runtaskentries[tid].depends:
(depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
- depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
+ if depmc:
+ depmc = "mc:" + depmc + ":"
+ depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
if taskfn not in seen_fns:
seen_fns.append(taskfn)
packages = []
@@ -1656,7 +1668,7 @@ class BBCooker:
# Return a copy, don't modify the original
pkgs_to_build = pkgs_to_build[:]
- if len(pkgs_to_build) == 0:
+ if not pkgs_to_build:
raise NothingToBuild
ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
@@ -1795,10 +1807,10 @@ class CookerCollectFiles(object):
files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
config.setVar("BBFILES_PRIORITIZED", " ".join(files))
- if not len(files):
+ if not files:
files = self.get_bbfiles()
- if not len(files):
+ if not files:
collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
bb.event.fire(CookerExit(), eventdata)
@@ -2208,21 +2220,33 @@ class CookerParser(object):
yield not cached, mc, infos
def parse_generator(self):
- while True:
+ empty = False
+ while self.processes or not empty:
+ for process in self.processes.copy():
+ if not process.is_alive():
+ process.join()
+ self.processes.remove(process)
+
if self.parsed >= self.toparse:
break
try:
result = self.result_queue.get(timeout=0.25)
except queue.Empty:
+ empty = True
pass
else:
+ empty = False
value = result[1]
if isinstance(value, BaseException):
raise value
else:
yield result
+ if not (self.parsed >= self.toparse):
+ raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
+
+
def parse_next(self):
result = []
parsed = None
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index ba657c03b6..397b43dfa7 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -86,7 +86,7 @@ class ConfigParameters(object):
action['msg'] = "Only one target can be used with the --environment option."
elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
action['msg'] = "No target should be used with the --environment and --buildfile options."
- elif len(self.options.pkgs_to_build) > 0:
+ elif self.options.pkgs_to_build:
action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
else:
action['action'] = ["showEnvironment", self.options.buildfile]
@@ -210,7 +210,7 @@ def findConfigFile(configfile, data):
#
# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working
-# up to /. If that fails, we search for a conf/bitbake.conf in BBPATH.
+# up to /. If that fails, bitbake would fall back to cwd.
#
def findTopdir():
@@ -223,11 +223,8 @@ def findTopdir():
layerconf = findConfigFile("bblayers.conf", d)
if layerconf:
return os.path.dirname(os.path.dirname(layerconf))
- if bbpath:
- bitbakeconf = bb.utils.which(bbpath, "conf/bitbake.conf")
- if bitbakeconf:
- return os.path.dirname(os.path.dirname(bitbakeconf))
- return None
+
+ return os.path.abspath(os.getcwd())
class CookerDataBuilder(object):
@@ -417,6 +414,9 @@ class CookerDataBuilder(object):
" invoked bitbake from the wrong directory?")
raise SystemExit(msg)
+ if not data.getVar("TOPDIR"):
+ data.setVar("TOPDIR", os.path.abspath(os.getcwd()))
+
data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
# Parse files for loading *after* bitbake.conf and any includes
diff --git a/poky/bitbake/lib/bb/daemonize.py b/poky/bitbake/lib/bb/daemonize.py
index c187fcfc6c..40fabd0c0a 100644
--- a/poky/bitbake/lib/bb/daemonize.py
+++ b/poky/bitbake/lib/bb/daemonize.py
@@ -74,26 +74,26 @@ def createDaemon(function, logfile):
with open('/dev/null', 'r') as si:
os.dup2(si.fileno(), sys.stdin.fileno())
- try:
- so = open(logfile, 'a+')
- os.dup2(so.fileno(), sys.stdout.fileno())
- os.dup2(so.fileno(), sys.stderr.fileno())
- except io.UnsupportedOperation:
- sys.stdout = open(logfile, 'a+')
+ with open(logfile, 'a+') as so:
+ try:
+ os.dup2(so.fileno(), sys.stdout.fileno())
+ os.dup2(so.fileno(), sys.stderr.fileno())
+ except io.UnsupportedOperation:
+ sys.stdout = so
- # Have stdout and stderr be the same so log output matches chronologically
- # and there aren't two seperate buffers
- sys.stderr = sys.stdout
+ # Have stdout and stderr be the same so log output matches chronologically
+ # and there aren't two seperate buffers
+ sys.stderr = sys.stdout
- try:
- function()
- except Exception as e:
- traceback.print_exc()
- finally:
- bb.event.print_ui_queue()
- # os._exit() doesn't flush open files like os.exit() does. Manually flush
- # stdout and stderr so that any logging output will be seen, particularly
- # exception tracebacks.
- sys.stdout.flush()
- sys.stderr.flush()
- os._exit(0)
+ try:
+ function()
+ except Exception as e:
+ traceback.print_exc()
+ finally:
+ bb.event.print_ui_queue()
+ # os._exit() doesn't flush open files like os.exit() does. Manually flush
+ # stdout and stderr so that any logging output will be seen, particularly
+ # exception tracebacks.
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(0)
diff --git a/poky/bitbake/lib/bb/data.py b/poky/bitbake/lib/bb/data.py
index 9d18b1e2bf..ee5557abfa 100644
--- a/poky/bitbake/lib/bb/data.py
+++ b/poky/bitbake/lib/bb/data.py
@@ -285,21 +285,19 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
vardeps = varflags.get("vardeps")
def handle_contains(value, contains, d):
- newvalue = ""
+ newvalue = []
+ if value:
+ newvalue.append(str(value))
for k in sorted(contains):
l = (d.getVar(k) or "").split()
for item in sorted(contains[k]):
for word in item.split():
if not word in l:
- newvalue += "\n%s{%s} = Unset" % (k, item)
+ newvalue.append("\n%s{%s} = Unset" % (k, item))
break
else:
- newvalue += "\n%s{%s} = Set" % (k, item)
- if not newvalue:
- return value
- if not value:
- return newvalue
- return value + newvalue
+ newvalue.append("\n%s{%s} = Set" % (k, item))
+ return "".join(newvalue)
def handle_remove(value, deps, removes, d):
for r in sorted(removes):
@@ -406,7 +404,9 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
if data is None:
bb.error("Task %s from %s seems to be empty?!" % (task, fn))
- data = ''
+ data = []
+ else:
+ data = [data]
gendeps[task] -= whitelist
newdeps = gendeps[task]
@@ -424,12 +424,12 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
alldeps = sorted(seen)
for dep in alldeps:
- data = data + dep
+ data.append(dep)
var = lookupcache[dep]
if var is not None:
- data = data + str(var)
+ data.append(str(var))
k = fn + ":" + task
- basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest()
+ basehash[k] = hashlib.sha256("".join(data).encode("utf-8")).hexdigest()
taskdeps[task] = alldeps
return taskdeps, basehash
diff --git a/poky/bitbake/lib/bb/data_smart.py b/poky/bitbake/lib/bb/data_smart.py
index 8d235da121..543372d153 100644
--- a/poky/bitbake/lib/bb/data_smart.py
+++ b/poky/bitbake/lib/bb/data_smart.py
@@ -492,7 +492,7 @@ class DataSmart(MutableMapping):
def setVar(self, var, value, **loginfo):
#print("var=" + str(var) + " val=" + str(value))
- if "_append" in var or "_prepend" in var or "_remove" in var:
+ if not var.startswith("__anon_") and ("_append" in var or "_prepend" in var or "_remove" in var):
info = "%s" % var
if "filename" in loginfo:
info += " file: %s" % loginfo[filename]
@@ -810,7 +810,7 @@ class DataSmart(MutableMapping):
expanded_removes[r] = self.expand(r).split()
parser.removes = set()
- val = ""
+ val = []
for v in __whitespace_split__.split(parser.value):
skip = False
for r in removes:
@@ -819,8 +819,8 @@ class DataSmart(MutableMapping):
skip = True
if skip:
continue
- val = val + v
- parser.value = val
+ val.append(v)
+ parser.value = "".join(val)
if expand:
value = parser.value
diff --git a/poky/bitbake/lib/bb/fetch2/README b/poky/bitbake/lib/bb/fetch2/README
new file mode 100644
index 0000000000..67b787ef47
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/README
@@ -0,0 +1,57 @@
+There are expectations of users of the fetcher code. This file attempts to document
+some of the constraints that are present. Some are obvious, some are less so. It is
+documented in the context of how OE uses it but the API calls are generic.
+
+a) network access for sources is only expected to happen in the do_fetch task.
+ This is not enforced or tested but is required so that we can:
+
+ i) audit the sources used (i.e. for license/manifest reasons)
+ ii) support offline builds with a suitable cache
+ iii) allow work to continue even with downtime upstream
+ iv) allow for changes upstream in incompatible ways
+ v) allow rebuilding of the software in X years time
+
+b) network access is not expected in do_unpack task.
+
+c) you can take DL_DIR and use it as a mirror for offline builds.
+
+d) access to the network is only made when explicitly configured in recipes
+ (e.g. use of AUTOREV, or use of git tags which change revision).
+
+e) fetcher output is deterministic (i.e. if you fetch configuration XXX now it
+ will match in future exactly in a clean build with a new DL_DIR).
+ One specific pain point example are git tags. They can be replaced and change
+ so the git fetcher has to resolve them with the network. We use git revisions
+ where possible to avoid this and ensure determinism.
+
+f) network access is expected to work with the standard linux proxy variables
+ so that access behind firewalls works (the fetcher sets these in the
+ environment but only in the do_fetch tasks).
+
+g) access during parsing has to be minimal, a "git ls-remote" for an AUTOREV
+ git recipe might be ok but you can't expect to checkout a git tree.
+
+h) we need to provide revision information during parsing such that a version
+ for the recipe can be constructed.
+
+i) versions are expected to be able to increase in a way which sorts allowing
+ package feeds to operate (see PR server required for git revisions to sort).
+
+j) API to query for possible version upgrades of a url is highly desireable to
+ allow our automated upgrage code to function (it is implied this does always
+ have network access).
+
+k) Where fixes or changes to behaviour in the fetcher are made, we ask that
+ test cases are added (run with "bitbake-selftest bb.tests.fetch"). We do
+ have fairly extensive test coverage of the fetcher as it is the only way
+ to track all of its corner cases, it still doesn't give entire coverage
+ though sadly.
+
+l) If using tools during parse time, they will have to be in ASSUME_PROVIDED
+ in OE's context as we can't build git-native, then parse a recipe and use
+ git ls-remote.
+
+Not all fetchers support all features, autorev is optional and doesn't make
+sense for some. Upgrade detection means different things in different contexts
+too.
+
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 666cc1306a..d37174185a 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -402,24 +402,24 @@ def encodeurl(decoded):
if not type:
raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
- url = '%s://' % type
+ url = ['%s://' % type]
if user and type != "file":
- url += "%s" % user
+ url.append("%s" % user)
if pswd:
- url += ":%s" % pswd
- url += "@"
+ url.append(":%s" % pswd)
+ url.append("@")
if host and type != "file":
- url += "%s" % host
+ url.append("%s" % host)
if path:
# Standardise path to ensure comparisons work
while '//' in path:
path = path.replace("//", "/")
- url += "%s" % urllib.parse.quote(path)
+ url.append("%s" % urllib.parse.quote(path))
if p:
for parm in p:
- url += ";%s=%s" % (parm, p[parm])
+ url.append(";%s=%s" % (parm, p[parm]))
- return url
+ return "".join(url)
def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
if not ud.url or not uri_find or not uri_replace:
@@ -430,6 +430,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
uri_replace_decoded = list(decodeurl(uri_replace))
logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
result_decoded = ['', '', '', '', '', {}]
+ # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
for loc, i in enumerate(uri_find_decoded):
result_decoded[loc] = uri_decoded[loc]
regexp = i
@@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
for l in replacements:
uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
result_decoded[loc][k] = uri_replace_decoded[loc][k]
+ elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
+ # User/password in the replacement is just a straight replacement
+ result_decoded[loc] = uri_replace_decoded[loc]
elif (re.match(regexp, uri_decoded[loc])):
if not uri_replace_decoded[loc]:
result_decoded[loc] = ""
@@ -466,9 +470,13 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
# Kill parameters, they make no sense for mirror tarballs
uri_decoded[5] = {}
elif ud.localpath and ud.method.supports_checksum(ud):
- basename = os.path.basename(uri_decoded[loc])
- if basename and not result_decoded[loc].endswith(basename):
- result_decoded[loc] = os.path.join(result_decoded[loc], basename)
+ basename = os.path.basename(ud.localpath)
+ if basename:
+ uri_basename = os.path.basename(uri_decoded[loc])
+ if uri_basename and basename != uri_basename and result_decoded[loc].endswith(uri_basename):
+ result_decoded[loc] = result_decoded[loc].replace(uri_basename, basename)
+ elif not result_decoded[loc].endswith(basename):
+ result_decoded[loc] = os.path.join(result_decoded[loc], basename)
else:
return None
result = encodeurl(result_decoded)
@@ -766,7 +774,7 @@ def get_srcrev(d, method_name='sortable_revision'):
if urldata[u].method.supports_srcrev():
scms.append(u)
- if len(scms) == 0:
+ if not scms:
raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
@@ -1450,30 +1458,33 @@ class FetchMethod(object):
cmd = None
if unpack:
+ tar_cmd = 'tar --extract --no-same-owner'
+ if 'striplevel' in urldata.parm:
+ tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel']
if file.endswith('.tar'):
- cmd = 'tar x --no-same-owner -f %s' % file
+ cmd = '%s -f %s' % (tar_cmd, file)
elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
- cmd = 'tar xz --no-same-owner -f %s' % file
+ cmd = '%s -z -f %s' % (tar_cmd, file)
elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
- cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
+ cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
cmd = 'gzip -dc %s > %s' % (file, efile)
elif file.endswith('.bz2'):
cmd = 'bzip2 -dc %s > %s' % (file, efile)
elif file.endswith('.txz') or file.endswith('.tar.xz'):
- cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
+ cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.xz'):
cmd = 'xz -dc %s > %s' % (file, efile)
elif file.endswith('.tar.lz'):
- cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
+ cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.lz'):
cmd = 'lzip -dc %s > %s' % (file, efile)
elif file.endswith('.tar.7z'):
- cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
+ cmd = '7z x -so %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.7z'):
cmd = '7za x -y %s 1>/dev/null' % file
elif file.endswith('.tzst') or file.endswith('.tar.zst'):
- cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file
+ cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.zst'):
cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
elif file.endswith('.zip') or file.endswith('.jar'):
@@ -1506,7 +1517,7 @@ class FetchMethod(object):
raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
else:
raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
- cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
+ cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
# If 'subdir' param exists, create a dir and use it as destination for unpack cmd
if 'subdir' in urldata.parm:
@@ -1632,7 +1643,7 @@ class Fetch(object):
if localonly and cache:
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
- if len(urls) == 0:
+ if not urls:
urls = d.getVar("SRC_URI").split()
self.urls = urls
self.d = d
@@ -1931,6 +1942,7 @@ from . import clearcase
from . import npm
from . import npmsw
from . import az
+from . import crate
methods.append(local.Local())
methods.append(wget.Wget())
@@ -1951,3 +1963,4 @@ methods.append(clearcase.ClearCase())
methods.append(npm.Npm())
methods.append(npmsw.NpmShrinkWrap())
methods.append(az.Az())
+methods.append(crate.Crate())
diff --git a/poky/bitbake/lib/bb/fetch2/crate.py b/poky/bitbake/lib/bb/fetch2/crate.py
new file mode 100644
index 0000000000..f7e2354afb
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/crate.py
@@ -0,0 +1,137 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for crates.io
+"""
+
+# Copyright (C) 2016 Doug Goldstein
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import hashlib
+import json
+import os
+import shutil
+import subprocess
+import bb
+from bb.fetch2 import logger, subprocess_setup, UnpackError
+from bb.fetch2.wget import Wget
+
+
+class Crate(Wget):
+
+ """Class to fetch crates via wget"""
+
+ def _cargo_bitbake_path(self, rootdir):
+ return os.path.join(rootdir, "cargo_home", "bitbake")
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url is for this fetcher
+ """
+ return ud.type in ['crate']
+
+ def recommends_checksum(self, urldata):
+ return False
+
+ def urldata_init(self, ud, d):
+ """
+ Sets up to download the respective crate from crates.io
+ """
+
+ if ud.type == 'crate':
+ self._crate_urldata_init(ud, d)
+
+ super(Crate, self).urldata_init(ud, d)
+
+ def _crate_urldata_init(self, ud, d):
+ """
+ Sets up the download for a crate
+ """
+
+ # URL syntax is: crate://NAME/VERSION
+ # break the URL apart by /
+ parts = ud.url.split('/')
+ if len(parts) < 5:
+ raise bb.fetch2.ParameterError("Invalid URL: Must be crate://HOST/NAME/VERSION", ud.url)
+
+ # last field is version
+ version = parts[len(parts) - 1]
+ # second to last field is name
+ name = parts[len(parts) - 2]
+ # host (this is to allow custom crate registries to be specified
+ host = '/'.join(parts[2:len(parts) - 2])
+
+ # if using upstream just fix it up nicely
+ if host == 'crates.io':
+ host = 'crates.io/api/v1/crates'
+
+ ud.url = "https://%s/%s/%s/download" % (host, name, version)
+ ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
+ ud.parm['name'] = name
+
+ logger.debug(2, "Fetching %s to %s" % (ud.url, ud.parm['downloadfilename']))
+
+ def unpack(self, ud, rootdir, d):
+ """
+ Uses the crate to build the necessary paths for cargo to utilize it
+ """
+ if ud.type == 'crate':
+ return self._crate_unpack(ud, rootdir, d)
+ else:
+ super(Crate, self).unpack(ud, rootdir, d)
+
+ def _crate_unpack(self, ud, rootdir, d):
+ """
+ Unpacks a crate
+ """
+ thefile = ud.localpath
+
+ # possible metadata we need to write out
+ metadata = {}
+
+ # change to the rootdir to unpack but save the old working dir
+ save_cwd = os.getcwd()
+ os.chdir(rootdir)
+
+ pn = d.getVar('BPN')
+ if pn == ud.parm.get('name'):
+ cmd = "tar -xz --no-same-owner -f %s" % thefile
+ else:
+ cargo_bitbake = self._cargo_bitbake_path(rootdir)
+
+ cmd = "tar -xz --no-same-owner -f %s -C %s" % (thefile, cargo_bitbake)
+
+ # ensure we've got these paths made
+ bb.utils.mkdirhier(cargo_bitbake)
+
+ # generate metadata necessary
+ with open(thefile, 'rb') as f:
+ # get the SHA256 of the original tarball
+ tarhash = hashlib.sha256(f.read()).hexdigest()
+
+ metadata['files'] = {}
+ metadata['package'] = tarhash
+
+ path = d.getVar('PATH')
+ if path:
+ cmd = "PATH=\"%s\" %s" % (path, cmd)
+ bb.note("Unpacking %s to %s/" % (thefile, os.getcwd()))
+
+ ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
+
+ os.chdir(save_cwd)
+
+ if ret != 0:
+ raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
+
+ # if we have metadata to write out..
+ if len(metadata) > 0:
+ cratepath = os.path.splitext(os.path.basename(thefile))[0]
+ bbpath = self._cargo_bitbake_path(rootdir)
+ mdfile = '.cargo-checksum.json'
+ mdpath = os.path.join(bbpath, cratepath, mdfile)
+ with open(mdpath, "w") as f:
+ json.dump(metadata, f)
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index e8ddf2c761..30da8e95b7 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -142,6 +142,11 @@ class Git(FetchMethod):
ud.proto = 'file'
else:
ud.proto = "git"
+ if ud.host == "github.com" and ud.proto == "git":
+ # github stopped supporting git protocol
+ # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
+ ud.proto = "https"
+ bb.warn("URL: %s uses git protocol which is no longer supported by github. Please change to ;protocol=https in the url." % ud.url)
if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
@@ -165,7 +170,10 @@ class Git(FetchMethod):
ud.nocheckout = 1
ud.unresolvedrev = {}
- branches = ud.parm.get("branch", "master").split(',')
+ branches = ud.parm.get("branch", "").split(',')
+ if branches == [""] and not ud.nobranch:
+ bb.warn("URL: %s does not set any branch parameter. The future default branch used by tools and repositories is uncertain and we will therefore soon require this is set in all git urls." % ud.url)
+ branches = ["master"]
if len(branches) != len(ud.names):
raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
@@ -516,13 +524,24 @@ class Git(FetchMethod):
def unpack(self, ud, destdir, d):
""" unpack the downloaded src to destdir"""
- subdir = ud.parm.get("subpath", "")
- if subdir != "":
- readpathspec = ":%s" % subdir
- def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
- else:
- readpathspec = ""
- def_destsuffix = "git/"
+ subdir = ud.parm.get("subdir")
+ subpath = ud.parm.get("subpath")
+ readpathspec = ""
+ def_destsuffix = "git/"
+
+ if subpath:
+ readpathspec = ":%s" % subpath
+ def_destsuffix = "%s/" % os.path.basename(subpath.rstrip('/'))
+
+ if subdir:
+ # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
+ if os.path.isabs(subdir):
+ if not os.path.realpath(subdir).startswith(os.path.realpath(destdir)):
+ raise bb.fetch2.UnpackError("subdir argument isn't a subdirectory of unpack root %s" % destdir, ud.url)
+ destdir = subdir
+ else:
+ destdir = os.path.join(destdir, subdir)
+ def_destsuffix = ""
destsuffix = ud.parm.get("destsuffix", def_destsuffix)
destdir = ud.destdir = os.path.join(destdir, destsuffix)
@@ -569,7 +588,7 @@ class Git(FetchMethod):
bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
if not ud.nocheckout:
- if subdir != "":
+ if subpath:
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
workdir=destdir)
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index a7110a988d..c5c23d5260 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -163,7 +163,7 @@ class GitSM(Git):
else:
self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
- if len(need_update_list) > 0:
+ if need_update_list:
logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
return True
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
index 3c41cb295c..b3a3a444ee 100644
--- a/poky/bitbake/lib/bb/fetch2/npm.py
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -72,7 +72,7 @@ def npm_unpack(tarball, destdir, d):
cmd += " --delay-directory-restore"
cmd += " --strip-components=1"
runfetchcmd(cmd, d, workdir=destdir)
- runfetchcmd("chmod -R +X %s" % (destdir), d, quiet=True, workdir=destdir)
+ runfetchcmd("chmod -R +X '%s'" % (destdir), d, quiet=True, workdir=destdir)
class NpmEnvironment(object):
"""
diff --git a/poky/bitbake/lib/bb/fetch2/npmsw.py b/poky/bitbake/lib/bb/fetch2/npmsw.py
index 426a139653..879ba5de0f 100644
--- a/poky/bitbake/lib/bb/fetch2/npmsw.py
+++ b/poky/bitbake/lib/bb/fetch2/npmsw.py
@@ -88,7 +88,11 @@ class NpmShrinkWrap(FetchMethod):
version = params.get("version", None)
# Handle registry sources
- if is_semver(version) and resolved and integrity:
+ if is_semver(version) and integrity:
+ # Handle duplicate dependencies without url
+ if not resolved:
+ return
+
localfile = npm_localfile(name, version)
uri = URI(resolved)
@@ -127,6 +131,8 @@ class NpmShrinkWrap(FetchMethod):
# Handle git sources
elif version.startswith("git"):
+ if version.startswith("github:"):
+ version = "git+https://github.com/" + version[len("github:"):]
regex = re.compile(r"""
^
git\+
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 349891e852..253cabce75 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -112,7 +112,17 @@ class Wget(FetchMethod):
fetchcmd += " -O %s" % shlex.quote(localpath)
if ud.user and ud.pswd:
- fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
+ fetchcmd += " --auth-no-challenge"
+ if ud.parm.get("redirectauth", "1") == "1":
+ # An undocumented feature of wget is that if the
+ # username/password are specified on the URI, wget will only
+ # send the Authorization header to the first host and not to
+ # any hosts that it is redirected to. With the increasing
+ # usage of temporary AWS URLs, this difference now matters as
+ # AWS will reject any request that has authentication both in
+ # the query parameters (from the redirect) and in the
+ # Authorization header.
+ fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd)
uri = ud.url.split(";")[0]
if os.path.exists(ud.localpath):
@@ -356,7 +366,7 @@ class Wget(FetchMethod):
except (TypeError, ImportError, IOError, netrc.NetrcParseError):
pass
- with opener.open(r) as response:
+ with opener.open(r, timeout=30) as response:
pass
except urllib.error.URLError as e:
if try_again:
@@ -583,7 +593,7 @@ class Wget(FetchMethod):
# src.rpm extension was added only for rpm package. Can be removed if the rpm
# packaged will always be considered as having to be manually upgraded
- psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+ psuffix_regex = r"(tar\.\w+|tgz|zip|xz|rpm|bz2|orig\.tar\.\w+|src\.tar\.\w+|src\.tgz|svnr\d+\.tar\.\w+|stable\.tar\.\w+|src\.rpm)"
# match name, version and archive type of a package
package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
diff --git a/poky/bitbake/lib/bb/parse/ast.py b/poky/bitbake/lib/bb/parse/ast.py
index 743ea0dfc0..31bcc8e7ac 100644
--- a/poky/bitbake/lib/bb/parse/ast.py
+++ b/poky/bitbake/lib/bb/parse/ast.py
@@ -130,6 +130,10 @@ class DataNode(AstNode):
else:
val = groupd["value"]
+ if ":append" in key or ":remove" in key or ":prepend" in key:
+ if op in ["append", "prepend", "postdot", "predot", "ques"]:
+ bb.warn(key + " " + groupd[op] + " is not a recommended operator combination, please replace it.")
+
flag = None
if 'flag' in groupd and groupd['flag'] is not None:
flag = groupd['flag']
diff --git a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 0834fe3f9b..b895d5b5ef 100644
--- a/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -48,10 +48,7 @@ __unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.]+)\]$" )
def init(data):
- topdir = data.getVar('TOPDIR', False)
- if not topdir:
- data.setVar('TOPDIR', os.getcwd())
-
+ return
def supports(fn, d):
return fn[-5:] == ".conf"
diff --git a/poky/bitbake/lib/bb/process.py b/poky/bitbake/lib/bb/process.py
index d5a1775fce..af5d804a1d 100644
--- a/poky/bitbake/lib/bb/process.py
+++ b/poky/bitbake/lib/bb/process.py
@@ -60,7 +60,7 @@ class Popen(subprocess.Popen):
"close_fds": True,
"preexec_fn": subprocess_setup,
"stdout": subprocess.PIPE,
- "stderr": subprocess.STDOUT,
+ "stderr": subprocess.PIPE,
"stdin": subprocess.PIPE,
"shell": False,
}
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 10511a09dc..8ae3fe85f1 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -385,7 +385,6 @@ class RunQueueData:
self.rq = rq
self.warn_multi_bb = False
- self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or ""
self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData, targets)
self.setscenewhitelist_checked = False
@@ -547,7 +546,7 @@ class RunQueueData:
next_points.append(revdep)
task_done[revdep] = True
endpoints = next_points
- if len(next_points) == 0:
+ if not next_points:
break
# Circular dependency sanity check
@@ -589,7 +588,7 @@ class RunQueueData:
found = False
for mc in self.taskData:
- if len(taskData[mc].taskentries) > 0:
+ if taskData[mc].taskentries:
found = True
break
if not found:
@@ -773,7 +772,7 @@ class RunQueueData:
# Find the dependency chain endpoints
endpoints = set()
for tid in self.runtaskentries:
- if len(deps[tid]) == 0:
+ if not deps[tid]:
endpoints.add(tid)
# Iterate the chains collating dependencies
while endpoints:
@@ -784,11 +783,11 @@ class RunQueueData:
cumulativedeps[dep].update(cumulativedeps[tid])
if tid in deps[dep]:
deps[dep].remove(tid)
- if len(deps[dep]) == 0:
+ if not deps[dep]:
next.add(dep)
endpoints = next
#for tid in deps:
- # if len(deps[tid]) != 0:
+ # if deps[tid]:
# bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid]))
# Loop here since recrdeptasks can depend upon other recrdeptasks and we have to
@@ -926,39 +925,37 @@ class RunQueueData:
#
# Once all active tasks are marked, prune the ones we don't need.
- delcount = {}
- for tid in list(self.runtaskentries.keys()):
- if tid not in runq_build:
- delcount[tid] = self.runtaskentries[tid]
- del self.runtaskentries[tid]
-
# Handle --runall
if self.cooker.configuration.runall:
# re-run the mark_active and then drop unused tasks from new list
+ reduced_tasklist = set(self.runtaskentries.keys())
+ for tid in list(self.runtaskentries.keys()):
+ if tid not in runq_build:
+ reduced_tasklist.remove(tid)
runq_build = {}
for task in self.cooker.configuration.runall:
if not task.startswith("do_"):
task = "do_{0}".format(task)
runall_tids = set()
- for tid in list(self.runtaskentries):
+ for tid in reduced_tasklist:
wanttid = "{0}:{1}".format(fn_from_tid(tid), task)
- if wanttid in delcount:
- self.runtaskentries[wanttid] = delcount[wanttid]
if wanttid in self.runtaskentries:
runall_tids.add(wanttid)
for tid in list(runall_tids):
- mark_active(tid,1)
+ mark_active(tid, 1)
if self.cooker.configuration.force:
invalidate_task(tid, False)
- for tid in list(self.runtaskentries.keys()):
- if tid not in runq_build:
- delcount[tid] = self.runtaskentries[tid]
- del self.runtaskentries[tid]
+ delcount = set()
+ for tid in list(self.runtaskentries.keys()):
+ if tid not in runq_build:
+ delcount.add(tid)
+ del self.runtaskentries[tid]
- if len(self.runtaskentries) == 0:
+ if self.cooker.configuration.runall:
+ if not self.runtaskentries:
bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
self.init_progress_reporter.next_stage()
@@ -971,19 +968,19 @@ class RunQueueData:
for task in self.cooker.configuration.runonly:
if not task.startswith("do_"):
task = "do_{0}".format(task)
- runonly_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == task }
+ runonly_tids = [k for k in self.runtaskentries.keys() if taskname_from_tid(k) == task]
- for tid in list(runonly_tids):
- mark_active(tid,1)
+ for tid in runonly_tids:
+ mark_active(tid, 1)
if self.cooker.configuration.force:
invalidate_task(tid, False)
for tid in list(self.runtaskentries.keys()):
if tid not in runq_build:
- delcount[tid] = self.runtaskentries[tid]
+ delcount.add(tid)
del self.runtaskentries[tid]
- if len(self.runtaskentries) == 0:
+ if not self.runtaskentries:
bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets)))
#
@@ -991,7 +988,7 @@ class RunQueueData:
#
# Check to make sure we still have tasks to run
- if len(self.runtaskentries) == 0:
+ if not self.runtaskentries:
if not taskData[''].abort:
bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
else:
@@ -1015,7 +1012,7 @@ class RunQueueData:
endpoints = []
for tid in self.runtaskentries:
revdeps = self.runtaskentries[tid].revdeps
- if len(revdeps) == 0:
+ if not revdeps:
endpoints.append(tid)
for dep in revdeps:
if dep in self.runtaskentries[tid].depends:
@@ -1061,12 +1058,12 @@ class RunQueueData:
seen_pn.append(pn)
else:
bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn))
- msg = "Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))
+ msgs = ["Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))]
#
# Construct a list of things which uniquely depend on each provider
# since this may help the user figure out which dependency is triggering this warning
#
- msg += "\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from."
+ msgs.append("\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from.")
deplist = {}
commondeps = None
for provfn in prov_list[prov]:
@@ -1086,12 +1083,12 @@ class RunQueueData:
commondeps &= deps
deplist[provfn] = deps
for provfn in deplist:
- msg += "\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps))
+ msgs.append("\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps)))
#
# Construct a list of provides and runtime providers for each recipe
# (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC)
#
- msg += "\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful."
+ msgs.append("\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful.")
provide_results = {}
rprovide_results = {}
commonprovs = None
@@ -1118,29 +1115,18 @@ class RunQueueData:
else:
commonrprovs &= rprovides
rprovide_results[provfn] = rprovides
- #msg += "\nCommon provides:\n %s" % ("\n ".join(commonprovs))
- #msg += "\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs))
+ #msgs.append("\nCommon provides:\n %s" % ("\n ".join(commonprovs)))
+ #msgs.append("\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs)))
for provfn in prov_list[prov]:
- msg += "\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs))
- msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs))
+ msgs.append("\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs)))
+ msgs.append("\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)))
if self.warn_multi_bb:
- logger.verbnote(msg)
+ logger.verbnote("".join(msgs))
else:
- logger.error(msg)
+ logger.error("".join(msgs))
self.init_progress_reporter.next_stage()
-
- # Create a whitelist usable by the stamp checks
- self.stampfnwhitelist = {}
- for mc in self.taskData:
- self.stampfnwhitelist[mc] = []
- for entry in self.stampwhitelist.split():
- if entry not in self.taskData[mc].build_targets:
- continue
- fn = self.taskData.build_targets[entry][0]
- self.stampfnwhitelist[mc].append(fn)
-
self.init_progress_reporter.next_stage()
# Iterate over the task list looking for tasks with a 'setscene' function
@@ -1188,9 +1174,9 @@ class RunQueueData:
# Iterate over the task list and call into the siggen code
dealtwith = set()
todeal = set(self.runtaskentries)
- while len(todeal) > 0:
+ while todeal:
for tid in todeal.copy():
- if len(self.runtaskentries[tid].depends - dealtwith) == 0:
+ if not (self.runtaskentries[tid].depends - dealtwith):
dealtwith.add(tid)
todeal.remove(tid)
self.prepare_task_hash(tid)
@@ -1229,7 +1215,6 @@ class RunQueue:
self.cfgData = cfgData
self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets)
- self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile"
self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None
self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None
@@ -1358,14 +1343,6 @@ class RunQueue:
if taskname is None:
taskname = tn
- if self.stamppolicy == "perfile":
- fulldeptree = False
- else:
- fulldeptree = True
- stampwhitelist = []
- if self.stamppolicy == "whitelist":
- stampwhitelist = self.rqdata.stampfnwhitelist[mc]
-
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn)
# If the stamp is missing, it's not current
@@ -1397,7 +1374,7 @@ class RunQueue:
continue
if t3 and t3 > t2:
continue
- if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
+ if fn == fn2:
if not t2:
logger.debug2('Stampfile %s does not exist', stampfile2)
iscurrent = False
@@ -1500,10 +1477,10 @@ class RunQueue:
self.rqexe = RunQueueExecute(self)
# If we don't have any setscene functions, skip execution
- if len(self.rqdata.runq_setscene_tids) == 0:
+ if not self.rqdata.runq_setscene_tids:
logger.info('No setscene tasks')
for tid in self.rqdata.runtaskentries:
- if len(self.rqdata.runtaskentries[tid].depends) == 0:
+ if not self.rqdata.runtaskentries[tid].depends:
self.rqexe.setbuildable(tid)
self.rqexe.tasks_notcovered.add(tid)
self.rqexe.sqdone = True
@@ -1780,7 +1757,7 @@ class RunQueueExecute:
bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
(self.scheduler, ", ".join(obj.name for obj in schedulers)))
- #if len(self.rqdata.runq_setscene_tids) > 0:
+ #if self.rqdata.runq_setscene_tids:
self.sqdata = SQData()
build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self)
@@ -1821,7 +1798,7 @@ class RunQueueExecute:
# worker must have died?
pass
- if len(self.failed_tids) != 0:
+ if self.failed_tids:
self.rq.state = runQueueFailed
return
@@ -1837,7 +1814,7 @@ class RunQueueExecute:
self.rq.read_workers()
return self.rq.active_fds()
- if len(self.failed_tids) != 0:
+ if self.failed_tids:
self.rq.state = runQueueFailed
return True
@@ -1935,7 +1912,7 @@ class RunQueueExecute:
self.stats.taskFailed()
self.failed_tids.append(task)
- fakeroot_log = ""
+ fakeroot_log = []
if fakerootlog and os.path.exists(fakerootlog):
with open(fakerootlog) as fakeroot_log_file:
fakeroot_failed = False
@@ -1945,12 +1922,12 @@ class RunQueueExecute:
fakeroot_failed = True
if 'doing new pid setup and server start' in line:
break
- fakeroot_log = line + fakeroot_log
+ fakeroot_log.append(line)
if not fakeroot_failed:
- fakeroot_log = None
+ fakeroot_log = []
- bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=fakeroot_log), self.cfgData)
+ bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=("".join(fakeroot_log) or None)), self.cfgData)
if self.rqdata.taskData[''].abort:
self.rq.state = runQueueCleanUp
@@ -2001,7 +1978,7 @@ class RunQueueExecute:
if x not in self.tasks_scenequeue_done:
logger.error("Task %s was never processed by the setscene code" % x)
err = True
- if len(self.rqdata.runtaskentries[x].depends) == 0 and x not in self.runq_buildable:
+ if not self.rqdata.runtaskentries[x].depends and x not in self.runq_buildable:
logger.error("Task %s was never marked as buildable by the setscene code" % x)
err = True
return err
@@ -2025,7 +2002,7 @@ class RunQueueExecute:
# Find the next setscene to run
for nexttask in self.sorted_setscene_tids:
if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values():
- if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
+ if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
if nexttask not in self.rqdata.target_tids:
logger.debug2("Skipping setscene for task %s" % nexttask)
self.sq_task_skip(nexttask)
@@ -2189,7 +2166,7 @@ class RunQueueExecute:
if self.can_start_task():
return True
- if self.stats.active > 0 or len(self.sq_live) > 0:
+ if self.stats.active > 0 or self.sq_live:
self.rq.read_workers()
return self.rq.active_fds()
@@ -2201,7 +2178,7 @@ class RunQueueExecute:
self.sq_task_failoutright(tid)
return True
- if len(self.failed_tids) != 0:
+ if self.failed_tids:
self.rq.state = runQueueFailed
return True
@@ -2280,7 +2257,7 @@ class RunQueueExecute:
covered.intersection_update(self.tasks_scenequeue_done)
for tid in notcovered | covered:
- if len(self.rqdata.runtaskentries[tid].depends) == 0:
+ if not self.rqdata.runtaskentries[tid].depends:
self.setbuildable(tid)
elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete):
self.setbuildable(tid)
@@ -2339,7 +2316,7 @@ class RunQueueExecute:
# Now iterate those tasks in dependency order to regenerate their taskhash/unihash
next = set()
for p in total:
- if len(self.rqdata.runtaskentries[p].depends) == 0:
+ if not self.rqdata.runtaskentries[p].depends:
next.add(p)
elif self.rqdata.runtaskentries[p].depends.isdisjoint(total):
next.add(p)
@@ -2349,7 +2326,7 @@ class RunQueueExecute:
current = next.copy()
next = set()
for tid in current:
- if len(self.rqdata.runtaskentries[p].depends) and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
+ if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
continue
orighash = self.rqdata.runtaskentries[tid].hash
dc = bb.parse.siggen.get_data_caches(self.rqdata.dataCaches, mc_from_tid(tid))
@@ -2436,7 +2413,7 @@ class RunQueueExecute:
if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
if tid not in self.sq_buildable:
self.sq_buildable.add(tid)
- if len(self.sqdata.sq_revdeps[tid]) == 0:
+ if not self.sqdata.sq_revdeps[tid]:
self.sq_buildable.add(tid)
if tid in self.sqdata.outrightfail:
@@ -2608,12 +2585,13 @@ class RunQueueExecute:
pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist):
if tid in self.rqdata.runq_setscene_tids:
- msg = 'Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname)
+ msg = ['Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname)]
else:
- msg = 'Task %s.%s attempted to execute unexpectedly' % (pn, taskname)
+ msg = ['Task %s.%s attempted to execute unexpectedly' % (pn, taskname)]
for t in self.scenequeue_notcovered:
- msg = msg + "\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash)
- logger.error(msg + '\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered))
+ msg.append("\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash))
+ msg.append('\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered))
+ logger.error("".join(msg))
return True
return False
@@ -2652,7 +2630,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
for tid in rqdata.runtaskentries:
sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps)
sq_revdeps_squash[tid] = set()
- if (len(sq_revdeps[tid]) == 0) and tid not in rqdata.runq_setscene_tids:
+ if not sq_revdeps[tid] and tid not in rqdata.runq_setscene_tids:
#bb.warn("Added endpoint %s" % (tid))
endpoints[tid] = set()
@@ -2693,9 +2671,9 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
sq_revdeps[dep].remove(point)
if tasks:
sq_revdeps_squash[dep] |= tasks
- if len(sq_revdeps[dep]) == 0 and dep not in rqdata.runq_setscene_tids:
+ if not sq_revdeps[dep] and dep not in rqdata.runq_setscene_tids:
newendpoints[dep] = task
- if len(newendpoints) != 0:
+ if newendpoints:
process_endpoints(newendpoints)
process_endpoints(endpoints)
@@ -2707,7 +2685,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
# Take the build endpoints (no revdeps) and find the sstate tasks they depend upon
new = True
for tid in rqdata.runtaskentries:
- if len(rqdata.runtaskentries[tid].revdeps) == 0:
+ if not rqdata.runtaskentries[tid].revdeps:
sqdata.unskippable.add(tid)
sqdata.unskippable |= sqrq.cantskip
while new:
@@ -2716,7 +2694,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
for tid in sorted(orig, reverse=True):
if tid in rqdata.runq_setscene_tids:
continue
- if len(rqdata.runtaskentries[tid].depends) == 0:
+ if not rqdata.runtaskentries[tid].depends:
# These are tasks which have no setscene tasks in their chain, need to mark as directly buildable
sqrq.setbuildable(tid)
sqdata.unskippable |= rqdata.runtaskentries[tid].depends
@@ -2731,7 +2709,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
for taskcounter, tid in enumerate(rqdata.runtaskentries):
if tid in rqdata.runq_setscene_tids:
pass
- elif len(sq_revdeps_squash[tid]) != 0:
+ elif sq_revdeps_squash[tid]:
bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.")
else:
del sq_revdeps_squash[tid]
@@ -2796,7 +2774,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
sqdata.multiconfigs = set()
for tid in sqdata.sq_revdeps:
sqdata.multiconfigs.add(mc_from_tid(tid))
- if len(sqdata.sq_revdeps[tid]) == 0:
+ if not sqdata.sq_revdeps[tid]:
sqrq.sq_buildable.add(tid)
rqdata.init_progress_reporter.finish()
@@ -3050,7 +3028,7 @@ class runQueuePipe():
raise
end = len(self.queue)
found = True
- while found and len(self.queue):
+ while found and self.queue:
found = False
index = self.queue.find(b"</event>")
while index != -1 and self.queue.startswith(b"<event>"):
@@ -3088,7 +3066,7 @@ class runQueuePipe():
def close(self):
while self.read():
continue
- if len(self.queue) > 0:
+ if self.queue:
print("Warning, worker left partial message: %s" % self.queue)
self.input.close()
diff --git a/poky/bitbake/lib/bb/server/process.py b/poky/bitbake/lib/bb/server/process.py
index 8fdcc66dc7..1636616660 100644
--- a/poky/bitbake/lib/bb/server/process.py
+++ b/poky/bitbake/lib/bb/server/process.py
@@ -326,10 +326,10 @@ class ProcessServer():
if e.errno != errno.ENOENT:
raise
- msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
+ msg = ["Delaying shutdown due to active processes which appear to be holding bitbake.lock"]
if procs:
- msg += ":\n%s" % str(procs.decode("utf-8"))
- serverlog(msg)
+ msg.append(":\n%s" % str(procs.decode("utf-8")))
+ serverlog("".join(msg))
def idle_commands(self, delay, fds=None):
nextsleep = delay
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 578ba5d661..e0ec736dff 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -328,6 +328,8 @@ class SignatureGeneratorBasic(SignatureGenerator):
for (f, cs) in self.file_checksum_values[tid]:
if cs:
+ if "/./" in f:
+ data = data + "./" + f.split("/./")[1]
data = data + cs
if tid in self.taints:
@@ -385,7 +387,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
if runtime and tid in self.taskhash:
data['runtaskdeps'] = self.runtaskdeps[tid]
- data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[tid]]
+ data['file_checksum_values'] = []
+ for f,cs in self.file_checksum_values[tid]:
+ if "/./" in f:
+ data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
+ else:
+ data['file_checksum_values'].append((os.path.basename(f), cs))
data['runtaskhashes'] = {}
for dep in data['runtaskdeps']:
data['runtaskhashes'][dep] = self.get_unihash(dep)
@@ -1028,6 +1035,8 @@ def calc_taskhash(sigdata):
for c in sigdata['file_checksum_values']:
if c[1]:
+ if "./" in c[0]:
+ data = data + c[0]
data = data + c[1]
if 'taint' in sigdata:
@@ -1045,28 +1054,28 @@ def dump_sigfile(a):
with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
a_data = json.load(f, object_hook=SetDecoder)
- output.append("basewhitelist: %s" % (a_data['basewhitelist']))
+ output.append("basewhitelist: %s" % (sorted(a_data['basewhitelist'])))
- output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))
+ output.append("taskwhitelist: %s" % (sorted(a_data['taskwhitelist'] or [])))
output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
output.append("basehash: %s" % (a_data['basehash']))
- for dep in a_data['gendeps']:
- output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep]))
+ for dep in sorted(a_data['gendeps']):
+ output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep])))
- for dep in a_data['varvals']:
+ for dep in sorted(a_data['varvals']):
output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
if 'runtaskdeps' in a_data:
- output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps']))
+ output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps'])))
if 'file_checksum_values' in a_data:
- output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values']))
+ output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values'])))
if 'runtaskhashes' in a_data:
- for dep in a_data['runtaskhashes']:
+ for dep in sorted(a_data['runtaskhashes']):
output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
if 'taint' in a_data:
diff --git a/poky/bitbake/lib/bb/taskdata.py b/poky/bitbake/lib/bb/taskdata.py
index 47bad6d1fa..7bfcdb8414 100644
--- a/poky/bitbake/lib/bb/taskdata.py
+++ b/poky/bitbake/lib/bb/taskdata.py
@@ -451,12 +451,12 @@ class TaskData:
for target in self.build_targets:
if fn in self.build_targets[target]:
self.build_targets[target].remove(fn)
- if len(self.build_targets[target]) == 0:
+ if not self.build_targets[target]:
self.remove_buildtarget(target, missing_list)
for target in self.run_targets:
if fn in self.run_targets[target]:
self.run_targets[target].remove(fn)
- if len(self.run_targets[target]) == 0:
+ if not self.run_targets[target]:
self.remove_runtarget(target, missing_list)
def remove_buildtarget(self, target, missing_list=None):
diff --git a/poky/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html b/poky/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
new file mode 100644
index 0000000000..4a1eb4de13
--- /dev/null
+++ b/poky/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
@@ -0,0 +1,59 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<html>
+ <head>
+ <title>Index of /debian/pool/main/m/minicom</title>
+ </head>
+ <body>
+<h1>Index of /debian/pool/main/m/minicom</h1>
+ <table>
+ <tr><th valign="top"><img src="/icons/blank.gif" alt="[ICO]"></th><th><a href="?C=N;O=D">Name</a></th><th><a href="?C=M;O=A">Last modified</a></th><th><a href="?C=S;O=A">Size</a></th></tr>
+ <tr><th colspan="4"><hr></th></tr>
+<tr><td valign="top"><img src="/icons/back.gif" alt="[PARENTDIR]"></td><td><a href="/debian/pool/main/m/">Parent Directory</a></td><td>&nbsp;</td><td align="right"> - </td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.debian.tar.xz">minicom_2.7-1+deb8u1.debian.tar.xz</a></td><td align="right">2017-04-24 08:22 </td><td align="right"> 14K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.dsc">minicom_2.7-1+deb8u1.dsc</a></td><td align="right">2017-04-24 08:22 </td><td align="right">1.9K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_amd64.deb">minicom_2.7-1+deb8u1_amd64.deb</a></td><td align="right">2017-04-25 21:10 </td><td align="right">257K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armel.deb">minicom_2.7-1+deb8u1_armel.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">246K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armhf.deb">minicom_2.7-1+deb8u1_armhf.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">245K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_i386.deb">minicom_2.7-1+deb8u1_i386.deb</a></td><td align="right">2017-04-25 21:41 </td><td align="right">258K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.debian.tar.xz">minicom_2.7-1.1.debian.tar.xz</a></td><td align="right">2017-04-22 09:34 </td><td align="right"> 14K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.dsc">minicom_2.7-1.1.dsc</a></td><td align="right">2017-04-22 09:34 </td><td align="right">1.9K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_amd64.deb">minicom_2.7-1.1_amd64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_arm64.deb">minicom_2.7-1.1_arm64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">250K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armel.deb">minicom_2.7-1.1_armel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">255K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armhf.deb">minicom_2.7-1.1_armhf.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">254K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_i386.deb">minicom_2.7-1.1_i386.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">266K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips.deb">minicom_2.7-1.1_mips.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">258K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips64el.deb">minicom_2.7-1.1_mips64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mipsel.deb">minicom_2.7-1.1_mipsel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_ppc64el.deb">minicom_2.7-1.1_ppc64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">253K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_s390x.deb">minicom_2.7-1.1_s390x.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_amd64.deb">minicom_2.7.1-1+b1_amd64.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">262K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_arm64.deb">minicom_2.7.1-1+b1_arm64.deb</a></td><td align="right">2018-05-06 07:58 </td><td align="right">250K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armel.deb">minicom_2.7.1-1+b1_armel.deb</a></td><td align="right">2018-05-06 08:45 </td><td align="right">253K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armhf.deb">minicom_2.7.1-1+b1_armhf.deb</a></td><td align="right">2018-05-06 10:42 </td><td align="right">253K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_i386.deb">minicom_2.7.1-1+b1_i386.deb</a></td><td align="right">2018-05-06 08:55 </td><td align="right">266K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mips.deb">minicom_2.7.1-1+b1_mips.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">258K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mipsel.deb">minicom_2.7.1-1+b1_mipsel.deb</a></td><td align="right">2018-05-06 12:13 </td><td align="right">259K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_ppc64el.deb">minicom_2.7.1-1+b1_ppc64el.deb</a></td><td align="right">2018-05-06 09:10 </td><td align="right">260K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_s390x.deb">minicom_2.7.1-1+b1_s390x.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">257K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b2_mips64el.deb">minicom_2.7.1-1+b2_mips64el.deb</a></td><td align="right">2018-05-06 09:41 </td><td align="right">260K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.debian.tar.xz">minicom_2.7.1-1.debian.tar.xz</a></td><td align="right">2017-08-13 15:40 </td><td align="right"> 14K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.dsc">minicom_2.7.1-1.dsc</a></td><td align="right">2017-08-13 15:40 </td><td align="right">1.8K</td></tr>
+<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.1.orig.tar.gz">minicom_2.7.1.orig.tar.gz</a></td><td align="right">2017-08-13 15:40 </td><td align="right">855K</td></tr>
+<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.orig.tar.gz">minicom_2.7.orig.tar.gz</a></td><td align="right">2014-01-01 09:36 </td><td align="right">843K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.debian.tar.xz">minicom_2.8-2.debian.tar.xz</a></td><td align="right">2021-06-15 03:47 </td><td align="right"> 14K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.dsc">minicom_2.8-2.dsc</a></td><td align="right">2021-06-15 03:47 </td><td align="right">1.8K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_amd64.deb">minicom_2.8-2_amd64.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">280K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_arm64.deb">minicom_2.8-2_arm64.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">275K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armel.deb">minicom_2.8-2_armel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">271K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armhf.deb">minicom_2.8-2_armhf.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">272K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_i386.deb">minicom_2.8-2_i386.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">285K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mips64el.deb">minicom_2.8-2_mips64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">277K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mipsel.deb">minicom_2.8-2_mipsel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">278K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_ppc64el.deb">minicom_2.8-2_ppc64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">286K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_s390x.deb">minicom_2.8-2_s390x.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">275K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8.orig.tar.bz2">minicom_2.8.orig.tar.bz2</a></td><td align="right">2021-01-03 12:44 </td><td align="right">598K</td></tr>
+ <tr><th colspan="4"><hr></th></tr>
+</table>
+<address>Apache Server at ftp.debian.org Port 80</address>
+</body></html>
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index 8ad1c85990..ec7d83c959 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -430,6 +430,12 @@ class MirrorUriTest(FetcherTest):
: "http://somewhere2.org/somefile_1.2.3.tar.gz",
("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http")
: "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
+ ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://user2@git.openembedded.org/bitbake;protocol=http")
+ : "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
+ ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=git;branch=master", "git://someserver.org/bitbake", "git://someotherserver.org/bitbake;protocol=https")
+ : "git://someotherserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=https;branch=master",
+
+ ("gitsm://git.qemu.org/git/seabios.git/;protocol=https;name=roms/seabios;subpath=roms/seabios;bareclone=1;nobranch=1;rev=1234567890123456789012345678901234567890", "gitsm://.*/.*", "http://petalinux.xilinx.com/sswreleases/rel-v${XILINX_VER_MAIN}/downloads") : "http://petalinux.xilinx.com/sswreleases/rel-v%24%7BXILINX_VER_MAIN%7D/downloads/git2_git.qemu.org.git.seabios.git..tar.gz",
#Renaming files doesn't work
#("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
@@ -491,7 +497,7 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
super(GitDownloadDirectoryNamingTest, self).setUp()
self.recipe_url = "git://git.openembedded.org/bitbake"
self.recipe_dir = "git.openembedded.org.bitbake"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
self.mirror_dir = "github.com.openembedded.bitbake.git"
self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
@@ -539,7 +545,7 @@ class TarballNamingTest(FetcherTest):
super(TarballNamingTest, self).setUp()
self.recipe_url = "git://git.openembedded.org/bitbake"
self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
@@ -573,7 +579,7 @@ class GitShallowTarballNamingTest(FetcherTest):
super(GitShallowTarballNamingTest, self).setUp()
self.recipe_url = "git://git.openembedded.org/bitbake"
self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
self.d.setVar('BB_GIT_SHALLOW', '1')
@@ -620,6 +626,9 @@ class FetcherLocalTest(FetcherTest):
os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir'))
touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e'))
touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device'))
+ bb.process.run('tar cf archive.tar -C dir .', cwd=self.localsrcdir)
+ bb.process.run('tar czf archive.tar.gz -C dir .', cwd=self.localsrcdir)
+ bb.process.run('tar cjf archive.tar.bz2 -C dir .', cwd=self.localsrcdir)
self.d.setVar("FILESPATH", self.localsrcdir)
def fetchUnpack(self, uris):
@@ -674,6 +683,18 @@ class FetcherLocalTest(FetcherTest):
with self.assertRaises(bb.fetch2.UnpackError):
self.fetchUnpack(['file://a;subdir=/bin/sh'])
+ def test_local_striplevel(self):
+ tree = self.fetchUnpack(['file://archive.tar;subdir=bar;striplevel=1'])
+ self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
+
+ def test_local_striplevel_gzip(self):
+ tree = self.fetchUnpack(['file://archive.tar.gz;subdir=bar;striplevel=1'])
+ self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
+
+ def test_local_striplevel_bzip2(self):
+ tree = self.fetchUnpack(['file://archive.tar.bz2;subdir=bar;striplevel=1'])
+ self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
+
def dummyGitTest(self, suffix):
# Create dummy local Git repo
src_dir = tempfile.mkdtemp(dir=self.tempdir,
@@ -875,17 +896,25 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_fetch_premirror_specify_downloadfilename_regex_uri(self):
self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake/")
- fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
+ fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
fetcher.download()
- self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
@skipIfNoNetwork()
# BZ13039
def test_fetch_premirror_specify_downloadfilename_specific_uri(self):
self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake https://downloads.yoctoproject.org/releases/bitbake")
- fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
+ fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
fetcher.download()
- self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
+
+ @skipIfNoNetwork()
+ def test_fetch_premirror_use_downloadfilename_to_fetch(self):
+ # Ensure downloadfilename is used when fetching from premirror.
+ self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
+ fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
+ fetcher.download()
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
@skipIfNoNetwork()
def gitfetcher(self, url1, url2):
@@ -996,7 +1025,7 @@ class FetcherNetworkTest(FetcherTest):
def test_git_submodule_dbus_broker(self):
# The following external repositories have show failures in fetch and unpack operations
# We want to avoid regressions!
- url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
+ url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1012,7 +1041,7 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_CLI11(self):
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1027,12 +1056,12 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_update_CLI11(self):
""" Prevent regression on update detection not finding missing submodule, or modules without needed commits """
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# CLI11 that pulls in a newer nlohmann-json
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1046,7 +1075,7 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_aktualizr(self):
- url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
+ url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1066,7 +1095,7 @@ class FetcherNetworkTest(FetcherTest):
""" Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
# This repository also has submodules where the module (name), path and url do not align
- url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
+ url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1124,7 +1153,7 @@ class SVNTest(FetcherTest):
bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
# Github will emulate SVN. Use this to check if we're downloding...
- bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .",
+ bb.process.run("svn propset svn:externals 'bitbake https://github.com/PhilipHazel/pcre2.git' .",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
bb.process.run("svn commit --non-interactive -m 'Add external'",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1242,7 +1271,7 @@ class FetchLatestVersionTest(FetcherTest):
test_git_uris = {
# version pattern "X.Y.Z"
- ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
+ ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
: "1.99.4",
# version pattern "vX.Y"
# mirror of git.infradead.org since network issues interfered with testing
@@ -1269,9 +1298,9 @@ class FetchLatestVersionTest(FetcherTest):
: "0.4.3",
("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))")
: "11.0.0",
- ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+ ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
: "1.3.59",
- ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+ ("remake", "git://github.com/rocky/remake.git;protocol=https", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
: "3.82+dbg0.9",
}
@@ -1317,6 +1346,12 @@ class FetchLatestVersionTest(FetcherTest):
# http://ftp.debian.org/debian/pool/main/d/db5.3/
("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
: "5.3.10",
+ #
+ # packages where the tarball compression changed in the new version
+ #
+ # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz
+ ("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz", "", "")
+ : "2.8",
}
@skipIfNoNetwork()
@@ -1365,9 +1400,6 @@ class FetchCheckStatusTest(FetcherTest):
"https://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
"https://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
"ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
- "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
- "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
- "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
# GitHub releases are hosted on Amazon S3, which doesn't support HEAD
"https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
]
@@ -2058,7 +2090,7 @@ class GitShallowTest(FetcherTest):
@skipIfNoNetwork()
def test_bitbake(self):
- self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir)
+ self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
self.git('config core.bare true', cwd=self.srcdir)
self.git('fetch', cwd=self.srcdir)
@@ -2149,7 +2181,7 @@ class GitLfsTest(FetcherTest):
def test_lfs_enabled(self):
import shutil
- uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir
+ uri = 'git://%s;protocol=file;lfs=1' % self.srcdir
self.d.setVar('SRC_URI', uri)
# Careful: suppress initial attempt at downloading until
@@ -2174,7 +2206,7 @@ class GitLfsTest(FetcherTest):
def test_lfs_disabled(self):
import shutil
- uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir
+ uri = 'git://%s;protocol=file;lfs=0' % self.srcdir
self.d.setVar('SRC_URI', uri)
# In contrast to test_lfs_enabled(), allow the implicit download
@@ -2228,6 +2260,41 @@ class GitURLWithSpacesTest(FetcherTest):
self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname']))
self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz'))
+class CrateTest(FetcherTest):
+ def test_crate_url(self):
+
+ uri = "crate://crates.io/glob/0.2.11"
+ self.d.setVar('SRC_URI', uri)
+
+ uris = self.d.getVar('SRC_URI').split()
+ d = self.d
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ fetcher.download()
+ fetcher.unpack(self.tempdir)
+ self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
+ self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
+
+ def test_crate_url_multi(self):
+
+ uri = "crate://crates.io/glob/0.2.11 crate://crates.io/time/0.1.35"
+ self.d.setVar('SRC_URI', uri)
+
+ uris = self.d.getVar('SRC_URI').split()
+ d = self.d
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ fetcher.download()
+ fetcher.unpack(self.tempdir)
+ self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
+ self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done', 'time-0.1.35.crate', 'time-0.1.35.crate.done'])
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/.cargo-checksum.json"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/src/lib.rs"))
+
class NPMTest(FetcherTest):
def skipIfNoNpm():
import shutil
diff --git a/poky/bitbake/lib/bb/tests/runqueue.py b/poky/bitbake/lib/bb/tests/runqueue.py
index 5b6ada886a..35d5a843ff 100644
--- a/poky/bitbake/lib/bb/tests/runqueue.py
+++ b/poky/bitbake/lib/bb/tests/runqueue.py
@@ -29,9 +29,10 @@ class RunQueueTests(unittest.TestCase):
def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False):
env = os.environ.copy()
env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests"))
- env["BB_ENV_EXTRAWHITE"] = "SSTATEVALID SLOWTASKS"
+ env["BB_ENV_EXTRAWHITE"] = "SSTATEVALID SLOWTASKS TOPDIR"
env["SSTATEVALID"] = sstatevalid
env["SLOWTASKS"] = slowtasks
+ env["TOPDIR"] = builddir
if extraenv:
for k in extraenv:
env[k] = extraenv[k]
@@ -58,6 +59,8 @@ class RunQueueTests(unittest.TestCase):
expected = ['a1:' + x for x in self.alltasks]
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_single_setscenevalid(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
@@ -68,6 +71,8 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot', 'a1:build']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_intermediate_setscenevalid(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
@@ -77,6 +82,8 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot_setscene', 'a1:build']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_intermediate_notcovered(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
@@ -86,6 +93,8 @@ class RunQueueTests(unittest.TestCase):
'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_all_setscenevalid(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
@@ -95,6 +104,8 @@ class RunQueueTests(unittest.TestCase):
'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_no_settasks(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1", "-c", "patch"]
@@ -103,6 +114,8 @@ class RunQueueTests(unittest.TestCase):
expected = ['a1:fetch', 'a1:unpack', 'a1:patch']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_mix_covered_notcovered(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"]
@@ -111,6 +124,7 @@ class RunQueueTests(unittest.TestCase):
expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
# Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks
def test_mixed_direct_tasks_setscene_tasks(self):
@@ -122,6 +136,8 @@ class RunQueueTests(unittest.TestCase):
'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
# This test slows down the execution of do_package_setscene until after other real tasks have
# started running which tests for a bug where tasks were being lost from the buildable list of real
# tasks if they weren't in tasks_covered or tasks_notcovered
@@ -136,6 +152,8 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot', 'a1:build']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_setscenewhitelist(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
@@ -149,6 +167,8 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot_setscene', 'a1:package_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
# Tests for problems with dependencies between setscene tasks
def test_no_setscenevalid_harddeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -162,6 +182,8 @@ class RunQueueTests(unittest.TestCase):
'd1:populate_sysroot', 'd1:build']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_no_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -172,6 +194,8 @@ class RunQueueTests(unittest.TestCase):
expected.remove('a1:package_qa')
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_single_a1_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -182,6 +206,8 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks]
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_single_b1_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -193,6 +219,8 @@ class RunQueueTests(unittest.TestCase):
expected.remove('b1:package')
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_intermediate_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -203,6 +231,8 @@ class RunQueueTests(unittest.TestCase):
expected.remove('b1:package')
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_all_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -213,6 +243,8 @@ class RunQueueTests(unittest.TestCase):
'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_multiconfig_setscene_optimise(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
@@ -232,6 +264,8 @@ class RunQueueTests(unittest.TestCase):
expected.remove(x)
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_multiconfig_bbmask(self):
# This test validates that multiconfigs can independently mask off
# recipes they do not want with BBMASK. It works by having recipes
@@ -248,6 +282,8 @@ class RunQueueTests(unittest.TestCase):
cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"]
self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv)
+ self.shutdown(tempdir)
+
def test_multiconfig_mcdepends(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
@@ -278,6 +314,8 @@ class RunQueueTests(unittest.TestCase):
["mc_2:a1:%s" % t for t in rerun_tasks]
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_hashserv_single(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
@@ -358,7 +396,6 @@ class RunQueueTests(unittest.TestCase):
def shutdown(self, tempdir):
# Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup
- while (os.path.exists(tempdir + "/hashserve.sock") or os.path.exists(tempdir + "cache/hashserv.db-wal")):
+ while (os.path.exists(tempdir + "/hashserve.sock") or os.path.exists(tempdir + "cache/hashserv.db-wal") or os.path.exists(tempdir + "/bitbake.lock")):
time.sleep(0.5)
-
diff --git a/poky/bitbake/lib/bb/ui/buildinfohelper.py b/poky/bitbake/lib/bb/ui/buildinfohelper.py
index 43aa592842..835e92c299 100644
--- a/poky/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/poky/bitbake/lib/bb/ui/buildinfohelper.py
@@ -483,11 +483,11 @@ class ORMWrapper(object):
# we already created the root directory, so ignore any
# entry for it
- if len(path) == 0:
+ if not path:
continue
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
- if len(parent_path) == 0:
+ if not parent_path:
parent_path = "/"
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
tf_obj = Target_File.objects.create(
@@ -571,7 +571,7 @@ class ORMWrapper(object):
assert isinstance(build_obj, Build)
assert isinstance(target_obj, Target)
- errormsg = ""
+ errormsg = []
for p in packagedict:
# Search name swtiches round the installed name vs package name
# by default installed name == package name
@@ -633,10 +633,10 @@ class ORMWrapper(object):
packagefile_objects.append(Package_File( package = packagedict[p]['object'],
path = targetpath,
size = targetfilesize))
- if len(packagefile_objects):
+ if packagefile_objects:
Package_File.objects.bulk_create(packagefile_objects)
except KeyError as e:
- errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
+ errormsg.append(" stpi: Key error, package %s key %s \n" % (p, e))
# save disk installed size
packagedict[p]['object'].installed_size = packagedict[p]['size']
@@ -673,13 +673,13 @@ class ORMWrapper(object):
logger.warning("Could not add dependency to the package %s "
"because %s is an unknown package", p, px)
- if len(packagedeps_objs) > 0:
+ if packagedeps_objs:
Package_Dependency.objects.bulk_create(packagedeps_objs)
else:
logger.info("No package dependencies created")
- if len(errormsg) > 0:
- logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
+ if errormsg:
+ logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg))
def save_target_image_file_information(self, target_obj, file_name, file_size):
Target_Image_File.objects.create(target=target_obj,
@@ -767,7 +767,7 @@ class ORMWrapper(object):
packagefile_objects.append(Package_File( package = bp_object,
path = path,
size = package_info['FILES_INFO'][path] ))
- if len(packagefile_objects):
+ if packagefile_objects:
Package_File.objects.bulk_create(packagefile_objects)
def _po_byname(p):
@@ -809,7 +809,7 @@ class ORMWrapper(object):
packagedeps_objs.append(Package_Dependency( package = bp_object,
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
- if len(packagedeps_objs) > 0:
+ if packagedeps_objs:
Package_Dependency.objects.bulk_create(packagedeps_objs)
return bp_object
@@ -826,7 +826,7 @@ class ORMWrapper(object):
desc = vardump[root_var]['doc']
if desc is None:
desc = ''
- if len(desc):
+ if desc:
HelpText.objects.get_or_create(build=build_obj,
area=HelpText.VARIABLE,
key=k, text=desc)
@@ -846,7 +846,7 @@ class ORMWrapper(object):
file_name = vh['file'],
line_number = vh['line'],
operation = vh['op']))
- if len(varhist_objects):
+ if varhist_objects:
VariableHistory.objects.bulk_create(varhist_objects)
@@ -1069,7 +1069,7 @@ class BuildInfoHelper(object):
for t in self.internal_state['targets']:
buildname = self.internal_state['build'].build_name
pe, pv = task_object.recipe.version.split(":",1)
- if len(pe) > 0:
+ if pe:
package = task_object.recipe.name + "-" + pe + "_" + pv
else:
package = task_object.recipe.name + "-" + pv
@@ -1404,7 +1404,7 @@ class BuildInfoHelper(object):
assert 'pn' in event._depgraph
assert 'tdepends' in event._depgraph
- errormsg = ""
+ errormsg = []
# save layer version priorities
if 'layer-priorities' in event._depgraph.keys():
@@ -1496,7 +1496,7 @@ class BuildInfoHelper(object):
elif dep in self.internal_state['recipes']:
dependency = self.internal_state['recipes'][dep]
else:
- errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)
+ errormsg.append(" stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep))
continue
recipe_dep = Recipe_Dependency(recipe=target,
depends_on=dependency,
@@ -1537,8 +1537,8 @@ class BuildInfoHelper(object):
taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
Task_Dependency.objects.bulk_create(taskdeps_objects)
- if len(errormsg) > 0:
- logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
+ if errormsg:
+ logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg))
def store_build_package_information(self, event):
@@ -1618,7 +1618,7 @@ class BuildInfoHelper(object):
if 'backlog' in self.internal_state:
# if we have a backlog of events, do our best to save them here
- if len(self.internal_state['backlog']):
+ if self.internal_state['backlog']:
tempevent = self.internal_state['backlog'].pop()
logger.debug("buildinfohelper: Saving stored event %s "
% tempevent)
diff --git a/poky/bitbake/lib/bb/ui/uievent.py b/poky/bitbake/lib/bb/ui/uievent.py
index 8607d0523b..e19c770bc9 100644
--- a/poky/bitbake/lib/bb/ui/uievent.py
+++ b/poky/bitbake/lib/bb/ui/uievent.py
@@ -73,13 +73,13 @@ class BBUIEventQueue:
self.eventQueueLock.acquire()
- if len(self.eventQueue) == 0:
+ if not self.eventQueue:
self.eventQueueLock.release()
return None
item = self.eventQueue.pop(0)
- if len(self.eventQueue) == 0:
+ if not self.eventQueue:
self.eventQueueNotify.clear()
self.eventQueueLock.release()
diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py
index 70634910f7..0312231933 100644
--- a/poky/bitbake/lib/bb/utils.py
+++ b/poky/bitbake/lib/bb/utils.py
@@ -16,7 +16,8 @@ import bb.msg
import multiprocessing
import fcntl
import importlib
-from importlib import machinery
+import importlib.machinery
+import importlib.util
import itertools
import subprocess
import glob
@@ -26,6 +27,7 @@ import errno
import signal
import collections
import copy
+import ctypes
from subprocess import getstatusoutput
from contextlib import contextmanager
from ctypes import cdll
@@ -451,6 +453,10 @@ def lockfile(name, shared=False, retry=True, block=False):
consider the possibility of sending a signal to the process to break
out - at which point you want block=True rather than retry=True.
"""
+ if len(name) > 255:
+ root, ext = os.path.splitext(name)
+ name = root[:255 - len(ext)] + ext
+
dirname = os.path.dirname(name)
mkdirhier(dirname)
@@ -487,7 +493,7 @@ def lockfile(name, shared=False, retry=True, block=False):
return lf
lf.close()
except OSError as e:
- if e.errno == errno.EACCES:
+ if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
logger.error("Unable to acquire lock '%s', %s",
e.strerror, name)
sys.exit(1)
@@ -1590,6 +1596,36 @@ def set_process_name(name):
except:
pass
+def disable_network(uid=None, gid=None):
+ """
+ Disable networking in the current process if the kernel supports it, else
+ just return after logging to debug. To do this we need to create a new user
+ namespace, then map back to the original uid/gid.
+ """
+ libc = ctypes.CDLL('libc.so.6')
+
+ # From sched.h
+ # New user namespace
+ CLONE_NEWUSER = 0x10000000
+ # New network namespace
+ CLONE_NEWNET = 0x40000000
+
+ if uid is None:
+ uid = os.getuid()
+ if gid is None:
+ gid = os.getgid()
+
+ ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
+ if ret != 0:
+ logger.debug("System doesn't suport disabling network without admin privs")
+ return
+ with open("/proc/self/uid_map", "w") as f:
+ f.write("%s %s 1" % (uid, uid))
+ with open("/proc/self/setgroups", "w") as f:
+ f.write("deny")
+ with open("/proc/self/gid_map", "w") as f:
+ f.write("%s %s 1" % (gid, gid))
+
def export_proxies(d):
""" export common proxies variables from datastore to environment """
import os
@@ -1616,7 +1652,9 @@ def load_plugins(logger, plugins, pluginpath):
logger.debug('Loading plugin %s' % name)
spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
if spec:
- return spec.loader.load_module()
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
+ return mod
logger.debug('Loading plugins from %s...' % pluginpath)
diff --git a/poky/bitbake/lib/pyinotify.py b/poky/bitbake/lib/pyinotify.py
index 6ae40a2d76..8c94b3e334 100644
--- a/poky/bitbake/lib/pyinotify.py
+++ b/poky/bitbake/lib/pyinotify.py
@@ -52,7 +52,6 @@ from collections import deque
from datetime import datetime, timedelta
import time
import re
-import asyncore
import glob
import locale
import subprocess
@@ -1475,35 +1474,6 @@ class ThreadedNotifier(threading.Thread, Notifier):
self.loop()
-class AsyncNotifier(asyncore.file_dispatcher, Notifier):
- """
- This notifier inherits from asyncore.file_dispatcher in order to be able to
- use pyinotify along with the asyncore framework.
-
- """
- def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
- threshold=0, timeout=None, channel_map=None):
- """
- Initializes the async notifier. The only additional parameter is
- 'channel_map' which is the optional asyncore private map. See
- Notifier class for the meaning of the others parameters.
-
- """
- Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
- threshold, timeout)
- asyncore.file_dispatcher.__init__(self, self._fd, channel_map)
-
- def handle_read(self):
- """
- When asyncore tells us we can read from the fd, we proceed processing
- events. This method can be overridden for handling a notification
- differently.
-
- """
- self.read_events()
- self.process_events()
-
-
class TornadoAsyncNotifier(Notifier):
"""
Tornado ioloop adapter.