summaryrefslogtreecommitdiff
path: root/poky/bitbake
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake')
-rw-r--r--poky/bitbake/README17
-rwxr-xr-xpoky/bitbake/bin/bitbake2
-rw-r--r--poky/bitbake/doc/README4
-rw-r--r--poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst45
-rw-r--r--poky/bitbake/lib/bb/__init__.py2
-rwxr-xr-xpoky/bitbake/lib/bb/acl.py215
-rw-r--r--poky/bitbake/lib/bb/command.py7
-rw-r--r--poky/bitbake/lib/bb/cooker.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py33
-rw-r--r--poky/bitbake/lib/bb/fetch2/gcp.py98
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py30
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py11
-rw-r--r--poky/bitbake/lib/bb/monitordisk.py7
-rw-r--r--poky/bitbake/lib/bb/runqueue.py21
-rw-r--r--poky/bitbake/lib/bb/server/process.py13
-rw-r--r--poky/bitbake/lib/bb/siggen.py21
-rw-r--r--poky/bitbake/lib/bb/ui/buildinfohelper.py13
-rwxr-xr-xpoky/bitbake/lib/bb/xattr.py126
-rw-r--r--poky/bitbake/lib/bblayers/query.py14
-rw-r--r--poky/bitbake/lib/toaster/bldcollector/urls.py2
-rw-r--r--poky/bitbake/lib/toaster/bldcontrol/models.py4
-rw-r--r--poky/bitbake/lib/toaster/orm/models.py28
-rw-r--r--poky/bitbake/lib/toaster/toastergui/urls.py2
-rw-r--r--poky/bitbake/lib/toaster/toastergui/views.py6
-rw-r--r--poky/bitbake/lib/toaster/toastergui/widgets.py5
-rw-r--r--poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py2
-rw-r--r--poky/bitbake/lib/toaster/toastermain/management/commands/checksocket.py4
-rw-r--r--poky/bitbake/lib/toaster/toastermain/urls.py2
-rw-r--r--poky/bitbake/toaster-requirements.txt2
29 files changed, 657 insertions, 81 deletions
diff --git a/poky/bitbake/README b/poky/bitbake/README
index 78610e65f1..38a461f5c9 100644
--- a/poky/bitbake/README
+++ b/poky/bitbake/README
@@ -18,16 +18,19 @@ Bitbake requires Python version 3.8 or newer.
Contributing
------------
-Please refer to
-https://www.openembedded.org/wiki/How_to_submit_a_patch_to_OpenEmbedded
-for guidelines on how to submit patches, just note that the latter documentation is intended
-for OpenEmbedded (and its core) not bitbake patches (bitbake-devel@lists.openembedded.org)
-but in general main guidelines apply. Once the commit(s) have been created, the way to send
-the patch is through git-send-email. For example, to send the last commit (HEAD) on current
-branch, type:
+Please refer to our contributor guide here: https://docs.yoctoproject.org/dev/contributor-guide/
+for full details on how to submit changes.
+
+As a quick guide, patches should be sent to bitbake-devel@lists.openembedded.org
+The git command to do that would be:
git send-email -M -1 --to bitbake-devel@lists.openembedded.org
+If you're sending a patch related to the BitBake manual, make sure you copy
+the Yocto Project documentation mailing list:
+
+ git send-email -M -1 --to bitbake-devel@lists.openembedded.org --cc docs@lists.yoctoproject.org
+
Mailing list:
https://lists.openembedded.org/g/bitbake-devel
diff --git a/poky/bitbake/bin/bitbake b/poky/bitbake/bin/bitbake
index 42ab14876e..8cfa165f9a 100755
--- a/poky/bitbake/bin/bitbake
+++ b/poky/bitbake/bin/bitbake
@@ -27,7 +27,7 @@ from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
bb.utils.check_system_locale()
-__version__ = "2.4.0"
+__version__ = "2.6.0"
if __name__ == "__main__":
if __version__ != bb.__version__:
diff --git a/poky/bitbake/doc/README b/poky/bitbake/doc/README
index cdbb23776e..d4f56afa37 100644
--- a/poky/bitbake/doc/README
+++ b/poky/bitbake/doc/README
@@ -47,8 +47,8 @@ To install all required packages run:
To build the documentation locally, run:
- $ cd documentation
- $ make -f Makefile.sphinx html
+ $ cd doc
+ $ make html
The resulting HTML index page will be _build/html/index.html, and you
can browse your own copy of the locally generated documentation with
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
index c061bd70ea..fb4f0a23d7 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
@@ -476,6 +476,14 @@ Here are some example URLs::
easy to share metadata without removing passwords. SSH keys, ``~/.netrc``
and ``~/.ssh/config`` files can be used as alternatives.
+Using tags with the git fetcher may cause surprising behaviour. Bitbake needs to
+resolve the tag to a specific revision and to do that, it has to connect to and use
+the upstream repository. This is because the revision the tags point at can change and
+we've seen cases of this happening in well known public repositories. This can mean
+many more network connections than expected and recipes may be reparsed at every build.
+Source mirrors will also be bypassed as the upstream repository is the only source
+of truth to resolve the revision accurately. For these reasons, whilst the fetcher
+can support tags, we recommend being specific about revisions in recipes.
.. _gitsm-fetcher:
@@ -688,6 +696,41 @@ Here is an example URL::
It can also be used when setting mirrors definitions using the :term:`PREMIRRORS` variable.
+.. _gcp-fetcher:
+
+GCP Fetcher (``gs://``)
+--------------------------
+
+This submodule fetches data from a
+`Google Cloud Storage Bucket <https://cloud.google.com/storage/docs/buckets>`__.
+It uses the `Google Cloud Storage Python Client <https://cloud.google.com/python/docs/reference/storage/latest>`__
+to check the status of objects in the bucket and download them.
+The use of the Python client makes it substantially faster than using command
+line tools such as gsutil.
+
+The fetcher requires the Google Cloud Storage Python Client to be installed, along
+with the gsutil tool.
+
+The fetcher requires that the machine has valid credentials for accessing the
+chosen bucket. Instructions for authentication can be found in the
+`Google Cloud documentation <https://cloud.google.com/docs/authentication/provide-credentials-adc#local-dev>`__.
+
+If it used from the OpenEmbedded build system, the fetcher can be used for
+fetching sstate artifacts from a GCS bucket by specifying the
+``SSTATE_MIRRORS`` variable as shown below::
+
+ SSTATE_MIRRORS ?= "\
+ file://.* gs://<bucket name>/PATH \
+ "
+
+The fetcher can also be used in recipes::
+
+ SRC_URI = "gs://<bucket name>/<foo_container>/<bar_file>"
+
+However, the checksum of the file should be also be provided::
+
+ SRC_URI[sha256sum] = "<sha256 string>"
+
.. _crate-fetcher:
Crate Fetcher (``crate://``)
@@ -791,6 +834,8 @@ Fetch submodules also exist for the following:
- OSC (``osc://``)
+- S3 (``s3://``)
+
- Secure FTP (``sftp://``)
- Secure Shell (``ssh://``)
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index 0e2d8677b8..3163481e56 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "2.4.0"
+__version__ = "2.6.0"
import sys
if sys.version_info < (3, 8, 0):
diff --git a/poky/bitbake/lib/bb/acl.py b/poky/bitbake/lib/bb/acl.py
new file mode 100755
index 0000000000..0f41b275cf
--- /dev/null
+++ b/poky/bitbake/lib/bb/acl.py
@@ -0,0 +1,215 @@
+#! /usr/bin/env python3
+#
+# Copyright 2023 by Garmin Ltd. or its subsidiaries
+#
+# SPDX-License-Identifier: MIT
+
+
+import sys
+import ctypes
+import os
+import errno
+import pwd
+import grp
+
+libacl = ctypes.CDLL("libacl.so.1", use_errno=True)
+
+
+ACL_TYPE_ACCESS = 0x8000
+ACL_TYPE_DEFAULT = 0x4000
+
+ACL_FIRST_ENTRY = 0
+ACL_NEXT_ENTRY = 1
+
+ACL_UNDEFINED_TAG = 0x00
+ACL_USER_OBJ = 0x01
+ACL_USER = 0x02
+ACL_GROUP_OBJ = 0x04
+ACL_GROUP = 0x08
+ACL_MASK = 0x10
+ACL_OTHER = 0x20
+
+ACL_READ = 0x04
+ACL_WRITE = 0x02
+ACL_EXECUTE = 0x01
+
+acl_t = ctypes.c_void_p
+acl_entry_t = ctypes.c_void_p
+acl_permset_t = ctypes.c_void_p
+acl_perm_t = ctypes.c_uint
+
+acl_tag_t = ctypes.c_int
+
+libacl.acl_free.argtypes = [acl_t]
+
+
+def acl_free(acl):
+ libacl.acl_free(acl)
+
+
+libacl.acl_get_file.restype = acl_t
+libacl.acl_get_file.argtypes = [ctypes.c_char_p, ctypes.c_uint]
+
+
+def acl_get_file(path, typ):
+ acl = libacl.acl_get_file(os.fsencode(path), typ)
+ if acl is None:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err), str(path))
+
+ return acl
+
+
+libacl.acl_get_entry.argtypes = [acl_t, ctypes.c_int, ctypes.c_void_p]
+
+
+def acl_get_entry(acl, entry_id):
+ entry = acl_entry_t()
+ ret = libacl.acl_get_entry(acl, entry_id, ctypes.byref(entry))
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+
+ if ret == 0:
+ return None
+
+ return entry
+
+
+libacl.acl_get_tag_type.argtypes = [acl_entry_t, ctypes.c_void_p]
+
+
+def acl_get_tag_type(entry_d):
+ tag = acl_tag_t()
+ ret = libacl.acl_get_tag_type(entry_d, ctypes.byref(tag))
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+ return tag.value
+
+
+libacl.acl_get_qualifier.restype = ctypes.c_void_p
+libacl.acl_get_qualifier.argtypes = [acl_entry_t]
+
+
+def acl_get_qualifier(entry_d):
+ ret = libacl.acl_get_qualifier(entry_d)
+ if ret is None:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+ return ctypes.c_void_p(ret)
+
+
+libacl.acl_get_permset.argtypes = [acl_entry_t, ctypes.c_void_p]
+
+
+def acl_get_permset(entry_d):
+ permset = acl_permset_t()
+ ret = libacl.acl_get_permset(entry_d, ctypes.byref(permset))
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+
+ return permset
+
+
+libacl.acl_get_perm.argtypes = [acl_permset_t, acl_perm_t]
+
+
+def acl_get_perm(permset_d, perm):
+ ret = libacl.acl_get_perm(permset_d, perm)
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+ return bool(ret)
+
+
+class Entry(object):
+ def __init__(self, tag, qualifier, mode):
+ self.tag = tag
+ self.qualifier = qualifier
+ self.mode = mode
+
+ def __str__(self):
+ typ = ""
+ qual = ""
+ if self.tag == ACL_USER:
+ typ = "user"
+ qual = pwd.getpwuid(self.qualifier).pw_name
+ elif self.tag == ACL_GROUP:
+ typ = "group"
+ qual = grp.getgrgid(self.qualifier).gr_name
+ elif self.tag == ACL_USER_OBJ:
+ typ = "user"
+ elif self.tag == ACL_GROUP_OBJ:
+ typ = "group"
+ elif self.tag == ACL_MASK:
+ typ = "mask"
+ elif self.tag == ACL_OTHER:
+ typ = "other"
+
+ r = "r" if self.mode & ACL_READ else "-"
+ w = "w" if self.mode & ACL_WRITE else "-"
+ x = "x" if self.mode & ACL_EXECUTE else "-"
+
+ return f"{typ}:{qual}:{r}{w}{x}"
+
+
+class ACL(object):
+ def __init__(self, acl):
+ self.acl = acl
+
+ def __del__(self):
+ acl_free(self.acl)
+
+ def entries(self):
+ entry_id = ACL_FIRST_ENTRY
+ while True:
+ entry = acl_get_entry(self.acl, entry_id)
+ if entry is None:
+ break
+
+ permset = acl_get_permset(entry)
+
+ mode = 0
+ for m in (ACL_READ, ACL_WRITE, ACL_EXECUTE):
+ if acl_get_perm(permset, m):
+ mode |= m
+
+ qualifier = None
+ tag = acl_get_tag_type(entry)
+
+ if tag == ACL_USER or tag == ACL_GROUP:
+ qual = acl_get_qualifier(entry)
+ qualifier = ctypes.cast(qual, ctypes.POINTER(ctypes.c_int))[0]
+
+ yield Entry(tag, qualifier, mode)
+
+ entry_id = ACL_NEXT_ENTRY
+
+ @classmethod
+ def from_path(cls, path, typ):
+ acl = acl_get_file(path, typ)
+ return cls(acl)
+
+
+def main():
+ import argparse
+ import pwd
+ import grp
+ from pathlib import Path
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("path", help="File Path", type=Path)
+
+ args = parser.parse_args()
+
+ acl = ACL.from_path(args.path, ACL_TYPE_ACCESS)
+ for entry in acl.entries():
+ print(str(entry))
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/poky/bitbake/lib/bb/command.py b/poky/bitbake/lib/bb/command.py
index a355f56c60..b494f84a0a 100644
--- a/poky/bitbake/lib/bb/command.py
+++ b/poky/bitbake/lib/bb/command.py
@@ -65,7 +65,7 @@ class Command:
command = commandline.pop(0)
# Ensure cooker is ready for commands
- if command != "updateConfig" and command != "setFeatures":
+ if command not in ["updateConfig", "setFeatures", "ping"]:
try:
self.cooker.init_configdata()
if not self.remotedatastores:
@@ -85,7 +85,8 @@ class Command:
if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'):
return None, "Not able to execute not readonly commands in readonly mode"
try:
- self.cooker.process_inotify_updates_apply()
+ if command != "ping":
+ self.cooker.process_inotify_updates_apply()
if getattr(command_method, 'needconfig', True):
self.cooker.updateCacheSync()
result = command_method(self, commandline)
@@ -169,6 +170,8 @@ class CommandsSync:
Allow a UI to check the server is still alive
"""
return "Still alive!"
+ ping.needconfig = False
+ ping.readonly = True
def stateShutdown(self, command, params):
"""
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index 11c9fa2c40..064e3cae6e 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -650,7 +650,7 @@ class BBCooker:
raise
else:
if not mc in self.databuilder.mcdata:
- bb.fatal('Not multiconfig named "%s" found' % mc)
+ bb.fatal('No multiconfig named "%s" found' % mc)
envdata = self.databuilder.mcdata[mc]
data.expandKeys(envdata)
parse.ast.runAnonFuncs(envdata)
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 2428a26fa6..765aedd51d 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -753,7 +753,7 @@ def get_autorev(d):
d.setVar("__BBAUTOREV_SEEN", True)
return "AUTOINC"
-def get_srcrev(d, method_name='sortable_revision'):
+def _get_srcrev(d, method_name='sortable_revision'):
"""
Return the revision string, usually for use in the version string (PV) of the current package
Most packages usually only have one SCM so we just pass on the call.
@@ -774,6 +774,7 @@ def get_srcrev(d, method_name='sortable_revision'):
d.setVar("__BBINSRCREV", True)
scms = []
+ revs = []
fetcher = Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud
for u in urldata:
@@ -781,16 +782,19 @@ def get_srcrev(d, method_name='sortable_revision'):
scms.append(u)
if not scms:
- raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+ d.delVar("__BBINSRCREV")
+ return "", revs
+
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
+ revs.append(rev)
if len(rev) > 10:
rev = rev[:10]
d.delVar("__BBINSRCREV")
if autoinc:
- return "AUTOINC+" + rev
- return rev
+ return "AUTOINC+" + rev, revs
+ return rev, revs
#
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
@@ -806,6 +810,7 @@ def get_srcrev(d, method_name='sortable_revision'):
ud = urldata[scm]
for name in ud.names:
autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
+ revs.append(rev)
seenautoinc = seenautoinc or autoinc
if len(rev) > 10:
rev = rev[:10]
@@ -823,7 +828,21 @@ def get_srcrev(d, method_name='sortable_revision'):
format = "AUTOINC+" + format
d.delVar("__BBINSRCREV")
- return format
+ return format, revs
+
+def get_hashvalue(d, method_name='sortable_revision'):
+ pkgv, revs = _get_srcrev(d, method_name=method_name)
+ return " ".join(revs)
+
+def get_pkgv_string(d, method_name='sortable_revision'):
+ pkgv, revs = _get_srcrev(d, method_name=method_name)
+ return pkgv
+
+def get_srcrev(d, method_name='sortable_revision'):
+ pkgv, revs = _get_srcrev(d, method_name=method_name)
+ if not pkgv:
+ raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+ return pkgv
def localpath(url, d):
fetcher = bb.fetch2.Fetch([url], d)
@@ -1290,7 +1309,7 @@ class FetchData(object):
if checksum_name in self.parm:
checksum_expected = self.parm[checksum_name]
- elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate"]:
+ elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]:
checksum_expected = None
else:
checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
@@ -1976,6 +1995,7 @@ from . import npm
from . import npmsw
from . import az
from . import crate
+from . import gcp
methods.append(local.Local())
methods.append(wget.Wget())
@@ -1997,3 +2017,4 @@ methods.append(npm.Npm())
methods.append(npmsw.NpmShrinkWrap())
methods.append(az.Az())
methods.append(crate.Crate())
+methods.append(gcp.GCP())
diff --git a/poky/bitbake/lib/bb/fetch2/gcp.py b/poky/bitbake/lib/bb/fetch2/gcp.py
new file mode 100644
index 0000000000..f42c81fda8
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/gcp.py
@@ -0,0 +1,98 @@
+"""
+BitBake 'Fetch' implementation for Google Cloup Platform Storage.
+
+Class for fetching files from Google Cloud Storage using the
+Google Cloud Storage Python Client. The GCS Python Client must
+be correctly installed, configured and authenticated prior to use.
+Additionally, gsutil must also be installed.
+
+"""
+
+# Copyright (C) 2023, Snap Inc.
+#
+# Based in part on bb.fetch2.s3:
+# Copyright (C) 2017 Andre McCurdy
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import urllib.parse, urllib.error
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import logger
+
+class GCP(FetchMethod):
+ """
+ Class to fetch urls via GCP's Python API.
+ """
+ def __init__(self):
+ self.gcp_client = None
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with GCP.
+ """
+ return ud.type in ['gs']
+
+ def recommends_checksum(self, urldata):
+ return True
+
+ def urldata_init(self, ud, d):
+ if 'downloadfilename' in ud.parm:
+ ud.basename = ud.parm['downloadfilename']
+ else:
+ ud.basename = os.path.basename(ud.path)
+
+ ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
+
+ def get_gcp_client(self):
+ from google.cloud import storage
+ self.gcp_client = storage.Client(project=None)
+
+ def download(self, ud, d):
+ """
+ Fetch urls using the GCP API.
+ Assumes localpath was called first.
+ """
+ logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
+ if self.gcp_client is None:
+ self.get_gcp_client()
+
+ bb.fetch2.check_network_access(d, "gsutil stat", ud.url)
+
+ # Path sometimes has leading slash, so strip it
+ path = ud.path.lstrip("/")
+ blob = self.gcp_client.bucket(ud.host).blob(path)
+ blob.download_to_filename(ud.localpath)
+
+ # Additional sanity checks copied from the wget class (although there
+ # are no known issues which mean these are required, treat the GCP API
+ # tool with a little healthy suspicion).
+ if not os.path.exists(ud.localpath):
+ raise FetchError(f"The GCP API returned success for gs://{ud.host}{ud.path} but {ud.localpath} doesn't exist?!")
+
+ if os.path.getsize(ud.localpath) == 0:
+ os.remove(ud.localpath)
+ raise FetchError(f"The downloaded file for gs://{ud.host}{ud.path} resulted in a zero size file?! Deleting and failing since this isn't right.")
+
+ return True
+
+ def checkstatus(self, fetch, ud, d):
+ """
+ Check the status of a URL.
+ """
+ logger.debug2(f"Checking status of gs://{ud.host}{ud.path}")
+ if self.gcp_client is None:
+ self.get_gcp_client()
+
+ bb.fetch2.check_network_access(d, "gsutil stat", ud.url)
+
+ # Path sometimes has leading slash, so strip it
+ path = ud.path.lstrip("/")
+ if self.gcp_client.bucket(ud.host).blob(path).exists() == False:
+ raise FetchError(f"The GCP API reported that gs://{ud.host}{ud.path} does not exist")
+ else:
+ return True
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 2a3c06fe4e..e11271b757 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -65,6 +65,7 @@ import fnmatch
import os
import re
import shlex
+import shutil
import subprocess
import tempfile
import bb
@@ -365,8 +366,35 @@ class Git(FetchMethod):
runfetchcmd(fetch_cmd, d, workdir=ud.clonedir)
repourl = self._get_repo_url(ud)
+ needs_clone = False
+ if os.path.exists(ud.clonedir):
+ # The directory may exist, but not be the top level of a bare git
+ # repository in which case it needs to be deleted and re-cloned.
+ try:
+ # Since clones can be bare, use --absolute-git-dir instead of --show-toplevel
+ output = runfetchcmd("LANG=C %s rev-parse --absolute-git-dir" % ud.basecmd, d, workdir=ud.clonedir)
+
+ toplevel = os.path.abspath(output.rstrip())
+ abs_clonedir = os.path.abspath(ud.clonedir).rstrip('/')
+ # The top level Git directory must either be the clone directory
+ # or a child of the clone directory. Any ancestor directory of
+ # the clone directory is not valid as the Git directory (and
+ # probably belongs to some other unrelated repository), so a
+ # clone is required
+ if os.path.commonprefix([abs_clonedir, toplevel]) != abs_clonedir:
+ logger.warning("Top level directory '%s' doesn't match expected '%s'. Re-cloning", toplevel, ud.clonedir)
+ needs_clone = True
+ except bb.fetch2.FetchError as e:
+ logger.warning("Unable to get top level for %s (not a git directory?): %s", ud.clonedir, e)
+ needs_clone = True
+
+ if needs_clone:
+ shutil.rmtree(ud.clonedir)
+ else:
+ needs_clone = True
+
# If the repo still doesn't exist, fallback to cloning it
- if not os.path.exists(ud.clonedir):
+ if needs_clone:
# We do this since git will use a "-l" option automatically for local urls where possible,
# but it doesn't work when git/objects is a symlink, only works when it is a directory.
if repourl.startswith("file://"):
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index 47225b9721..a87361ccf3 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -123,6 +123,7 @@ class GitSM(Git):
url += ";name=%s" % module
url += ";subpath=%s" % module
url += ";nobranch=1"
+ url += ";lfs=%s" % self._need_lfs(ud)
# Note that adding "user=" here to give credentials to the
# submodule is not supported. Since using SRC_URI to give git://
# URL a password is not supported, one have to use one of the
@@ -242,10 +243,12 @@ class GitSM(Git):
ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
if not ud.bareclone and ret:
- # All submodules should already be downloaded and configured in the tree. This simply sets
- # up the configuration and checks out the files. The main project config should remain
- # unmodified, and no download from the internet should occur.
- runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+ # All submodules should already be downloaded and configured in the tree. This simply
+ # sets up the configuration and checks out the files. The main project config should
+ # remain unmodified, and no download from the internet should occur. As such, lfs smudge
+ # should also be skipped as these files were already smudged in the fetch stage if lfs
+ # was enabled.
+ runfetchcmd("GIT_LFS_SKIP_SMUDGE=1 %s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
def implicit_urldata(self, ud, d):
import shutil, subprocess, tempfile
diff --git a/poky/bitbake/lib/bb/monitordisk.py b/poky/bitbake/lib/bb/monitordisk.py
index a1b910007d..f928210351 100644
--- a/poky/bitbake/lib/bb/monitordisk.py
+++ b/poky/bitbake/lib/bb/monitordisk.py
@@ -234,9 +234,10 @@ class diskMonitor:
freeInode = st.f_favail
if minInode and freeInode < minInode:
- # Some filesystems use dynamic inodes so can't run out
- # (e.g. btrfs). This is reported by the inode count being 0.
- if st.f_files == 0:
+ # Some filesystems use dynamic inodes so can't run out.
+ # This is reported by the inode count being 0 (btrfs) or the free
+ # inode count being -1 (cephfs).
+ if st.f_files == 0 or st.f_favail == -1:
self.devDict[k][2] = None
continue
# Always show warning, the self.checked would always be False if the action is WARN
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 48788f4aa6..c88d7129ca 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -200,21 +200,24 @@ class RunQueueScheduler(object):
curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
now = time.time()
tdiff = now - self.prev_pressure_time
- if tdiff > 1.0:
- exceeds_cpu_pressure = self.rq.max_cpu_pressure and (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) / tdiff > self.rq.max_cpu_pressure
- exceeds_io_pressure = self.rq.max_io_pressure and (float(curr_io_pressure) - float(self.prev_io_pressure)) / tdiff > self.rq.max_io_pressure
- exceeds_memory_pressure = self.rq.max_memory_pressure and (float(curr_memory_pressure) - float(self.prev_memory_pressure)) / tdiff > self.rq.max_memory_pressure
+ psi_accumulation_interval = 1.0
+ cpu_pressure = (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) / tdiff
+ io_pressure = (float(curr_io_pressure) - float(self.prev_io_pressure)) / tdiff
+ memory_pressure = (float(curr_memory_pressure) - float(self.prev_memory_pressure)) / tdiff
+ exceeds_cpu_pressure = self.rq.max_cpu_pressure and cpu_pressure > self.rq.max_cpu_pressure
+ exceeds_io_pressure = self.rq.max_io_pressure and io_pressure > self.rq.max_io_pressure
+ exceeds_memory_pressure = self.rq.max_memory_pressure and memory_pressure > self.rq.max_memory_pressure
+
+ if tdiff > psi_accumulation_interval:
self.prev_cpu_pressure = curr_cpu_pressure
self.prev_io_pressure = curr_io_pressure
self.prev_memory_pressure = curr_memory_pressure
self.prev_pressure_time = now
- else:
- exceeds_cpu_pressure = self.rq.max_cpu_pressure and (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) > self.rq.max_cpu_pressure
- exceeds_io_pressure = self.rq.max_io_pressure and (float(curr_io_pressure) - float(self.prev_io_pressure)) > self.rq.max_io_pressure
- exceeds_memory_pressure = self.rq.max_memory_pressure and (float(curr_memory_pressure) - float(self.prev_memory_pressure)) > self.rq.max_memory_pressure
+
pressure_state = (exceeds_cpu_pressure, exceeds_io_pressure, exceeds_memory_pressure)
+ pressure_values = (round(cpu_pressure,1), self.rq.max_cpu_pressure, round(io_pressure,1), self.rq.max_io_pressure, round(memory_pressure,1), self.rq.max_memory_pressure)
if hasattr(self, "pressure_state") and pressure_state != self.pressure_state:
- bb.note("Pressure status changed to CPU: %s, IO: %s, Mem: %s" % pressure_state)
+ bb.note("Pressure status changed to CPU: %s, IO: %s, Mem: %s (CPU: %s/%s, IO: %s/%s, Mem: %s/%s) - using %s/%s bitbake threads" % (pressure_state + pressure_values + (len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks)))
self.pressure_state = pressure_state
return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure)
return False
diff --git a/poky/bitbake/lib/bb/server/process.py b/poky/bitbake/lib/bb/server/process.py
index f62faed000..40cb99bc97 100644
--- a/poky/bitbake/lib/bb/server/process.py
+++ b/poky/bitbake/lib/bb/server/process.py
@@ -38,8 +38,11 @@ logger = logging.getLogger('BitBake')
class ProcessTimeout(SystemExit):
pass
+def currenttime():
+ return datetime.datetime.now().strftime('%H:%M:%S.%f')
+
def serverlog(msg):
- print(str(os.getpid()) + " " + datetime.datetime.now().strftime('%H:%M:%S.%f') + " " + msg)
+ print(str(os.getpid()) + " " + currenttime() + " " + msg)
sys.stdout.flush()
#
@@ -289,7 +292,9 @@ class ProcessServer():
continue
try:
serverlog("Running command %s" % command)
- self.command_channel_reply.send(self.cooker.command.runCommand(command, self))
+ reply = self.cooker.command.runCommand(command, self)
+ serverlog("Sending reply %s" % repr(reply))
+ self.command_channel_reply.send(reply)
serverlog("Command Completed (socket: %s)" % os.path.exists(self.sockname))
except Exception as e:
stack = traceback.format_exc()
@@ -502,9 +507,9 @@ class ServerCommunicator():
def runCommand(self, command):
self.connection.send(command)
if not self.recv.poll(30):
- logger.info("No reply from server in 30s (for command %s)" % command[0])
+ logger.info("No reply from server in 30s (for command %s at %s)" % (command[0], currenttime()))
if not self.recv.poll(30):
- raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)")
+ raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s at %s)" % currenttime())
ret, exc = self.recv.get()
# Should probably turn all exceptions in exc back into exceptions?
# For now, at least handle BBHandledException
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index d0a555654b..b023b79eca 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -261,10 +261,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
bb.warn("Error during finalise of %s" % mcfn)
raise
- #Slow but can be useful for debugging mismatched basehashes
- #for task in self.taskdeps[mcfn]:
- # self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
-
basehashes = {}
for task in taskdeps:
basehashes[task] = self.basehash[mcfn + ":" + task]
@@ -274,6 +270,11 @@ class SignatureGeneratorBasic(SignatureGenerator):
d.setVar("__siggen_varvals", lookupcache)
d.setVar("__siggen_taskdeps", taskdeps)
+ #Slow but can be useful for debugging mismatched basehashes
+ #self.setup_datacache_from_datastore(mcfn, d)
+ #for task in taskdeps:
+ # self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
+
def setup_datacache_from_datastore(self, mcfn, d):
super().setup_datacache_from_datastore(mcfn, d)
@@ -360,7 +361,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
for dep in sorted(self.runtaskdeps[tid]):
data += self.get_unihash(dep[1])
- for (f, cs) in self.file_checksum_values[tid]:
+ for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
if cs:
if "/./" in f:
data += "./" + f.split("/./")[1]
@@ -418,14 +419,14 @@ class SignatureGeneratorBasic(SignatureGenerator):
data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
if dep in self.basehash_ignore_vars:
- continue
+ continue
data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
if runtime and tid in self.taskhash:
data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])]
data['file_checksum_values'] = []
- for f,cs in self.file_checksum_values[tid]:
+ for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
if "/./" in f:
data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
else:
@@ -744,6 +745,12 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
self.server = data.getVar('BB_HASHSERVE')
self.method = "sstate_output_hash"
+def clean_checksum_file_path(file_checksum_tuple):
+ f, cs = file_checksum_tuple
+ if "/./" in f:
+ return "./" + f.split("/./")[1]
+ return f
+
def dump_this_task(outfile, d):
import bb.parse
mcfn = d.getVar("BB_FILENAME")
diff --git a/poky/bitbake/lib/bb/ui/buildinfohelper.py b/poky/bitbake/lib/bb/ui/buildinfohelper.py
index 129bb329c3..8b212b7803 100644
--- a/poky/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/poky/bitbake/lib/bb/ui/buildinfohelper.py
@@ -1746,7 +1746,6 @@ class BuildInfoHelper(object):
buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
- image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
# location of the manifest files for this build;
# note that this file is only produced if an image is produced
@@ -1767,6 +1766,18 @@ class BuildInfoHelper(object):
# filter out anything which isn't an image target
image_targets = [target for target in targets if target.is_image]
+ if len(image_targets) > 0:
+ #if there are image targets retrieve image_name
+ image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
+ if not image_name:
+ #When build target is an image and image_name is not found as an environment variable
+ logger.info("IMAGE_NAME not found, extracting from bitbake command")
+ cmd = self.server.runCommand(['getVariable','BB_CMDLINE'])[0]
+ #filter out tokens that are command line options
+ cmd = [token for token in cmd if not token.startswith('-')]
+ image_name = cmd[1].split(':', 1)[0] # remove everything after : in image name
+ logger.info("IMAGE_NAME found as : %s " % image_name)
+
for image_target in image_targets:
# this is set to True if we find at least one file relating to
# this target; if this remains False after the scan, we copy the
diff --git a/poky/bitbake/lib/bb/xattr.py b/poky/bitbake/lib/bb/xattr.py
new file mode 100755
index 0000000000..7b634944a4
--- /dev/null
+++ b/poky/bitbake/lib/bb/xattr.py
@@ -0,0 +1,126 @@
+#! /usr/bin/env python3
+#
+# Copyright 2023 by Garmin Ltd. or its subsidiaries
+#
+# SPDX-License-Identifier: MIT
+
+import sys
+import ctypes
+import os
+import errno
+
+libc = ctypes.CDLL("libc.so.6", use_errno=True)
+fsencoding = sys.getfilesystemencoding()
+
+
+libc.listxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
+libc.llistxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
+
+
+def listxattr(path, follow=True):
+ func = libc.listxattr if follow else libc.llistxattr
+
+ os_path = os.fsencode(path)
+
+ while True:
+ length = func(os_path, None, 0)
+
+ if length < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err), str(path))
+
+ if length == 0:
+ return []
+
+ arr = ctypes.create_string_buffer(length)
+
+ read_length = func(os_path, arr, length)
+ if read_length != length:
+ # Race!
+ continue
+
+ return [a.decode(fsencoding) for a in arr.raw.split(b"\x00") if a]
+
+
+libc.getxattr.argtypes = [
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_size_t,
+]
+libc.lgetxattr.argtypes = [
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_size_t,
+]
+
+
+def getxattr(path, name, follow=True):
+ func = libc.getxattr if follow else libc.lgetxattr
+
+ os_path = os.fsencode(path)
+ os_name = os.fsencode(name)
+
+ while True:
+ length = func(os_path, os_name, None, 0)
+
+ if length < 0:
+ err = ctypes.get_errno()
+ if err == errno.ENODATA:
+ return None
+ raise OSError(err, os.strerror(err), str(path))
+
+ if length == 0:
+ return ""
+
+ arr = ctypes.create_string_buffer(length)
+
+ read_length = func(os_path, os_name, arr, length)
+ if read_length != length:
+ # Race!
+ continue
+
+ return arr.raw
+
+
+def get_all_xattr(path, follow=True):
+ attrs = {}
+
+ names = listxattr(path, follow)
+
+ for name in names:
+ value = getxattr(path, name, follow)
+ if value is None:
+ # This can happen if a value is erased after listxattr is called,
+ # so ignore it
+ continue
+ attrs[name] = value
+
+ return attrs
+
+
+def main():
+ import argparse
+ from pathlib import Path
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("path", help="File Path", type=Path)
+
+ args = parser.parse_args()
+
+ attrs = get_all_xattr(args.path)
+
+ for name, value in attrs.items():
+ try:
+ value = value.decode(fsencoding)
+ except UnicodeDecodeError:
+ pass
+
+ print(f"{name} = {value}")
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/poky/bitbake/lib/bblayers/query.py b/poky/bitbake/lib/bblayers/query.py
index bec76db98a..bfc18a7593 100644
--- a/poky/bitbake/lib/bblayers/query.py
+++ b/poky/bitbake/lib/bblayers/query.py
@@ -282,7 +282,10 @@ Lists recipes with the bbappends that apply to them as subitems.
else:
logger.plain('=== Appended recipes ===')
- pnlist = list(self.tinfoil.cooker_data.pkg_pn.keys())
+
+ cooker_data = self.tinfoil.cooker.recipecaches[args.mc]
+
+ pnlist = list(cooker_data.pkg_pn.keys())
pnlist.sort()
appends = False
for pn in pnlist:
@@ -295,7 +298,7 @@ Lists recipes with the bbappends that apply to them as subitems.
if not found:
continue
- if self.show_appends_for_pn(pn):
+ if self.show_appends_for_pn(pn, cooker_data, args.mc):
appends = True
if not args.pnspec and self.show_appends_for_skipped():
@@ -304,8 +307,10 @@ Lists recipes with the bbappends that apply to them as subitems.
if not appends:
logger.plain('No append files found')
- def show_appends_for_pn(self, pn):
- filenames = self.tinfoil.cooker_data.pkg_pn[pn]
+ def show_appends_for_pn(self, pn, cooker_data, mc):
+ filenames = cooker_data.pkg_pn[pn]
+ if mc:
+ pn = "mc:%s:%s" % (mc, pn)
best = self.tinfoil.find_best_provider(pn)
best_filename = os.path.basename(best[3])
@@ -530,6 +535,7 @@ NOTE: .bbappend files can impact the dependencies.
parser_show_appends = self.add_command(sp, 'show-appends', self.do_show_appends)
parser_show_appends.add_argument('pnspec', nargs='*', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)')
+ parser_show_appends.add_argument('--mc', help='use specified multiconfig', default='')
parser_show_cross_depends = self.add_command(sp, 'show-cross-depends', self.do_show_cross_depends)
parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true')
diff --git a/poky/bitbake/lib/toaster/bldcollector/urls.py b/poky/bitbake/lib/toaster/bldcollector/urls.py
index efd67a81a5..3c34070351 100644
--- a/poky/bitbake/lib/toaster/bldcollector/urls.py
+++ b/poky/bitbake/lib/toaster/bldcollector/urls.py
@@ -6,7 +6,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from django.conf.urls import url
+from django.urls import re_path as url
import bldcollector.views
diff --git a/poky/bitbake/lib/toaster/bldcontrol/models.py b/poky/bitbake/lib/toaster/bldcontrol/models.py
index c2f302da24..42750e7180 100644
--- a/poky/bitbake/lib/toaster/bldcontrol/models.py
+++ b/poky/bitbake/lib/toaster/bldcontrol/models.py
@@ -4,7 +4,7 @@
from __future__ import unicode_literals
from django.db import models
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
from orm.models import Project, Build, Layer_Version
import logging
@@ -124,7 +124,7 @@ class BuildRequest(models.Model):
return self.brvariable_set.get(name="MACHINE").value
def __str__(self):
- return force_text('%s %s' % (self.project, self.get_state_display()))
+ return force_str('%s %s' % (self.project, self.get_state_display()))
# These tables specify the settings for running an actual build.
# They MUST be kept in sync with the tables in orm.models.Project*
diff --git a/poky/bitbake/lib/toaster/orm/models.py b/poky/bitbake/lib/toaster/orm/models.py
index 2cb7d7e049..f9fcf9e4fb 100644
--- a/poky/bitbake/lib/toaster/orm/models.py
+++ b/poky/bitbake/lib/toaster/orm/models.py
@@ -107,7 +107,7 @@ class ToasterSetting(models.Model):
class ProjectManager(models.Manager):
- def create_project(self, name, release, existing_project=None):
+ def create_project(self, name, release, existing_project=None, imported=False):
if existing_project and (release is not None):
prj = existing_project
prj.bitbake_version = release.bitbake_version
@@ -134,19 +134,19 @@ class ProjectManager(models.Manager):
if release is None:
return prj
-
- for rdl in release.releasedefaultlayer_set.all():
- lv = Layer_Version.objects.filter(
- layer__name=rdl.layer_name,
- release=release).first()
-
- if lv:
- ProjectLayer.objects.create(project=prj,
- layercommit=lv,
- optional=False)
- else:
- logger.warning("Default project layer %s not found" %
- rdl.layer_name)
+ if not imported:
+ for rdl in release.releasedefaultlayer_set.all():
+ lv = Layer_Version.objects.filter(
+ layer__name=rdl.layer_name,
+ release=release).first()
+
+ if lv:
+ ProjectLayer.objects.create(project=prj,
+ layercommit=lv,
+ optional=False)
+ else:
+ logger.warning("Default project layer %s not found" %
+ rdl.layer_name)
return prj
diff --git a/poky/bitbake/lib/toaster/toastergui/urls.py b/poky/bitbake/lib/toaster/toastergui/urls.py
index d2df4e6048..bc3b0c79d8 100644
--- a/poky/bitbake/lib/toaster/toastergui/urls.py
+++ b/poky/bitbake/lib/toaster/toastergui/urls.py
@@ -6,7 +6,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from django.conf.urls import url
+from django.urls import re_path as url
from django.views.generic import RedirectView
from toastergui import tables
diff --git a/poky/bitbake/lib/toaster/toastergui/views.py b/poky/bitbake/lib/toaster/toastergui/views.py
index a571b8cc18..552ff1649b 100644
--- a/poky/bitbake/lib/toaster/toastergui/views.py
+++ b/poky/bitbake/lib/toaster/toastergui/views.py
@@ -670,11 +670,11 @@ def xhr_dirinfo(request, build_id, target_id):
return HttpResponse(_get_dir_entries(build_id, target_id, top), content_type = "application/json")
from django.utils.functional import Promise
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
class LazyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Promise):
- return force_text(obj)
+ return force_str(obj)
return super(LazyEncoder, self).default(obj)
from toastergui.templatetags.projecttags import filtered_filesizeformat
@@ -1404,7 +1404,7 @@ if True:
if not os.path.isdir('%s/conf' % request.POST['importdir']):
raise BadParameterException("Bad path or missing 'conf' directory (%s)" % request.POST['importdir'])
from django.core import management
- management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir'], interactive=False)
+ management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir'])
prj = Project.objects.get(name = request.POST['projectname'])
prj.merged_attr = True
prj.save()
diff --git a/poky/bitbake/lib/toaster/toastergui/widgets.py b/poky/bitbake/lib/toaster/toastergui/widgets.py
index ceff52942e..53696912a4 100644
--- a/poky/bitbake/lib/toaster/toastergui/widgets.py
+++ b/poky/bitbake/lib/toaster/toastergui/widgets.py
@@ -7,6 +7,7 @@
#
from django.views.generic import View, TemplateView
+from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django.shortcuts import HttpResponse
from django.core.cache import cache
@@ -63,8 +64,8 @@ class ToasterTable(TemplateView):
self.default_orderby = ""
# prevent HTTP caching of table data
- @cache_control(must_revalidate=True,
- max_age=0, no_store=True, no_cache=True)
+ @method_decorator(cache_control(must_revalidate=True,
+ max_age=0, no_store=True, no_cache=True))
def dispatch(self, *args, **kwargs):
return super(ToasterTable, self).dispatch(*args, **kwargs)
diff --git a/poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py b/poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
index e25b55e5ab..f7139aa041 100644
--- a/poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
+++ b/poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
@@ -545,7 +545,7 @@ class Command(BaseCommand):
# Find the directory's release, and promote to default_release if local paths
release = self.find_import_release(layers_list,lv_dict,default_release)
# create project, SANITY: reuse any project of same name
- project = Project.objects.create_project(project_name,release,project)
+ project = Project.objects.create_project(project_name,release,project, imported=True)
# Apply any new layers or variables
self.apply_conf_variables(project,layers_list,lv_dict,release)
# WORKAROUND: since we now derive the release, redirect 'newproject_specific' to 'project_specific'
diff --git a/poky/bitbake/lib/toaster/toastermain/management/commands/checksocket.py b/poky/bitbake/lib/toaster/toastermain/management/commands/checksocket.py
index 811fd5d516..b2c002da7a 100644
--- a/poky/bitbake/lib/toaster/toastermain/management/commands/checksocket.py
+++ b/poky/bitbake/lib/toaster/toastermain/management/commands/checksocket.py
@@ -13,7 +13,7 @@ import errno
import socket
from django.core.management.base import BaseCommand, CommandError
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
DEFAULT_ADDRPORT = "0.0.0.0:8000"
@@ -51,7 +51,7 @@ class Command(BaseCommand):
if hasattr(err, 'errno') and err.errno in errors:
errtext = errors[err.errno]
else:
- errtext = force_text(err)
+ errtext = force_str(err)
raise CommandError(errtext)
self.stdout.write("OK")
diff --git a/poky/bitbake/lib/toaster/toastermain/urls.py b/poky/bitbake/lib/toaster/toastermain/urls.py
index 5fb520b384..0360302668 100644
--- a/poky/bitbake/lib/toaster/toastermain/urls.py
+++ b/poky/bitbake/lib/toaster/toastermain/urls.py
@@ -6,7 +6,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from django.conf.urls import include, url
+from django.urls import re_path as url, include
from django.views.generic import RedirectView, TemplateView
from django.views.decorators.cache import never_cache
import bldcollector.views
diff --git a/poky/bitbake/toaster-requirements.txt b/poky/bitbake/toaster-requirements.txt
index dedd423556..c1f433f9ec 100644
--- a/poky/bitbake/toaster-requirements.txt
+++ b/poky/bitbake/toaster-requirements.txt
@@ -1,3 +1,3 @@
-Django>3.2,<3.3
+Django>4.2,<4.3
beautifulsoup4>=4.4.0
pytz