summaryrefslogtreecommitdiff
path: root/poky/meta/lib/oe
diff options
context:
space:
mode:
Diffstat (limited to 'poky/meta/lib/oe')
-rw-r--r--poky/meta/lib/oe/npm_registry.py169
-rw-r--r--poky/meta/lib/oe/package_manager/__init__.py4
-rw-r--r--poky/meta/lib/oe/package_manager/deb/__init__.py10
-rw-r--r--poky/meta/lib/oe/package_manager/ipk/__init__.py4
-rw-r--r--poky/meta/lib/oe/package_manager/rpm/__init__.py4
-rw-r--r--poky/meta/lib/oe/patch.py8
-rw-r--r--poky/meta/lib/oe/recipeutils.py9
-rw-r--r--poky/meta/lib/oe/sbom.py4
-rw-r--r--poky/meta/lib/oe/utils.py64
9 files changed, 196 insertions, 80 deletions
diff --git a/poky/meta/lib/oe/npm_registry.py b/poky/meta/lib/oe/npm_registry.py
new file mode 100644
index 0000000000..96c0affb45
--- /dev/null
+++ b/poky/meta/lib/oe/npm_registry.py
@@ -0,0 +1,169 @@
+import bb
+import json
+import subprocess
+
+_ALWAYS_SAFE = frozenset('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ 'abcdefghijklmnopqrstuvwxyz'
+ '0123456789'
+ '_.-~')
+
+MISSING_OK = object()
+
+REGISTRY = "https://registry.npmjs.org"
+
+# we can not use urllib.parse here because npm expects lowercase
+# hex-chars but urllib generates uppercase ones
+def uri_quote(s, safe = '/'):
+ res = ""
+ safe_set = set(safe)
+ for c in s:
+ if c in _ALWAYS_SAFE or c in safe_set:
+ res += c
+ else:
+ res += '%%%02x' % ord(c)
+ return res
+
+class PackageJson:
+ def __init__(self, spec):
+ self.__spec = spec
+
+ @property
+ def name(self):
+ return self.__spec['name']
+
+ @property
+ def version(self):
+ return self.__spec['version']
+
+ @property
+ def empty_manifest(self):
+ return {
+ 'name': self.name,
+ 'description': self.__spec.get('description', ''),
+ 'versions': {},
+ }
+
+ def base_filename(self):
+ return uri_quote(self.name, safe = '@')
+
+ def as_manifest_entry(self, tarball_uri):
+ res = {}
+
+ ## NOTE: 'npm install' requires more than basic meta information;
+ ## e.g. it takes 'bin' from this manifest entry but not the actual
+ ## 'package.json'
+ for (idx,dflt) in [('name', None),
+ ('description', ""),
+ ('version', None),
+ ('bin', MISSING_OK),
+ ('man', MISSING_OK),
+ ('scripts', MISSING_OK),
+ ('directories', MISSING_OK),
+ ('dependencies', MISSING_OK),
+ ('devDependencies', MISSING_OK),
+ ('optionalDependencies', MISSING_OK),
+ ('license', "unknown")]:
+ if idx in self.__spec:
+ res[idx] = self.__spec[idx]
+ elif dflt == MISSING_OK:
+ pass
+ elif dflt != None:
+ res[idx] = dflt
+ else:
+ raise Exception("%s-%s: missing key %s" % (self.name,
+ self.version,
+ idx))
+
+ res['dist'] = {
+ 'tarball': tarball_uri,
+ }
+
+ return res
+
+class ManifestImpl:
+ def __init__(self, base_fname, spec):
+ self.__base = base_fname
+ self.__spec = spec
+
+ def load(self):
+ try:
+ with open(self.filename, "r") as f:
+ res = json.load(f)
+ except IOError:
+ res = self.__spec.empty_manifest
+
+ return res
+
+ def save(self, meta):
+ with open(self.filename, "w") as f:
+ json.dump(meta, f, indent = 2)
+
+ @property
+ def filename(self):
+ return self.__base + ".meta"
+
+class Manifest:
+ def __init__(self, base_fname, spec):
+ self.__base = base_fname
+ self.__spec = spec
+ self.__lockf = None
+ self.__impl = None
+
+ def __enter__(self):
+ self.__lockf = bb.utils.lockfile(self.__base + ".lock")
+ self.__impl = ManifestImpl(self.__base, self.__spec)
+ return self.__impl
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ bb.utils.unlockfile(self.__lockf)
+
+class NpmCache:
+ def __init__(self, cache):
+ self.__cache = cache
+
+ @property
+ def path(self):
+ return self.__cache
+
+ def run(self, type, key, fname):
+ subprocess.run(['oe-npm-cache', self.__cache, type, key, fname],
+ check = True)
+
+class NpmRegistry:
+ def __init__(self, path, cache):
+ self.__path = path
+ self.__cache = NpmCache(cache + '/_cacache')
+ bb.utils.mkdirhier(self.__path)
+ bb.utils.mkdirhier(self.__cache.path)
+
+ @staticmethod
+ ## This function is critical and must match nodejs expectations
+ def _meta_uri(spec):
+ return REGISTRY + '/' + uri_quote(spec.name, safe = '@')
+
+ @staticmethod
+ ## Exact return value does not matter; just make it look like a
+ ## usual registry url
+ def _tarball_uri(spec):
+ return '%s/%s/-/%s-%s.tgz' % (REGISTRY,
+ uri_quote(spec.name, safe = '@'),
+ uri_quote(spec.name, safe = '@/'),
+ spec.version)
+
+ def add_pkg(self, tarball, pkg_json):
+ pkg_json = PackageJson(pkg_json)
+ base = os.path.join(self.__path, pkg_json.base_filename())
+
+ with Manifest(base, pkg_json) as manifest:
+ meta = manifest.load()
+ tarball_uri = self._tarball_uri(pkg_json)
+
+ meta['versions'][pkg_json.version] = pkg_json.as_manifest_entry(tarball_uri)
+
+ manifest.save(meta)
+
+ ## Cache entries are a little bit dependent on the nodejs
+ ## version; version specific cache implementation must
+ ## mitigate differences
+ self.__cache.run('meta', self._meta_uri(pkg_json), manifest.filename);
+ self.__cache.run('tgz', tarball_uri, tarball);
diff --git a/poky/meta/lib/oe/package_manager/__init__.py b/poky/meta/lib/oe/package_manager/__init__.py
index 80bc1a6bc6..d3b45705ec 100644
--- a/poky/meta/lib/oe/package_manager/__init__.py
+++ b/poky/meta/lib/oe/package_manager/__init__.py
@@ -266,7 +266,7 @@ class PackageManager(object, metaclass=ABCMeta):
pass
@abstractmethod
- def install(self, pkgs, attempt_only=False):
+ def install(self, pkgs, attempt_only=False, hard_depends_only=False):
"""
Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
True, installation failures are ignored.
@@ -396,7 +396,7 @@ class PackageManager(object, metaclass=ABCMeta):
bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
' '.join(install_pkgs),
' '.join(skip_pkgs)))
- self.install(install_pkgs)
+ self.install(install_pkgs, hard_depends_only=True)
except subprocess.CalledProcessError as e:
bb.fatal("Could not compute complementary packages list. Command "
"'%s' returned %d:\n%s" %
diff --git a/poky/meta/lib/oe/package_manager/deb/__init__.py b/poky/meta/lib/oe/package_manager/deb/__init__.py
index 86ddb130ad..b96ea0bad4 100644
--- a/poky/meta/lib/oe/package_manager/deb/__init__.py
+++ b/poky/meta/lib/oe/package_manager/deb/__init__.py
@@ -289,14 +289,18 @@ class DpkgPM(OpkgDpkgPM):
self.deploy_dir_unlock()
- def install(self, pkgs, attempt_only=False):
+ def install(self, pkgs, attempt_only=False, hard_depends_only=False):
if attempt_only and len(pkgs) == 0:
return
os.environ['APT_CONFIG'] = self.apt_conf_file
- cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s" % \
- (self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
+ extra_args = ""
+ if hard_depends_only:
+ extra_args = "--no-install-recommends"
+
+ cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s %s" % \
+ (self.apt_get_cmd, self.apt_args, extra_args, ' '.join(pkgs))
try:
bb.note("Installing the following packages: %s" % ' '.join(pkgs))
diff --git a/poky/meta/lib/oe/package_manager/ipk/__init__.py b/poky/meta/lib/oe/package_manager/ipk/__init__.py
index 4cd3963111..6fd2f021b6 100644
--- a/poky/meta/lib/oe/package_manager/ipk/__init__.py
+++ b/poky/meta/lib/oe/package_manager/ipk/__init__.py
@@ -337,7 +337,7 @@ class OpkgPM(OpkgDpkgPM):
self.deploy_dir_unlock()
- def install(self, pkgs, attempt_only=False):
+ def install(self, pkgs, attempt_only=False, hard_depends_only=False):
if not pkgs:
return
@@ -346,6 +346,8 @@ class OpkgPM(OpkgDpkgPM):
cmd += " --add-exclude %s" % exclude
for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split():
cmd += " --add-ignore-recommends %s" % bad_recommendation
+ if hard_depends_only:
+ cmd += " --no-install-recommends"
cmd += " install "
cmd += " ".join(pkgs)
diff --git a/poky/meta/lib/oe/package_manager/rpm/__init__.py b/poky/meta/lib/oe/package_manager/rpm/__init__.py
index b392581069..d97dab3293 100644
--- a/poky/meta/lib/oe/package_manager/rpm/__init__.py
+++ b/poky/meta/lib/oe/package_manager/rpm/__init__.py
@@ -181,7 +181,7 @@ class RpmPM(PackageManager):
os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
- def install(self, pkgs, attempt_only = False):
+ def install(self, pkgs, attempt_only=False, hard_depends_only=False):
if len(pkgs) == 0:
return
self._prepare_pkg_transaction()
@@ -192,7 +192,7 @@ class RpmPM(PackageManager):
output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) +
(["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) +
- (["--setopt=install_weak_deps=False"] if self.d.getVar('NO_RECOMMENDATIONS') == "1" else []) +
+ (["--setopt=install_weak_deps=False"] if (hard_depends_only or self.d.getVar('NO_RECOMMENDATIONS') == "1") else []) +
(["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) +
["install"] +
pkgs)
diff --git a/poky/meta/lib/oe/patch.py b/poky/meta/lib/oe/patch.py
index 95b915a6ab..4ec9caed45 100644
--- a/poky/meta/lib/oe/patch.py
+++ b/poky/meta/lib/oe/patch.py
@@ -299,10 +299,10 @@ class GitApplyTree(PatchTree):
PatchTree.__init__(self, dir, d)
self.commituser = d.getVar('PATCH_GIT_USER_NAME')
self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL')
- if not self._isInitialized():
+ if not self._isInitialized(d):
self._initRepo()
- def _isInitialized(self):
+ def _isInitialized(self, d):
cmd = "git rev-parse --show-toplevel"
try:
output = runcmd(cmd.split(), self.dir).strip()
@@ -310,8 +310,8 @@ class GitApplyTree(PatchTree):
## runcmd returned non-zero which most likely means 128
## Not a git directory
return False
- ## Make sure repo is in builddir to not break top-level git repos
- return os.path.samefile(output, self.dir)
+ ## Make sure repo is in builddir to not break top-level git repos, or under workdir
+ return os.path.samefile(output, self.dir) or oe.path.is_path_parent(d.getVar('WORKDIR'), output)
def _initRepo(self):
runcmd("git init".split(), self.dir)
diff --git a/poky/meta/lib/oe/recipeutils.py b/poky/meta/lib/oe/recipeutils.py
index 872ff97b89..b04992c66d 100644
--- a/poky/meta/lib/oe/recipeutils.py
+++ b/poky/meta/lib/oe/recipeutils.py
@@ -666,7 +666,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False):
return (appendpath, pathok)
-def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None):
+def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None, params=None):
"""
Writes a bbappend file for a recipe
Parameters:
@@ -696,6 +696,9 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
redirect_output:
If specified, redirects writing the output file to the
specified directory (for dry-run purposes)
+ params:
+ Parameters to use when adding entries to SRC_URI. If specified,
+ should be a list of dicts with the same length as srcfiles.
"""
if not removevalues:
@@ -762,12 +765,14 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
copyfiles = {}
if srcfiles:
instfunclines = []
- for newfile, origsrcfile in srcfiles.items():
+ for i, (newfile, origsrcfile) in enumerate(srcfiles.items()):
srcfile = origsrcfile
srcurientry = None
if not srcfile:
srcfile = os.path.basename(newfile)
srcurientry = 'file://%s' % srcfile
+ if params and params[i]:
+ srcurientry = '%s;%s' % (srcurientry, ';'.join('%s=%s' % (k,v) for k,v in params[i].items()))
# Double-check it's not there already
# FIXME do we care if the entry is added by another bbappend that might go away?
if not srcurientry in rd.getVar('SRC_URI').split():
diff --git a/poky/meta/lib/oe/sbom.py b/poky/meta/lib/oe/sbom.py
index 3372f13a9d..52bf51440e 100644
--- a/poky/meta/lib/oe/sbom.py
+++ b/poky/meta/lib/oe/sbom.py
@@ -32,7 +32,7 @@ def get_sdk_spdxid(sdk):
return "SPDXRef-SDK-%s" % sdk
-def write_doc(d, spdx_doc, subdir, spdx_deploy=None):
+def write_doc(d, spdx_doc, subdir, spdx_deploy=None, indent=None):
from pathlib import Path
if spdx_deploy is None:
@@ -41,7 +41,7 @@ def write_doc(d, spdx_doc, subdir, spdx_deploy=None):
dest = spdx_deploy / subdir / (spdx_doc.name + ".spdx.json")
dest.parent.mkdir(exist_ok=True, parents=True)
with dest.open("wb") as f:
- doc_sha1 = spdx_doc.to_json(f, sort_keys=True)
+ doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent)
l = spdx_deploy / "by-namespace" / spdx_doc.documentNamespace.replace("/", "_")
l.parent.mkdir(exist_ok=True, parents=True)
diff --git a/poky/meta/lib/oe/utils.py b/poky/meta/lib/oe/utils.py
index 46fc76c261..1ee947d584 100644
--- a/poky/meta/lib/oe/utils.py
+++ b/poky/meta/lib/oe/utils.py
@@ -473,70 +473,6 @@ def get_multilib_datastore(variant, d):
localdata.setVar("MLPREFIX", "")
return localdata
-#
-# Python 2.7 doesn't have threaded pools (just multiprocessing)
-# so implement a version here
-#
-
-from queue import Queue
-from threading import Thread
-
-class ThreadedWorker(Thread):
- """Thread executing tasks from a given tasks queue"""
- def __init__(self, tasks, worker_init, worker_end, name=None):
- Thread.__init__(self, name=name)
- self.tasks = tasks
- self.daemon = True
-
- self.worker_init = worker_init
- self.worker_end = worker_end
-
- def run(self):
- from queue import Empty
-
- if self.worker_init is not None:
- self.worker_init(self)
-
- while True:
- try:
- func, args, kargs = self.tasks.get(block=False)
- except Empty:
- if self.worker_end is not None:
- self.worker_end(self)
- break
-
- try:
- func(self, *args, **kargs)
- except Exception as e:
- # Eat all exceptions
- bb.mainlogger.debug("Worker task raised %s" % e, exc_info=e)
- finally:
- self.tasks.task_done()
-
-class ThreadedPool:
- """Pool of threads consuming tasks from a queue"""
- def __init__(self, num_workers, num_tasks, worker_init=None, worker_end=None, name="ThreadedPool-"):
- self.tasks = Queue(num_tasks)
- self.workers = []
-
- for i in range(num_workers):
- worker = ThreadedWorker(self.tasks, worker_init, worker_end, name=name + str(i))
- self.workers.append(worker)
-
- def start(self):
- for worker in self.workers:
- worker.start()
-
- def add_task(self, func, *args, **kargs):
- """Add a task to the queue"""
- self.tasks.put((func, args, kargs))
-
- def wait_completion(self):
- """Wait for completion of all the tasks in the queue"""
- self.tasks.join()
- for worker in self.workers:
- worker.join()
-
class ImageQAFailed(Exception):
def __init__(self, description, name=None, logfile=None):
self.description = description