summaryrefslogtreecommitdiff
path: root/poky/scripts/lib
diff options
context:
space:
mode:
authorjmbills <42755197+jmbills@users.noreply.github.com>2019-10-25 19:18:16 +0300
committerGitHub <noreply@github.com>2019-10-25 19:18:16 +0300
commit0dbb60593ebb5a62190c0e6cff7f1770493303a2 (patch)
tree0df2ce67404dbca3ddc4ee063dbfd9ae455be682 /poky/scripts/lib
parent34a3942845ac3264ce27c648ae5486d302c3e6d8 (diff)
parentcc9cea46d74d280de03c713c8b555153fd811f09 (diff)
downloadopenbmc-0dbb60593ebb5a62190c0e6cff7f1770493303a2.tar.xz
Merge branch 'intel' into intel2
Diffstat (limited to 'poky/scripts/lib')
-rw-r--r--poky/scripts/lib/buildstats.py6
-rw-r--r--poky/scripts/lib/checklayer/__init__.py2
-rw-r--r--poky/scripts/lib/checklayer/cases/common.py2
-rw-r--r--poky/scripts/lib/devtool/build.py13
-rw-r--r--poky/scripts/lib/devtool/menuconfig.py79
-rw-r--r--poky/scripts/lib/devtool/standard.py180
-rw-r--r--poky/scripts/lib/devtool/upgrade.py29
-rw-r--r--poky/scripts/lib/recipetool/create.py5
-rw-r--r--poky/scripts/lib/recipetool/create_buildsys_python.py9
-rw-r--r--poky/scripts/lib/resulttool/log.py70
-rw-r--r--poky/scripts/lib/resulttool/report.py60
-rw-r--r--poky/scripts/lib/resulttool/resultutils.py49
-rw-r--r--poky/scripts/lib/scriptutils.py43
-rw-r--r--poky/scripts/lib/wic/__init__.py2
-rw-r--r--poky/scripts/lib/wic/canned-wks/qemuriscv.wks3
-rw-r--r--poky/scripts/lib/wic/ksparser.py4
-rw-r--r--poky/scripts/lib/wic/partition.py26
-rw-r--r--poky/scripts/lib/wic/pluginbase.py2
-rw-r--r--poky/scripts/lib/wic/plugins/imager/direct.py74
-rw-r--r--poky/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py213
-rw-r--r--poky/scripts/lib/wic/plugins/source/bootimg-efi.py18
-rw-r--r--poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py6
-rw-r--r--poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py19
23 files changed, 758 insertions, 156 deletions
diff --git a/poky/scripts/lib/buildstats.py b/poky/scripts/lib/buildstats.py
index 1adab06ed..c69b5bf4d 100644
--- a/poky/scripts/lib/buildstats.py
+++ b/poky/scripts/lib/buildstats.py
@@ -261,13 +261,17 @@ class BuildStats(dict):
self[pkg].aggregate(data)
-def diff_buildstats(bs1, bs2, stat_attr, min_val=None, min_absdiff=None):
+def diff_buildstats(bs1, bs2, stat_attr, min_val=None, min_absdiff=None, only_tasks=[]):
"""Compare the tasks of two buildstats"""
tasks_diff = []
pkgs = set(bs1.keys()).union(set(bs2.keys()))
for pkg in pkgs:
tasks1 = bs1[pkg].tasks if pkg in bs1 else {}
tasks2 = bs2[pkg].tasks if pkg in bs2 else {}
+ if only_tasks:
+ tasks1 = {k: v for k, v in tasks1.items() if k in only_tasks}
+ tasks2 = {k: v for k, v in tasks2.items() if k in only_tasks}
+
if not tasks1:
pkg_op = '+'
elif not tasks2:
diff --git a/poky/scripts/lib/checklayer/__init__.py b/poky/scripts/lib/checklayer/__init__.py
index 8244cf072..5aeec2f00 100644
--- a/poky/scripts/lib/checklayer/__init__.py
+++ b/poky/scripts/lib/checklayer/__init__.py
@@ -245,7 +245,7 @@ def get_signatures(builddir, failsafe=False, machine=None):
sigs = {}
tune2tasks = {}
- cmd = ''
+ cmd = 'BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
if machine:
cmd += 'MACHINE=%s ' % machine
cmd += 'bitbake '
diff --git a/poky/scripts/lib/checklayer/cases/common.py b/poky/scripts/lib/checklayer/cases/common.py
index 8ffe028b3..b82304e36 100644
--- a/poky/scripts/lib/checklayer/cases/common.py
+++ b/poky/scripts/lib/checklayer/cases/common.py
@@ -12,7 +12,7 @@ from checklayer.case import OECheckLayerTestCase
class CommonCheckLayer(OECheckLayerTestCase):
def test_readme(self):
# The top-level README file may have a suffix (like README.rst or README.txt).
- readme_files = glob.glob(os.path.join(self.tc.layer['path'], 'README*'))
+ readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*'))
self.assertTrue(len(readme_files) > 0,
msg="Layer doesn't contains README file.")
diff --git a/poky/scripts/lib/devtool/build.py b/poky/scripts/lib/devtool/build.py
index 7543398d9..935ffab46 100644
--- a/poky/scripts/lib/devtool/build.py
+++ b/poky/scripts/lib/devtool/build.py
@@ -11,7 +11,8 @@ import bb
import logging
import argparse
import tempfile
-from devtool import exec_build_env_command, check_workspace_recipe, DevtoolError
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
+from devtool import parse_recipe
logger = logging.getLogger('devtool')
@@ -43,12 +44,22 @@ def _get_build_tasks(config):
def build(args, config, basepath, workspace):
"""Entry point for the devtool 'build' subcommand"""
workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
+ if not rd:
+ return 1
+ deploytask = 'do_deploy' in rd.getVar('__BBTASKS')
+ finally:
+ tinfoil.shutdown()
if args.clean:
# use clean instead of cleansstate to avoid messing things up in eSDK
build_tasks = ['do_clean']
else:
build_tasks = _get_build_tasks(config)
+ if deploytask:
+ build_tasks.append('do_deploy')
bbappend = workspace[workspacepn]['bbappend']
if args.disable_parallel_make:
diff --git a/poky/scripts/lib/devtool/menuconfig.py b/poky/scripts/lib/devtool/menuconfig.py
new file mode 100644
index 000000000..95384c533
--- /dev/null
+++ b/poky/scripts/lib/devtool/menuconfig.py
@@ -0,0 +1,79 @@
+# OpenEmbedded Development tool - menuconfig command plugin
+#
+# Copyright (C) 2018 Xilinx
+# Written by: Chandana Kalluri <ckalluri@xilinx.com>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""Devtool menuconfig plugin"""
+
+import os
+import bb
+import logging
+import argparse
+import re
+import glob
+from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
+from devtool import check_workspace_recipe
+logger = logging.getLogger('devtool')
+
+def menuconfig(args, config, basepath, workspace):
+ """Entry point for the devtool 'menuconfig' subcommand"""
+
+ rd = ""
+ kconfigpath = ""
+ pn_src = ""
+ localfilesdir = ""
+ workspace_dir = ""
+ tinfoil = setup_tinfoil(basepath=basepath)
+ try:
+ rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False)
+ if not rd:
+ return 1
+
+ check_workspace_recipe(workspace, args.component)
+ pn = rd.getVar('PN', True)
+
+ if not rd.getVarFlag('do_menuconfig','task'):
+ raise DevtoolError("This recipe does not support menuconfig option")
+
+ workspace_dir = os.path.join(config.workspace_path,'sources')
+ kconfigpath = rd.getVar('B')
+ pn_src = os.path.join(workspace_dir,pn)
+
+ # add check to see if oe_local_files exists or not
+ localfilesdir = os.path.join(pn_src,'oe-local-files')
+ if not os.path.exists(localfilesdir):
+ bb.utils.mkdirhier(localfilesdir)
+ # Add gitignore to ensure source tree is clean
+ gitignorefile = os.path.join(localfilesdir,'.gitignore')
+ with open(gitignorefile, 'w') as f:
+ f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n')
+ f.write('*\n')
+
+ finally:
+ tinfoil.shutdown()
+
+ logger.info('Launching menuconfig')
+ exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
+ fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
+ res = standard._create_kconfig_diff(pn_src,rd,fragment)
+
+ return 0
+
+def register_commands(subparsers, context):
+ """register devtool subcommands from this plugin"""
+ parser_menuconfig = subparsers.add_parser('menuconfig',help='Alter build-time configuration for a recipe', description='Launches the make menuconfig command (for recipes where do_menuconfig is available), allowing users to make changes to the build-time configuration. Creates a config fragment corresponding to changes made.', group='advanced')
+ parser_menuconfig.add_argument('component', help='compenent to alter config')
+ parser_menuconfig.set_defaults(func=menuconfig,fixed_setup=context.fixed_setup)
diff --git a/poky/scripts/lib/devtool/standard.py b/poky/scripts/lib/devtool/standard.py
index aca74b1fc..60c9a046f 100644
--- a/poky/scripts/lib/devtool/standard.py
+++ b/poky/scripts/lib/devtool/standard.py
@@ -461,11 +461,37 @@ def sync(args, config, basepath, workspace):
finally:
tinfoil.shutdown()
+def symlink_oelocal_files_srctree(rd,srctree):
+ import oe.patch
+ if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')):
+ # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
+ # (otherwise the recipe won't build as expected)
+ local_files_dir = os.path.join(srctree, 'oe-local-files')
+ addfiles = []
+ for root, _, files in os.walk(local_files_dir):
+ relpth = os.path.relpath(root, local_files_dir)
+ if relpth != '.':
+ bb.utils.mkdirhier(os.path.join(srctree, relpth))
+ for fn in files:
+ if fn == '.gitignore':
+ continue
+ destpth = os.path.join(srctree, relpth, fn)
+ if os.path.exists(destpth):
+ os.unlink(destpth)
+ os.symlink('oe-local-files/%s' % fn, destpth)
+ addfiles.append(os.path.join(relpth, fn))
+ if addfiles:
+ bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree)
+ useroptions = []
+ oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
+ bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
+
def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
"""Extract sources of a recipe"""
import oe.recipeutils
import oe.patch
+ import oe.path
pn = d.getVar('PN')
@@ -562,7 +588,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
with open(preservestampfile, 'w') as f:
f.write(d.getVar('STAMP'))
try:
- if bb.data.inherits_class('kernel-yocto', d):
+ if is_kernel_yocto:
# We need to generate the kernel config
task = 'do_configure'
else:
@@ -589,6 +615,23 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir'))
+ # Check if work-shared is empty, if yes
+ # find source and copy to work-shared
+ if is_kernel_yocto:
+ workshareddir = d.getVar('STAGING_KERNEL_DIR')
+ staging_kerVer = get_staging_kver(workshareddir)
+ kernelVersion = d.getVar('LINUX_VERSION')
+
+ # handle dangling symbolic link in work-shared:
+ if os.path.islink(workshareddir):
+ os.unlink(workshareddir)
+
+ if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
+ shutil.rmtree(workshareddir)
+ oe.path.copyhardlinktree(srcsubdir,workshareddir)
+ elif not os.path.exists(workshareddir):
+ oe.path.copyhardlinktree(srcsubdir,workshareddir)
+
tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
srctree_localdir = os.path.join(srctree, 'oe-local-files')
@@ -617,29 +660,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
shutil.move(tempdir_localdir, srcsubdir)
shutil.move(srcsubdir, srctree)
-
- if os.path.abspath(d.getVar('S')) == os.path.abspath(d.getVar('WORKDIR')):
- # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
- # (otherwise the recipe won't build as expected)
- local_files_dir = os.path.join(srctree, 'oe-local-files')
- addfiles = []
- for root, _, files in os.walk(local_files_dir):
- relpth = os.path.relpath(root, local_files_dir)
- if relpth != '.':
- bb.utils.mkdirhier(os.path.join(srctree, relpth))
- for fn in files:
- if fn == '.gitignore':
- continue
- destpth = os.path.join(srctree, relpth, fn)
- if os.path.exists(destpth):
- os.unlink(destpth)
- os.symlink('oe-local-files/%s' % fn, destpth)
- addfiles.append(os.path.join(relpth, fn))
- if addfiles:
- bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree)
- useroptions = []
- oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d)
- bb.process.run('git %s commit -a -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
+ symlink_oelocal_files_srctree(d,srctree)
if is_kernel_yocto:
logger.info('Copying kernel config to srctree')
@@ -707,11 +728,31 @@ def _check_preserve(config, recipename):
tf.write(line)
os.rename(newfile, origfile)
+def get_staging_kver(srcdir):
+ # Kernel version from work-shared
+ kerver = []
+ staging_kerVer=""
+ if os.path.exists(srcdir) and os.listdir(srcdir):
+ with open(os.path.join(srcdir,"Makefile")) as f:
+ version = [next(f) for x in range(5)][1:4]
+ for word in version:
+ kerver.append(word.split('= ')[1].split('\n')[0])
+ staging_kerVer = ".".join(kerver)
+ return staging_kerVer
+
+def get_staging_kbranch(srcdir):
+ staging_kbranch = ""
+ if os.path.exists(srcdir) and os.listdir(srcdir):
+ (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir)
+ staging_kbranch = "".join(branch.split('\n')[0])
+ return staging_kbranch
+
def modify(args, config, basepath, workspace):
"""Entry point for the devtool 'modify' subcommand"""
import bb
import oe.recipeutils
import oe.patch
+ import oe.path
if args.recipename in workspace:
raise DevtoolError("recipe %s is already in your workspace" %
@@ -753,6 +794,59 @@ def modify(args, config, basepath, workspace):
initial_rev = None
commits = []
check_commits = False
+
+ if bb.data.inherits_class('kernel-yocto', rd):
+ # Current set kernel version
+ kernelVersion = rd.getVar('LINUX_VERSION')
+ srcdir = rd.getVar('STAGING_KERNEL_DIR')
+ kbranch = rd.getVar('KBRANCH')
+
+ staging_kerVer = get_staging_kver(srcdir)
+ staging_kbranch = get_staging_kbranch(srcdir)
+ if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
+ oe.path.copyhardlinktree(srcdir,srctree)
+ workdir = rd.getVar('WORKDIR')
+ srcsubdir = rd.getVar('S')
+ localfilesdir = os.path.join(srctree,'oe-local-files')
+ # Move local source files into separate subdir
+ recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)]
+ local_files = oe.recipeutils.get_recipe_local_files(rd)
+
+ for key in local_files.copy():
+ if key.endswith('scc'):
+ sccfile = open(local_files[key], 'r')
+ for l in sccfile:
+ line = l.split()
+ if line and line[0] in ('kconf', 'patch'):
+ cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
+ if not cfg in local_files.values():
+ local_files[line[-1]] = cfg
+ shutil.copy2(cfg, workdir)
+ sccfile.close()
+
+ # Ignore local files with subdir={BP}
+ srcabspath = os.path.abspath(srcsubdir)
+ local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))]
+ if local_files:
+ for fname in local_files:
+ _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname))
+ with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f:
+ f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n')
+
+ symlink_oelocal_files_srctree(rd,srctree)
+
+ task = 'do_configure'
+ res = tinfoil.build_targets(pn, task, handle_events=True)
+
+ # Copy .config to workspace
+ kconfpath = rd.getVar('B')
+ logger.info('Copying kernel config to workspace')
+ shutil.copy2(os.path.join(kconfpath, '.config'),srctree)
+
+ # Set this to true, we still need to get initial_rev
+ # by parsing the git repo
+ args.no_extract = True
+
if not args.no_extract:
initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
if not initial_rev:
@@ -844,6 +938,11 @@ def modify(args, config, basepath, workspace):
' cp ${B}/.config ${S}/.config.baseline\n'
' ln -sfT ${B}/.config ${S}/.config.new\n'
'}\n')
+ if rd.getVarFlag('do_menuconfig','task'):
+ f.write('\ndo_configure_append() {\n'
+ ' cp ${B}/.config ${S}/.config.baseline\n'
+ ' ln -sfT ${B}/.config ${S}/.config.new\n'
+ '}\n')
if initial_rev:
f.write('\n# initial_rev: %s\n' % initial_rev)
for commit in commits:
@@ -1520,17 +1619,17 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
patches_dir, changed_revs)
logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
if filter_patches:
- new_p = {}
- upd_p = {k:v for k,v in upd_p.items() if k in filter_patches}
+ new_p = OrderedDict()
+ upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
remove_files = [f for f in remove_files if f in filter_patches]
updatefiles = False
updaterecipe = False
destpath = None
srcuri = (rd.getVar('SRC_URI', False) or '').split()
if appendlayerdir:
- files = dict((os.path.join(local_files_dir, key), val) for
+ files = OrderedDict((os.path.join(local_files_dir, key), val) for
key, val in list(upd_f.items()) + list(new_f.items()))
- files.update(dict((os.path.join(patches_dir, key), val) for
+ files.update(OrderedDict((os.path.join(patches_dir, key), val) for
key, val in list(upd_p.items()) + list(new_p.items())))
if files or remove_files:
removevalues = None
@@ -1866,13 +1965,27 @@ def reset(args, config, basepath, workspace):
def _get_layer(layername, d):
"""Determine the base layer path for the specified layer name/path"""
layerdirs = d.getVar('BBLAYERS').split()
- layers = {os.path.basename(p): p for p in layerdirs}
+ layers = {} # {basename: layer_paths}
+ for p in layerdirs:
+ bn = os.path.basename(p)
+ if bn not in layers:
+ layers[bn] = [p]
+ else:
+ layers[bn].append(p)
# Provide some shortcuts
if layername.lower() in ['oe-core', 'openembedded-core']:
- layerdir = layers.get('meta', None)
+ layername = 'meta'
+ layer_paths = layers.get(layername, None)
+ if not layer_paths:
+ return os.path.abspath(layername)
+ elif len(layer_paths) == 1:
+ return os.path.abspath(layer_paths[0])
else:
- layerdir = layers.get(layername, None)
- return os.path.abspath(layerdir or layername)
+ # multiple layers having the same base name
+ logger.warning("Multiple layers have the same base name '%s', use the first one '%s'." % (layername, layer_paths[0]))
+ logger.warning("Consider using path instead of base name to specify layer:\n\t\t%s" % '\n\t\t'.join(layer_paths))
+ return os.path.abspath(layer_paths[0])
+
def finish(args, config, basepath, workspace):
"""Entry point for the devtool 'finish' subcommand"""
@@ -1895,7 +2008,7 @@ def finish(args, config, basepath, workspace):
else:
raise DevtoolError('Source tree is not clean:\n\n%s\nEnsure you have committed your changes or use -f/--force if you are sure there\'s nothing that needs to be committed' % dirty)
- no_clean = False
+ no_clean = args.no_clean
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -2169,6 +2282,7 @@ def register_commands(subparsers, context):
parser_finish.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
parser_finish.add_argument('--initial-rev', help='Override starting revision for patches')
parser_finish.add_argument('--force', '-f', action="store_true", help='Force continuing even if there are uncommitted changes in the source tree repository')
+ parser_finish.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
parser_finish.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
parser_finish.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
parser_finish.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
diff --git a/poky/scripts/lib/devtool/upgrade.py b/poky/scripts/lib/devtool/upgrade.py
index 62ec2f94c..18c5b66a2 100644
--- a/poky/scripts/lib/devtool/upgrade.py
+++ b/poky/scripts/lib/devtool/upgrade.py
@@ -122,18 +122,22 @@ def _cleanup_on_error(rf, srctree):
rfp = os.path.split(rf)[0] # recipe folder
rfpp = os.path.split(rfp)[0] # recipes folder
if os.path.exists(rfp):
- shutil.rmtree(b)
+ shutil.rmtree(rfp)
if not len(os.listdir(rfpp)):
os.rmdir(rfpp)
srctree = os.path.abspath(srctree)
if os.path.exists(srctree):
shutil.rmtree(srctree)
-def _upgrade_error(e, rf, srctree):
- if rf:
- cleanup_on_error(rf, srctree)
+def _upgrade_error(e, rf, srctree, keep_failure=False, extramsg=None):
+ if rf and not keep_failure:
+ _cleanup_on_error(rf, srctree)
logger.error(e)
- raise DevtoolError(e)
+ if extramsg:
+ logger.error(extramsg)
+ if keep_failure:
+ logger.info('Preserving failed upgrade files (--keep-failure)')
+ sys.exit(1)
def _get_uri(rd):
srcuris = rd.getVar('SRC_URI').split()
@@ -277,6 +281,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
logger.info('Preserving temporary directory %s' % tmpsrctree)
else:
shutil.rmtree(tmpsrctree)
+ shutil.rmtree(tmpdir)
return (rev, md5, sha256, srcbranch, srcsubdir_rel)
@@ -299,7 +304,7 @@ def _add_license_diff_to_recipe(path, diff):
f.write("\n#\n\n".encode())
f.write(orig_content)
-def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses):
+def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
"""Creates the new recipe under workspace"""
bpn = rd.getVar('BPN')
@@ -416,7 +421,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
newvalues["LIC_FILES_CHKSUM"] = newlicchksum
_add_license_diff_to_recipe(fullpath, license_diff)
- rd = tinfoil.parse_recipe_file(fullpath, False)
+ try:
+ rd = tinfoil.parse_recipe_file(fullpath, False)
+ except bb.tinfoil.TinfoilCommandFailed as e:
+ _upgrade_error(e, fullpath, srctree, keep_failure, 'Parsing of upgraded recipe failed')
oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
return fullpath, copied
@@ -548,11 +556,11 @@ def upgrade(args, config, basepath, workspace):
tinfoil, rd)
new_licenses = _extract_licenses(srctree, rd.getVar('LIC_FILES_CHKSUM'))
license_diff = _generate_license_diff(old_licenses, new_licenses)
- rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses)
+ rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
except bb.process.CmdError as e:
- _upgrade_error(e, rf, srctree)
+ _upgrade_error(e, rf, srctree, args.keep_failure)
except DevtoolError as e:
- _upgrade_error(e, rf, srctree)
+ _upgrade_error(e, rf, srctree, args.keep_failure)
standard._add_md5(config, pn, os.path.dirname(rf))
af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2,
@@ -623,6 +631,7 @@ def register_commands(subparsers, context):
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
+ parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)')
parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe',
diff --git a/poky/scripts/lib/recipetool/create.py b/poky/scripts/lib/recipetool/create.py
index 98277f74c..1fb6b5553 100644
--- a/poky/scripts/lib/recipetool/create.py
+++ b/poky/scripts/lib/recipetool/create.py
@@ -60,7 +60,9 @@ class RecipeHandler(object):
if RecipeHandler.recipelibmap:
return
# First build up library->package mapping
- shlib_providers = oe.package.read_shlib_providers(d)
+ d2 = bb.data.createCopy(d)
+ d2.setVar("WORKDIR_PKGDATA", "${PKGDATA_DIR}")
+ shlib_providers = oe.package.read_shlib_providers(d2)
libdir = d.getVar('libdir')
base_libdir = d.getVar('base_libdir')
libpaths = list(set([base_libdir, libdir]))
@@ -1053,6 +1055,7 @@ def get_license_md5sums(d, static_only=False):
md5sums['3b83ef96387f14655fc854ddc3c6bd57'] = 'Apache-2.0'
md5sums['385c55653886acac3821999a3ccd17b3'] = 'Artistic-1.0 | GPL-2.0' # some perl modules
md5sums['54c7042be62e169199200bc6477f04d1'] = 'BSD-3-Clause'
+ md5sums['bfe1f75d606912a4111c90743d6c7325'] = 'MPL-1.1'
return md5sums
def crunch_license(licfile):
diff --git a/poky/scripts/lib/recipetool/create_buildsys_python.py b/poky/scripts/lib/recipetool/create_buildsys_python.py
index ac9bc9237..adfa37795 100644
--- a/poky/scripts/lib/recipetool/create_buildsys_python.py
+++ b/poky/scripts/lib/recipetool/create_buildsys_python.py
@@ -154,8 +154,13 @@ class PythonRecipeHandler(RecipeHandler):
if 'buildsystem' in handled:
return False
- if not RecipeHandler.checkfiles(srctree, ['setup.py']):
- return
+ # Check for non-zero size setup.py files
+ setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
+ for fn in setupfiles:
+ if os.path.getsize(fn):
+ break
+ else:
+ return False
# setup.py is always parsed to get at certain required information, such as
# distutils vs setuptools
diff --git a/poky/scripts/lib/resulttool/log.py b/poky/scripts/lib/resulttool/log.py
index 25c339671..f1bfd9950 100644
--- a/poky/scripts/lib/resulttool/log.py
+++ b/poky/scripts/lib/resulttool/log.py
@@ -8,14 +8,23 @@ import os
import resulttool.resultutils as resultutils
def show_ptest(result, ptest, logger):
- if 'ptestresult.sections' in result:
- if ptest in result['ptestresult.sections'] and 'log' in result['ptestresult.sections'][ptest]:
- print(result['ptestresult.sections'][ptest]['log'])
- return 0
+ logdata = resultutils.ptestresult_get_log(result, ptest)
+ if logdata is not None:
+ print(logdata)
+ return 0
- print("ptest '%s' not found" % ptest)
+ print("ptest '%s' log not found" % ptest)
return 1
+def show_reproducible(result, reproducible, logger):
+ try:
+ print(result['reproducible'][reproducible]['diffoscope.text'])
+ return 0
+
+ except KeyError:
+ print("reproducible '%s' not found" % reproducible)
+ return 1
+
def log(args, logger):
results = resultutils.load_resultsdata(args.source)
@@ -25,32 +34,43 @@ def log(args, logger):
return 1
for _, run_name, _, r in resultutils.test_run_results(results):
- if args.dump_ptest:
- if 'ptestresult.sections' in r:
- for name, ptest in r['ptestresult.sections'].items():
- if 'log' in ptest:
- dest_dir = args.dump_ptest
- if args.prepend_run:
- dest_dir = os.path.join(dest_dir, run_name)
+ if args.dump_ptest and 'ptestresult.sections' in r:
+ for name, ptest in r['ptestresult.sections'].items():
+ logdata = resultutils.ptestresult_get_log(r, name)
+ if logdata is not None:
+ dest_dir = args.dump_ptest
+ if args.prepend_run:
+ dest_dir = os.path.join(dest_dir, run_name)
- os.makedirs(dest_dir, exist_ok=True)
+ os.makedirs(dest_dir, exist_ok=True)
+ dest = os.path.join(dest_dir, '%s.log' % name)
+ print(dest)
+ with open(dest, 'w') as f:
+ f.write(logdata)
- dest = os.path.join(dest_dir, '%s.log' % name)
- print(dest)
- with open(dest, 'w') as f:
- f.write(ptest['log'])
+ if args.raw_ptest:
+ rawlog = resultutils.ptestresult_get_rawlogs(r)
+ if rawlog is not None:
+ print(rawlog)
+ else:
+ print('Raw ptest logs not found')
+ return 1
- if args.raw:
- if 'ptestresult.rawlogs' in r:
- print(r['ptestresult.rawlogs']['log'])
+ if args.raw_reproducible:
+ if 'reproducible.rawlogs' in r:
+ print(r['reproducible.rawlogs']['log'])
else:
- print('Raw logs not found')
+ print('Raw reproducible logs not found')
return 1
for ptest in args.ptest:
if not show_ptest(r, ptest, logger):
return 1
+ for reproducible in args.reproducible:
+ if not show_reproducible(r, reproducible, logger):
+ return 1
+
def register_commands(subparsers):
"""Register subcommands from this plugin"""
parser = subparsers.add_parser('log', help='show logs',
@@ -63,9 +83,15 @@ def register_commands(subparsers):
help='show logs for a ptest')
parser.add_argument('--dump-ptest', metavar='DIR',
help='Dump all ptest log files to the specified directory.')
+ parser.add_argument('--reproducible', action='append', default=[],
+ help='show logs for a reproducible test')
parser.add_argument('--prepend-run', action='store_true',
help='''Dump ptest results to a subdirectory named after the test run when using --dump-ptest.
Required if more than one test run is present in the result file''')
parser.add_argument('--raw', action='store_true',
- help='show raw logs')
+ help='show raw (ptest) logs. Deprecated. Alias for "--raw-ptest"', dest='raw_ptest')
+ parser.add_argument('--raw-ptest', action='store_true',
+ help='show raw ptest log')
+ parser.add_argument('--raw-reproducible', action='store_true',
+ help='show raw reproducible build logs')
diff --git a/poky/scripts/lib/resulttool/report.py b/poky/scripts/lib/resulttool/report.py
index a48c59f63..883b52517 100644
--- a/poky/scripts/lib/resulttool/report.py
+++ b/poky/scripts/lib/resulttool/report.py
@@ -19,9 +19,9 @@ class ResultsTextReport(object):
self.ptests = {}
self.ltptests = {}
self.ltpposixtests = {}
- self.result_types = {'passed': ['PASSED', 'passed'],
- 'failed': ['FAILED', 'failed', 'ERROR', 'error', 'UNKNOWN'],
- 'skipped': ['SKIPPED', 'skipped']}
+ self.result_types = {'passed': ['PASSED', 'passed', 'PASS', 'XFAIL'],
+ 'failed': ['FAILED', 'failed', 'FAIL', 'ERROR', 'error', 'UNKNOWN', 'XPASS'],
+ 'skipped': ['SKIPPED', 'skipped', 'UNSUPPORTED', 'UNTESTED', 'UNRESOLVED']}
def handle_ptest_result(self, k, status, result, machine):
@@ -32,16 +32,22 @@ class ResultsTextReport(object):
# Ensure tests without any test results still show up on the report
for suite in result['ptestresult.sections']:
if suite not in self.ptests[machine]:
- self.ptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+ self.ptests[machine][suite] = {
+ 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
+ 'failed_testcases': [], "testcases": set(),
+ }
if 'duration' in result['ptestresult.sections'][suite]:
self.ptests[machine][suite]['duration'] = result['ptestresult.sections'][suite]['duration']
if 'timeout' in result['ptestresult.sections'][suite]:
self.ptests[machine][suite]['duration'] += " T"
- return
+ return True
+
+ # process test result
try:
_, suite, test = k.split(".", 2)
except ValueError:
- return
+ return True
+
# Handle 'glib-2.0'
if 'ptestresult.sections' in result and suite not in result['ptestresult.sections']:
try:
@@ -50,11 +56,23 @@ class ResultsTextReport(object):
suite = suite + "." + suite1
except ValueError:
pass
+
if suite not in self.ptests[machine]:
- self.ptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+ self.ptests[machine][suite] = {
+ 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
+ 'failed_testcases': [], "testcases": set(),
+ }
+
+ # do not process duplicate results
+ if test in self.ptests[machine][suite]["testcases"]:
+ print("Warning duplicate ptest result '{}.{}' for {}".format(suite, test, machine))
+ return False
+
for tk in self.result_types:
if status in self.result_types[tk]:
self.ptests[machine][suite][tk] += 1
+ self.ptests[machine][suite]["testcases"].add(test)
+ return True
def handle_ltptest_result(self, k, status, result, machine):
if machine not in self.ltptests:
@@ -124,17 +142,20 @@ class ResultsTextReport(object):
result = testresult.get('result', [])
for k in result:
test_status = result[k].get('status', [])
+ if k.startswith("ptestresult."):
+ if not self.handle_ptest_result(k, test_status, result, machine):
+ continue
+ elif k.startswith("ltpresult."):
+ self.handle_ltptest_result(k, test_status, result, machine)
+ elif k.startswith("ltpposixresult."):
+ self.handle_ltpposixtest_result(k, test_status, result, machine)
+
+ # process result if it was not skipped by a handler
for tk in self.result_types:
if test_status in self.result_types[tk]:
test_count_report[tk] += 1
if test_status in self.result_types['failed']:
test_count_report['failed_testcases'].append(k)
- if k.startswith("ptestresult."):
- self.handle_ptest_result(k, test_status, result, machine)
- if k.startswith("ltpresult."):
- self.handle_ltptest_result(k, test_status, result, machine)
- if k.startswith("ltpposixresult."):
- self.handle_ltpposixtest_result(k, test_status, result, machine)
return test_count_report
def print_test_report(self, template_file_name, test_count_reports):
@@ -203,8 +224,21 @@ class ResultsTextReport(object):
testresults = resultutils.load_resultsdata(source_dir)
for testsuite in testresults:
for resultid in testresults[testsuite]:
+ skip = False
result = testresults[testsuite][resultid]
machine = result['configuration']['MACHINE']
+
+ # Check to see if there is already results for these kinds of tests for the machine
+ for key in result['result'].keys():
+ testtype = str(key).split('.')[0]
+ if ((machine in self.ltptests and testtype == "ltpiresult" and self.ltptests[machine]) or
+ (machine in self.ltpposixtests and testtype == "ltpposixresult" and self.ltpposixtests[machine])):
+ print("Already have test results for %s on %s, skipping %s" %(str(key).split('.')[0], machine, resultid))
+ skip = True
+ break
+ if skip:
+ break
+
test_count_report = self.get_aggregated_test_result(logger, result, machine)
test_count_report['machine'] = machine
test_count_report['testseries'] = result['configuration']['TESTSERIES']
diff --git a/poky/scripts/lib/resulttool/resultutils.py b/poky/scripts/lib/resulttool/resultutils.py
index e595c185d..7cb85a6aa 100644
--- a/poky/scripts/lib/resulttool/resultutils.py
+++ b/poky/scripts/lib/resulttool/resultutils.py
@@ -7,6 +7,8 @@
#
import os
+import base64
+import zlib
import json
import scriptpath
import copy
@@ -117,6 +119,38 @@ def strip_ptestresults(results):
del newresults[res]['result']['ptestresult.sections'][i]['log']
return newresults
+def decode_log(logdata):
+ if isinstance(logdata, str):
+ return logdata
+ elif isinstance(logdata, dict):
+ if "compressed" in logdata:
+ data = logdata.get("compressed")
+ data = base64.b64decode(data.encode("utf-8"))
+ data = zlib.decompress(data)
+ try:
+ return data.decode("utf-8")
+ except UnicodeDecodeError:
+ return data
+ return None
+
+def ptestresult_get_log(results, section):
+ if 'ptestresult.sections' not in results:
+ return None
+ if section not in results['ptestresult.sections']:
+ return None
+
+ ptest = results['ptestresult.sections'][section]
+ if 'log' not in ptest:
+ return None
+ return decode_log(ptest['log'])
+
+def ptestresult_get_rawlogs(results):
+ if 'ptestresult.rawlogs' not in results:
+ return None
+ if 'log' not in results['ptestresult.rawlogs']:
+ return None
+ return decode_log(results['ptestresult.rawlogs']['log'])
+
def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False):
for res in results:
if res:
@@ -131,14 +165,17 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p
f.write(json.dumps(resultsout, sort_keys=True, indent=4))
for res2 in results[res]:
if ptestlogs and 'result' in results[res][res2]:
- if 'ptestresult.rawlogs' in results[res][res2]['result']:
+ seriesresults = results[res][res2]['result']
+ rawlogs = ptestresult_get_rawlogs(seriesresults)
+ if rawlogs is not None:
with open(dst.replace(fn, "ptest-raw.log"), "w+") as f:
- f.write(results[res][res2]['result']['ptestresult.rawlogs']['log'])
- if 'ptestresult.sections' in results[res][res2]['result']:
- for i in results[res][res2]['result']['ptestresult.sections']:
- if 'log' in results[res][res2]['result']['ptestresult.sections'][i]:
+ f.write(rawlogs)
+ if 'ptestresult.sections' in seriesresults:
+ for i in seriesresults['ptestresult.sections']:
+ sectionlog = ptestresult_get_log(seriesresults, i)
+ if sectionlog is not None:
with open(dst.replace(fn, "ptest-%s.log" % i), "w+") as f:
- f.write(results[res][res2]['result']['ptestresult.sections'][i]['log'])
+ f.write(sectionlog)
def git_get_result(repo, tags):
git_objs = []
diff --git a/poky/scripts/lib/scriptutils.py b/poky/scripts/lib/scriptutils.py
index e7e7021c2..c573dc7f6 100644
--- a/poky/scripts/lib/scriptutils.py
+++ b/poky/scripts/lib/scriptutils.py
@@ -16,12 +16,51 @@ import string
import subprocess
import sys
import tempfile
+import threading
import importlib
from importlib import machinery
-def logger_create(name, stream=None):
+class KeepAliveStreamHandler(logging.StreamHandler):
+ def __init__(self, keepalive=True, **kwargs):
+ super().__init__(**kwargs)
+ if keepalive is True:
+ keepalive = 5000 # default timeout
+ self._timeout = threading.Condition()
+ self._stop = False
+
+ # background thread waits on condition, if the condition does not
+ # happen emit a keep alive message
+ def thread():
+ while not self._stop:
+ with self._timeout:
+ if not self._timeout.wait(keepalive):
+ self.emit(logging.LogRecord("keepalive", logging.INFO,
+ None, None, "Keepalive message", None, None))
+
+ self._thread = threading.Thread(target = thread, daemon = True)
+ self._thread.start()
+
+ def close(self):
+ # mark the thread to stop and notify it
+ self._stop = True
+ with self._timeout:
+ self._timeout.notify()
+ # wait for it to join
+ self._thread.join()
+ super().close()
+
+ def emit(self, record):
+ super().emit(record)
+ # trigger timer reset
+ with self._timeout:
+ self._timeout.notify()
+
+def logger_create(name, stream=None, keepalive=None):
logger = logging.getLogger(name)
- loggerhandler = logging.StreamHandler(stream=stream)
+ if keepalive is not None:
+ loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive)
+ else:
+ loggerhandler = logging.StreamHandler(stream=stream)
loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
logger.addHandler(loggerhandler)
logger.setLevel(logging.INFO)
diff --git a/poky/scripts/lib/wic/__init__.py b/poky/scripts/lib/wic/__init__.py
index ba2d61406..85567934a 100644
--- a/poky/scripts/lib/wic/__init__.py
+++ b/poky/scripts/lib/wic/__init__.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python -tt
+#!/usr/bin/env python3
#
# Copyright (c) 2007 Red Hat, Inc.
# Copyright (c) 2011 Intel, Inc.
diff --git a/poky/scripts/lib/wic/canned-wks/qemuriscv.wks b/poky/scripts/lib/wic/canned-wks/qemuriscv.wks
new file mode 100644
index 000000000..12c68b706
--- /dev/null
+++ b/poky/scripts/lib/wic/canned-wks/qemuriscv.wks
@@ -0,0 +1,3 @@
+# short-description: Create qcow2 image for RISC-V QEMU machines
+
+part / --source rootfs --fstype=ext4 --label root --align 4096 --size 5G
diff --git a/poky/scripts/lib/wic/ksparser.py b/poky/scripts/lib/wic/ksparser.py
index 62048213b..6a643ba3a 100644
--- a/poky/scripts/lib/wic/ksparser.py
+++ b/poky/scripts/lib/wic/ksparser.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python -tt
+#!/usr/bin/env python3
#
# Copyright (c) 2016 Intel, Inc.
#
@@ -151,6 +151,8 @@ class KickStart():
part.add_argument('--part-name')
part.add_argument('--part-type')
part.add_argument('--rootfs-dir')
+ part.add_argument('--type', default='primary',
+ choices = ('primary', 'logical'))
# --size and --fixed-size cannot be specified together; options
# ----extra-space and --overhead-factor should also raise a parser
diff --git a/poky/scripts/lib/wic/partition.py b/poky/scripts/lib/wic/partition.py
index 01466b258..d809408e1 100644
--- a/poky/scripts/lib/wic/partition.py
+++ b/poky/scripts/lib/wic/partition.py
@@ -50,6 +50,7 @@ class Partition():
self.use_uuid = args.use_uuid
self.uuid = args.uuid
self.fsuuid = args.fsuuid
+ self.type = args.type
self.lineno = lineno
self.source_file = ""
@@ -211,19 +212,24 @@ class Partition():
if os.path.isfile(rootfs):
os.remove(rootfs)
- # Get rootfs size from bitbake variable if it's not set in .ks file
if not self.size and real_rootfs:
- # Bitbake variable ROOTFS_SIZE is calculated in
- # Image._get_rootfs_size method from meta/lib/oe/image.py
- # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT,
- # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE
+ # The rootfs size is not set in .ks file so try to get it
+ # from bitbake variable
rsize_bb = get_bitbake_var('ROOTFS_SIZE')
- if rsize_bb:
- logger.warning('overhead-factor was specified, but size was not,'
- ' so bitbake variables will be used for the size.'
- ' In this case both IMAGE_OVERHEAD_FACTOR and '
- '--overhead-factor will be applied')
+ rdir = get_bitbake_var('IMAGE_ROOTFS')
+ if rsize_bb and rdir == rootfs_dir:
+ # Bitbake variable ROOTFS_SIZE is calculated in
+ # Image._get_rootfs_size method from meta/lib/oe/image.py
+ # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT,
+ # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE
self.size = int(round(float(rsize_bb)))
+ else:
+ # Bitbake variable ROOTFS_SIZE is not defined so compute it
+ # from the rootfs_dir size using the same logic found in
+ # get_rootfs_size() from meta/classes/image.bbclass
+ du_cmd = "du -ks %s" % rootfs_dir
+ out = exec_cmd(du_cmd)
+ self.size = int(out.split()[0])
prefix = "ext" if self.fstype.startswith("ext") else self.fstype
method = getattr(self, "prepare_rootfs_" + prefix)
diff --git a/poky/scripts/lib/wic/pluginbase.py b/poky/scripts/lib/wic/pluginbase.py
index bfb73ca61..f74d6430f 100644
--- a/poky/scripts/lib/wic/pluginbase.py
+++ b/poky/scripts/lib/wic/pluginbase.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python -tt
+#!/usr/bin/env python3
#
# Copyright (c) 2011 Intel, Inc.
#
diff --git a/poky/scripts/lib/wic/plugins/imager/direct.py b/poky/scripts/lib/wic/plugins/imager/direct.py
index 67addeff3..3ce6ad55b 100644
--- a/poky/scripts/lib/wic/plugins/imager/direct.py
+++ b/poky/scripts/lib/wic/plugins/imager/direct.py
@@ -49,7 +49,6 @@ class DirectPlugin(ImagerPlugin):
# parse possible 'rootfs=name' items
self.rootfs_dir = dict(rdir.split('=') for rdir in rootfs_dir.split(' '))
- self.replaced_rootfs_paths = {}
self.bootimg_dir = bootimg_dir
self.kernel_dir = kernel_dir
self.native_sysroot = native_sysroot
@@ -59,6 +58,7 @@ class DirectPlugin(ImagerPlugin):
self.compressor = options.compressor
self.bmap = options.bmap
self.no_fstab_update = options.no_fstab_update
+ self.original_fstab = None
self.name = "%s-%s" % (os.path.splitext(os.path.basename(wks_file))[0],
strftime("%Y%m%d%H%M"))
@@ -104,24 +104,13 @@ class DirectPlugin(ImagerPlugin):
with open(fstab_path) as fstab:
fstab_lines = fstab.readlines()
+ self.original_fstab = fstab_lines.copy()
if self._update_fstab(fstab_lines, self.parts):
- # copy rootfs dir to workdir to update fstab
- # as rootfs can be used by other tasks and can't be modified
- new_pseudo = os.path.realpath(os.path.join(self.workdir, "pseudo"))
- from_dir = os.path.join(os.path.join(image_rootfs, ".."), "pseudo")
- from_dir = os.path.realpath(from_dir)
- copyhardlinktree(from_dir, new_pseudo)
- new_rootfs = os.path.realpath(os.path.join(self.workdir, "rootfs_copy"))
- copyhardlinktree(image_rootfs, new_rootfs)
- fstab_path = os.path.join(new_rootfs, 'etc/fstab')
-
- os.unlink(fstab_path)
-
with open(fstab_path, "w") as fstab:
fstab.writelines(fstab_lines)
-
- return new_rootfs
+ else:
+ self.original_fstab = None
def _update_fstab(self, fstab_lines, parts):
"""Assume partition order same as in wks"""
@@ -170,14 +159,8 @@ class DirectPlugin(ImagerPlugin):
filesystems from the artifacts directly and combine them into
a partitioned image.
"""
- if self.no_fstab_update:
- new_rootfs = None
- else:
- new_rootfs = self._write_fstab(self.rootfs_dir.get("ROOTFS_DIR"))
- if new_rootfs:
- # rootfs was copied to update fstab
- self.replaced_rootfs_paths[new_rootfs] = self.rootfs_dir['ROOTFS_DIR']
- self.rootfs_dir['ROOTFS_DIR'] = new_rootfs
+ if not self.no_fstab_update:
+ self._write_fstab(self.rootfs_dir.get("ROOTFS_DIR"))
for part in self.parts:
# get rootfs size from bitbake variable if it's not set in .ks file
@@ -253,8 +236,6 @@ class DirectPlugin(ImagerPlugin):
else:
suffix = '["%s"]:' % (part.mountpoint or part.label)
rootdir = part.rootfs_dir
- if rootdir in self.replaced_rootfs_paths:
- rootdir = self.replaced_rootfs_paths[rootdir]
msg += ' ROOTFS_DIR%s%s\n' % (suffix.ljust(20), rootdir)
msg += ' BOOTIMG_DIR: %s\n' % self.bootimg_dir
@@ -292,6 +273,12 @@ class DirectPlugin(ImagerPlugin):
if os.path.isfile(path):
shutil.move(path, os.path.join(self.outdir, fname))
+ #Restore original fstab
+ if self.original_fstab:
+ fstab_path = self.rootfs_dir.get("ROOTFS_DIR") + "/etc/fstab"
+ with open(fstab_path, "w") as fstab:
+ fstab.writelines(self.original_fstab)
+
# remove work directory
shutil.rmtree(self.workdir, ignore_errors=True)
@@ -313,6 +300,10 @@ class PartitionedImage():
self.path = path # Path to the image file
self.numpart = 0 # Number of allocated partitions
self.realpart = 0 # Number of partitions in the partition table
+ self.primary_part_num = 0 # Number of primary partitions (msdos)
+ self.extendedpart = 0 # Create extended partition before this logical partition (msdos)
+ self.extended_size_sec = 0 # Size of exteded partition (msdos)
+ self.logical_part_cnt = 0 # Number of total logical paritions (msdos)
self.offset = 0 # Offset of next partition (in sectors)
self.min_size = 0 # Minimum required disk size to fit
# all partitions (in bytes)
@@ -404,12 +395,16 @@ class PartitionedImage():
# Skip one sector required for the partitioning scheme overhead
self.offset += overhead
- if self.realpart > 3 and num_real_partitions > 4:
+ if self.ptable_format == "msdos":
+ if self.primary_part_num > 3 or \
+ (self.extendedpart == 0 and self.primary_part_num >= 3 and num_real_partitions > 4):
+ part.type = 'logical'
# Reserve a sector for EBR for every logical partition
# before alignment is performed.
- if self.ptable_format == "msdos":
+ if part.type == 'logical':
self.offset += 1
+ align_sectors = 0
if part.align:
# If not first partition and we do have alignment set we need
# to align the partition.
@@ -435,18 +430,25 @@ class PartitionedImage():
part.start = self.offset
self.offset += part.size_sec
- part.type = 'primary'
if not part.no_table:
part.num = self.realpart
else:
part.num = 0
- if self.ptable_format == "msdos":
- # only count the partitions that are in partition table
- if num_real_partitions > 4:
- if self.realpart > 3:
- part.type = 'logical'
- part.num = self.realpart + 1
+ if self.ptable_format == "msdos" and not part.no_table:
+ if part.type == 'logical':
+ self.logical_part_cnt += 1
+ part.num = self.logical_part_cnt + 4
+ if self.extendedpart == 0:
+ # Create extended partition as a primary partition
+ self.primary_part_num += 1
+ self.extendedpart = part.num
+ else:
+ self.extended_size_sec += align_sectors
+ self.extended_size_sec += part.size_sec + 1
+ else:
+ self.primary_part_num += 1
+ part.num = self.primary_part_num
logger.debug("Assigned %s to %s%d, sectors range %d-%d size %d "
"sectors (%d bytes).", part.mountpoint, part.disk,
@@ -496,7 +498,7 @@ class PartitionedImage():
if part.num == 0:
continue
- if self.ptable_format == "msdos" and part.num == 5:
+ if self.ptable_format == "msdos" and part.num == self.extendedpart:
# Create an extended partition (note: extended
# partition is described in MBR and contains all
# logical partitions). The logical partitions save a
@@ -510,7 +512,7 @@ class PartitionedImage():
# room for all logical partitions.
self._create_partition(self.path, "extended",
None, part.start - 1,
- self.offset - part.start + 1)
+ self.extended_size_sec)
if part.fstype == "swap":
parted_fs_type = "linux-swap"
diff --git a/poky/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/poky/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py
new file mode 100644
index 000000000..5bd739068
--- /dev/null
+++ b/poky/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py
@@ -0,0 +1,213 @@
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# DESCRIPTION
+# This implements the 'bootimg-biosplusefi' source plugin class for 'wic'
+#
+# AUTHORS
+# William Bourque <wbourque [at) gmail.com>
+
+import types
+
+from wic.pluginbase import SourcePlugin
+from importlib.machinery import SourceFileLoader
+
+class BootimgBiosPlusEFIPlugin(SourcePlugin):
+ """
+ Create MBR + EFI boot partition
+
+ This plugin creates a boot partition that contains both
+ legacy BIOS and EFI content. It will be able to boot from both.
+ This is useful when managing PC fleet with some older machines
+ without EFI support.
+
+ Note it is possible to create an image that can boot from both
+ legacy BIOS and EFI by defining two partitions : one with arg
+ --source bootimg-efi and another one with --source bootimg-pcbios.
+ However, this method has the obvious downside that it requires TWO
+ partitions to be created on the storage device.
+ Both partitions will also be marked as "bootable" which does not work on
+ most BIOS, has BIOS often uses the "bootable" flag to determine
+ what to boot. If you have such a BIOS, you need to manually remove the
+ "bootable" flag from the EFI partition for the drive to be bootable.
+ Having two partitions also seems to confuse wic : the content of
+ the first partition will be duplicated into the second, even though it
+ will not be used at all.
+
+ Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin
+ allows you to have more than only a single rootfs partitions and does
+ not turn the rootfs into an initramfs RAM image.
+
+ This plugin is made to put everything into a single /boot partition so it
+ does not have the limitations listed above.
+
+ The plugin is made so it does tries not to reimplement what's already
+ been done in other plugins; as such it imports "bootimg-pcbios"
+ and "bootimg-efi".
+ Plugin "bootimg-pcbios" is used to generate legacy BIOS boot.
+ Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it
+ requires a --sourceparams argument to know which loader to use; refer
+ to "bootimg-efi" code/documentation for the list of loader.
+
+ Imports are handled with "SourceFileLoader" from importlib as it is
+ otherwise very difficult to import module that has hyphen "-" in their
+ filename.
+ The SourcePlugin() methods used in the plugins (do_install_disk,
+ do_configure_partition, do_prepare_partition) are then called on both,
+ beginning by "bootimg-efi".
+
+ Plugin options, such as "--sourceparams" can still be passed to a
+ plugin, as long they does not cause issue in the other plugin.
+
+ Example wic configuration:
+ part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\
+ --ondisk sda --label os_boot --active --align 1024 --use-uuid
+ """
+
+ name = 'bootimg-biosplusefi'
+
+ __PCBIOS_MODULE_NAME = "bootimg-pcbios"
+ __EFI_MODULE_NAME = "bootimg-efi"
+
+ __imgEFIObj = None
+ __imgBiosObj = None
+
+ @classmethod
+ def __init__(cls):
+ """
+ Constructor (init)
+ """
+
+ # XXX
+ # For some reasons, __init__ constructor is never called.
+ # Something to do with how pluginbase works?
+ cls.__instanciateSubClasses()
+
+ @classmethod
+ def __instanciateSubClasses(cls):
+ """
+
+ """
+
+ # Import bootimg-pcbios (class name "BootimgPcbiosPlugin")
+ modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ cls.__PCBIOS_MODULE_NAME + ".py")
+ loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath)
+ mod = types.ModuleType(loader.name)
+ loader.exec_module(mod)
+ cls.__imgBiosObj = mod.BootimgPcbiosPlugin()
+
+ # Import bootimg-efi (class name "BootimgEFIPlugin")
+ modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ cls.__EFI_MODULE_NAME + ".py")
+ loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath)
+ mod = types.ModuleType(loader.name)
+ loader.exec_module(mod)
+ cls.__imgEFIObj = mod.BootimgEFIPlugin()
+
+ @classmethod
+ def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir,
+ bootimg_dir, kernel_dir, native_sysroot):
+ """
+ Called after all partitions have been prepared and assembled into a
+ disk image.
+ """
+
+ if ( (not cls.__imgEFIObj) or (not cls.__imgBiosObj) ):
+ cls.__instanciateSubClasses()
+
+ cls.__imgEFIObj.do_install_disk(
+ disk,
+ disk_name,
+ creator,
+ workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ native_sysroot)
+
+ cls.__imgBiosObj.do_install_disk(
+ disk,
+ disk_name,
+ creator,
+ workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ native_sysroot)
+
+ @classmethod
+ def do_configure_partition(cls, part, source_params, creator, cr_workdir,
+ oe_builddir, bootimg_dir, kernel_dir,
+ native_sysroot):
+ """
+ Called before do_prepare_partition()
+ """
+
+ if ( (not cls.__imgEFIObj) or (not cls.__imgBiosObj) ):
+ cls.__instanciateSubClasses()
+
+ cls.__imgEFIObj.do_configure_partition(
+ part,
+ source_params,
+ creator,
+ cr_workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ native_sysroot)
+
+ cls.__imgBiosObj.do_configure_partition(
+ part,
+ source_params,
+ creator,
+ cr_workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ native_sysroot)
+
+ @classmethod
+ def do_prepare_partition(cls, part, source_params, creator, cr_workdir,
+ oe_builddir, bootimg_dir, kernel_dir,
+ rootfs_dir, native_sysroot):
+ """
+ Called to do the actual content population for a partition i.e. it
+ 'prepares' the partition to be incorporated into the image.
+ """
+
+ if ( (not cls.__imgEFIObj) or (not cls.__imgBiosObj) ):
+ cls.__instanciateSubClasses()
+
+ cls.__imgEFIObj.do_prepare_partition(
+ part,
+ source_params,
+ creator,
+ cr_workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ rootfs_dir,
+ native_sysroot)
+
+ cls.__imgBiosObj.do_prepare_partition(
+ part,
+ source_params,
+ creator,
+ cr_workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ rootfs_dir,
+ native_sysroot)
diff --git a/poky/scripts/lib/wic/plugins/source/bootimg-efi.py b/poky/scripts/lib/wic/plugins/source/bootimg-efi.py
index 5cc5c8a6b..2cfdc10ec 100644
--- a/poky/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/poky/scripts/lib/wic/plugins/source/bootimg-efi.py
@@ -74,8 +74,10 @@ class BootimgEFIPlugin(SourcePlugin):
grubefi_conf += "menuentry '%s'{\n" % (title if title else "boot")
kernel = get_bitbake_var("KERNEL_IMAGETYPE")
- if not kernel:
- kernel = "bzImage"
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
label = source_params.get('label')
label_conf = "root=%s" % creator.rootdev
@@ -154,8 +156,10 @@ class BootimgEFIPlugin(SourcePlugin):
if not custom_cfg:
# Create systemd-boot configuration using parameters from wks file
kernel = get_bitbake_var("KERNEL_IMAGETYPE")
- if not kernel:
- kernel = "bzImage"
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
title = source_params.get('title')
@@ -225,8 +229,10 @@ class BootimgEFIPlugin(SourcePlugin):
hdddir = "%s/hdd/boot" % cr_workdir
kernel = get_bitbake_var("KERNEL_IMAGETYPE")
- if not kernel:
- kernel = "bzImage"
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
install_cmd = "install -m 0644 %s/%s %s/%s" % \
(staging_kernel_dir, kernel, hdddir, kernel)
diff --git a/poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
index 670d34774..f2639e700 100644
--- a/poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
@@ -150,8 +150,10 @@ class BootimgPcbiosPlugin(SourcePlugin):
hdddir = "%s/hdd/boot" % cr_workdir
kernel = get_bitbake_var("KERNEL_IMAGETYPE")
- if not kernel:
- kernel = "bzImage"
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
cmds = ("install -m 0644 %s/%s %s/vmlinuz" %
(staging_kernel_dir, kernel, hdddir),
diff --git a/poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
index 74d6f1451..24299c1ec 100644
--- a/poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ b/poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
@@ -71,8 +71,11 @@ class IsoImagePlugin(SourcePlugin):
syslinux_conf += "LABEL boot\n"
kernel = get_bitbake_var("KERNEL_IMAGETYPE")
- if not kernel:
- kernel = "bzImage"
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
+
syslinux_conf += "KERNEL /" + kernel + "\n"
syslinux_conf += "APPEND initrd=/initrd LABEL=boot %s\n" \
% bootloader.append
@@ -117,8 +120,10 @@ class IsoImagePlugin(SourcePlugin):
grubefi_conf += "menuentry 'boot'{\n"
kernel = get_bitbake_var("KERNEL_IMAGETYPE")
- if not kernel:
- kernel = "bzImage"
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
grubefi_conf += "linux /%s rootwait %s\n" \
% (kernel, bootloader.append)
@@ -273,8 +278,10 @@ class IsoImagePlugin(SourcePlugin):
os.remove("%s/initrd.cpio.gz" % cr_workdir)
kernel = get_bitbake_var("KERNEL_IMAGETYPE")
- if not kernel:
- kernel = "bzImage"
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
install_cmd = "install -m 0644 %s/%s %s/%s" % \
(kernel_dir, kernel, isodir, kernel)