summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/binman/README34
-rwxr-xr-xtools/binman/binman.py2
-rwxr-xr-xtools/binman/cbfs_util_test.py2
-rw-r--r--tools/binman/elf.py2
-rw-r--r--tools/binman/elf_test.py2
-rw-r--r--tools/binman/entry.py25
-rw-r--r--tools/binman/entry_test.py15
-rw-r--r--tools/binman/etype/intel_fit.py2
-rw-r--r--tools/binman/etype/section.py18
-rw-r--r--tools/binman/ftest.py44
-rw-r--r--tools/binman/image_test.py8
-rw-r--r--tools/binman/test/155_symbols_tpl_x86.dts31
-rw-r--r--tools/binman/test/Makefile6
l---------tools/binman/test/u_boot_binman_syms_x86.c1
-rw-r--r--tools/binman/test/u_boot_binman_syms_x86.lds30
-rw-r--r--tools/buildman/board.py9
-rw-r--r--tools/buildman/bsettings.py20
-rw-r--r--tools/buildman/builder.py47
-rw-r--r--tools/buildman/builderthread.py24
-rwxr-xr-xtools/buildman/buildman.py10
-rw-r--r--tools/buildman/control.py44
-rw-r--r--tools/buildman/func_test.py16
-rw-r--r--tools/buildman/test.py22
-rw-r--r--tools/buildman/toolchain.py99
-rwxr-xr-xtools/dtoc/dtoc.py2
-rw-r--r--tools/dtoc/fdt.py17
-rwxr-xr-x[-rw-r--r--]tools/dtoc/test_dtoc.py1
-rwxr-xr-xtools/dtoc/test_fdt.py2
-rwxr-xr-xtools/img2brec.sh388
-rw-r--r--tools/libfdt/fdt_rw.c3
-rwxr-xr-xtools/microcode-tool.py28
-rwxr-xr-xtools/moveconfig.py82
-rw-r--r--tools/patman/command.py31
-rw-r--r--tools/patman/func_test.py8
-rw-r--r--tools/patman/patchstream.py4
-rwxr-xr-xtools/patman/patman.py2
-rw-r--r--tools/patman/series.py2
-rw-r--r--tools/patman/settings.py4
-rw-r--r--tools/patman/test.py4
-rw-r--r--tools/patman/tools.py29
-rwxr-xr-xtools/rkmux.py16
41 files changed, 429 insertions, 707 deletions
diff --git a/tools/binman/README b/tools/binman/README
index c96a564226..20a80944e2 100644
--- a/tools/binman/README
+++ b/tools/binman/README
@@ -766,20 +766,38 @@ when SPL is finished.
Binman allows you to declare symbols in the SPL image which are filled in
with their correct values during the build. For example:
- binman_sym_declare(ulong, u_boot_any, offset);
+ binman_sym_declare(ulong, u_boot_any, image_pos);
-declares a ulong value which will be assigned to the offset of any U-Boot
+declares a ulong value which will be assigned to the image-pos of any U-Boot
image (u-boot.bin, u-boot.img, u-boot-nodtb.bin) that is present in the image.
You can access this value with something like:
- ulong u_boot_offset = binman_sym(ulong, u_boot_any, offset);
+ ulong u_boot_offset = binman_sym(ulong, u_boot_any, image_pos);
-Thus u_boot_offset will be set to the offset of U-Boot in memory, assuming that
-the whole image has been loaded, or is available in flash. You can then jump to
-that address to start U-Boot.
+Thus u_boot_offset will be set to the image-pos of U-Boot in memory, assuming
+that the whole image has been loaded, or is available in flash. You can then
+jump to that address to start U-Boot.
-At present this feature is only supported in SPL. In principle it is possible
-to fill in such symbols in U-Boot proper, as well.
+At present this feature is only supported in SPL and TPL. In principle it is
+possible to fill in such symbols in U-Boot proper, as well, but a future C
+library is planned for this instead, to read from the device tree.
+
+As well as image-pos, it is possible to read the size of an entry and its
+offset (which is the start position of the entry within its parent).
+
+A small technical note: Binman automatically adds the base address of the image
+(i.e. __image_copy_start) to the value of the image-pos symbol, so that when the
+image is loaded to its linked address, the value will be correct and actually
+point into the image.
+
+For example, say SPL is at the start of the image and linked to start at address
+80108000. If U-Boot's image-pos is 0x8000 then binman will write an image-pos
+for U-Boot of 80110000 into the SPL binary, since it assumes the image is loaded
+to 80108000, with SPL at 80108000 and U-Boot at 80110000.
+
+For x86 devices (with the end-at-4gb property) this base address is not added
+since it is assumed that images are XIP and the offsets already include the
+address.
Access to binman entry offsets at run time (fdt)
diff --git a/tools/binman/binman.py b/tools/binman/binman.py
index 8bd5868df2..9e6fd72117 100755
--- a/tools/binman/binman.py
+++ b/tools/binman/binman.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2016 Google, Inc
diff --git a/tools/binman/cbfs_util_test.py b/tools/binman/cbfs_util_test.py
index 772c794ece..ddc2e09e35 100755
--- a/tools/binman/cbfs_util_test.py
+++ b/tools/binman/cbfs_util_test.py
@@ -56,7 +56,7 @@ class TestCbfs(unittest.TestCase):
cls.have_lz4 = True
try:
tools.Run('lz4', '--no-frame-crc', '-c',
- tools.GetInputFilename('u-boot.bin'))
+ tools.GetInputFilename('u-boot.bin'), binary=True)
except:
cls.have_lz4 = False
diff --git a/tools/binman/elf.py b/tools/binman/elf.py
index 0c1a5b44b6..de1ce73f2a 100644
--- a/tools/binman/elf.py
+++ b/tools/binman/elf.py
@@ -134,7 +134,7 @@ def LookupAndWriteSymbols(elf_fname, entry, section):
(msg, sym.size))
# Look up the symbol in our entry tables.
- value = section.LookupSymbol(name, sym.weak, msg)
+ value = section.LookupSymbol(name, sym.weak, msg, base.address)
if value is None:
value = -1
pack_string = pack_string.lower()
diff --git a/tools/binman/elf_test.py b/tools/binman/elf_test.py
index c0c11cb340..ac26fd51e4 100644
--- a/tools/binman/elf_test.py
+++ b/tools/binman/elf_test.py
@@ -45,7 +45,7 @@ class FakeSection:
def GetPath(self):
return 'section_path'
- def LookupSymbol(self, name, weak, msg):
+ def LookupSymbol(self, name, weak, msg, base_addr):
"""Fake implementation which returns the same value for all symbols"""
return self.sym_value
diff --git a/tools/binman/entry.py b/tools/binman/entry.py
index 409c0dca93..b6f1b2c93f 100644
--- a/tools/binman/entry.py
+++ b/tools/binman/entry.py
@@ -7,16 +7,7 @@
from __future__ import print_function
from collections import namedtuple
-
-# importlib was introduced in Python 2.7 but there was a report of it not
-# working in 2.7.12, so we work around this:
-# http://lists.denx.de/pipermail/u-boot/2016-October/269729.html
-try:
- import importlib
- have_importlib = True
-except:
- have_importlib = False
-
+import importlib
import os
import sys
@@ -56,6 +47,8 @@ class Entry(object):
offset: Offset of entry within the section, None if not known yet (in
which case it will be calculated by Pack())
size: Entry size in bytes, None if not known
+ pre_reset_size: size as it was before ResetForPack(). This allows us to
+ keep track of the size we started with and detect size changes
uncomp_size: Size of uncompressed data in bytes, if the entry is
compressed, else None
contents_size: Size of contents in bytes, 0 by default
@@ -80,6 +73,7 @@ class Entry(object):
self.name = node and (name_prefix + node.name) or 'none'
self.offset = None
self.size = None
+ self.pre_reset_size = None
self.uncomp_size = None
self.data = None
self.contents_size = 0
@@ -119,10 +113,7 @@ class Entry(object):
old_path = sys.path
sys.path.insert(0, os.path.join(our_path, 'etype'))
try:
- if have_importlib:
- module = importlib.import_module(module_name)
- else:
- module = __import__(module_name)
+ module = importlib.import_module(module_name)
except ImportError as e:
raise ValueError("Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
(etype, node_path, module_name, e))
@@ -326,6 +317,7 @@ class Entry(object):
self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
(ToHex(self.offset), ToHex(self.orig_offset),
ToHex(self.size), ToHex(self.orig_size)))
+ self.pre_reset_size = self.size
self.offset = self.orig_offset
self.size = self.orig_size
@@ -769,7 +761,10 @@ features to produce new behaviours.
True if the data did not result in a resize of this entry, False if
the entry must be resized
"""
- self.contents_size = self.size
+ if self.size is not None:
+ self.contents_size = self.size
+ else:
+ self.contents_size = self.pre_reset_size
ok = self.ProcessContentsUpdate(data)
self.Detail('WriteData: size=%x, ok=%s' % (len(data), ok))
section_ok = self.section.WriteChildData(self)
diff --git a/tools/binman/entry_test.py b/tools/binman/entry_test.py
index 13f5864516..277e10b585 100644
--- a/tools/binman/entry_test.py
+++ b/tools/binman/entry_test.py
@@ -39,21 +39,6 @@ class TestEntry(unittest.TestCase):
else:
import entry
- def test1EntryNoImportLib(self):
- """Test that we can import Entry subclassess successfully"""
- sys.modules['importlib'] = None
- global entry
- self._ReloadEntry()
- entry.Entry.Create(None, self.GetNode(), 'u-boot')
- self.assertFalse(entry.have_importlib)
-
- def test2EntryImportLib(self):
- del sys.modules['importlib']
- global entry
- self._ReloadEntry()
- entry.Entry.Create(None, self.GetNode(), 'u-boot-spl')
- self.assertTrue(entry.have_importlib)
-
def testEntryContents(self):
"""Test the Entry bass class"""
import entry
diff --git a/tools/binman/etype/intel_fit.py b/tools/binman/etype/intel_fit.py
index 23606d27d0..2a34a05f95 100644
--- a/tools/binman/etype/intel_fit.py
+++ b/tools/binman/etype/intel_fit.py
@@ -27,6 +27,6 @@ class Entry_intel_fit(Entry_blob):
self.align = 16
def ObtainContents(self):
- data = struct.pack('<8sIHBB', '_FIT_ ', 1, 0x100, 0x80, 0x7d)
+ data = struct.pack('<8sIHBB', b'_FIT_ ', 1, 0x100, 0x80, 0x7d)
self.SetContents(data)
return True
diff --git a/tools/binman/etype/section.py b/tools/binman/etype/section.py
index ab0c42cee0..89b7bf67fa 100644
--- a/tools/binman/etype/section.py
+++ b/tools/binman/etype/section.py
@@ -290,13 +290,16 @@ class Entry_section(Entry):
return entry.GetData()
source_entry.Raise("Cannot find entry for node '%s'" % node.name)
- def LookupSymbol(self, sym_name, optional, msg):
+ def LookupSymbol(self, sym_name, optional, msg, base_addr):
"""Look up a symbol in an ELF file
Looks up a symbol in an ELF file. Only entry types which come from an
ELF image can be used by this function.
- At present the only entry property supported is offset.
+ At present the only entry properties supported are:
+ offset
+ image_pos - 'base_addr' is added if this is not an end-at-4gb image
+ size
Args:
sym_name: Symbol name in the ELF file to look up in the format
@@ -309,6 +312,12 @@ class Entry_section(Entry):
optional: True if the symbol is optional. If False this function
will raise if the symbol is not found
msg: Message to display if an error occurs
+ base_addr: Base address of image. This is added to the returned
+ image_pos in most cases so that the returned position indicates
+ where the targetted entry/binary has actually been loaded. But
+ if end-at-4gb is used, this is not done, since the binary is
+ already assumed to be linked to the ROM position and using
+ execute-in-place (XIP).
Returns:
Value that should be assigned to that symbol, or None if it was
@@ -343,7 +352,10 @@ class Entry_section(Entry):
if prop_name == 'offset':
return entry.offset
elif prop_name == 'image_pos':
- return entry.image_pos
+ value = entry.image_pos
+ if not self.GetImage()._end_4gb:
+ value += base_addr
+ return value
if prop_name == 'size':
return entry.size
else:
diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py
index 494e218cbc..872b855444 100644
--- a/tools/binman/ftest.py
+++ b/tools/binman/ftest.py
@@ -174,7 +174,7 @@ class TestFunctional(unittest.TestCase):
cls.have_lz4 = True
try:
tools.Run('lz4', '--no-frame-crc', '-c',
- os.path.join(cls._indir, 'u-boot.bin'))
+ os.path.join(cls._indir, 'u-boot.bin'), binary=True)
except:
cls.have_lz4 = False
@@ -1242,7 +1242,7 @@ class TestFunctional(unittest.TestCase):
self._SetupSplElf('u_boot_binman_syms')
data = self._DoReadFile('053_symbols.dts')
- sym_values = struct.pack('<LQLL', 0, 28, 24, 4)
+ sym_values = struct.pack('<LQLL', 0x00, 0x1c, 0x28, 0x04)
expected = (sym_values + U_BOOT_SPL_DATA[20:] +
tools.GetBytes(0xff, 1) + U_BOOT_DATA + sym_values +
U_BOOT_SPL_DATA[20:])
@@ -2113,7 +2113,7 @@ class TestFunctional(unittest.TestCase):
data = self.data = self._DoReadFileRealDtb('115_fdtmap.dts')
fdtmap_data = data[len(U_BOOT_DATA):]
magic = fdtmap_data[:8]
- self.assertEqual('_FDTMAP_', magic)
+ self.assertEqual(b'_FDTMAP_', magic)
self.assertEqual(tools.GetBytes(0, 8), fdtmap_data[8:16])
fdt_data = fdtmap_data[16:]
@@ -2156,7 +2156,7 @@ class TestFunctional(unittest.TestCase):
dtb = fdt.Fdt.FromData(fdt_data)
fdt_size = dtb.GetFdtObj().totalsize()
hdr_data = data[-8:]
- self.assertEqual('BinM', hdr_data[:4])
+ self.assertEqual(b'BinM', hdr_data[:4])
offset = struct.unpack('<I', hdr_data[4:])[0] & 0xffffffff
self.assertEqual(fdtmap_pos - 0x400, offset - (1 << 32))
@@ -2165,7 +2165,7 @@ class TestFunctional(unittest.TestCase):
data = self.data = self._DoReadFileRealDtb('117_fdtmap_hdr_start.dts')
fdtmap_pos = 0x100 + len(U_BOOT_DATA)
hdr_data = data[:8]
- self.assertEqual('BinM', hdr_data[:4])
+ self.assertEqual(b'BinM', hdr_data[:4])
offset = struct.unpack('<I', hdr_data[4:])[0]
self.assertEqual(fdtmap_pos, offset)
@@ -2174,7 +2174,7 @@ class TestFunctional(unittest.TestCase):
data = self.data = self._DoReadFileRealDtb('118_fdtmap_hdr_pos.dts')
fdtmap_pos = 0x100 + len(U_BOOT_DATA)
hdr_data = data[0x80:0x88]
- self.assertEqual('BinM', hdr_data[:4])
+ self.assertEqual(b'BinM', hdr_data[:4])
offset = struct.unpack('<I', hdr_data[4:])[0]
self.assertEqual(fdtmap_pos, offset)
@@ -2435,9 +2435,9 @@ class TestFunctional(unittest.TestCase):
' section 100 %x section 100' % section_size,
' cbfs 100 400 cbfs 0',
' u-boot 138 4 u-boot 38',
-' u-boot-dtb 180 10f u-boot-dtb 80 3c9',
+' u-boot-dtb 180 105 u-boot-dtb 80 3c9',
' u-boot-dtb 500 %x u-boot-dtb 400 3c9' % fdt_size,
-' fdtmap %x 3b4 fdtmap %x' %
+' fdtmap %x 3bd fdtmap %x' %
(fdtmap_offset, fdtmap_offset),
' image-header bf8 8 image-header bf8',
]
@@ -2522,7 +2522,7 @@ class TestFunctional(unittest.TestCase):
data = self._RunExtractCmd('section')
cbfs_data = data[:0x400]
cbfs = cbfs_util.CbfsReader(cbfs_data)
- self.assertEqual(['u-boot', 'u-boot-dtb', ''], cbfs.files.keys())
+ self.assertEqual(['u-boot', 'u-boot-dtb', ''], list(cbfs.files.keys()))
dtb_data = data[0x400:]
dtb = self._decompress(dtb_data)
self.assertEqual(EXTRACT_DTB_SIZE, len(dtb))
@@ -3304,12 +3304,9 @@ class TestFunctional(unittest.TestCase):
self.assertIn("'intel-fit-ptr' section must have an 'intel-fit' sibling",
str(e.exception))
- def testSymbolsTplSection(self):
- """Test binman can assign symbols embedded in U-Boot TPL in a section"""
- self._SetupSplElf('u_boot_binman_syms')
- self._SetupTplElf('u_boot_binman_syms')
- data = self._DoReadFile('149_symbols_tpl.dts')
- sym_values = struct.pack('<LQLL', 4, 0x1c, 0x34, 4)
+ def _CheckSymbolsTplSection(self, dts, expected_vals):
+ data = self._DoReadFile(dts)
+ sym_values = struct.pack('<LQLL', *expected_vals)
upto1 = 4 + len(U_BOOT_SPL_DATA)
expected1 = tools.GetBytes(0xff, 4) + sym_values + U_BOOT_SPL_DATA[20:]
self.assertEqual(expected1, data[:upto1])
@@ -3323,7 +3320,22 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(expected3, data[upto2:upto3])
expected4 = sym_values + U_BOOT_TPL_DATA[20:]
- self.assertEqual(expected4, data[upto3:])
+ self.assertEqual(expected4, data[upto3:upto3 + len(U_BOOT_TPL_DATA)])
+
+ def testSymbolsTplSection(self):
+ """Test binman can assign symbols embedded in U-Boot TPL in a section"""
+ self._SetupSplElf('u_boot_binman_syms')
+ self._SetupTplElf('u_boot_binman_syms')
+ self._CheckSymbolsTplSection('149_symbols_tpl.dts',
+ [0x04, 0x1c, 0x10 + 0x34, 0x04])
+
+ def testSymbolsTplSectionX86(self):
+ """Test binman can assign symbols in a section with end-at-4gb"""
+ self._SetupSplElf('u_boot_binman_syms_x86')
+ self._SetupTplElf('u_boot_binman_syms_x86')
+ self._CheckSymbolsTplSection('155_symbols_tpl_x86.dts',
+ [0xffffff04, 0xffffff1c, 0xffffff34,
+ 0x04])
def testPackX86RomIfwiSectiom(self):
"""Test that a section can be placed in an IFWI region"""
diff --git a/tools/binman/image_test.py b/tools/binman/image_test.py
index 4004f789b7..10f85d1081 100644
--- a/tools/binman/image_test.py
+++ b/tools/binman/image_test.py
@@ -13,7 +13,7 @@ class TestImage(unittest.TestCase):
def testInvalidFormat(self):
image = Image('name', 'node', test=True)
with self.assertRaises(ValueError) as e:
- image.LookupSymbol('_binman_something_prop_', False, 'msg')
+ image.LookupSymbol('_binman_something_prop_', False, 'msg', 0)
self.assertIn(
"msg: Symbol '_binman_something_prop_' has invalid format",
str(e.exception))
@@ -22,7 +22,7 @@ class TestImage(unittest.TestCase):
image = Image('name', 'node', test=True)
image._entries = {}
with self.assertRaises(ValueError) as e:
- image.LookupSymbol('_binman_type_prop_pname', False, 'msg')
+ image.LookupSymbol('_binman_type_prop_pname', False, 'msg', 0)
self.assertIn("msg: Entry 'type' not found in list ()",
str(e.exception))
@@ -30,7 +30,7 @@ class TestImage(unittest.TestCase):
image = Image('name', 'node', test=True)
image._entries = {}
with capture_sys_output() as (stdout, stderr):
- val = image.LookupSymbol('_binman_type_prop_pname', True, 'msg')
+ val = image.LookupSymbol('_binman_type_prop_pname', True, 'msg', 0)
self.assertEqual(val, None)
self.assertEqual("Warning: msg: Entry 'type' not found in list ()\n",
stderr.getvalue())
@@ -40,5 +40,5 @@ class TestImage(unittest.TestCase):
image = Image('name', 'node', test=True)
image._entries = {'u-boot': 1}
with self.assertRaises(ValueError) as e:
- image.LookupSymbol('_binman_u_boot_prop_bad', False, 'msg')
+ image.LookupSymbol('_binman_u_boot_prop_bad', False, 'msg', 0)
self.assertIn("msg: No such property 'bad", str(e.exception))
diff --git a/tools/binman/test/155_symbols_tpl_x86.dts b/tools/binman/test/155_symbols_tpl_x86.dts
new file mode 100644
index 0000000000..72ca447447
--- /dev/null
+++ b/tools/binman/test/155_symbols_tpl_x86.dts
@@ -0,0 +1,31 @@
+/* SPDX-License-Identifier: GPL-2.0+ */
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ pad-byte = <0xff>;
+ end-at-4gb;
+ size = <0x100>;
+ u-boot-spl {
+ offset = <0xffffff04>;
+ };
+
+ u-boot-spl2 {
+ offset = <0xffffff1c>;
+ type = "u-boot-spl";
+ };
+
+ u-boot {
+ offset = <0xffffff34>;
+ };
+
+ section {
+ u-boot-tpl {
+ type = "u-boot-tpl";
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/Makefile b/tools/binman/test/Makefile
index bdbb009874..e4fd97bb2e 100644
--- a/tools/binman/test/Makefile
+++ b/tools/binman/test/Makefile
@@ -14,10 +14,11 @@ CFLAGS := -march=i386 -m32 -nostdlib -I $(SRC)../../../include \
LDS_UCODE := -T $(SRC)u_boot_ucode_ptr.lds
LDS_BINMAN := -T $(SRC)u_boot_binman_syms.lds
LDS_BINMAN_BAD := -T $(SRC)u_boot_binman_syms_bad.lds
+LDS_BINMAN_X86 := -T $(SRC)u_boot_binman_syms_x86.lds
TARGETS = u_boot_ucode_ptr u_boot_no_ucode_ptr bss_data \
u_boot_binman_syms u_boot_binman_syms.bin u_boot_binman_syms_bad \
- u_boot_binman_syms_size
+ u_boot_binman_syms_size u_boot_binman_syms_x86
all: $(TARGETS)
@@ -36,6 +37,9 @@ u_boot_binman_syms.bin: u_boot_binman_syms
u_boot_binman_syms: CFLAGS += $(LDS_BINMAN)
u_boot_binman_syms: u_boot_binman_syms.c
+u_boot_binman_syms_x86: CFLAGS += $(LDS_BINMAN_X86)
+u_boot_binman_syms_x86: u_boot_binman_syms_x86.c
+
u_boot_binman_syms_bad: CFLAGS += $(LDS_BINMAN_BAD)
u_boot_binman_syms_bad: u_boot_binman_syms_bad.c
diff --git a/tools/binman/test/u_boot_binman_syms_x86.c b/tools/binman/test/u_boot_binman_syms_x86.c
new file mode 120000
index 0000000000..939b2e965f
--- /dev/null
+++ b/tools/binman/test/u_boot_binman_syms_x86.c
@@ -0,0 +1 @@
+u_boot_binman_syms.c \ No newline at end of file
diff --git a/tools/binman/test/u_boot_binman_syms_x86.lds b/tools/binman/test/u_boot_binman_syms_x86.lds
new file mode 100644
index 0000000000..9daf86f833
--- /dev/null
+++ b/tools/binman/test/u_boot_binman_syms_x86.lds
@@ -0,0 +1,30 @@
+/* SPDX-License-Identifier: GPL-2.0+ */
+/*
+ * Copyright (c) 2016 Google, Inc
+ */
+
+OUTPUT_FORMAT("elf32-i386", "elf32-i386", "elf32-i386")
+OUTPUT_ARCH(i386)
+ENTRY(_start)
+
+SECTIONS
+{
+ . = 0xffffff00;
+ _start = .;
+
+ . = ALIGN(4);
+ .text :
+ {
+ __image_copy_start = .;
+ *(.text*)
+ }
+
+ . = ALIGN(4);
+ .binman_sym_table : {
+ __binman_sym_start = .;
+ KEEP(*(SORT(.binman_sym*)));
+ __binman_sym_end = .;
+ }
+ .interp : { *(.interp*) }
+
+}
diff --git a/tools/buildman/board.py b/tools/buildman/board.py
index 2a1d021574..447aaabea8 100644
--- a/tools/buildman/board.py
+++ b/tools/buildman/board.py
@@ -1,6 +1,7 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2012 The Chromium OS Authors.
+from collections import OrderedDict
import re
class Expr:
@@ -120,7 +121,7 @@ class Boards:
Args:
fname: Filename of boards.cfg file
"""
- with open(fname, 'r') as fd:
+ with open(fname, 'r', encoding='utf-8') as fd:
for line in fd:
if line[0] == '#':
continue
@@ -155,7 +156,7 @@ class Boards:
key is board.target
value is board
"""
- board_dict = {}
+ board_dict = OrderedDict()
for board in self._boards:
board_dict[board.target] = board
return board_dict
@@ -166,7 +167,7 @@ class Boards:
Returns:
List of Board objects that are marked selected
"""
- board_dict = {}
+ board_dict = OrderedDict()
for board in self._boards:
if board.build_it:
board_dict[board.target] = board
@@ -259,7 +260,7 @@ class Boards:
due to each argument, arranged by argument.
List of errors found
"""
- result = {}
+ result = OrderedDict()
warnings = []
terms = self._BuildTerms(args)
diff --git a/tools/buildman/bsettings.py b/tools/buildman/bsettings.py
index 03d7439aa5..0b7208da37 100644
--- a/tools/buildman/bsettings.py
+++ b/tools/buildman/bsettings.py
@@ -1,9 +1,9 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2012 The Chromium OS Authors.
-import ConfigParser
+import configparser
import os
-import StringIO
+import io
def Setup(fname=''):
@@ -15,20 +15,20 @@ def Setup(fname=''):
global settings
global config_fname
- settings = ConfigParser.SafeConfigParser()
+ settings = configparser.SafeConfigParser()
if fname is not None:
config_fname = fname
if config_fname == '':
config_fname = '%s/.buildman' % os.getenv('HOME')
if not os.path.exists(config_fname):
- print 'No config file found ~/.buildman\nCreating one...\n'
+ print('No config file found ~/.buildman\nCreating one...\n')
CreateBuildmanConfigFile(config_fname)
- print 'To install tool chains, please use the --fetch-arch option'
+ print('To install tool chains, please use the --fetch-arch option')
if config_fname:
settings.read(config_fname)
def AddFile(data):
- settings.readfp(StringIO.StringIO(data))
+ settings.readfp(io.StringIO(data))
def GetItems(section):
"""Get the items from a section of the config.
@@ -41,7 +41,7 @@ def GetItems(section):
"""
try:
return settings.items(section)
- except ConfigParser.NoSectionError as e:
+ except configparser.NoSectionError as e:
return []
except:
raise
@@ -68,10 +68,10 @@ def CreateBuildmanConfigFile(config_fname):
try:
f = open(config_fname, 'w')
except IOError:
- print "Couldn't create buildman config file '%s'\n" % config_fname
+ print("Couldn't create buildman config file '%s'\n" % config_fname)
raise
- print >>f, '''[toolchain]
+ print('''[toolchain]
# name = path
# e.g. x86 = /opt/gcc-4.6.3-nolibc/x86_64-linux
@@ -93,5 +93,5 @@ openrisc = or1k
# snapper-boards=ENABLE_AT91_TEST=1
# snapper9260=${snapper-boards} BUILD_TAG=442
# snapper9g45=${snapper-boards} BUILD_TAG=443
-'''
+''', file=f)
f.close();
diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py
index fbb236676c..cfbe4c26b1 100644
--- a/tools/buildman/builder.py
+++ b/tools/buildman/builder.py
@@ -9,7 +9,7 @@ from datetime import datetime, timedelta
import glob
import os
import re
-import Queue
+import queue
import shutil
import signal
import string
@@ -92,11 +92,10 @@ u-boot/ source directory
"""
# Possible build outcomes
-OUTCOME_OK, OUTCOME_WARNING, OUTCOME_ERROR, OUTCOME_UNKNOWN = range(4)
+OUTCOME_OK, OUTCOME_WARNING, OUTCOME_ERROR, OUTCOME_UNKNOWN = list(range(4))
# Translate a commit subject into a valid filename (and handle unicode)
-trans_valid_chars = string.maketrans('/: ', '---')
-trans_valid_chars = trans_valid_chars.decode('latin-1')
+trans_valid_chars = str.maketrans('/: ', '---')
BASE_CONFIG_FILENAMES = [
'u-boot.cfg', 'u-boot-spl.cfg', 'u-boot-tpl.cfg'
@@ -122,8 +121,8 @@ class Config:
def __hash__(self):
val = 0
for fname in self.config:
- for key, value in self.config[fname].iteritems():
- print key, value
+ for key, value in self.config[fname].items():
+ print(key, value)
val = val ^ hash(key) & hash(value)
return val
@@ -293,8 +292,8 @@ class Builder:
self._re_dtb_warning = re.compile('(.*): Warning .*')
self._re_note = re.compile('(.*):(\d*):(\d*): note: this is the location of the previous.*')
- self.queue = Queue.Queue()
- self.out_queue = Queue.Queue()
+ self.queue = queue.Queue()
+ self.out_queue = queue.Queue()
for i in range(self.num_threads):
t = builderthread.BuilderThread(self, i, incremental,
per_board_out_dir)
@@ -781,7 +780,7 @@ class Builder:
config = {}
environment = {}
- for board in boards_selected.itervalues():
+ for board in boards_selected.values():
outcome = self.GetBuildOutcome(commit_upto, board.target,
read_func_sizes, read_config,
read_environment)
@@ -814,13 +813,13 @@ class Builder:
tconfig = Config(self.config_filenames, board.target)
for fname in self.config_filenames:
if outcome.config:
- for key, value in outcome.config[fname].iteritems():
+ for key, value in outcome.config[fname].items():
tconfig.Add(fname, key, value)
config[board.target] = tconfig
tenvironment = Environment(board.target)
if outcome.environment:
- for key, value in outcome.environment.iteritems():
+ for key, value in outcome.environment.items():
tenvironment.Add(key, value)
environment[board.target] = tenvironment
@@ -1040,12 +1039,12 @@ class Builder:
# We now have a list of image size changes sorted by arch
# Print out a summary of these
- for arch, target_list in arch_list.iteritems():
+ for arch, target_list in arch_list.items():
# Get total difference for each type
totals = {}
for result in target_list:
total = 0
- for name, diff in result.iteritems():
+ for name, diff in result.items():
if name.startswith('_'):
continue
total += diff
@@ -1250,7 +1249,7 @@ class Builder:
if self._show_unknown:
self.AddOutcome(board_selected, arch_list, unknown_boards, '?',
self.col.MAGENTA)
- for arch, target_list in arch_list.iteritems():
+ for arch, target_list in arch_list.items():
Print('%10s: %s' % (arch, target_list))
self._error_lines += 1
if better_err:
@@ -1283,13 +1282,13 @@ class Builder:
environment_minus = {}
environment_change = {}
base = tbase.environment
- for key, value in tenvironment.environment.iteritems():
+ for key, value in tenvironment.environment.items():
if key not in base:
environment_plus[key] = value
- for key, value in base.iteritems():
+ for key, value in base.items():
if key not in tenvironment.environment:
environment_minus[key] = value
- for key, value in base.iteritems():
+ for key, value in base.items():
new_value = tenvironment.environment.get(key)
if new_value and value != new_value:
desc = '%s -> %s' % (value, new_value)
@@ -1342,15 +1341,15 @@ class Builder:
config_minus = {}
config_change = {}
base = tbase.config[name]
- for key, value in tconfig.config[name].iteritems():
+ for key, value in tconfig.config[name].items():
if key not in base:
config_plus[key] = value
all_config_plus[key] = value
- for key, value in base.iteritems():
+ for key, value in base.items():
if key not in tconfig.config[name]:
config_minus[key] = value
all_config_minus[key] = value
- for key, value in base.iteritems():
+ for key, value in base.items():
new_value = tconfig.config.get(key)
if new_value and value != new_value:
desc = '%s -> %s' % (value, new_value)
@@ -1368,7 +1367,7 @@ class Builder:
summary[target] = '\n'.join(lines)
lines_by_target = {}
- for target, lines in summary.iteritems():
+ for target, lines in summary.items():
if lines in lines_by_target:
lines_by_target[lines].append(target)
else:
@@ -1392,7 +1391,7 @@ class Builder:
Print('%s:' % arch)
_OutputConfigInfo(lines)
- for lines, targets in lines_by_target.iteritems():
+ for lines, targets in lines_by_target.items():
if not lines:
continue
Print('%s :' % ' '.join(sorted(targets)))
@@ -1463,7 +1462,7 @@ class Builder:
commits: Selected commits to build
"""
# First work out how many commits we will build
- count = (self.commit_count + self._step - 1) / self._step
+ count = (self.commit_count + self._step - 1) // self._step
self.count = len(board_selected) * count
self.upto = self.warned = self.fail = 0
self._timestamps = collections.deque()
@@ -1566,7 +1565,7 @@ class Builder:
self.ProcessResult(None)
# Create jobs to build all commits for each board
- for brd in board_selected.itervalues():
+ for brd in board_selected.values():
job = builderthread.BuilderJob()
job.board = brd
job.commits = commits
diff --git a/tools/buildman/builderthread.py b/tools/buildman/builderthread.py
index 8a9d47cd5e..570c1f6595 100644
--- a/tools/buildman/builderthread.py
+++ b/tools/buildman/builderthread.py
@@ -28,7 +28,7 @@ def Mkdir(dirname, parents = False):
except OSError as err:
if err.errno == errno.EEXIST:
if os.path.realpath('.') == os.path.realpath(dirname):
- print "Cannot create the current working directory '%s'!" % dirname
+ print("Cannot create the current working directory '%s'!" % dirname)
sys.exit(1)
pass
else:
@@ -291,15 +291,13 @@ class BuilderThread(threading.Thread):
outfile = os.path.join(build_dir, 'log')
with open(outfile, 'w') as fd:
if result.stdout:
- # We don't want unicode characters in log files
- fd.write(result.stdout.decode('UTF-8').encode('ASCII', 'replace'))
+ fd.write(result.stdout)
errfile = self.builder.GetErrFile(result.commit_upto,
result.brd.target)
if result.stderr:
with open(errfile, 'w') as fd:
- # We don't want unicode characters in log files
- fd.write(result.stderr.decode('UTF-8').encode('ASCII', 'replace'))
+ fd.write(result.stderr)
elif os.path.exists(errfile):
os.remove(errfile)
@@ -314,17 +312,17 @@ class BuilderThread(threading.Thread):
else:
fd.write('%s' % result.return_code)
with open(os.path.join(build_dir, 'toolchain'), 'w') as fd:
- print >>fd, 'gcc', result.toolchain.gcc
- print >>fd, 'path', result.toolchain.path
- print >>fd, 'cross', result.toolchain.cross
- print >>fd, 'arch', result.toolchain.arch
+ print('gcc', result.toolchain.gcc, file=fd)
+ print('path', result.toolchain.path, file=fd)
+ print('cross', result.toolchain.cross, file=fd)
+ print('arch', result.toolchain.arch, file=fd)
fd.write('%s' % result.return_code)
# Write out the image and function size information and an objdump
env = result.toolchain.MakeEnvironment(self.builder.full_path)
with open(os.path.join(build_dir, 'env'), 'w') as fd:
for var in sorted(env.keys()):
- print >>fd, '%s="%s"' % (var, env[var])
+ print('%s="%s"' % (var, env[var]), file=fd)
lines = []
for fname in ['u-boot', 'spl/u-boot-spl']:
cmd = ['%snm' % self.toolchain.cross, '--size-sort', fname]
@@ -335,7 +333,7 @@ class BuilderThread(threading.Thread):
nm = self.builder.GetFuncSizesFile(result.commit_upto,
result.brd.target, fname)
with open(nm, 'w') as fd:
- print >>fd, nm_result.stdout,
+ print(nm_result.stdout, end=' ', file=fd)
cmd = ['%sobjdump' % self.toolchain.cross, '-h', fname]
dump_result = command.RunPipe([cmd], capture=True,
@@ -346,7 +344,7 @@ class BuilderThread(threading.Thread):
objdump = self.builder.GetObjdumpFile(result.commit_upto,
result.brd.target, fname)
with open(objdump, 'w') as fd:
- print >>fd, dump_result.stdout,
+ print(dump_result.stdout, end=' ', file=fd)
for line in dump_result.stdout.splitlines():
fields = line.split()
if len(fields) > 5 and fields[1] == '.rodata':
@@ -378,7 +376,7 @@ class BuilderThread(threading.Thread):
sizes = self.builder.GetSizesFile(result.commit_upto,
result.brd.target)
with open(sizes, 'w') as fd:
- print >>fd, '\n'.join(lines)
+ print('\n'.join(lines), file=fd)
# Write out the configuration files, with a special case for SPL
for dirname in ['', 'spl', 'tpl']:
diff --git a/tools/buildman/buildman.py b/tools/buildman/buildman.py
index f17aa15e7c..30a8690f93 100755
--- a/tools/buildman/buildman.py
+++ b/tools/buildman/buildman.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
#
# Copyright (c) 2012 The Chromium OS Authors.
@@ -6,6 +6,8 @@
"""See README for more information"""
+from __future__ import print_function
+
import multiprocessing
import os
import re
@@ -46,11 +48,11 @@ def RunTests(skip_net_tests):
suite = unittest.TestLoader().loadTestsFromTestCase(module)
suite.run(result)
- print result
+ print(result)
for test, err in result.errors:
- print err
+ print(err)
for test, err in result.failures:
- print err
+ print(err)
options, args = cmdline.ParseArgs()
diff --git a/tools/buildman/control.py b/tools/buildman/control.py
index 9787b86747..216012d001 100644
--- a/tools/buildman/control.py
+++ b/tools/buildman/control.py
@@ -30,7 +30,7 @@ def GetActionSummary(is_summary, commits, selected, options):
"""
if commits:
count = len(commits)
- count = (count + options.step - 1) / options.step
+ count = (count + options.step - 1) // options.step
commit_str = '%d commit%s' % (count, GetPlural(count))
else:
commit_str = 'current source'
@@ -59,31 +59,31 @@ def ShowActions(series, why_selected, boards_selected, builder, options,
board_warnings: List of warnings obtained from board selected
"""
col = terminal.Color()
- print 'Dry run, so not doing much. But I would do this:'
- print
+ print('Dry run, so not doing much. But I would do this:')
+ print()
if series:
commits = series.commits
else:
commits = None
- print GetActionSummary(False, commits, boards_selected,
- options)
- print 'Build directory: %s' % builder.base_dir
+ print(GetActionSummary(False, commits, boards_selected,
+ options))
+ print('Build directory: %s' % builder.base_dir)
if commits:
for upto in range(0, len(series.commits), options.step):
commit = series.commits[upto]
- print ' ', col.Color(col.YELLOW, commit.hash[:8], bright=False),
- print commit.subject
- print
+ print(' ', col.Color(col.YELLOW, commit.hash[:8], bright=False), end=' ')
+ print(commit.subject)
+ print()
for arg in why_selected:
if arg != 'all':
- print arg, ': %d boards' % len(why_selected[arg])
+ print(arg, ': %d boards' % len(why_selected[arg]))
if options.verbose:
- print ' %s' % ' '.join(why_selected[arg])
- print ('Total boards to build for each commit: %d\n' %
- len(why_selected['all']))
+ print(' %s' % ' '.join(why_selected[arg]))
+ print(('Total boards to build for each commit: %d\n' %
+ len(why_selected['all'])))
if board_warnings:
for warning in board_warnings:
- print col.Color(col.YELLOW, warning)
+ print(col.Color(col.YELLOW, warning))
def CheckOutputDir(output_dir):
"""Make sure that the output directory is not within the current directory
@@ -146,17 +146,17 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
if options.fetch_arch:
if options.fetch_arch == 'list':
sorted_list = toolchains.ListArchs()
- print col.Color(col.BLUE, 'Available architectures: %s\n' %
- ' '.join(sorted_list))
+ print(col.Color(col.BLUE, 'Available architectures: %s\n' %
+ ' '.join(sorted_list)))
return 0
else:
fetch_arch = options.fetch_arch
if fetch_arch == 'all':
fetch_arch = ','.join(toolchains.ListArchs())
- print col.Color(col.CYAN, '\nDownloading toolchains: %s' %
- fetch_arch)
+ print(col.Color(col.CYAN, '\nDownloading toolchains: %s' %
+ fetch_arch))
for arch in fetch_arch.split(','):
- print
+ print()
ret = toolchains.FetchAndInstall(arch)
if ret:
return ret
@@ -167,7 +167,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
toolchains.Scan(options.list_tool_chains and options.verbose)
if options.list_tool_chains:
toolchains.List()
- print
+ print()
return 0
# Work out how many commits to build. We want to build everything on the
@@ -191,7 +191,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
sys.exit(col.Color(col.RED, "Range '%s' has no commits" %
options.branch))
if msg:
- print col.Color(col.YELLOW, msg)
+ print(col.Color(col.YELLOW, msg))
count += 1 # Build upstream commit also
if not count:
@@ -268,7 +268,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
options.threads = min(multiprocessing.cpu_count(), len(selected))
if not options.jobs:
options.jobs = max(1, (multiprocessing.cpu_count() +
- len(selected) - 1) / len(selected))
+ len(selected) - 1) // len(selected))
if not options.step:
options.step = len(series.commits) - 1
diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py
index f90b8ea7f5..4c3d497294 100644
--- a/tools/buildman/func_test.py
+++ b/tools/buildman/func_test.py
@@ -270,7 +270,7 @@ class TestFunctional(unittest.TestCase):
stdout=''.join(commit_log[:count]))
# Not handled, so abort
- print 'git log', args
+ print('git log', args)
sys.exit(1)
def _HandleCommandGitConfig(self, args):
@@ -286,7 +286,7 @@ class TestFunctional(unittest.TestCase):
stdout='refs/heads/master\n')
# Not handled, so abort
- print 'git config', args
+ print('git config', args)
sys.exit(1)
def _HandleCommandGit(self, in_args):
@@ -320,7 +320,7 @@ class TestFunctional(unittest.TestCase):
return command.CommandResult(return_code=0)
# Not handled, so abort
- print 'git', git_args, sub_cmd, args
+ print('git', git_args, sub_cmd, args)
sys.exit(1)
def _HandleCommandNm(self, args):
@@ -351,7 +351,7 @@ class TestFunctional(unittest.TestCase):
if pipe_list[1] == ['wc', '-l']:
wc = True
else:
- print 'invalid pipe', kwargs
+ print('invalid pipe', kwargs)
sys.exit(1)
cmd = pipe_list[0][0]
args = pipe_list[0][1:]
@@ -371,7 +371,7 @@ class TestFunctional(unittest.TestCase):
if not result:
# Not handled, so abort
- print 'unknown command', kwargs
+ print('unknown command', kwargs)
sys.exit(1)
if wc:
@@ -404,14 +404,14 @@ class TestFunctional(unittest.TestCase):
return command.CommandResult(return_code=0)
# Not handled, so abort
- print 'make', stage
+ print('make', stage)
sys.exit(1)
# Example function to print output lines
def print_lines(self, lines):
- print len(lines)
+ print(len(lines))
for line in lines:
- print line
+ print(line)
#self.print_lines(terminal.GetPrintTestLines())
def testNoBoards(self):
diff --git a/tools/buildman/test.py b/tools/buildman/test.py
index ed99b9375c..b4e28d6867 100644
--- a/tools/buildman/test.py
+++ b/tools/buildman/test.py
@@ -212,11 +212,11 @@ class TestBuild(unittest.TestCase):
self.assertEqual(lines[1].text, '02: %s' % commits[1][1])
col = terminal.Color()
- self.assertSummary(lines[2].text, 'sandbox', 'w+', ['board4'],
+ self.assertSummary(lines[2].text, 'arm', 'w+', ['board1'],
outcome=OUTCOME_WARN)
- self.assertSummary(lines[3].text, 'arm', 'w+', ['board1'],
+ self.assertSummary(lines[3].text, 'powerpc', 'w+', ['board2', 'board3'],
outcome=OUTCOME_WARN)
- self.assertSummary(lines[4].text, 'powerpc', 'w+', ['board2', 'board3'],
+ self.assertSummary(lines[4].text, 'sandbox', 'w+', ['board4'],
outcome=OUTCOME_WARN)
# Second commit: The warnings should be listed
@@ -226,10 +226,10 @@ class TestBuild(unittest.TestCase):
# Third commit: Still fails
self.assertEqual(lines[6].text, '03: %s' % commits[2][1])
- self.assertSummary(lines[7].text, 'sandbox', '+', ['board4'])
- self.assertSummary(lines[8].text, 'arm', '', ['board1'],
+ self.assertSummary(lines[7].text, 'arm', '', ['board1'],
outcome=OUTCOME_OK)
- self.assertSummary(lines[9].text, 'powerpc', '+', ['board2', 'board3'])
+ self.assertSummary(lines[8].text, 'powerpc', '+', ['board2', 'board3'])
+ self.assertSummary(lines[9].text, 'sandbox', '+', ['board4'])
# Expect a compiler error
self.assertEqual(lines[10].text, '+%s' %
@@ -237,8 +237,6 @@ class TestBuild(unittest.TestCase):
# Fourth commit: Compile errors are fixed, just have warning for board3
self.assertEqual(lines[11].text, '04: %s' % commits[3][1])
- self.assertSummary(lines[12].text, 'sandbox', 'w+', ['board4'],
- outcome=OUTCOME_WARN)
expect = '%10s: ' % 'powerpc'
expect += ' ' + col.Color(col.GREEN, '')
expect += ' '
@@ -246,7 +244,9 @@ class TestBuild(unittest.TestCase):
expect += ' ' + col.Color(col.YELLOW, 'w+')
expect += ' '
expect += col.Color(col.YELLOW, ' %s' % 'board3')
- self.assertEqual(lines[13].text, expect)
+ self.assertEqual(lines[12].text, expect)
+ self.assertSummary(lines[13].text, 'sandbox', 'w+', ['board4'],
+ outcome=OUTCOME_WARN)
# Compile error fixed
self.assertEqual(lines[14].text, '-%s' %
@@ -259,9 +259,9 @@ class TestBuild(unittest.TestCase):
# Fifth commit
self.assertEqual(lines[16].text, '05: %s' % commits[4][1])
- self.assertSummary(lines[17].text, 'sandbox', '+', ['board4'])
- self.assertSummary(lines[18].text, 'powerpc', '', ['board3'],
+ self.assertSummary(lines[17].text, 'powerpc', '', ['board3'],
outcome=OUTCOME_OK)
+ self.assertSummary(lines[18].text, 'sandbox', '+', ['board4'])
# The second line of errors[3] is a duplicate, so buildman will drop it
expect = errors[3].rstrip().split('\n')
diff --git a/tools/buildman/toolchain.py b/tools/buildman/toolchain.py
index a65737fdf8..cc26e2ede5 100644
--- a/tools/buildman/toolchain.py
+++ b/tools/buildman/toolchain.py
@@ -4,18 +4,19 @@
import re
import glob
-from HTMLParser import HTMLParser
+from html.parser import HTMLParser
import os
import sys
import tempfile
-import urllib2
+import urllib.request, urllib.error, urllib.parse
import bsettings
import command
import terminal
+import tools
(PRIORITY_FULL_PREFIX, PRIORITY_PREFIX_GCC, PRIORITY_PREFIX_GCC_PATH,
- PRIORITY_CALC) = range(4)
+ PRIORITY_CALC) = list(range(4))
# Simple class to collect links from a page
class MyHTMLParser(HTMLParser):
@@ -100,15 +101,15 @@ class Toolchain:
raise_on_error=False)
self.ok = result.return_code == 0
if verbose:
- print 'Tool chain test: ',
+ print('Tool chain test: ', end=' ')
if self.ok:
- print "OK, arch='%s', priority %d" % (self.arch,
- self.priority)
+ print("OK, arch='%s', priority %d" % (self.arch,
+ self.priority))
else:
- print 'BAD'
- print 'Command: ', cmd
- print result.stdout
- print result.stderr
+ print('BAD')
+ print('Command: ', cmd)
+ print(result.stdout)
+ print(result.stderr)
else:
self.ok = True
@@ -138,7 +139,7 @@ class Toolchain:
value = ''
for name, value in bsettings.GetItems('toolchain-wrapper'):
if not value:
- print "Warning: Wrapper not found"
+ print("Warning: Wrapper not found")
if value:
value = value + ' '
@@ -227,11 +228,11 @@ class Toolchains:
"""
toolchains = bsettings.GetItems('toolchain')
if show_warning and not toolchains:
- print ("Warning: No tool chains. Please run 'buildman "
+ print(("Warning: No tool chains. Please run 'buildman "
"--fetch-arch all' to download all available toolchains, or "
"add a [toolchain] section to your buildman config file "
"%s. See README for details" %
- bsettings.config_fname)
+ bsettings.config_fname))
paths = []
for name, value in toolchains:
@@ -272,10 +273,10 @@ class Toolchains:
if add_it:
self.toolchains[toolchain.arch] = toolchain
elif verbose:
- print ("Toolchain '%s' at priority %d will be ignored because "
+ print(("Toolchain '%s' at priority %d will be ignored because "
"another toolchain for arch '%s' has priority %d" %
(toolchain.gcc, toolchain.priority, toolchain.arch,
- self.toolchains[toolchain.arch].priority))
+ self.toolchains[toolchain.arch].priority)))
def ScanPath(self, path, verbose):
"""Scan a path for a valid toolchain
@@ -289,9 +290,9 @@ class Toolchains:
fnames = []
for subdir in ['.', 'bin', 'usr/bin']:
dirname = os.path.join(path, subdir)
- if verbose: print " - looking in '%s'" % dirname
+ if verbose: print(" - looking in '%s'" % dirname)
for fname in glob.glob(dirname + '/*gcc'):
- if verbose: print " - found '%s'" % fname
+ if verbose: print(" - found '%s'" % fname)
fnames.append(fname)
return fnames
@@ -321,9 +322,9 @@ class Toolchains:
Args:
verbose: True to print out progress information
"""
- if verbose: print 'Scanning for tool chains'
+ if verbose: print('Scanning for tool chains')
for name, value in self.prefixes:
- if verbose: print " - scanning prefix '%s'" % value
+ if verbose: print(" - scanning prefix '%s'" % value)
if os.path.exists(value):
self.Add(value, True, verbose, PRIORITY_FULL_PREFIX, name)
continue
@@ -335,10 +336,10 @@ class Toolchains:
for f in fname_list:
self.Add(f, True, verbose, PRIORITY_PREFIX_GCC_PATH, name)
if not fname_list:
- raise ValueError, ("No tool chain found for prefix '%s'" %
+ raise ValueError("No tool chain found for prefix '%s'" %
value)
for path in self.paths:
- if verbose: print " - scanning path '%s'" % path
+ if verbose: print(" - scanning path '%s'" % path)
fnames = self.ScanPath(path, verbose)
for fname in fnames:
self.Add(fname, True, verbose)
@@ -346,13 +347,13 @@ class Toolchains:
def List(self):
"""List out the selected toolchains for each architecture"""
col = terminal.Color()
- print col.Color(col.BLUE, 'List of available toolchains (%d):' %
- len(self.toolchains))
+ print(col.Color(col.BLUE, 'List of available toolchains (%d):' %
+ len(self.toolchains)))
if len(self.toolchains):
- for key, value in sorted(self.toolchains.iteritems()):
- print '%-10s: %s' % (key, value.gcc)
+ for key, value in sorted(self.toolchains.items()):
+ print('%-10s: %s' % (key, value.gcc))
else:
- print 'None'
+ print('None')
def Select(self, arch):
"""Returns the toolchain for a given architecture
@@ -370,7 +371,7 @@ class Toolchains:
return self.toolchains[alias]
if not arch in self.toolchains:
- raise ValueError, ("No tool chain found for arch '%s'" % arch)
+ raise ValueError("No tool chain found for arch '%s'" % arch)
return self.toolchains[arch]
def ResolveReferences(self, var_dict, args):
@@ -464,9 +465,9 @@ class Toolchains:
links = []
for version in versions:
url = '%s/%s/%s/' % (base, arch, version)
- print 'Checking: %s' % url
- response = urllib2.urlopen(url)
- html = response.read()
+ print('Checking: %s' % url)
+ response = urllib.request.urlopen(url)
+ html = tools.ToString(response.read())
parser = MyHTMLParser(fetch_arch)
parser.feed(html)
if fetch_arch == 'list':
@@ -488,14 +489,14 @@ class Toolchains:
Full path to the downloaded archive file in that directory,
or None if there was an error while downloading
"""
- print 'Downloading: %s' % url
+ print('Downloading: %s' % url)
leaf = url.split('/')[-1]
tmpdir = tempfile.mkdtemp('.buildman')
- response = urllib2.urlopen(url)
+ response = urllib.request.urlopen(url)
fname = os.path.join(tmpdir, leaf)
fd = open(fname, 'wb')
meta = response.info()
- size = int(meta.getheaders('Content-Length')[0])
+ size = int(meta.get('Content-Length'))
done = 0
block_size = 1 << 16
status = ''
@@ -504,19 +505,19 @@ class Toolchains:
while True:
buffer = response.read(block_size)
if not buffer:
- print chr(8) * (len(status) + 1), '\r',
+ print(chr(8) * (len(status) + 1), '\r', end=' ')
break
done += len(buffer)
fd.write(buffer)
- status = r'%10d MiB [%3d%%]' % (done / 1024 / 1024,
- done * 100 / size)
+ status = r'%10d MiB [%3d%%]' % (done // 1024 // 1024,
+ done * 100 // size)
status = status + chr(8) * (len(status) + 1)
- print status,
+ print(status, end=' ')
sys.stdout.flush()
fd.close()
if done != size:
- print 'Error, failed to download'
+ print('Error, failed to download')
os.remove(fname)
fname = None
return tmpdir, fname
@@ -565,11 +566,11 @@ class Toolchains:
"""
# Fist get the URL for this architecture
col = terminal.Color()
- print col.Color(col.BLUE, "Downloading toolchain for arch '%s'" % arch)
+ print(col.Color(col.BLUE, "Downloading toolchain for arch '%s'" % arch))
url = self.LocateArchUrl(arch)
if not url:
- print ("Cannot find toolchain for arch '%s' - use 'list' to list" %
- arch)
+ print(("Cannot find toolchain for arch '%s' - use 'list' to list" %
+ arch))
return 2
home = os.environ['HOME']
dest = os.path.join(home, '.buildman-toolchains')
@@ -580,28 +581,28 @@ class Toolchains:
tmpdir, tarfile = self.Download(url)
if not tarfile:
return 1
- print col.Color(col.GREEN, 'Unpacking to: %s' % dest),
+ print(col.Color(col.GREEN, 'Unpacking to: %s' % dest), end=' ')
sys.stdout.flush()
path = self.Unpack(tarfile, dest)
os.remove(tarfile)
os.rmdir(tmpdir)
- print
+ print()
# Check that the toolchain works
- print col.Color(col.GREEN, 'Testing')
+ print(col.Color(col.GREEN, 'Testing'))
dirpath = os.path.join(dest, path)
compiler_fname_list = self.ScanPath(dirpath, True)
if not compiler_fname_list:
- print 'Could not locate C compiler - fetch failed.'
+ print('Could not locate C compiler - fetch failed.')
return 1
if len(compiler_fname_list) != 1:
- print col.Color(col.RED, 'Warning, ambiguous toolchains: %s' %
- ', '.join(compiler_fname_list))
+ print(col.Color(col.RED, 'Warning, ambiguous toolchains: %s' %
+ ', '.join(compiler_fname_list)))
toolchain = Toolchain(compiler_fname_list[0], True, True)
# Make sure that it will be found by buildman
if not self.TestSettingsHasPath(dirpath):
- print ("Adding 'download' to config file '%s'" %
- bsettings.config_fname)
+ print(("Adding 'download' to config file '%s'" %
+ bsettings.config_fname))
bsettings.SetItem('toolchain', 'download', '%s/*/*' % dest)
return 0
diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py
index 514e0dd4a3..b3596a5918 100755
--- a/tools/dtoc/dtoc.py
+++ b/tools/dtoc/dtoc.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
#
# Copyright (C) 2016 Google, Inc
diff --git a/tools/dtoc/fdt.py b/tools/dtoc/fdt.py
index 6770be79fb..1b7b730359 100644
--- a/tools/dtoc/fdt.py
+++ b/tools/dtoc/fdt.py
@@ -56,9 +56,6 @@ def BytesToValue(data):
is_string = False
break
for ch in string:
- # Handle Python 2 treating bytes as str
- if type(ch) == str:
- ch = ord(ch)
if ch < 32 or ch > 127:
is_string = False
break
@@ -66,15 +63,9 @@ def BytesToValue(data):
is_string = False
if is_string:
if count == 1:
- if sys.version_info[0] >= 3: # pragma: no cover
- return TYPE_STRING, strings[0].decode()
- else:
- return TYPE_STRING, strings[0]
+ return TYPE_STRING, strings[0].decode()
else:
- if sys.version_info[0] >= 3: # pragma: no cover
- return TYPE_STRING, [s.decode() for s in strings[:-1]]
- else:
- return TYPE_STRING, strings[:-1]
+ return TYPE_STRING, [s.decode() for s in strings[:-1]]
if size % 4:
if size == 1:
return TYPE_BYTE, tools.ToChar(data[0])
@@ -415,8 +406,8 @@ class Node:
prop_name: Name of property to set
val: String value to set (will be \0-terminated in DT)
"""
- if sys.version_info[0] >= 3: # pragma: no cover
- val = bytes(val, 'utf-8')
+ if type(val) == str:
+ val = val.encode('utf-8')
self._CheckProp(prop_name).props[prop_name].SetData(val + b'\0')
def AddString(self, prop_name, val):
diff --git a/tools/dtoc/test_dtoc.py b/tools/dtoc/test_dtoc.py
index b915b27856..d733b70655 100644..100755
--- a/tools/dtoc/test_dtoc.py
+++ b/tools/dtoc/test_dtoc.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2012 The Chromium OS Authors.
#
diff --git a/tools/dtoc/test_fdt.py b/tools/dtoc/test_fdt.py
index 028c8cbaa8..3316757e61 100755
--- a/tools/dtoc/test_fdt.py
+++ b/tools/dtoc/test_fdt.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2018 Google, Inc
# Written by Simon Glass <sjg@chromium.org>
diff --git a/tools/img2brec.sh b/tools/img2brec.sh
deleted file mode 100755
index 0fcdba27d4..0000000000
--- a/tools/img2brec.sh
+++ /dev/null
@@ -1,388 +0,0 @@
-#!/bin/sh
-
-# This script converts binary files (u-boot.bin) into so called
-# bootstrap records that are accepted by Motorola's MC9328MX1/L
-# (a.k.a. DragaonBall i.MX) in "Bootstrap Mode"
-#
-# The code for the SynchFlash programming routines is taken from
-# Bootloader\Bin\SyncFlash\programBoot_b.txt contained in
-# Motorolas LINUX_BSP_0_3_8.tar.gz
-#
-# The script could easily extended for AMD flash routines.
-#
-# 2004-06-23 - steven.scholz@imc-berlin.de
-
-#################################################################################
-# From the posting to the U-Boot-Users mailing list, 23 Jun 2004:
-# ===============================================================
-# I just hacked a simple script that converts u-boot.bin into a text file
-# containg processor init code, SynchFlash programming code and U-Boot data in
-# form of so called b-records.
-#
-# This can be used to programm U-Boot into (Synch)Flash using the Bootstrap
-# Mode of the MC9328MX1/L
-#
-# 0AFE1F3410202E2E2E000000002073756363656564/
-# 0AFE1F44102E0A0000206661696C656420210A0000/
-# 0AFE100000
-# ...
-# MX1ADS Sync-flash Programming Utility v0.5 2002/08/21
-#
-# Source address (stored in 0x0AFE0000): 0x0A000000
-# Target address (stored in 0x0AFE0004): 0x0C000000
-# Size (stored in 0x0AFE0008): 0x0001A320
-#
-# Press any key to start programming ...
-# Erasing ...
-# Blank checking ...
-# Programming ...
-# Verifying flash ... succeed.
-#
-# Programming finished.
-#
-# So no need for a BDI2000 anymore... ;-)
-#
-# This is working on my MX1ADS eval board. Hope this could be useful for
-# someone.
-#################################################################################
-
-if [ "$#" -lt 1 -o "$#" -gt 2 ] ; then
- echo "Usage: $0 infile [outfile]" >&2
- echo " $0 u-boot.bin [u-boot.brec]" >&2
- exit 1
-fi
-
-if [ "$#" -ge 1 ] ; then
- INFILE=$1
-fi
-
-if [ ! -f $INFILE ] ; then
- echo "Error: file '$INFILE' does not exist." >&2
- exit 1
-fi
-
-FILESIZE=`filesize $INFILE`
-
-output_init()
-{
-echo "\
-********************************************
-* Initialize I/O Pad Driving Strength *
-********************************************
-0021B80CC4000003AB
-********************************************
-* Initialize SDRAM *
-********************************************
-00221000C492120200 ; pre-charge command
-08200000E4 ; special read
-
-00221000C4A2120200 ; auto-refresh command
-08000000E4 ; 8 special read
-08000000E4 ; 8 special read
-08000000E4 ; 8 special read
-08000000E4 ; 8 special read
-08000000E4 ; 8 special read
-08000000E4 ; 8 special read
-08000000E4 ; 8 special read
-08000000E4 ; 8 special read
-
-00221000C4B2120200 ; set mode register
-08111800E4 ; special read
-
-00221000C482124200 ; set normal mode
- "
-}
-
-output_uboot()
-{
-echo "\
-********************************************
-* U-Boot image as bootstrap records *
-* will be stored in SDRAM at 0x0A000000 *
-********************************************
- "
-
-cat $INFILE | \
-hexdump -v -e "\"0A0%05.5_ax10\" 16/1 \"%02x\"\"\r\n\"" | \
-tr [:lower:] [:upper:]
-}
-
-output_flashprog()
-{
-echo "\
-********************************************
-* Address of arguments to flashProg *
-* ---------------------------------------- *
-* Source : 0x0A000000 *
-* Destination : 0x0C000000 * "
-
-# get the real size of the U-Boot image
-printf "* Size : 0x%08X *\r\n" $FILESIZE
-printf "********************************************\r\n"
-printf "0AFE0000CC0A0000000C000000%08X\r\n" $FILESIZE
-
-#;0AFE0000CC0A0000000C00000000006000
-
-echo "\
-********************************************
-* Flash Program *
-********************************************
-0AFE10001008D09FE5AC0000EA00F0A0E1A42DFE0A
-0AFE1010100080FE0A0DC0A0E100D82DE904B04CE2
-0AFE1020109820A0E318309FE5003093E5033082E0
-0AFE103010003093E5013003E2FF3003E20300A0E1
-0AFE10401000A81BE9A01DFE0A0DC0A0E100D82DE9
-0AFE10501004B04CE204D04DE20030A0E10D304BE5
-0AFE1060109820A0E330309FE5003093E5033082E0
-0AFE107010003093E5013903E2000053E3F7FFFF0A
-0AFE1080104020A0E310309FE5003093E5032082E0
-0AFE1090100D305BE5003082E500A81BE9A01DFE0A
-0AFE10A0100DC0A0E100D82DE904B04CE20000A0E1
-0AFE10B010D7FFFFEB0030A0E1FF3003E2000053E3
-0AFE10C010FAFFFF0A10309FE5003093E5003093E5
-0AFE10D010FF3003E20300A0E100A81BE9A01DFE0A
-0AFE10E0100DC0A0E100D82DE904B04CE204D04DE2
-0AFE10F0100030A0E10D304BE50D305BE52332A0E1
-0AFE1100100E304BE50E305BE5090053E30300009A
-0AFE1110100E305BE5373083E20E304BE5020000EA
-0AFE1120100E305BE5303083E20E304BE50E305BE5
-0AFE1130100300A0E1C3FFFFEB0D305BE50F3003E2
-0AFE1140100E304BE50E305BE5090053E30300009A
-0AFE1150100E305BE5373083E20E304BE5020000EA
-0AFE1160100E305BE5303083E20E304BE50E305BE5
-0AFE1170100300A0E1B3FFFFEB00A81BE90DC0A0E1
-0AFE11801000D82DE904B04CE21CD04DE210000BE5
-0AFE11901014100BE518200BE588009FE5E50200EB
-0AFE11A01010301BE51C300BE514301BE520300BE5
-0AFE11B0100030A0E324300BE524201BE518301BE5
-0AFE11C010030052E10000003A120000EA1C004BE2
-0AFE11D010002090E520104BE2003091E500C093E5
-0AFE11E010043083E2003081E5003092E5042082E2
-0AFE11F010002080E50C0053E10200000A0030A0E3
-0AFE12001028300BE5050000EA24301BE5043083E2
-0AFE12101024300BE5E7FFFFEA0130A0E328300BE5
-0AFE12201028001BE500A81BE9E81EFE0A0DC0A0E1
-0AFE12301000D82DE904B04CE214D04DE210000BE5
-0AFE12401014100BE56C009FE5BA0200EB10301BE5
-0AFE12501018300BE50030A0E31C300BE51C201BE5
-0AFE12601014301BE5030052E10000003A0D0000EA
-0AFE12701018304BE2002093E5001092E5042082E2
-0AFE128010002083E5010071E30200000A0030A0E3
-0AFE12901020300BE5050000EA1C301BE5043083E2
-0AFE12A0101C300BE5ECFFFFEA0130A0E320300BE5
-0AFE12B01020001BE500A81BE9001FFE0A0DC0A0E1
-0AFE12C01000D82DE904B04CE224D04DE20130A0E3
-0AFE12D01024300BE5A4229FE58139A0E3023A83E2
-0AFE12E010003082E59820A0E390329FE5003093E5
-0AFE12F010033082E0003093E5023903E2000053E3
-0AFE1300100300001A74229FE58139A0E3033A83E2
-0AFE131010003082E568029FE5860200EBAF36A0E3
-0AFE1320100E3883E2003093E510300BE554029FE5
-0AFE133010800200EB10301BE5233CA0E1FF3003E2
-0AFE1340100300A0E165FFFFEB10301BE52338A0E1
-0AFE135010FF3003E20300A0E160FFFFEB10301BE5
-0AFE1360102334A0E1FF3003E20300A0E15BFFFFEB
-0AFE13701010305BE50300A0E158FFFFEB0A00A0E3
-0AFE13801030FFFFEB0D00A0E32EFFFFEBAF36A0E3
-0AFE1390100E3883E2043083E2003093E514300BE5
-0AFE13A010E4019FE5630200EB14301BE5233CA0E1
-0AFE13B010FF3003E20300A0E148FFFFEB14301BE5
-0AFE13C0102338A0E1FF3003E20300A0E143FFFFEB
-0AFE13D01014301BE52334A0E1FF3003E20300A0E1
-0AFE13E0103EFFFFEB14305BE50300A0E13BFFFFEB
-0AFE13F0100A00A0E313FFFFEB0D00A0E311FFFFEB
-0AFE140010AF36A0E30E3883E2083083E2003093E5
-0AFE14101018300BE574019FE5460200EB18301BE5
-0AFE142010233CA0E1FF3003E20300A0E12BFFFFEB
-0AFE14301018301BE52338A0E1FF3003E20300A0E1
-0AFE14401026FFFFEB18301BE52334A0E1FF3003E2
-0AFE1450100300A0E121FFFFEB18305BE50300A0E1
-0AFE1460101EFFFFEB0A00A0E3F6FEFFEB0D00A0E3
-0AFE147010F4FEFFEBE6FEFFEB0030A0E1FF3003E2
-0AFE148010000053E30000001A020000EA03FFFFEB
-0AFE1490102D004BE5F6FFFFEAF4009FE5250200EB
-0AFE14A010FEFEFFEB2D004BE5CD0000EBC00000EB
-0AFE14B010E0009FE51F0200EB18301BE528300BE5
-0AFE14C01014301BE52C300BE52C001BE5100100EB
-0AFE14D01028301BE5013643E228300BE52C301BE5
-0AFE14E010013683E22C300BE528301BE5000053E3
-0AFE14F010F4FFFFCAAE0000EB14001BE518101BE5
-0AFE15001049FFFFEB0030A0E1FF3003E2000053E3
-0AFE151010E6FFFF0A80009FE5060200EB10001BE5
-0AFE15201014101BE518201BE5D00000EB10001BE5
-0AFE15301014101BE518201BE50FFFFFEB0030A0E1
-0AFE154010FF3003E2000053E30200000A4C009FE5
-0AFE155010F80100EB010000EA44009FE5F50100EB
-0AFE156010930000EB3C009FE5F20100EB0000A0E3
-0AFE157010A4FEFFEB0030A0E30300A0E100A81BE9
-0AFE158010A01DFE0AA41DFE0AE01DFE0A0C1EFE0A
-0AFE159010381EFE0A641EFE0A181FFE0A281FFE0A
-0AFE15A0103C1FFE0A481FFE0AB41EFE0A0DC0A0E1
-0AFE15B01000D82DE904B04CE204D04DE210000BE5
-0AFE15C01010301BE5013043E210300BE5010073E3
-0AFE15D010FAFFFF1A00A81BE90DC0A0E100D82DE9
-0AFE15E01004B04CE208D04DE210000BE510301BE5
-0AFE15F01014300BE514301BE50300A0E100A81BE9
-0AFE1600100DC0A0E100D82DE904B04CE204D04DE2
-0AFE1610102228A0E3012A82E2042082E2E134A0E3
-0AFE162010023883E2033C83E2003082E50333A0E3
-0AFE163010053983E2003093E510300BE500A81BE9
-0AFE1640100DC0A0E100D82DE904B04CE204D04DE2
-0AFE1650102228A0E3012A82E2042082E29134A0E3
-0AFE166010023883E2033C83E2003082E5C136A0E3
-0AFE167010003093E510300BE52228A0E3012A82E2
-0AFE168010042082E2E134A0E3023883E2033C83E2
-0AFE169010003082E50333A0E3073983E20020A0E3
-0AFE16A010002083E52228A0E3012A82E2042082E2
-0AFE16B0108134A0E3023883E2033C83E2003082E5
-0AFE16C0100333A0E3003093E510300BE5CBFFFFEB
-0AFE16D01010301BE50300A0E100A81BE90DC0A0E1
-0AFE16E01000D82DE904B04CE208D04DE2D3FFFFEB
-0AFE16F0100030A0E110300BE510301BE5023503E2
-0AFE170010000053E30500000A10301BE5073703E2
-0AFE171010000053E30100000A10001BE5ADFFFFEB
-0AFE17201010301BE5803003E2000053E30500000A
-0AFE17301010301BE51C3003E2000053E30100000A
-0AFE17401010001BE5A3FFFFEB10201BE50235A0E3
-0AFE175010803083E2030052E10200001A0130A0E3
-0AFE17601014300BE5010000EA0030A0E314300BE5
-0AFE17701014001BE500A81BE90DC0A0E100D82DE9
-0AFE17801004B04CE204D04DE22228A0E3012A82E2
-0AFE179010042082E29134A0E3023883E2033C83E2
-0AFE17A010003082E5C136A0E3003093E510300BE5
-0AFE17B01000A81BE90DC0A0E100D82DE904B04CE2
-0AFE17C010ECFFFFEB2228A0E3012A82E2042082E2
-0AFE17D0108134A0E3023883E2033C83E2003082E5
-0AFE17E01000A81BE90DC0A0E100D82DE904B04CE2
-0AFE17F01004D04DE22228A0E3012A82E2042082E2
-0AFE1800102238A0E3013A83E2043083E2003093E5
-0AFE181010023183E3003082E52228A0E3012A82E2
-0AFE1820102238A0E3013A83E2003093E5023183E3
-0AFE183010003082E5FA0FA0E35BFFFFEB2228A0E3
-0AFE184010012A82E2042082E2B134A0E3023883E2
-0AFE185010033C83E2003082E50333A0E3233983E2
-0AFE186010033B83E2003093E510300BE500A81BE9
-0AFE1870100DC0A0E100D82DE904B04CE21CD04DE2
-0AFE18801010000BE514100BE518200BE50030A0E3
-0AFE1890101C300BE51C201BE518301BE5030052E1
-0AFE18A0100000003A190000EAB2FFFFEB2228A0E3
-0AFE18B010012A82E2042082E2F134A0E3023883E2
-0AFE18C010033C83E2003082E514201BE51C301BE5
-0AFE18D010031082E010201BE51C301BE5033082E0
-0AFE18E010003093E5003081E57BFFFFEB0030A0E1
-0AFE18F010FF3003E2000053E3FAFFFF0AACFFFFEB
-0AFE1900101C301BE5043083E21C300BE5E0FFFFEA
-0AFE19101000A81BE90DC0A0E100D82DE904B04CE2
-0AFE1920100CD04DE210000BE52228A0E3012A82E2
-0AFE193010042082E28134A0E3023883E2033C83E2
-0AFE194010003082E510301BE5003093E514300BE5
-0AFE1950102228A0E3012A82E2042082E29134A0E3
-0AFE196010023883E2033C83E2003082E510301BE5
-0AFE197010003093E518300BE52228A0E3012A82E2
-0AFE198010042082E2E134A0E3023883E2033C83E2
-0AFE199010003082E50229A0E310301BE5032082E0
-0AFE19A0100030A0E3003082E52228A0E3012A82E2
-0AFE19B010042082E28134A0E3023883E2033C83E2
-0AFE19C010003082E510201BE50D3AA0E3D03083E2
-0AFE19D010033883E1003082E53FFFFFEB0030A0E1
-0AFE19E010FF3003E2000053E3FAFFFF0A70FFFFEB
-0AFE19F01000A81BE90DC0A0E100D82DE904B04CE2
-0AFE1A00105CFFFFEB2228A0E3012A82E2042082E2
-0AFE1A1010E134A0E3023883E2033C83E2003082E5
-0AFE1A20100333A0E3033983E20020A0E3002083E5
-0AFE1A30102228A0E3012A82E2042082E28134A0E3
-0AFE1A4010023883E2033C83E2003082E50323A0E3
-0AFE1A5010032982E20339A0E3C03083E2033883E1
-0AFE1A6010003082E500A81BE90DC0A0E100D82DE9
-0AFE1A701004B04CE23FFFFFEB2228A0E3012A82E2
-0AFE1A8010042082E2E134A0E3023883E2033C83E2
-0AFE1A9010003082E50333A0E30A3983E20020A0E3
-0AFE1AA010002083E52228A0E3012A82E2042082E2
-0AFE1AB0108134A0E3023883E2033C83E2003082E5
-0AFE1AC0100323A0E30A2982E20339A0E3C03083E2
-0AFE1AD010033883E1003082E500A81BE90DC0A0E1
-0AFE1AE01000D82DE904B04CE28729A0E3222E82E2
-0AFE1AF0108739A0E3223E83E2003093E51E3CC3E3
-0AFE1B0010003082E58729A0E38E2F82E28739A0E3
-0AFE1B10108E3F83E2003093E51E3CC3E3003082E5
-0AFE1B20108139A0E3823D83E20520A0E3002083E5
-0AFE1B30108129A0E3822D82E2042082E20139A0E3
-0AFE1B4010273083E2003082E58139A0E3823D83E2
-0AFE1B50100C3083E20120A0E3002083E58129A0E3
-0AFE1B6010822D82E2102082E22A3DA0E3013083E2
-0AFE1B7010003082E58139A0E3823D83E2243083E2
-0AFE1B80100F20A0E3002083E58139A0E3823D83E2
-0AFE1B9010283083E28A20A0E3002083E58139A0E3
-0AFE1BA010823D83E22C3083E20820A0E3002083E5
-0AFE1BB01000A81BE90DC0A0E100D82DE904B04CE2
-0AFE1BC0108139A0E3823D83E2183083E2003093E5
-0AFE1BD010013003E2FF3003E20300A0E100A81BE9
-0AFE1BE0100DC0A0E100D82DE904B04CE204D04DE2
-0AFE1BF0100030A0E10D304BE58139A0E3823D83E2
-0AFE1C0010183083E2003093E5013903E2000053E3
-0AFE1C1010F8FFFF0A8139A0E3813D83E20D205BE5
-0AFE1C2010002083E50D305BE50A0053E30A00001A
-0AFE1C30108139A0E3823D83E2183083E2003093E5
-0AFE1C4010013903E2000053E3F8FFFF0A8139A0E3
-0AFE1C5010813D83E20D20A0E3002083E500A81BE9
-0AFE1C60100DC0A0E100D82DE904B04CE20000A0E1
-0AFE1C7010CFFFFFEB0030A0E1FF3003E2000053E3
-0AFE1C8010FAFFFF0A8139A0E3023A83E2003093E5
-0AFE1C9010FF3003E20300A0E100A81BE90DC0A0E1
-0AFE1CA01000D82DE904B04CE204D04DE20030A0E1
-0AFE1CB0100D304BE50D305BE52332A0E10E304BE5
-0AFE1CC0100E305BE5090053E30300009A0E305BE5
-0AFE1CD010373083E20E304BE5020000EA0E305BE5
-0AFE1CE010303083E20E304BE50E305BE50300A0E1
-0AFE1CF010BAFFFFEB0D305BE50F3003E20E304BE5
-0AFE1D00100E305BE5090053E30300009A0E305BE5
-0AFE1D1010373083E20E304BE5020000EA0E305BE5
-0AFE1D2010303083E20E304BE50E305BE50300A0E1
-0AFE1D3010AAFFFFEB00A81BE90DC0A0E100D82DE9
-0AFE1D401004B04CE204D04DE210000BE510301BE5
-0AFE1D50100030D3E5000053E30000001A080000EA
-0AFE1D601010104BE2003091E50320A0E10020D2E5
-0AFE1D7010013083E2003081E50200A0E197FFFFEB
-0AFE1D8008F1FFFFEA00A81BE9
-0AFE1DA4100A0D4D58314144532053796E632D666C
-0AFE1DB4106173682050726F6772616D6D696E6720
-0AFE1DC4105574696C6974792076302E3520323030
-0AFE1DD410322F30382F32310A0D000000536F7572
-0AFE1DE41063652061646472657373202873746F72
-0AFE1DF410656420696E2030783041464530303030
-0AFE1E0410293A2030780000005461726765742061
-0AFE1E1410646472657373202873746F7265642069
-0AFE1E24106E2030783041464530303034293A2030
-0AFE1E34107800000053697A652020202020202020
-0AFE1E44102020202873746F72656420696E203078
-0AFE1E54103041464530303038293A203078000000
-0AFE1E6410507265737320616E79206B657920746F
-0AFE1E74102073746172742070726F6772616D6D69
-0AFE1E84106E67202E2E2E00000A0D45726173696E
-0AFE1E94106720666C617368202E2E2E000A0D5072
-0AFE1EA4106F6772616D6D696E67202E2E2E000000
-0AFE1EB4100A0D50726F6772616D6D696E67206669
-0AFE1EC4106E69736865642E0A0D50726573732027
-0AFE1ED410612720746F20636F6E74696E7565202E
-0AFE1EE4102E2E2E000A0D566572696679696E6720
-0AFE1EF410666C617368202E2E2E0000000A0D426C
-0AFE1F0410616E6B20636865636B696E67202E2E2E
-0AFE1F1410000000000A45726173696E67202E2E2E
-0AFE1F2410000000000A50726F6772616D6D696E67
-0AFE1F3410202E2E2E000000002073756363656564
-0AFE1F44102E0A0000206661696C656420210A0000
-0AFE100000
- "
-}
-
-#########################################################
-
-if [ "$#" -eq 2 ] ; then
- output_init > $2
- output_uboot >> $2
- output_flashprog >> $2
-else
- output_init;
- output_uboot;
- output_flashprog;
-fi
diff --git a/tools/libfdt/fdt_rw.c b/tools/libfdt/fdt_rw.c
index 68fc7c8c88..7189f01429 100644
--- a/tools/libfdt/fdt_rw.c
+++ b/tools/libfdt/fdt_rw.c
@@ -11,6 +11,7 @@ int fdt_remove_unused_strings(const void *old, void *new)
const char *str;
int ret;
int tag = FDT_PROP;
+ int allocated;
/* Make a copy and remove the strings */
memcpy(new, old, size);
@@ -25,7 +26,7 @@ int fdt_remove_unused_strings(const void *old, void *new)
new_prop = (struct fdt_property *)(unsigned long)
fdt_get_property_by_offset(new, offset, NULL);
str = fdt_string(old, fdt32_to_cpu(old_prop->nameoff));
- ret = fdt_find_add_string_(new, str);
+ ret = fdt_find_add_string_(new, str, &allocated);
if (ret < 0)
return ret;
new_prop->nameoff = cpu_to_fdt32(ret);
diff --git a/tools/microcode-tool.py b/tools/microcode-tool.py
index 249a33b8ca..24c02c4fca 100755
--- a/tools/microcode-tool.py
+++ b/tools/microcode-tool.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
#
# Copyright (c) 2014 Google, Inc
@@ -126,15 +126,15 @@ def List(date, microcodes, model):
microcodes: Dict of Microcode objects indexed by name
model: Model string to search for, or None
"""
- print 'Date: %s' % date
+ print('Date: %s' % date)
if model:
mcode_list, tried = FindMicrocode(microcodes, model.lower())
- print 'Matching models %s:' % (', '.join(tried))
+ print('Matching models %s:' % (', '.join(tried)))
else:
- print 'All models:'
- mcode_list = [microcodes[m] for m in microcodes.keys()]
+ print('All models:')
+ mcode_list = [microcodes[m] for m in list(microcodes.keys())]
for mcode in mcode_list:
- print '%-20s: model %s' % (mcode.name, mcode.model)
+ print('%-20s: model %s' % (mcode.name, mcode.model))
def FindMicrocode(microcodes, model):
"""Find all the microcode chunks which match the given model.
@@ -164,7 +164,7 @@ def FindMicrocode(microcodes, model):
for i in range(3):
abbrev = model[:-i] if i else model
tried.append(abbrev)
- for mcode in microcodes.values():
+ for mcode in list(microcodes.values()):
if mcode.model.startswith(abbrev):
found.append(mcode)
if found:
@@ -229,17 +229,17 @@ data = <%s
args += [mcode.words[i] for i in range(7)]
args.append(words)
if outfile == '-':
- print out % tuple(args)
+ print(out % tuple(args))
else:
if not outfile:
if not os.path.exists(MICROCODE_DIR):
- print >> sys.stderr, "Creating directory '%s'" % MICROCODE_DIR
+ print("Creating directory '%s'" % MICROCODE_DIR, file=sys.stderr)
os.makedirs(MICROCODE_DIR)
outfile = os.path.join(MICROCODE_DIR, mcode.name + '.dtsi')
- print >> sys.stderr, "Writing microcode for '%s' to '%s'" % (
- ', '.join([mcode.name for mcode in mcodes]), outfile)
+ print("Writing microcode for '%s' to '%s'" % (
+ ', '.join([mcode.name for mcode in mcodes]), outfile), file=sys.stderr)
with open(outfile, 'w') as fd:
- print >> fd, out % tuple(args)
+ print(out % tuple(args), file=fd)
def MicrocodeTool():
"""Run the microcode tool"""
@@ -289,14 +289,14 @@ def MicrocodeTool():
if cmd == 'list':
List(date, microcodes, options.model)
elif cmd == 'license':
- print '\n'.join(license_text)
+ print('\n'.join(license_text))
elif cmd == 'create':
if not options.model:
parser.error('You must specify a model to create')
model = options.model.lower()
if options.model == 'all':
options.multiple = True
- mcode_list = microcodes.values()
+ mcode_list = list(microcodes.values())
tried = []
else:
mcode_list, tried = FindMicrocode(microcodes, model)
diff --git a/tools/moveconfig.py b/tools/moveconfig.py
index b99417e9d6..e2ff4cfc88 100755
--- a/tools/moveconfig.py
+++ b/tools/moveconfig.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
#
# Author: Masahiro Yamada <yamada.masahiro@socionext.com>
@@ -304,7 +304,7 @@ import glob
import multiprocessing
import optparse
import os
-import Queue
+import queue
import re
import shutil
import subprocess
@@ -450,8 +450,8 @@ def get_matched_defconfigs(defconfigs_file):
line = line.split(' ')[0] # handle 'git log' input
matched = get_matched_defconfig(line)
if not matched:
- print >> sys.stderr, "warning: %s:%d: no defconfig matched '%s'" % \
- (defconfigs_file, i + 1, line)
+ print("warning: %s:%d: no defconfig matched '%s'" % \
+ (defconfigs_file, i + 1, line), file=sys.stderr)
defconfigs += matched
@@ -494,11 +494,11 @@ def show_diff(a, b, file_path, color_enabled):
for line in diff:
if line[0] == '-' and line[1] != '-':
- print color_text(color_enabled, COLOR_RED, line),
+ print(color_text(color_enabled, COLOR_RED, line), end=' ')
elif line[0] == '+' and line[1] != '+':
- print color_text(color_enabled, COLOR_GREEN, line),
+ print(color_text(color_enabled, COLOR_GREEN, line), end=' ')
else:
- print line,
+ print(line, end=' ')
def extend_matched_lines(lines, matched, pre_patterns, post_patterns, extend_pre,
extend_post):
@@ -554,9 +554,9 @@ def extend_matched_lines(lines, matched, pre_patterns, post_patterns, extend_pre
def confirm(options, prompt):
if not options.yes:
while True:
- choice = raw_input('{} [y/n]: '.format(prompt))
+ choice = input('{} [y/n]: '.format(prompt))
choice = choice.lower()
- print choice
+ print(choice)
if choice == 'y' or choice == 'n':
break
@@ -809,10 +809,10 @@ def try_expand(line):
val= val.strip('\"')
if re.search("[*+-/]|<<|SZ_+|\(([^\)]+)\)", val):
newval = hex(eval(val, SIZES))
- print "\tExpanded expression %s to %s" % (val, newval)
+ print("\tExpanded expression %s to %s" % (val, newval))
return cfg+'='+newval
except:
- print "\tFailed to expand expression in %s" % line
+ print("\tFailed to expand expression in %s" % line)
return line
@@ -838,7 +838,7 @@ class Progress:
def show(self):
"""Display the progress."""
- print ' %d defconfigs out of %d\r' % (self.current, self.total),
+ print(' %d defconfigs out of %d\r' % (self.current, self.total), end=' ')
sys.stdout.flush()
@@ -1236,7 +1236,7 @@ class Slot:
"Tool chain for '%s' is missing. Do nothing.\n" % arch)
self.finish(False)
return
- env = toolchain.MakeEnvironment(False)
+ env = toolchain.MakeEnvironment(False)
cmd = list(self.make_cmd)
cmd.append('KCONFIG_IGNORE_DUPLICATES=1')
@@ -1312,7 +1312,7 @@ class Slot:
log += '\n'.join([ ' ' + s for s in self.log.split('\n') ])
# Some threads are running in parallel.
# Print log atomically to not mix up logs from different threads.
- print >> (sys.stdout if success else sys.stderr), log
+ print(log, file=(sys.stdout if success else sys.stderr))
if not success:
if self.options.exit_on_error:
@@ -1411,8 +1411,8 @@ class Slots:
msg = "The following boards were not processed due to error:\n"
msg += boards
msg += "(the list has been saved in %s)\n" % output_file
- print >> sys.stderr, color_text(self.options.color, COLOR_LIGHT_RED,
- msg)
+ print(color_text(self.options.color, COLOR_LIGHT_RED,
+ msg), file=sys.stderr)
with open(output_file, 'w') as f:
f.write(boards)
@@ -1431,8 +1431,8 @@ class Slots:
msg += "It is highly recommended to check them manually:\n"
msg += boards
msg += "(the list has been saved in %s)\n" % output_file
- print >> sys.stderr, color_text(self.options.color, COLOR_YELLOW,
- msg)
+ print(color_text(self.options.color, COLOR_YELLOW,
+ msg), file=sys.stderr)
with open(output_file, 'w') as f:
f.write(boards)
@@ -1448,11 +1448,11 @@ class ReferenceSource:
commit: commit to git-clone
"""
self.src_dir = tempfile.mkdtemp()
- print "Cloning git repo to a separate work directory..."
+ print("Cloning git repo to a separate work directory...")
subprocess.check_output(['git', 'clone', os.getcwd(), '.'],
cwd=self.src_dir)
- print "Checkout '%s' to build the original autoconf.mk." % \
- subprocess.check_output(['git', 'rev-parse', '--short', commit]).strip()
+ print("Checkout '%s' to build the original autoconf.mk." % \
+ subprocess.check_output(['git', 'rev-parse', '--short', commit]).strip())
subprocess.check_output(['git', 'checkout', commit],
stderr=subprocess.STDOUT, cwd=self.src_dir)
@@ -1480,14 +1480,14 @@ def move_config(toolchains, configs, options, db_queue):
"""
if len(configs) == 0:
if options.force_sync:
- print 'No CONFIG is specified. You are probably syncing defconfigs.',
+ print('No CONFIG is specified. You are probably syncing defconfigs.', end=' ')
elif options.build_db:
- print 'Building %s database' % CONFIG_DATABASE
+ print('Building %s database' % CONFIG_DATABASE)
else:
- print 'Neither CONFIG nor --force-sync is specified. Nothing will happen.',
+ print('Neither CONFIG nor --force-sync is specified. Nothing will happen.', end=' ')
else:
- print 'Move ' + ', '.join(configs),
- print '(jobs: %d)\n' % options.jobs
+ print('Move ' + ', '.join(configs), end=' ')
+ print('(jobs: %d)\n' % options.jobs)
if options.git_ref:
reference_src = ReferenceSource(options.git_ref)
@@ -1517,7 +1517,7 @@ def move_config(toolchains, configs, options, db_queue):
while not slots.empty():
time.sleep(SLEEP_TIME)
- print ''
+ print('')
slots.show_failed_boards()
slots.show_suspicious_boards()
@@ -1691,15 +1691,15 @@ def do_imply_config(config_list, add_imply, imply_flags, skip_added,
for config in config_list:
defconfigs = defconfig_db.get(config)
if not defconfigs:
- print '%s not found in any defconfig' % config
+ print('%s not found in any defconfig' % config)
continue
# Get the set of defconfigs without this one (since a config cannot
# imply itself)
non_defconfigs = all_defconfigs - defconfigs
num_defconfigs = len(defconfigs)
- print '%s found in %d/%d defconfigs' % (config, num_defconfigs,
- len(all_configs))
+ print('%s found in %d/%d defconfigs' % (config, num_defconfigs,
+ len(all_configs)))
# This will hold the results: key=config, value=defconfigs containing it
imply_configs = {}
@@ -1736,7 +1736,7 @@ def do_imply_config(config_list, add_imply, imply_flags, skip_added,
if common_defconfigs:
skip = False
if find_superset:
- for prev in imply_configs.keys():
+ for prev in list(imply_configs.keys()):
prev_count = len(imply_configs[prev])
count = len(common_defconfigs)
if (prev_count > count and
@@ -1806,15 +1806,15 @@ def do_imply_config(config_list, add_imply, imply_flags, skip_added,
add_list[fname].append(linenum)
if show and kconfig_info != 'skip':
- print '%5d : %-30s%-25s %s' % (num_common, iconfig.ljust(30),
- kconfig_info, missing_str)
+ print('%5d : %-30s%-25s %s' % (num_common, iconfig.ljust(30),
+ kconfig_info, missing_str))
# Having collected a list of things to add, now we add them. We process
# each file from the largest line number to the smallest so that
# earlier additions do not affect our line numbers. E.g. if we added an
# imply at line 20 it would change the position of each line after
# that.
- for fname, linenums in add_list.iteritems():
+ for fname, linenums in add_list.items():
for linenum in sorted(linenums, reverse=True):
add_imply_rule(config[CONFIG_LEN:], fname, linenum)
@@ -1891,11 +1891,11 @@ def main():
for flag in options.imply_flags.split(','):
bad = flag not in IMPLY_FLAGS
if bad:
- print "Invalid flag '%s'" % flag
+ print("Invalid flag '%s'" % flag)
if flag == 'help' or bad:
- print "Imply flags: (separate with ',')"
- for name, info in IMPLY_FLAGS.iteritems():
- print ' %-15s: %s' % (name, info[1])
+ print("Imply flags: (separate with ',')")
+ for name, info in IMPLY_FLAGS.items():
+ print(' %-15s: %s' % (name, info[1]))
parser.print_usage()
sys.exit(1)
imply_flags |= IMPLY_FLAGS[flag][0]
@@ -1905,14 +1905,14 @@ def main():
return
config_db = {}
- db_queue = Queue.Queue()
+ db_queue = queue.Queue()
t = DatabaseThread(config_db, db_queue)
t.setDaemon(True)
t.start()
if not options.cleanup_headers_only:
check_clean_directory()
- bsettings.Setup('')
+ bsettings.Setup('')
toolchains = toolchain.Toolchains()
toolchains.GetSettings()
toolchains.Scan(verbose=False)
@@ -1939,7 +1939,7 @@ def main():
if options.build_db:
with open(CONFIG_DATABASE, 'w') as fd:
- for defconfig, configs in config_db.iteritems():
+ for defconfig, configs in config_db.items():
fd.write('%s\n' % defconfig)
for config in sorted(configs.keys()):
fd.write(' %s=%s\n' % (config, configs[config]))
diff --git a/tools/patman/command.py b/tools/patman/command.py
index 16299f3f5b..5fbd2c4a3e 100644
--- a/tools/patman/command.py
+++ b/tools/patman/command.py
@@ -4,6 +4,7 @@
import os
import cros_subprocess
+import tools
"""Shell command ease-ups for Python."""
@@ -31,6 +32,13 @@ class CommandResult:
self.return_code = return_code
self.exception = exception
+ def ToOutput(self, binary):
+ if not binary:
+ self.stdout = tools.ToString(self.stdout)
+ self.stderr = tools.ToString(self.stderr)
+ self.combined = tools.ToString(self.combined)
+ return self
+
# This permits interception of RunPipe for test purposes. If it is set to
# a function, then that function is called with the pipe list being
@@ -41,7 +49,7 @@ test_result = None
def RunPipe(pipe_list, infile=None, outfile=None,
capture=False, capture_stderr=False, oneline=False,
- raise_on_error=True, cwd=None, **kwargs):
+ raise_on_error=True, cwd=None, binary=False, **kwargs):
"""
Perform a command pipeline, with optional input/output filenames.
@@ -67,7 +75,7 @@ def RunPipe(pipe_list, infile=None, outfile=None,
else:
return test_result
# No result: fall through to normal processing
- result = CommandResult()
+ result = CommandResult(b'', b'', b'')
last_pipe = None
pipeline = list(pipe_list)
user_pipestr = '|'.join([' '.join(pipe) for pipe in pipe_list])
@@ -93,29 +101,36 @@ def RunPipe(pipe_list, infile=None, outfile=None,
if raise_on_error:
raise Exception("Error running '%s': %s" % (user_pipestr, str))
result.return_code = 255
- return result
+ return result.ToOutput(binary)
if capture:
result.stdout, result.stderr, result.combined = (
last_pipe.CommunicateFilter(None))
if result.stdout and oneline:
- result.output = result.stdout.rstrip('\r\n')
+ result.output = result.stdout.rstrip(b'\r\n')
result.return_code = last_pipe.wait()
else:
result.return_code = os.waitpid(last_pipe.pid, 0)[1]
if raise_on_error and result.return_code:
raise Exception("Error running '%s'" % user_pipestr)
- return result
+ return result.ToOutput(binary)
def Output(*cmd, **kwargs):
kwargs['raise_on_error'] = kwargs.get('raise_on_error', True)
return RunPipe([cmd], capture=True, **kwargs).stdout
def OutputOneLine(*cmd, **kwargs):
+ """Run a command and output it as a single-line string
+
+ The command us expected to produce a single line of output
+
+ Returns:
+ String containing output of command
+ """
raise_on_error = kwargs.pop('raise_on_error', True)
- return (RunPipe([cmd], capture=True, oneline=True,
- raise_on_error=raise_on_error,
- **kwargs).stdout.strip())
+ result = RunPipe([cmd], capture=True, oneline=True,
+ raise_on_error=raise_on_error, **kwargs).stdout.strip()
+ return result
def Run(*cmd, **kwargs):
return RunPipe([cmd], **kwargs).stdout
diff --git a/tools/patman/func_test.py b/tools/patman/func_test.py
index 2321f9e028..76319fff37 100644
--- a/tools/patman/func_test.py
+++ b/tools/patman/func_test.py
@@ -51,7 +51,7 @@ class TestFunctional(unittest.TestCase):
@classmethod
def GetText(self, fname):
- return open(self.GetPath(fname)).read()
+ return open(self.GetPath(fname), encoding='utf-8').read()
@classmethod
def GetPatchName(self, subject):
@@ -160,7 +160,7 @@ class TestFunctional(unittest.TestCase):
dry_run, not ignore_bad_tags, cc_file,
in_reply_to=in_reply_to, thread=None)
series.ShowActions(args, cmd, process_tags)
- cc_lines = open(cc_file).read().splitlines()
+ cc_lines = open(cc_file, encoding='utf-8').read().splitlines()
os.remove(cc_file)
lines = out[0].splitlines()
@@ -229,14 +229,14 @@ Simon Glass (2):
2.7.4
'''
- lines = open(cover_fname).read().splitlines()
+ lines = open(cover_fname, encoding='utf-8').read().splitlines()
self.assertEqual(
'Subject: [RFC PATCH v3 0/2] test: A test patch series',
lines[3])
self.assertEqual(expected.splitlines(), lines[7:])
for i, fname in enumerate(args):
- lines = open(fname).read().splitlines()
+ lines = open(fname, encoding='utf-8').read().splitlines()
subject = [line for line in lines if line.startswith('Subject')]
self.assertEqual('Subject: [RFC %d/%d]' % (i + 1, count),
subject[0][:18])
diff --git a/tools/patman/patchstream.py b/tools/patman/patchstream.py
index ef06606297..df3eb7483b 100644
--- a/tools/patman/patchstream.py
+++ b/tools/patman/patchstream.py
@@ -511,8 +511,8 @@ def FixPatch(backup_dir, fname, series, commit):
A list of errors, or [] if all ok.
"""
handle, tmpname = tempfile.mkstemp()
- outfd = os.fdopen(handle, 'w')
- infd = open(fname, 'r')
+ outfd = os.fdopen(handle, 'w', encoding='utf-8')
+ infd = open(fname, 'r', encoding='utf-8')
ps = PatchStream(series)
ps.commit = commit
ps.ProcessStream(infd, outfd)
diff --git a/tools/patman/patman.py b/tools/patman/patman.py
index 0187ebe1d4..cf53e532dd 100755
--- a/tools/patman/patman.py
+++ b/tools/patman/patman.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0+
#
# Copyright (c) 2011 The Chromium OS Authors.
diff --git a/tools/patman/series.py b/tools/patman/series.py
index d667d9b6d6..02a1113ad0 100644
--- a/tools/patman/series.py
+++ b/tools/patman/series.py
@@ -223,7 +223,7 @@ class Series(dict):
col = terminal.Color()
# Look for commit tags (of the form 'xxx:' at the start of the subject)
fname = '/tmp/patman.%d' % os.getpid()
- fd = open(fname, 'w')
+ fd = open(fname, 'w', encoding='utf-8')
all_ccs = []
for commit in self.commits:
cc = []
diff --git a/tools/patman/settings.py b/tools/patman/settings.py
index c98911d522..5dc83a8500 100644
--- a/tools/patman/settings.py
+++ b/tools/patman/settings.py
@@ -165,7 +165,7 @@ def ReadGitAliases(fname):
fname: Filename to read
"""
try:
- fd = open(fname, 'r')
+ fd = open(fname, 'r', encoding='utf-8')
except IOError:
print("Warning: Cannot find alias file '%s'" % fname)
return
@@ -259,7 +259,7 @@ def _ReadAliasFile(fname):
"""
if os.path.exists(fname):
bad_line = None
- with open(fname) as fd:
+ with open(fname, encoding='utf-8') as fd:
linenum = 0
for line in fd:
linenum += 1
diff --git a/tools/patman/test.py b/tools/patman/test.py
index cc61c20606..889e186606 100644
--- a/tools/patman/test.py
+++ b/tools/patman/test.py
@@ -72,12 +72,12 @@ Signed-off-by: Simon Glass <sjg@chromium.org>
'''
out = ''
inhandle, inname = tempfile.mkstemp()
- infd = os.fdopen(inhandle, 'w')
+ infd = os.fdopen(inhandle, 'w', encoding='utf-8')
infd.write(data)
infd.close()
exphandle, expname = tempfile.mkstemp()
- expfd = os.fdopen(exphandle, 'w')
+ expfd = os.fdopen(exphandle, 'w', encoding='utf-8')
expfd.write(expected)
expfd.close()
diff --git a/tools/patman/tools.py b/tools/patman/tools.py
index 4a7fcdad21..3feddb292f 100644
--- a/tools/patman/tools.py
+++ b/tools/patman/tools.py
@@ -186,7 +186,7 @@ def PathHasFile(path_spec, fname):
return True
return False
-def Run(name, *args):
+def Run(name, *args, **kwargs):
"""Run a tool with some arguments
This runs a 'tool', which is a program used by binman to process files and
@@ -201,13 +201,14 @@ def Run(name, *args):
CommandResult object
"""
try:
+ binary = kwargs.get('binary')
env = None
if tool_search_paths:
env = dict(os.environ)
env['PATH'] = ':'.join(tool_search_paths) + ':' + env['PATH']
all_args = (name,) + args
result = command.RunPipe([all_args], capture=True, capture_stderr=True,
- env=env, raise_on_error=False)
+ env=env, raise_on_error=False, binary=binary)
if result.return_code:
raise Exception("Error %d running '%s': %s" %
(result.return_code,' '.join(all_args),
@@ -375,7 +376,7 @@ def ToBytes(string):
"""Convert a str type into a bytes type
Args:
- string: string to convert value
+ string: string to convert
Returns:
Python 3: A bytes type
@@ -385,6 +386,18 @@ def ToBytes(string):
return string.encode('utf-8')
return string
+def ToString(bval):
+ """Convert a bytes type into a str type
+
+ Args:
+ bval: bytes value to convert
+
+ Returns:
+ Python 3: A bytes type
+ Python 2: A string type
+ """
+ return bval.decode('utf-8')
+
def Compress(indata, algo, with_header=True):
"""Compress some data using a given algorithm
@@ -406,14 +419,14 @@ def Compress(indata, algo, with_header=True):
fname = GetOutputFilename('%s.comp.tmp' % algo)
WriteFile(fname, indata)
if algo == 'lz4':
- data = Run('lz4', '--no-frame-crc', '-c', fname)
+ data = Run('lz4', '--no-frame-crc', '-c', fname, binary=True)
# cbfstool uses a very old version of lzma
elif algo == 'lzma':
outfname = GetOutputFilename('%s.comp.otmp' % algo)
Run('lzma_alone', 'e', fname, outfname, '-lc1', '-lp0', '-pb0', '-d8')
data = ReadFile(outfname)
elif algo == 'gzip':
- data = Run('gzip', '-c', fname)
+ data = Run('gzip', '-c', fname, binary=True)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
if with_header:
@@ -446,13 +459,13 @@ def Decompress(indata, algo, with_header=True):
with open(fname, 'wb') as fd:
fd.write(indata)
if algo == 'lz4':
- data = Run('lz4', '-dc', fname)
+ data = Run('lz4', '-dc', fname, binary=True)
elif algo == 'lzma':
outfname = GetOutputFilename('%s.decomp.otmp' % algo)
Run('lzma_alone', 'd', fname, outfname)
- data = ReadFile(outfname)
+ data = ReadFile(outfname, binary=True)
elif algo == 'gzip':
- data = Run('gzip', '-cd', fname)
+ data = Run('gzip', '-cd', fname, binary=True)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
return data
diff --git a/tools/rkmux.py b/tools/rkmux.py
index 11c192a073..1226ee201c 100755
--- a/tools/rkmux.py
+++ b/tools/rkmux.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# Script to create enums from datasheet register tables
#
@@ -43,8 +43,8 @@ class RegField:
self.desc.append(desc)
def Show(self):
- print self
- print
+ print(self)
+ print()
self.__init__()
def __str__(self):
@@ -65,11 +65,11 @@ class Printer:
self.output_footer()
def output_header(self):
- print '/* %s */' % self.name
- print 'enum {'
+ print('/* %s */' % self.name)
+ print('enum {')
def output_footer(self):
- print '};';
+ print('};');
def output_regfield(self, regfield):
lines = regfield.desc
@@ -97,7 +97,7 @@ class Printer:
self.first = False
self.output_header()
else:
- print
+ print()
out_enum(field, 'shift', bit_low)
out_enum(field, 'mask', mask)
next_val = -1
@@ -175,7 +175,7 @@ def out_enum(field, suffix, value, skip_val=False):
val_str = '%d' % value
str += '%s= %s' % ('\t' * tabs, val_str)
- print '\t%s,' % str
+ print('\t%s,' % str)
# Process a CSV file, e.g. from tabula
def process_csv(name, fd):