summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorTom Rini <trini@konsulko.com>2022-08-22 12:41:07 -0400
committerTom Rini <trini@konsulko.com>2022-08-22 12:41:07 -0400
commit850ac7ceb75d2f86f0feae48ee77ee663dcd29d5 (patch)
tree5d70681ddd31ad0cb69ad7900a9956aaae8aba51 /tools
parent61e887fb9cbb6e1d64c2383a865f79ed3570bcf8 (diff)
parentcd15b640b0b8d5a7ba5f1c0587e4f9c767e2d8fb (diff)
Merge tag 'dm-pull-20aug22' of https://source.denx.de/u-boot/custodians/u-boot-dm
binman fixes for various things binman clean-up of compression and addition of utilities
Diffstat (limited to 'tools')
-rw-r--r--tools/binman/binman.rst2
-rw-r--r--tools/binman/bintool.py115
-rw-r--r--tools/binman/btool/btool_gzip.py31
-rw-r--r--tools/binman/btool/bzip2.py30
-rw-r--r--tools/binman/btool/lzop.py30
-rw-r--r--tools/binman/btool/xz.py31
-rw-r--r--tools/binman/btool/zstd.py30
-rw-r--r--tools/binman/cbfs_util.py20
-rwxr-xr-xtools/binman/cbfs_util_test.py4
-rw-r--r--tools/binman/comp_util.py76
-rw-r--r--tools/binman/control.py15
-rw-r--r--tools/binman/elf_test.py14
-rw-r--r--tools/binman/entries.rst70
-rw-r--r--tools/binman/entry.py122
-rw-r--r--tools/binman/entry_test.py9
-rw-r--r--tools/binman/etype/blob_dtb.py31
-rw-r--r--tools/binman/etype/cbfs.py5
-rw-r--r--tools/binman/etype/collection.py3
-rw-r--r--tools/binman/etype/fill.py3
-rw-r--r--tools/binman/etype/gbb.py1
-rw-r--r--tools/binman/etype/intel_ifwi.py1
-rw-r--r--tools/binman/etype/mkimage.py120
-rw-r--r--tools/binman/etype/pre_load.py3
-rw-r--r--tools/binman/etype/section.py12
-rw-r--r--tools/binman/etype/vblock.py1
-rw-r--r--tools/binman/ftest.py260
-rw-r--r--tools/binman/test/235_compress_dtb_prepend_invalid.dts17
-rw-r--r--tools/binman/test/235_mkimage_name.dts18
-rw-r--r--tools/binman/test/236_compress_dtb_prepend_length.dts19
-rw-r--r--tools/binman/test/236_mkimage_image.dts21
-rw-r--r--tools/binman/test/237_compress_dtb_invalid.dts16
-rw-r--r--tools/binman/test/237_mkimage_image_no_content.dts22
-rw-r--r--tools/binman/test/238_compress_dtb_zstd.dts16
-rw-r--r--tools/binman/test/238_mkimage_image_bad.dts22
-rw-r--r--tools/binman/test/239_collection_other.dts29
-rw-r--r--tools/binman/test/240_mkimage_coll.dts27
-rw-r--r--tools/patman/test_util.py7
37 files changed, 1079 insertions, 174 deletions
diff --git a/tools/binman/binman.rst b/tools/binman/binman.rst
index 6e1fd3476f..f94fd7e2a5 100644
--- a/tools/binman/binman.rst
+++ b/tools/binman/binman.rst
@@ -1684,8 +1684,6 @@ Some ideas:
- Figure out how to make Fdt support changing the node order, so that
Node.AddSubnode() can support adding a node before another, existing node.
Perhaps it should completely regenerate the flat tree?
-- Put faked files into a separate subdir and remove them on start-up, to avoid
- seeing them as 'real' files on a subsequent run
--
Simon Glass <sjg@chromium.org>
diff --git a/tools/binman/bintool.py b/tools/binman/bintool.py
index 8435b29749..ec30ceff74 100644
--- a/tools/binman/bintool.py
+++ b/tools/binman/bintool.py
@@ -1,5 +1,7 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright 2022 Google LLC
+# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
+# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
#
"""Base class for all bintools
@@ -71,17 +73,25 @@ class Bintool:
# interested in the type.
module_name = btype.replace('-', '_')
module = modules.get(module_name)
+ class_name = f'Bintool{module_name}'
# Import the module if we have not already done so
if not module:
try:
module = importlib.import_module('binman.btool.' + module_name)
except ImportError as exc:
- return module_name, exc
+ try:
+ # Deal with classes which must be renamed due to conflicts
+ # with Python libraries
+ class_name = f'Bintoolbtool_{module_name}'
+ module = importlib.import_module('binman.btool.btool_' +
+ module_name)
+ except ImportError:
+ return module_name, exc
modules[module_name] = module
# Look up the expected class name
- return getattr(module, 'Bintool%s' % module_name)
+ return getattr(module, class_name)
@staticmethod
def create(name):
@@ -464,3 +474,104 @@ binaries. It is fairly easy to create new bintools. Just add a new file to the
str: Version string for this bintool
"""
return 'unknown'
+
+class BintoolPacker(Bintool):
+ """Tool which compression / decompression entry contents
+
+ This is a bintools base class for compression / decompression packer
+
+ Properties:
+ name: Name of packer tool
+ compression: Compression type (COMPRESS_...), value of 'name' property
+ if none
+ compress_args: List of positional args provided to tool for compress,
+ ['--compress'] if none
+ decompress_args: List of positional args provided to tool for
+ decompress, ['--decompress'] if none
+ fetch_package: Name of the tool installed using the apt, value of 'name'
+ property if none
+ version_regex: Regular expressions to extract the version from tool
+ version output, '(v[0-9.]+)' if none
+ """
+ def __init__(self, name, compression=None, compress_args=None,
+ decompress_args=None, fetch_package=None,
+ version_regex=r'(v[0-9.]+)'):
+ desc = '%s compression' % (compression if compression else name)
+ super().__init__(name, desc)
+ if compress_args is None:
+ compress_args = ['--compress']
+ self.compress_args = compress_args
+ if decompress_args is None:
+ decompress_args = ['--decompress']
+ self.decompress_args = decompress_args
+ if fetch_package is None:
+ fetch_package = name
+ self.fetch_package = fetch_package
+ self.version_regex = version_regex
+
+ def compress(self, indata):
+ """Compress data
+
+ Args:
+ indata (bytes): Data to compress
+
+ Returns:
+ bytes: Compressed data
+ """
+ with tempfile.NamedTemporaryFile(prefix='comp.tmp',
+ dir=tools.get_output_dir()) as tmp:
+ tools.write_file(tmp.name, indata)
+ args = self.compress_args + ['--stdout', tmp.name]
+ return self.run_cmd(*args, binary=True)
+
+ def decompress(self, indata):
+ """Decompress data
+
+ Args:
+ indata (bytes): Data to decompress
+
+ Returns:
+ bytes: Decompressed data
+ """
+ with tempfile.NamedTemporaryFile(prefix='decomp.tmp',
+ dir=tools.get_output_dir()) as inf:
+ tools.write_file(inf.name, indata)
+ args = self.decompress_args + ['--stdout', inf.name]
+ return self.run_cmd(*args, binary=True)
+
+ def fetch(self, method):
+ """Fetch handler
+
+ This installs the gzip package using the apt utility.
+
+ Args:
+ method (FETCH_...): Method to use
+
+ Returns:
+ True if the file was fetched and now installed, None if a method
+ other than FETCH_BIN was requested
+
+ Raises:
+ Valuerror: Fetching could not be completed
+ """
+ if method != FETCH_BIN:
+ return None
+ return self.apt_install(self.fetch_package)
+
+ def version(self):
+ """Version handler
+
+ Returns:
+ str: Version number
+ """
+ import re
+
+ result = self.run_cmd_result('-V')
+ out = result.stdout.strip()
+ if not out:
+ out = result.stderr.strip()
+ if not out:
+ return super().version()
+
+ m_version = re.search(self.version_regex, out)
+ return m_version.group(1) if m_version else out
diff --git a/tools/binman/btool/btool_gzip.py b/tools/binman/btool/btool_gzip.py
new file mode 100644
index 0000000000..7bea300b5d
--- /dev/null
+++ b/tools/binman/btool/btool_gzip.py
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
+# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
+#
+"""Bintool implementation for gzip
+
+gzip allows compression and decompression of files.
+
+Documentation is available via::
+
+ man gzip
+"""
+
+from binman import bintool
+
+# pylint: disable=C0103
+class Bintoolbtool_gzip(bintool.BintoolPacker):
+ """Compression/decompression using the gzip algorithm
+
+ This bintool supports running `gzip` to compress and decompress data, as
+ used by binman.
+
+ It is also possible to fetch the tool, which uses `apt` to install it.
+
+ Documentation is available via::
+
+ man gzip
+ """
+ def __init__(self, name):
+ super().__init__(name, compress_args=[],
+ version_regex=r'gzip ([0-9.]+)')
diff --git a/tools/binman/btool/bzip2.py b/tools/binman/btool/bzip2.py
new file mode 100644
index 0000000000..9be87a621f
--- /dev/null
+++ b/tools/binman/btool/bzip2.py
@@ -0,0 +1,30 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
+# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
+#
+"""Bintool implementation for bzip2
+
+bzip2 allows compression and decompression of files.
+
+Documentation is available via::
+
+ man bzip2
+"""
+
+from binman import bintool
+
+# pylint: disable=C0103
+class Bintoolbzip2(bintool.BintoolPacker):
+ """Compression/decompression using the bzip2 algorithm
+
+ This bintool supports running `bzip2` to compress and decompress data, as
+ used by binman.
+
+ It is also possible to fetch the tool, which uses `apt` to install it.
+
+ Documentation is available via::
+
+ man bzip2
+ """
+ def __init__(self, name):
+ super().__init__(name, version_regex=r'bzip2.*Version ([0-9.]+)')
diff --git a/tools/binman/btool/lzop.py b/tools/binman/btool/lzop.py
new file mode 100644
index 0000000000..f6903b4db7
--- /dev/null
+++ b/tools/binman/btool/lzop.py
@@ -0,0 +1,30 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
+# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
+#
+"""Bintool implementation for lzop
+
+lzop allows compression and decompression of files.
+
+Documentation is available via::
+
+ man lzop
+"""
+
+from binman import bintool
+
+# pylint: disable=C0103
+class Bintoollzop(bintool.BintoolPacker):
+ """Compression/decompression using the lzop algorithm
+
+ This bintool supports running `lzop` to compress and decompress data, as
+ used by binman.
+
+ It is also possible to fetch the tool, which uses `apt` to install it.
+
+ Documentation is available via::
+
+ man lzop
+ """
+ def __init__(self, name):
+ super().__init__(name, 'lzo', compress_args=[])
diff --git a/tools/binman/btool/xz.py b/tools/binman/btool/xz.py
new file mode 100644
index 0000000000..e2b413d18b
--- /dev/null
+++ b/tools/binman/btool/xz.py
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
+# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
+#
+"""Bintool implementation for xz
+
+xz allows compression and decompression of files.
+
+Documentation is available via::
+
+ man xz
+"""
+
+from binman import bintool
+
+# pylint: disable=C0103
+class Bintoolxz(bintool.BintoolPacker):
+ """Compression/decompression using the xz algorithm
+
+ This bintool supports running `xz` to compress and decompress data, as
+ used by binman.
+
+ It is also possible to fetch the tool, which uses `apt` to install it.
+
+ Documentation is available via::
+
+ man xz
+ """
+ def __init__(self, name):
+ super().__init__(name, fetch_package='xz-utils',
+ version_regex=r'xz \(XZ Utils\) ([0-9.]+)')
diff --git a/tools/binman/btool/zstd.py b/tools/binman/btool/zstd.py
new file mode 100644
index 0000000000..299bd37126
--- /dev/null
+++ b/tools/binman/btool/zstd.py
@@ -0,0 +1,30 @@
+# SPDX-License-Identifier: GPL-2.0+
+# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
+# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
+#
+"""Bintool implementation for zstd
+
+zstd allows compression and decompression of files.
+
+Documentation is available via::
+
+ man zstd
+"""
+
+from binman import bintool
+
+# pylint: disable=C0103
+class Bintoolzstd(bintool.BintoolPacker):
+ """Compression/decompression using the zstd algorithm
+
+ This bintool supports running `zstd` to compress and decompress data, as
+ used by binman.
+
+ It is also possible to fetch the tool, which uses `apt` to install it.
+
+ Documentation is available via::
+
+ man zstd
+ """
+ def __init__(self, name):
+ super().__init__(name)
diff --git a/tools/binman/cbfs_util.py b/tools/binman/cbfs_util.py
index 9cad03886f..7bd3d89798 100644
--- a/tools/binman/cbfs_util.py
+++ b/tools/binman/cbfs_util.py
@@ -20,7 +20,7 @@ import io
import struct
import sys
-from binman import comp_util
+from binman import bintool
from binman import elf
from patman import command
from patman import tools
@@ -236,14 +236,18 @@ class CbfsFile(object):
self.data_len = len(data)
self.erase_byte = None
self.size = None
+ if self.compress == COMPRESS_LZ4:
+ self.comp_bintool = bintool.Bintool.create('lz4')
+ elif self.compress == COMPRESS_LZMA:
+ self.comp_bintool = bintool.Bintool.create('lzma_alone')
+ else:
+ self.comp_bintool = None
def decompress(self):
"""Handle decompressing data if necessary"""
indata = self.data
- if self.compress == COMPRESS_LZ4:
- data = comp_util.decompress(indata, 'lz4', with_header=False)
- elif self.compress == COMPRESS_LZMA:
- data = comp_util.decompress(indata, 'lzma', with_header=False)
+ if self.comp_bintool:
+ data = self.comp_bintool.decompress(indata)
else:
data = indata
self.memlen = len(data)
@@ -361,10 +365,8 @@ class CbfsFile(object):
data = elf_data.data
elif self.ftype == TYPE_RAW:
orig_data = data
- if self.compress == COMPRESS_LZ4:
- data = comp_util.compress(orig_data, 'lz4', with_header=False)
- elif self.compress == COMPRESS_LZMA:
- data = comp_util.compress(orig_data, 'lzma', with_header=False)
+ if self.comp_bintool:
+ data = self.comp_bintool.compress(orig_data)
self.memlen = len(orig_data)
self.data_len = len(data)
attr = struct.pack(ATTR_COMPRESSION_FORMAT,
diff --git a/tools/binman/cbfs_util_test.py b/tools/binman/cbfs_util_test.py
index f86b295149..e0f792fd34 100755
--- a/tools/binman/cbfs_util_test.py
+++ b/tools/binman/cbfs_util_test.py
@@ -19,7 +19,6 @@ import unittest
from binman import bintool
from binman import cbfs_util
from binman.cbfs_util import CbfsWriter
-from binman import comp_util
from binman import elf
from patman import test_util
from patman import tools
@@ -50,7 +49,8 @@ class TestCbfs(unittest.TestCase):
cls.cbfstool = bintool.Bintool.create('cbfstool')
cls.have_cbfstool = cls.cbfstool.is_present()
- cls.have_lz4 = comp_util.HAVE_LZ4
+ lz4 = bintool.Bintool.create('lz4')
+ cls.have_lz4 = lz4.is_present()
@classmethod
def tearDownClass(cls):
diff --git a/tools/binman/comp_util.py b/tools/binman/comp_util.py
deleted file mode 100644
index dc76adab35..0000000000
--- a/tools/binman/comp_util.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# SPDX-License-Identifier: GPL-2.0+
-# Copyright 2022 Google LLC
-#
-"""Utilities to compress and decompress data"""
-
-import struct
-import tempfile
-
-from binman import bintool
-from patman import tools
-
-LZ4 = bintool.Bintool.create('lz4')
-HAVE_LZ4 = LZ4.is_present()
-
-LZMA_ALONE = bintool.Bintool.create('lzma_alone')
-HAVE_LZMA_ALONE = LZMA_ALONE.is_present()
-
-
-def compress(indata, algo, with_header=True):
- """Compress some data using a given algorithm
-
- Note that for lzma this uses an old version of the algorithm, not that
- provided by xz.
-
- This requires 'lz4' and 'lzma_alone' tools. It also requires an output
- directory to be previously set up, by calling PrepareOutputDir().
-
- Args:
- indata (bytes): Input data to compress
- algo (str): Algorithm to use ('none', 'lz4' or 'lzma')
-
- Returns:
- bytes: Compressed data
- """
- if algo == 'none':
- return indata
- if algo == 'lz4':
- data = LZ4.compress(indata)
- # cbfstool uses a very old version of lzma
- elif algo == 'lzma':
- data = LZMA_ALONE.compress(indata)
- else:
- raise ValueError("Unknown algorithm '%s'" % algo)
- if with_header:
- hdr = struct.pack('<I', len(data))
- data = hdr + data
- return data
-
-def decompress(indata, algo, with_header=True):
- """Decompress some data using a given algorithm
-
- Note that for lzma this uses an old version of the algorithm, not that
- provided by xz.
-
- This requires 'lz4' and 'lzma_alone' tools. It also requires an output
- directory to be previously set up, by calling PrepareOutputDir().
-
- Args:
- indata (bytes): Input data to decompress
- algo (str): Algorithm to use ('none', 'lz4' or 'lzma')
-
- Returns:
- (bytes) Compressed data
- """
- if algo == 'none':
- return indata
- if with_header:
- data_len = struct.unpack('<I', indata[:4])[0]
- indata = indata[4:4 + data_len]
- if algo == 'lz4':
- data = LZ4.decompress(indata)
- elif algo == 'lzma':
- data = LZMA_ALONE.decompress(indata)
- else:
- raise ValueError("Unknown algorithm '%s'" % algo)
- return data
diff --git a/tools/binman/control.py b/tools/binman/control.py
index ce57dc7efc..bfe63a1520 100644
--- a/tools/binman/control.py
+++ b/tools/binman/control.py
@@ -16,8 +16,9 @@ from patman import tools
from binman import bintool
from binman import cbfs_util
-from binman import elf
from patman import command
+from binman import elf
+from binman import entry
from patman import tout
# These are imported if needed since they import libfdt
@@ -215,6 +216,7 @@ def ReadEntry(image_fname, entry_path, decomp=True):
from binman.image import Image
image = Image.FromFile(image_fname)
+ image.CollectBintools()
entry = image.FindEntryPath(entry_path)
return entry.ReadData(decomp)
@@ -251,6 +253,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
List of EntryInfo records that were written
"""
image = Image.FromFile(image_fname)
+ image.CollectBintools()
if alt_format == 'list':
ShowAltFormats(image)
@@ -370,6 +373,7 @@ def WriteEntry(image_fname, entry_path, data, do_compress=True,
"""
tout.info("Write entry '%s', file '%s'" % (entry_path, image_fname))
image = Image.FromFile(image_fname)
+ image.CollectBintools()
entry = image.FindEntryPath(entry_path)
WriteEntryToImage(image, entry, data, do_compress=do_compress,
allow_resize=allow_resize, write_map=write_map)
@@ -507,8 +511,8 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
# without changing the device-tree size, thus ensuring that our
# entry offsets remain the same.
for image in images.values():
- image.CollectBintools()
image.gen_entries()
+ image.CollectBintools()
if update_fdt:
image.AddMissingProperties(True)
image.ProcessFdt(dtb)
@@ -717,6 +721,13 @@ def Binman(args):
bintool.Bintool.set_missing_list(
args.force_missing_bintools.split(',') if
args.force_missing_bintools else None)
+
+ # Create the directory here instead of Entry.check_fake_fname()
+ # since that is called from a threaded context so different threads
+ # may race to create the directory
+ if args.fake_ext_blobs:
+ entry.Entry.create_fake_dir()
+
for image in images.values():
invalid |= ProcessImage(image, args.update_fdt, args.map,
allow_missing=args.allow_missing,
diff --git a/tools/binman/elf_test.py b/tools/binman/elf_test.py
index 5a51c64cfe..75b867c2be 100644
--- a/tools/binman/elf_test.py
+++ b/tools/binman/elf_test.py
@@ -122,6 +122,8 @@ class TestElf(unittest.TestCase):
def testOutsideFile(self):
"""Test a symbol which extends outside the entry area is detected"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
entry = FakeEntry(10)
section = FakeSection()
elf_fname = self.ElfTestFile('u_boot_binman_syms')
@@ -147,6 +149,8 @@ class TestElf(unittest.TestCase):
Only 32 and 64 bits are supported, since we need to store an offset
into the image.
"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
entry = FakeEntry(10)
section = FakeSection()
elf_fname =self.ElfTestFile('u_boot_binman_syms_size')
@@ -161,6 +165,8 @@ class TestElf(unittest.TestCase):
This should produce -1 values for all thress symbols, taking up the
first 16 bytes of the image.
"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
entry = FakeEntry(28)
section = FakeSection(sym_value=None)
elf_fname = self.ElfTestFile('u_boot_binman_syms')
@@ -172,6 +178,8 @@ class TestElf(unittest.TestCase):
def testDebug(self):
"""Check that enabling debug in the elf module produced debug output"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
try:
tout.init(tout.DEBUG)
entry = FakeEntry(24)
@@ -281,6 +289,8 @@ class TestElf(unittest.TestCase):
def test_read_segments_bad_data(self):
"""Test for read_loadable_segments() with an invalid ELF file"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
fname = self.ElfTestFile('embed_data')
with self.assertRaises(ValueError) as e:
elf.read_loadable_segments(tools.get_bytes(100, 100))
@@ -288,6 +298,8 @@ class TestElf(unittest.TestCase):
def test_get_file_offset(self):
"""Test GetFileOffset() gives the correct file offset for a symbol"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
fname = self.ElfTestFile('embed_data')
syms = elf.GetSymbols(fname, ['embed'])
addr = syms['embed'].address
@@ -314,6 +326,8 @@ class TestElf(unittest.TestCase):
def test_get_symbol_from_address(self):
"""Test GetSymbolFromAddress()"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
fname = self.ElfTestFile('elf_sections')
sym_name = 'calculate'
syms = elf.GetSymbols(fname, [sym_name])
diff --git a/tools/binman/entries.rst b/tools/binman/entries.rst
index 782bff1f8d..b3613d7cbd 100644
--- a/tools/binman/entries.rst
+++ b/tools/binman/entries.rst
@@ -216,6 +216,9 @@ This is a blob containing a device tree. The contents of the blob are
obtained from the list of available device-tree files, managed by the
'state' module.
+Additional Properties / Entry arguments:
+ - prepend: Header type to use:
+ length: 32-bit length header
.. _etype_blob_ext:
@@ -447,6 +450,9 @@ listed entries are combined to form this entry. This serves as a useful
base class for entry types which need to process data from elsewhere in
the image, not necessarily child entries.
+The entries can generally be anywhere in the same image, even if they are in
+a different section from this entry.
+
.. _etype_cros_ec_rw:
@@ -1166,11 +1172,12 @@ Entry: mkimage: Binary produced by mkimage
------------------------------------------
Properties / Entry arguments:
- - datafile: Filename for -d argument
- - args: Other arguments to pass
+ - args: Arguments to pass
+ - data-to-imagename: Indicates that the -d data should be passed in as
+ the image name also (-n)
-The data passed to mkimage is collected from subnodes of the mkimage node,
-e.g.::
+The data passed to mkimage via the -d flag is collected from subnodes of the
+mkimage node, e.g.::
mkimage {
args = "-n test -T imximage";
@@ -1179,9 +1186,24 @@ e.g.::
};
};
-This calls mkimage to create an imximage with u-boot-spl.bin as the input
-file. The output from mkimage then becomes part of the image produced by
-binman.
+This calls mkimage to create an imximage with `u-boot-spl.bin` as the data
+file, which mkimage being called like this::
+
+ mkimage -d <data_file> -n test -T imximage <output_file>
+
+The output from mkimage then becomes part of the image produced by
+binman. If you need to put mulitple things in the data file, you can use
+a section, or just multiple subnodes like this::
+
+ mkimage {
+ args = "-n test -T imximage";
+
+ u-boot-spl {
+ };
+
+ u-boot-tpl {
+ };
+ };
To use CONFIG options in the arguments, use a string list instead, as in
this example which also produces four arguments::
@@ -1193,7 +1215,38 @@ this example which also produces four arguments::
};
};
+If you need to pass the input data in with the -n argument as well, then use
+the 'data-to-imagename' property::
+
+ mkimage {
+ args = "-T imximage";
+ data-to-imagename';
+
+ u-boot-spl {
+ };
+ };
+
+That will pass the data to mkimage both as the data file (with -d) and as
+the image name (with -n).
+
+
+If need to pass different data in with -n, then use an imagename subnode::
+
+ mkimage {
+ args = "-T imximage";
+
+ imagename {
+ blob {
+ filename = "spl/u-boot-spl.cfgout"
+ };
+ };
+
+ u-boot-spl {
+ };
+ };
+This will pass in u-boot-spl as the input data and the .cfgout file as the
+-n data.
.. _etype_opensbi:
@@ -1231,7 +1284,8 @@ Entry: pre-load: Pre load image header
--------------------------------------
Properties / Entry arguments:
- - pre-load-key-path: Path of the directory that store key (provided by the environment variable PRE_LOAD_KEY_PATH)
+ - pre-load-key-path: Path of the directory that store key (provided by
+ the environment variable PRE_LOAD_KEY_PATH)
- content: List of phandles to entries to sign
- algo-name: Hash and signature algo to use for the signature
- padding-name: Name of the padding (pkcs-1.5 or pss)
diff --git a/tools/binman/entry.py b/tools/binman/entry.py
index e3767aefa7..63ec5cea3b 100644
--- a/tools/binman/entry.py
+++ b/tools/binman/entry.py
@@ -9,9 +9,9 @@ import importlib
import os
import pathlib
import sys
+import time
from binman import bintool
-from binman import comp_util
from dtoc import fdt_util
from patman import tools
from patman.tools import to_hex, to_hex_size
@@ -82,7 +82,13 @@ class Entry(object):
missing_bintools: List of missing bintools for this entry
update_hash: True if this entry's "hash" subnode should be
updated with a hash of the entry contents
+ comp_bintool: Bintools used for compress and decompress data
+ fake_fname: Fake filename, if one was created, else None
+ required_props (dict of str): Properties which must be present. This can
+ be added to by subclasses
"""
+ fake_dir = None
+
def __init__(self, section, etype, node, name_prefix=''):
# Put this here to allow entry-docs and help to work without libfdt
global state
@@ -116,6 +122,9 @@ class Entry(object):
self.bintools = {}
self.missing_bintools = []
self.update_hash = True
+ self.fake_fname = None
+ self.required_props = []
+ self.comp_bintool = None
@staticmethod
def FindEntryClass(etype, expanded):
@@ -233,6 +242,7 @@ class Entry(object):
This reads all the fields we recognise from the node, ready for use.
"""
+ self.ensure_props()
if 'pos' in self._node.props:
self.Raise("Please use 'offset' instead of 'pos'")
if 'expand-size' in self._node.props:
@@ -670,6 +680,7 @@ class Entry(object):
self.WriteMapLine(fd, indent, self.name, self.offset, self.size,
self.image_pos)
+ # pylint: disable=assignment-from-none
def GetEntries(self):
"""Return a list of entries contained by this entry
@@ -679,6 +690,28 @@ class Entry(object):
"""
return None
+ def FindEntryByNode(self, find_node):
+ """Find a node in an entry, searching all subentries
+
+ This does a recursive search.
+
+ Args:
+ find_node (fdt.Node): Node to find
+
+ Returns:
+ Entry: entry, if found, else None
+ """
+ entries = self.GetEntries()
+ if entries:
+ for entry in entries.values():
+ if entry._node == find_node:
+ return entry
+ found = entry.FindEntryByNode(find_node)
+ if found:
+ return found
+
+ return None
+
def GetArg(self, name, datatype=str):
"""Get the value of an entry argument or device-tree-node property
@@ -1014,12 +1047,14 @@ features to produce new behaviours.
bool: True if the blob was faked, False if not
"""
if self.allow_fake and not pathlib.Path(fname).is_file():
- outfname = tools.get_output_filename(os.path.basename(fname))
- with open(outfname, "wb") as out:
- out.truncate(size)
+ if not self.fake_fname:
+ outfname = os.path.join(self.fake_dir, os.path.basename(fname))
+ with open(outfname, "wb") as out:
+ out.truncate(size)
+ tout.info(f"Entry '{self._node.path}': Faked blob '{outfname}'")
+ self.fake_fname = outfname
self.faked = True
- tout.info(f"Entry '{self._node.path}': Faked file '{outfname}'")
- return outfname, True
+ return self.fake_fname, True
return fname, False
def CheckFakedBlobs(self, faked_blobs_list):
@@ -1047,7 +1082,8 @@ features to produce new behaviours.
Args:
bintool (Bintool): Bintool that was missing
"""
- self.missing_bintools.append(bintool)
+ if bintool not in self.missing_bintools:
+ self.missing_bintools.append(bintool)
def check_missing_bintools(self, missing_list):
"""Check if any entries in this section have missing bintools
@@ -1057,7 +1093,10 @@ features to produce new behaviours.
Args:
missing_list: List of Bintool objects to be added to
"""
- missing_list += self.missing_bintools
+ for bintool in self.missing_bintools:
+ if bintool not in missing_list:
+ missing_list.append(bintool)
+
def GetHelpTags(self):
"""Get the tags use for missing-blob help
@@ -1074,12 +1113,39 @@ features to produce new behaviours.
indata: Data to compress
Returns:
- Compressed data (first word is the compressed size)
+ Compressed data
"""
self.uncomp_data = indata
if self.compress != 'none':
self.uncomp_size = len(indata)
- data = comp_util.compress(indata, self.compress)
+ if self.comp_bintool.is_present():
+ data = self.comp_bintool.compress(indata)
+ else:
+ self.record_missing_bintool(self.comp_bintool)
+ data = tools.get_bytes(0, 1024)
+ else:
+ data = indata
+ return data
+
+ def DecompressData(self, indata):
+ """Decompress data according to the entry's compression method
+
+ Args:
+ indata: Data to decompress
+
+ Returns:
+ Decompressed data
+ """
+ if self.compress != 'none':
+ if self.comp_bintool.is_present():
+ data = self.comp_bintool.decompress(indata)
+ self.uncomp_size = len(data)
+ else:
+ self.record_missing_bintool(self.comp_bintool)
+ data = tools.get_bytes(0, 1024)
+ else:
+ data = indata
+ self.uncomp_data = data
return data
@classmethod
@@ -1119,8 +1185,18 @@ features to produce new behaviours.
Args:
btools (dict of Bintool):
+
+ Raise:
+ ValueError if compression algorithm is not supported
"""
- pass
+ algo = self.compress
+ if algo != 'none':
+ algos = ['bzip2', 'gzip', 'lz4', 'lzma', 'lzo', 'xz', 'zstd']
+ if algo not in algos:
+ raise ValueError("Unknown algorithm '%s'" % algo)
+ names = {'lzma': 'lzma_alone', 'lzo': 'lzop'}
+ name = names.get(self.compress, self.compress)
+ self.comp_bintool = self.AddBintool(btools, name)
@classmethod
def AddBintool(self, tools, name):
@@ -1169,3 +1245,27 @@ features to produce new behaviours.
fname = tools.get_output_filename(f'{prefix}.{uniq}')
tools.write_file(fname, data)
return data, fname, uniq
+
+ @classmethod
+ def create_fake_dir(cls):
+ """Create the directory for fake files"""
+ cls.fake_dir = tools.get_output_filename('binman-fake')
+ if not os.path.exists(cls.fake_dir):
+ os.mkdir(cls.fake_dir)
+ tout.notice(f"Fake-blob dir is '{cls.fake_dir}'")
+
+ def ensure_props(self):
+ """Raise an exception if properties are missing
+
+ Args:
+ prop_list (list of str): List of properties to check for
+
+ Raises:
+ ValueError: Any property is missing
+ """
+ not_present = []
+ for prop in self.required_props:
+ if not prop in self._node.props:
+ not_present.append(prop)
+ if not_present:
+ self.Raise(f"'{self.etype}' entry is missing properties: {' '.join(not_present)}")
diff --git a/tools/binman/entry_test.py b/tools/binman/entry_test.py
index 1d60076be1..aa470c5816 100644
--- a/tools/binman/entry_test.py
+++ b/tools/binman/entry_test.py
@@ -105,6 +105,15 @@ class TestEntry(unittest.TestCase):
self.assertTrue(isinstance(ent, Entry_blob))
self.assertEquals('missing', ent.etype)
+ def testDecompressData(self):
+ """Test the DecompressData() method of the base class"""
+ base = entry.Entry.Create(None, self.GetNode(), 'blob-dtb')
+ base.compress = 'lz4'
+ bintools = {}
+ base.comp_bintool = base.AddBintool(bintools, '_testing')
+ self.assertEquals(tools.get_bytes(0, 1024), base.CompressData(b'abc'))
+ self.assertEquals(tools.get_bytes(0, 1024), base.DecompressData(b'abc'))
+
if __name__ == "__main__":
unittest.main()
diff --git a/tools/binman/etype/blob_dtb.py b/tools/binman/etype/blob_dtb.py
index 4159e3032a..6a3fbc4775 100644
--- a/tools/binman/etype/blob_dtb.py
+++ b/tools/binman/etype/blob_dtb.py
@@ -7,6 +7,8 @@
from binman.entry import Entry
from binman.etype.blob import Entry_blob
+from dtoc import fdt_util
+import struct
# This is imported if needed
state = None
@@ -17,6 +19,9 @@ class Entry_blob_dtb(Entry_blob):
This is a blob containing a device tree. The contents of the blob are
obtained from the list of available device-tree files, managed by the
'state' module.
+
+ Additional attributes:
+ prepend: Header used (e.g. 'length')
"""
def __init__(self, section, etype, node):
# Put this here to allow entry-docs and help to work without libfdt
@@ -24,6 +29,14 @@ class Entry_blob_dtb(Entry_blob):
from binman import state
super().__init__(section, etype, node)
+ self.prepend = None
+
+ def ReadNode(self):
+ super().ReadNode()
+ self.prepend = fdt_util.GetString(self._node, 'prepend')
+ if self.prepend and self.prepend not in ['length']:
+ self.Raise("Invalid prepend in '%s': '%s'" %
+ (self._node.name, self.prepend))
def ObtainContents(self):
"""Get the device-tree from the list held by the 'state' module"""
@@ -34,6 +47,10 @@ class Entry_blob_dtb(Entry_blob):
def ProcessContents(self):
"""Re-read the DTB contents so that we get any calculated properties"""
_, indata = state.GetFdtContents(self.GetFdtEtype())
+
+ if self.compress == 'zstd' and self.prepend != 'length':
+ self.Raise('The zstd compression requires a length header')
+
data = self.CompressData(indata)
return self.ProcessContentsUpdate(data)
@@ -58,3 +75,17 @@ class Entry_blob_dtb(Entry_blob):
# will still return the old contents
state.UpdateFdtContents(self.GetFdtEtype(), data)
return ok
+
+ def CompressData(self, indata):
+ data = super().CompressData(indata)
+ if self.prepend == 'length':
+ hdr = struct.pack('<I', len(data))
+ data = hdr + data
+ return data
+
+ def DecompressData(self, indata):
+ if self.prepend == 'length':
+ data_len = struct.unpack('<I', indata[:4])[0]
+ indata = indata[4:4 + data_len]
+ data = super().DecompressData(indata)
+ return data
diff --git a/tools/binman/etype/cbfs.py b/tools/binman/etype/cbfs.py
index 4a1837f26c..832f8d038f 100644
--- a/tools/binman/etype/cbfs.py
+++ b/tools/binman/etype/cbfs.py
@@ -296,3 +296,8 @@ class Entry_cbfs(Entry):
# so that child.data is used to pack into the FIP.
self.ObtainContents(skip_entry=child)
return True
+
+ def AddBintools(self, btools):
+ super().AddBintools(btools)
+ for entry in self._entries.values():
+ entry.AddBintools(btools)
diff --git a/tools/binman/etype/collection.py b/tools/binman/etype/collection.py
index 442b40b48b..c532aafe3e 100644
--- a/tools/binman/etype/collection.py
+++ b/tools/binman/etype/collection.py
@@ -21,6 +21,9 @@ class Entry_collection(Entry):
listed entries are combined to form this entry. This serves as a useful
base class for entry types which need to process data from elsewhere in
the image, not necessarily child entries.
+
+ The entries can generally be anywhere in the same image, even if they are in
+ a different section from this entry.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
diff --git a/tools/binman/etype/fill.py b/tools/binman/etype/fill.py
index cd38279919..c91d0152a8 100644
--- a/tools/binman/etype/fill.py
+++ b/tools/binman/etype/fill.py
@@ -23,11 +23,10 @@ class Entry_fill(Entry):
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
+ self.required_props = ['size']
def ReadNode(self):
super().ReadNode()
- if self.size is None:
- self.Raise("'fill' entry must have a size property")
self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0)
def ObtainContents(self):
diff --git a/tools/binman/etype/gbb.py b/tools/binman/etype/gbb.py
index 7394e4e5d3..ba2a362bb5 100644
--- a/tools/binman/etype/gbb.py
+++ b/tools/binman/etype/gbb.py
@@ -100,4 +100,5 @@ class Entry_gbb(Entry):
return True
def AddBintools(self, btools):
+ super().AddBintools(btools)
self.futility = self.AddBintool(btools, 'futility')
diff --git a/tools/binman/etype/intel_ifwi.py b/tools/binman/etype/intel_ifwi.py
index 4fa7d636fe..04fad401ee 100644
--- a/tools/binman/etype/intel_ifwi.py
+++ b/tools/binman/etype/intel_ifwi.py
@@ -144,4 +144,5 @@ class Entry_intel_ifwi(Entry_blob_ext):
entry.WriteSymbols(self)
def AddBintools(self, btools):
+ super().AddBintools(btools)
self.ifwitool = self.AddBintool(btools, 'ifwitool')
diff --git a/tools/binman/etype/mkimage.py b/tools/binman/etype/mkimage.py
index 5f6def2287..ddbd9cec65 100644
--- a/tools/binman/etype/mkimage.py
+++ b/tools/binman/etype/mkimage.py
@@ -15,11 +15,12 @@ class Entry_mkimage(Entry):
"""Binary produced by mkimage
Properties / Entry arguments:
- - datafile: Filename for -d argument
- - args: Other arguments to pass
+ - args: Arguments to pass
+ - data-to-imagename: Indicates that the -d data should be passed in as
+ the image name also (-n)
- The data passed to mkimage is collected from subnodes of the mkimage node,
- e.g.::
+ The data passed to mkimage via the -d flag is collected from subnodes of the
+ mkimage node, e.g.::
mkimage {
args = "-n test -T imximage";
@@ -28,9 +29,27 @@ class Entry_mkimage(Entry):
};
};
- This calls mkimage to create an imximage with u-boot-spl.bin as the input
- file. The output from mkimage then becomes part of the image produced by
- binman.
+ This calls mkimage to create an imximage with `u-boot-spl.bin` as the data
+ file, with mkimage being called like this::
+
+ mkimage -d <data_file> -n test -T imximage <output_file>
+
+ The output from mkimage then becomes part of the image produced by
+ binman. If you need to put multiple things in the data file, you can use
+ a section, or just multiple subnodes like this::
+
+ mkimage {
+ args = "-n test -T imximage";
+
+ u-boot-spl {
+ };
+
+ u-boot-tpl {
+ };
+ };
+
+ Note that binman places the contents (here SPL and TPL) into a single file
+ and passes that to mkimage using the -d option.
To use CONFIG options in the arguments, use a string list instead, as in
this example which also produces four arguments::
@@ -42,23 +61,85 @@ class Entry_mkimage(Entry):
};
};
+ If you need to pass the input data in with the -n argument as well, then use
+ the 'data-to-imagename' property::
+
+ mkimage {
+ args = "-T imximage";
+ data-to-imagename;
+
+ u-boot-spl {
+ };
+ };
+
+ That will pass the data to mkimage both as the data file (with -d) and as
+ the image name (with -n). In both cases, a filename is passed as the
+ argument, with the actual data being in that file.
+
+ If need to pass different data in with -n, then use an `imagename` subnode::
+
+ mkimage {
+ args = "-T imximage";
+
+ imagename {
+ blob {
+ filename = "spl/u-boot-spl.cfgout"
+ };
+ };
+
+ u-boot-spl {
+ };
+ };
+
+ This will pass in u-boot-spl as the input data and the .cfgout file as the
+ -n data.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
- self._args = fdt_util.GetArgs(self._node, 'args')
self._mkimage_entries = OrderedDict()
+ self._imagename = None
self.align_default = None
+
+ def ReadNode(self):
+ super().ReadNode()
+ self._args = fdt_util.GetArgs(self._node, 'args')
+ self._data_to_imagename = fdt_util.GetBool(self._node,
+ 'data-to-imagename')
+ if self._data_to_imagename and self._node.FindNode('imagename'):
+ self.Raise('Cannot use both imagename node and data-to-imagename')
self.ReadEntries()
+ def ReadEntries(self):
+ """Read the subnodes to find out what should go in this image"""
+ for node in self._node.subnodes:
+ entry = Entry.Create(self, node)
+ entry.ReadNode()
+ if entry.name == 'imagename':
+ self._imagename = entry
+ else:
+ self._mkimage_entries[entry.name] = entry
+
def ObtainContents(self):
# Use a non-zero size for any fake files to keep mkimage happy
+ # Note that testMkimageImagename() relies on this 'mkimage' parameter
data, input_fname, uniq = self.collect_contents_to_file(
self._mkimage_entries.values(), 'mkimage', 1024)
if data is None:
return False
+ if self._imagename:
+ image_data, imagename_fname, _ = self.collect_contents_to_file(
+ [self._imagename], 'mkimage-n', 1024)
+ if image_data is None:
+ return False
output_fname = tools.get_output_filename('mkimage-out.%s' % uniq)
- if self.mkimage.run_cmd('-d', input_fname, *self._args,
- output_fname) is not None:
+
+ args = ['-d', input_fname]
+ if self._data_to_imagename:
+ args += ['-n', input_fname]
+ elif self._imagename:
+ args += ['-n', imagename_fname]
+ args += self._args + [output_fname]
+ if self.mkimage.run_cmd(*args) is not None:
self.SetContents(tools.read_file(output_fname))
else:
# Bintool is missing; just use the input data as the output
@@ -67,12 +148,12 @@ class Entry_mkimage(Entry):
return True
- def ReadEntries(self):
- """Read the subnodes to find out what should go in this image"""
- for node in self._node.subnodes:
- entry = Entry.Create(self, node)
- entry.ReadNode()
- self._mkimage_entries[entry.name] = entry
+ def GetEntries(self):
+ # Make a copy so we don't change the original
+ entries = OrderedDict(self._mkimage_entries)
+ if self._imagename:
+ entries['imagename'] = self._imagename
+ return entries
def SetAllowMissing(self, allow_missing):
"""Set whether a section allows missing external blobs
@@ -83,6 +164,8 @@ class Entry_mkimage(Entry):
self.allow_missing = allow_missing
for entry in self._mkimage_entries.values():
entry.SetAllowMissing(allow_missing)
+ if self._imagename:
+ self._imagename.SetAllowMissing(allow_missing)
def SetAllowFakeBlob(self, allow_fake):
"""Set whether the sub nodes allows to create a fake blob
@@ -92,6 +175,8 @@ class Entry_mkimage(Entry):
"""
for entry in self._mkimage_entries.values():
entry.SetAllowFakeBlob(allow_fake)
+ if self._imagename:
+ self._imagename.SetAllowFakeBlob(allow_fake)
def CheckFakedBlobs(self, faked_blobs_list):
"""Check if any entries in this section have faked external blobs
@@ -103,6 +188,9 @@ class Entry_mkimage(Entry):
"""
for entry in self._mkimage_entries.values():
entry.CheckFakedBlobs(faked_blobs_list)
+ if self._imagename:
+ self._imagename.CheckFakedBlobs(faked_blobs_list)
def AddBintools(self, btools):
+ super().AddBintools(btools)
self.mkimage = self.AddBintool(btools, 'mkimage')
diff --git a/tools/binman/etype/pre_load.py b/tools/binman/etype/pre_load.py
index 245ee75525..b622281159 100644
--- a/tools/binman/etype/pre_load.py
+++ b/tools/binman/etype/pre_load.py
@@ -37,7 +37,8 @@ class Entry_pre_load(Entry_collection):
"""Pre load image header
Properties / Entry arguments:
- - pre-load-key-path: Path of the directory that store key (provided by the environment variable PRE_LOAD_KEY_PATH)
+ - pre-load-key-path: Path of the directory that store key (provided by
+ the environment variable PRE_LOAD_KEY_PATH)
- content: List of phandles to entries to sign
- algo-name: Hash and signature algo to use for the signature
- padding-name: Name of the padding (pkcs-1.5 or pss)
diff --git a/tools/binman/etype/section.py b/tools/binman/etype/section.py
index bd67238b91..621950893f 100644
--- a/tools/binman/etype/section.py
+++ b/tools/binman/etype/section.py
@@ -13,7 +13,6 @@ import concurrent.futures
import re
import sys
-from binman import comp_util
from binman.entry import Entry
from binman import state
from dtoc import fdt_util
@@ -506,10 +505,10 @@ class Entry_section(Entry):
node = self._node.GetFdt().LookupPhandle(phandle)
if not node:
source_entry.Raise("Cannot find node for phandle %d" % phandle)
- for entry in self._entries.values():
- if entry._node == node:
- return entry.GetData(required)
- source_entry.Raise("Cannot find entry for node '%s'" % node.name)
+ entry = self.FindEntryByNode(node)
+ if not entry:
+ source_entry.Raise("Cannot find entry for node '%s'" % node.name)
+ return entry.GetData(required)
def LookupSymbol(self, sym_name, optional, msg, base_addr, entries=None):
"""Look up a symbol in an ELF file
@@ -777,7 +776,7 @@ class Entry_section(Entry):
data = parent_data[offset:offset + child.size]
if decomp:
indata = data
- data = comp_util.decompress(indata, child.compress)
+ data = child.DecompressData(indata)
if child.uncomp_size:
tout.info("%s: Decompressing data size %#x with algo '%s' to data size %#x" %
(child.GetPath(), len(indata), child.compress,
@@ -899,5 +898,6 @@ class Entry_section(Entry):
entry.CheckAltFormats(alt_formats)
def AddBintools(self, btools):
+ super().AddBintools(btools)
for entry in self._entries.values():
entry.AddBintools(btools)
diff --git a/tools/binman/etype/vblock.py b/tools/binman/etype/vblock.py
index c3ef08bbb2..04cb7228aa 100644
--- a/tools/binman/etype/vblock.py
+++ b/tools/binman/etype/vblock.py
@@ -98,4 +98,5 @@ class Entry_vblock(Entry_collection):
return self.ProcessContentsUpdate(data)
def AddBintools(self, btools):
+ super().AddBintools(btools)
self.futility = self.AddBintool(btools, 'futility')
diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py
index fa1f421c05..0b1774046f 100644
--- a/tools/binman/ftest.py
+++ b/tools/binman/ftest.py
@@ -23,7 +23,6 @@ import urllib.error
from binman import bintool
from binman import cbfs_util
from binman import cmdline
-from binman import comp_util
from binman import control
from binman import elf
from binman import elf_test
@@ -107,6 +106,8 @@ BASE_DTB_PROPS = ['offset', 'size', 'image-pos']
# Extra properties expected to be in the device tree when allow-repack is used
REPACK_DTB_PROPS = ['orig-offset', 'orig-size']
+# Supported compression bintools
+COMP_BINTOOLS = ['bzip2', 'gzip', 'lz4', 'lzma_alone', 'lzop', 'xz', 'zstd']
class TestFunctional(unittest.TestCase):
"""Functional tests for binman
@@ -212,7 +213,9 @@ class TestFunctional(unittest.TestCase):
TestFunctional._MakeInputFile('tee.elf',
tools.read_file(cls.ElfTestFile('elf_sections')))
- cls.have_lz4 = comp_util.HAVE_LZ4
+ cls.comp_bintools = {}
+ for name in COMP_BINTOOLS:
+ cls.comp_bintools[name] = bintool.Bintool.create(name)
@classmethod
def tearDownClass(cls):
@@ -242,9 +245,13 @@ class TestFunctional(unittest.TestCase):
cls.toolpath = toolpath
cls.verbosity = verbosity
+ def _CheckBintool(self, bintool):
+ if not bintool.is_present():
+ self.skipTest('%s not available' % bintool.name)
+
def _CheckLz4(self):
- if not self.have_lz4:
- self.skipTest('lz4 --no-frame-crc not available')
+ bintool = self.comp_bintools['lz4']
+ self._CheckBintool(bintool)
def _CleanupOutputDir(self):
"""Remove the temporary output directory"""
@@ -1693,7 +1700,7 @@ class TestFunctional(unittest.TestCase):
"""Test for an fill entry type with no size"""
with self.assertRaises(ValueError) as e:
self._DoReadFile('070_fill_no_size.dts')
- self.assertIn("'fill' entry must have a size property",
+ self.assertIn("'fill' entry is missing properties: size",
str(e.exception))
def _HandleGbbCommand(self, pipe_list):
@@ -1967,7 +1974,8 @@ class TestFunctional(unittest.TestCase):
self._ResetDtbs()
def _decompress(self, data):
- return comp_util.decompress(data, 'lz4')
+ bintool = self.comp_bintools['lz4']
+ return bintool.decompress(data)
def testCompress(self):
"""Test compression of blobs"""
@@ -2855,8 +2863,10 @@ class TestFunctional(unittest.TestCase):
def testExtractCbfsRaw(self):
"""Test extracting CBFS compressed data without decompressing it"""
+ bintool = self.comp_bintools['lzma_alone']
+ self._CheckBintool(bintool)
data = self._RunExtractCmd('section/cbfs/u-boot-dtb', decomp=False)
- dtb = comp_util.decompress(data, 'lzma', with_header=False)
+ dtb = bintool.decompress(data)
self.assertEqual(EXTRACT_DTB_SIZE, len(dtb))
def testExtractBadEntry(self):
@@ -4412,6 +4422,15 @@ class TestFunctional(unittest.TestCase):
}
self.assertEqual(expected, props)
+ def testLz4Missing(self):
+ """Test that binman still produces an image if lz4 is missing"""
+ with test_util.capture_sys_output() as (_, stderr):
+ self._DoTestFile('185_compress_section.dts',
+ force_missing_bintools='lz4')
+ err = stderr.getvalue()
+ self.assertRegex(err,
+ "Image 'main-section'.*missing bintools.*: lz4")
+
def testCompressExtra(self):
"""Test compression of a section with no fixed size"""
self._CheckLz4()
@@ -4427,15 +4446,18 @@ class TestFunctional(unittest.TestCase):
rest = base[len(U_BOOT_DATA):]
# Check compressed data
- section1 = self._decompress(rest)
- expect1 = comp_util.compress(COMPRESS_DATA + U_BOOT_DATA, 'lz4')
- self.assertEquals(expect1, rest[:len(expect1)])
+ bintool = self.comp_bintools['lz4']
+ expect1 = bintool.compress(COMPRESS_DATA + U_BOOT_DATA)
+ data1 = rest[:len(expect1)]
+ section1 = self._decompress(data1)
+ self.assertEquals(expect1, data1)
self.assertEquals(COMPRESS_DATA + U_BOOT_DATA, section1)
rest1 = rest[len(expect1):]
- section2 = self._decompress(rest1)
- expect2 = comp_util.compress(COMPRESS_DATA + COMPRESS_DATA, 'lz4')
- self.assertEquals(expect2, rest1[:len(expect2)])
+ expect2 = bintool.compress(COMPRESS_DATA + COMPRESS_DATA)
+ data2 = rest1[:len(expect2)]
+ section2 = self._decompress(data2)
+ self.assertEquals(expect2, data2)
self.assertEquals(COMPRESS_DATA + COMPRESS_DATA, section2)
rest2 = rest1[len(expect2):]
@@ -4807,6 +4829,8 @@ class TestFunctional(unittest.TestCase):
def testUpdateFdtInElf(self):
"""Test that we can update the devicetree in an ELF file"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
infile = elf_fname = self.ElfTestFile('u_boot_binman_embed')
outfile = os.path.join(self._indir, 'u-boot.out')
begin_sym = 'dtb_embed_begin'
@@ -4858,6 +4882,8 @@ class TestFunctional(unittest.TestCase):
def testUpdateFdtInElfNoSyms(self):
"""Test that missing symbols are detected with --update-fdt-in-elf"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
infile = elf_fname = self.ElfTestFile('u_boot_binman_embed')
outfile = ''
begin_sym = 'wrong_begin'
@@ -4871,6 +4897,8 @@ class TestFunctional(unittest.TestCase):
def testUpdateFdtInElfTooSmall(self):
"""Test that an over-large dtb is detected with --update-fdt-in-elf"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
infile = elf_fname = self.ElfTestFile('u_boot_binman_embed_sm')
outfile = os.path.join(self._indir, 'u-boot.out')
begin_sym = 'dtb_embed_begin'
@@ -5209,15 +5237,6 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self._DoBinman(*args)
self.assertIn('failed to fetch with all methods', stdout.getvalue())
- def testInvalidCompress(self):
- with self.assertRaises(ValueError) as e:
- comp_util.compress(b'', 'invalid')
- self.assertIn("Unknown algorithm 'invalid'", str(e.exception))
-
- with self.assertRaises(ValueError) as e:
- comp_util.decompress(b'1234', 'invalid')
- self.assertIn("Unknown algorithm 'invalid'", str(e.exception))
-
def testBintoolDocs(self):
"""Test for creation of bintool documentation"""
with test_util.capture_sys_output() as (stdout, stderr):
@@ -5344,6 +5363,8 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSplitElf(self):
"""Test an image with an FIT with an split-elf operation"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
'default-dt': 'test-fdt2',
@@ -5421,6 +5442,8 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSplitElfBadElf(self):
"""Test a FIT split-elf operation with an invalid ELF file"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
TestFunctional._MakeInputFile('bad.elf', tools.get_bytes(100, 100))
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
@@ -5440,6 +5463,8 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSplitElfBadDirective(self):
"""Test a FIT split-elf invalid fit,xxx directive in an image node"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
err = self._check_bad_fit('227_fit_bad_dir.dts')
self.assertIn(
"Node '/binman/fit': subnode 'images/@atf-SEQ': Unknown directive 'fit,something'",
@@ -5447,13 +5472,24 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSplitElfBadDirectiveConfig(self):
"""Test a FIT split-elf with invalid fit,xxx directive in config"""
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
err = self._check_bad_fit('228_fit_bad_dir_config.dts')
self.assertEqual(
"Node '/binman/fit': subnode 'configurations/@config-SEQ': Unknown directive 'fit,config'",
err)
def checkFitSplitElf(self, **kwargs):
- """Test an split-elf FIT with a missing ELF file"""
+ """Test an split-elf FIT with a missing ELF file
+
+ Args:
+ kwargs (dict of str): Arguments to pass to _DoTestFile()
+
+ Returns:
+ tuple:
+ str: stdout result
+ str: stderr result
+ """
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
'default-dt': 'test-fdt2',
@@ -5464,23 +5500,36 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
with test_util.capture_sys_output() as (stdout, stderr):
self._DoTestFile(
'226_fit_split_elf.dts', entry_args=entry_args,
- extra_indirs=[test_subdir], **kwargs)
- err = stderr.getvalue()
- return err
+ extra_indirs=[test_subdir], verbosity=3, **kwargs)
+ out = stdout.getvalue()
+ err = stderr.getvalue()
+ return out, err
def testFitSplitElfMissing(self):
"""Test an split-elf FIT with a missing ELF file"""
- err = self.checkFitSplitElf(allow_missing=True)
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
+ out, err = self.checkFitSplitElf(allow_missing=True)
self.assertRegex(
err,
"Image '.*' is missing external blobs and is non-functional: .*")
+ self.assertNotRegex(out, '.*Faked blob.*')
+ fname = tools.get_output_filename('binman-fake/missing.elf')
+ self.assertFalse(os.path.exists(fname))
def testFitSplitElfFaked(self):
"""Test an split-elf FIT with faked ELF file"""
- err = self.checkFitSplitElf(allow_missing=True, allow_fake_blobs=True)
+ if not elf.ELF_TOOLS:
+ self.skipTest('Python elftools not available')
+ out, err = self.checkFitSplitElf(allow_missing=True, allow_fake_blobs=True)
self.assertRegex(
err,
"Image '.*' is missing external blobs and is non-functional: .*")
+ self.assertRegex(
+ out,
+ "Entry '/binman/fit/images/@tee-SEQ/tee-os': Faked blob '.*binman-fake/missing.elf")
+ fname = tools.get_output_filename('binman-fake/missing.elf')
+ self.assertTrue(os.path.exists(fname))
def testPreLoad(self):
"""Test an image with a pre-load header"""
@@ -5694,14 +5743,159 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self.assertIsNotNone(path)
self.assertEqual(expected_fdtmap, fdtmap)
- @unittest.expectedFailure
def testReplaceSectionSimple(self):
"""Test replacing a simple section with arbitrary data"""
new_data = b'w' * len(COMPRESS_DATA + U_BOOT_DATA)
- data, expected_fdtmap, _ = self._RunReplaceCmd(
- 'section', new_data,
- dts='234_replace_section_simple.dts')
- self.assertEqual(new_data, data)
+ with self.assertRaises(ValueError) as exc:
+ self._RunReplaceCmd('section', new_data,
+ dts='234_replace_section_simple.dts')
+ self.assertIn(
+ "Node '/section': Replacing sections is not implemented yet",
+ str(exc.exception))
+
+ def testMkimageImagename(self):
+ """Test using mkimage with -n holding the data too"""
+ data = self._DoReadFile('235_mkimage_name.dts')
+
+ # Check that the data appears in the file somewhere
+ self.assertIn(U_BOOT_SPL_DATA, data)
+
+ # Get struct image_header -> ih_name
+ name = data[0x20:0x40]
+
+ # Build the filename that we expect to be placed in there, by virtue of
+ # the -n paraameter
+ expect = os.path.join(tools.get_output_dir(), 'mkimage.mkimage')
+
+ # Check that the image name is set to the temporary filename used
+ self.assertEqual(expect.encode('utf-8')[:0x20], name)
+
+ def testMkimageImage(self):
+ """Test using mkimage with -n holding the data too"""
+ data = self._DoReadFile('236_mkimage_image.dts')
+
+ # Check that the data appears in the file somewhere
+ self.assertIn(U_BOOT_SPL_DATA, data)
+
+ # Get struct image_header -> ih_name
+ name = data[0x20:0x40]
+
+ # Build the filename that we expect to be placed in there, by virtue of
+ # the -n paraameter
+ expect = os.path.join(tools.get_output_dir(), 'mkimage-n.mkimage')
+
+ # Check that the image name is set to the temporary filename used
+ self.assertEqual(expect.encode('utf-8')[:0x20], name)
+
+ # Check the corect data is in the imagename file
+ self.assertEqual(U_BOOT_DATA, tools.read_file(expect))
+
+ def testMkimageImageNoContent(self):
+ """Test using mkimage with -n and no data"""
+ with self.assertRaises(ValueError) as exc:
+ self._DoReadFile('237_mkimage_image_no_content.dts')
+ self.assertIn('Could not complete processing of contents',
+ str(exc.exception))
+
+ def testMkimageImageBad(self):
+ """Test using mkimage with imagename node and data-to-imagename"""
+ with self.assertRaises(ValueError) as exc:
+ self._DoReadFile('238_mkimage_image_bad.dts')
+ self.assertIn('Cannot use both imagename node and data-to-imagename',
+ str(exc.exception))
+
+ def testCollectionOther(self):
+ """Test a collection where the data comes from another section"""
+ data = self._DoReadFile('239_collection_other.dts')
+ self.assertEqual(U_BOOT_NODTB_DATA + U_BOOT_DTB_DATA +
+ tools.get_bytes(0xff, 2) + U_BOOT_NODTB_DATA +
+ tools.get_bytes(0xfe, 3) + U_BOOT_DTB_DATA,
+ data)
+
+ def testMkimageCollection(self):
+ """Test using a collection referring to an entry in a mkimage entry"""
+ data = self._DoReadFile('240_mkimage_coll.dts')
+ expect = U_BOOT_SPL_DATA + U_BOOT_DATA
+ self.assertEqual(expect, data[:len(expect)])
+
+ def testCompressDtbPrependInvalid(self):
+ """Test that invalid header is detected"""
+ with self.assertRaises(ValueError) as e:
+ self._DoReadFileDtb('235_compress_dtb_prepend_invalid.dts')
+ self.assertIn("Node '/binman/u-boot-dtb': Invalid prepend in "
+ "'u-boot-dtb': 'invalid'", str(e.exception))
+
+ def testCompressDtbPrependLength(self):
+ """Test that compress with length header works as expected"""
+ data = self._DoReadFileRealDtb('236_compress_dtb_prepend_length.dts')
+ image = control.images['image']
+ entries = image.GetEntries()
+ self.assertIn('u-boot-dtb', entries)
+ u_boot_dtb = entries['u-boot-dtb']
+ self.assertIn('fdtmap', entries)
+ fdtmap = entries['fdtmap']
+
+ image_fname = tools.get_output_filename('image.bin')
+ orig = control.ReadEntry(image_fname, 'u-boot-dtb')
+ dtb = fdt.Fdt.FromData(orig)
+ dtb.Scan()
+ props = self._GetPropTree(dtb, ['size', 'uncomp-size'])
+ expected = {
+ 'u-boot:size': len(U_BOOT_DATA),
+ 'u-boot-dtb:uncomp-size': len(orig),
+ 'u-boot-dtb:size': u_boot_dtb.size,
+ 'fdtmap:size': fdtmap.size,
+ 'size': len(data),
+ }
+ self.assertEqual(expected, props)
+
+ # Check implementation
+ self.assertEqual(U_BOOT_DATA, data[:len(U_BOOT_DATA)])
+ rest = data[len(U_BOOT_DATA):]
+ comp_data_len = struct.unpack('<I', rest[:4])[0]
+ comp_data = rest[4:4 + comp_data_len]
+ orig2 = self._decompress(comp_data)
+ self.assertEqual(orig, orig2)
+
+ def testInvalidCompress(self):
+ """Test that invalid compress algorithm is detected"""
+ with self.assertRaises(ValueError) as e:
+ self._DoTestFile('237_compress_dtb_invalid.dts')
+ self.assertIn("Unknown algorithm 'invalid'", str(e.exception))
+
+ def testCompUtilCompressions(self):
+ """Test compression algorithms"""
+ for bintool in self.comp_bintools.values():
+ self._CheckBintool(bintool)
+ data = bintool.compress(COMPRESS_DATA)
+ self.assertNotEqual(COMPRESS_DATA, data)
+ orig = bintool.decompress(data)
+ self.assertEquals(COMPRESS_DATA, orig)
+
+ def testCompUtilVersions(self):
+ """Test tool version of compression algorithms"""
+ for bintool in self.comp_bintools.values():
+ self._CheckBintool(bintool)
+ version = bintool.version()
+ self.assertRegex(version, '^v?[0-9]+[0-9.]*')
+
+ def testCompUtilPadding(self):
+ """Test padding of compression algorithms"""
+ # Skip zstd because it doesn't support padding
+ for bintool in [v for k,v in self.comp_bintools.items() if k != 'zstd']:
+ self._CheckBintool(bintool)
+ data = bintool.compress(COMPRESS_DATA)
+ self.assertNotEqual(COMPRESS_DATA, data)
+ data += tools.get_bytes(0, 64)
+ orig = bintool.decompress(data)
+ self.assertEquals(COMPRESS_DATA, orig)
+
+ def testCompressDtbZstd(self):
+ """Test that zstd compress of device-tree files failed"""
+ with self.assertRaises(ValueError) as e:
+ self._DoTestFile('238_compress_dtb_zstd.dts')
+ self.assertIn("Node '/binman/u-boot-dtb': The zstd compression "
+ "requires a length header", str(e.exception))
if __name__ == "__main__":
diff --git a/tools/binman/test/235_compress_dtb_prepend_invalid.dts b/tools/binman/test/235_compress_dtb_prepend_invalid.dts
new file mode 100644
index 0000000000..ee32670a91
--- /dev/null
+++ b/tools/binman/test/235_compress_dtb_prepend_invalid.dts
@@ -0,0 +1,17 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ u-boot {
+ };
+ u-boot-dtb {
+ compress = "lz4";
+ prepend = "invalid";
+ };
+ };
+};
diff --git a/tools/binman/test/235_mkimage_name.dts b/tools/binman/test/235_mkimage_name.dts
new file mode 100644
index 0000000000..fbc82f1f8d
--- /dev/null
+++ b/tools/binman/test/235_mkimage_name.dts
@@ -0,0 +1,18 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ mkimage {
+ args = "-T script";
+ data-to-imagename;
+
+ u-boot-spl {
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/236_compress_dtb_prepend_length.dts b/tools/binman/test/236_compress_dtb_prepend_length.dts
new file mode 100644
index 0000000000..1570233637
--- /dev/null
+++ b/tools/binman/test/236_compress_dtb_prepend_length.dts
@@ -0,0 +1,19 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ u-boot {
+ };
+ u-boot-dtb {
+ compress = "lz4";
+ prepend = "length";
+ };
+ fdtmap {
+ };
+ };
+};
diff --git a/tools/binman/test/236_mkimage_image.dts b/tools/binman/test/236_mkimage_image.dts
new file mode 100644
index 0000000000..6b8f4a4a40
--- /dev/null
+++ b/tools/binman/test/236_mkimage_image.dts
@@ -0,0 +1,21 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ mkimage {
+ args = "-T script";
+
+ imagename {
+ type = "u-boot";
+ };
+
+ u-boot-spl {
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/237_compress_dtb_invalid.dts b/tools/binman/test/237_compress_dtb_invalid.dts
new file mode 100644
index 0000000000..228139060b
--- /dev/null
+++ b/tools/binman/test/237_compress_dtb_invalid.dts
@@ -0,0 +1,16 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ u-boot {
+ };
+ u-boot-dtb {
+ compress = "invalid";
+ };
+ };
+};
diff --git a/tools/binman/test/237_mkimage_image_no_content.dts b/tools/binman/test/237_mkimage_image_no_content.dts
new file mode 100644
index 0000000000..7306c06af4
--- /dev/null
+++ b/tools/binman/test/237_mkimage_image_no_content.dts
@@ -0,0 +1,22 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ mkimage {
+ args = "-T script";
+
+ imagename {
+ type = "_testing";
+ return-unknown-contents;
+ };
+
+ u-boot-spl {
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/238_compress_dtb_zstd.dts b/tools/binman/test/238_compress_dtb_zstd.dts
new file mode 100644
index 0000000000..90cf85d1e2
--- /dev/null
+++ b/tools/binman/test/238_compress_dtb_zstd.dts
@@ -0,0 +1,16 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ u-boot {
+ };
+ u-boot-dtb {
+ compress = "zstd";
+ };
+ };
+};
diff --git a/tools/binman/test/238_mkimage_image_bad.dts b/tools/binman/test/238_mkimage_image_bad.dts
new file mode 100644
index 0000000000..54d2c99d62
--- /dev/null
+++ b/tools/binman/test/238_mkimage_image_bad.dts
@@ -0,0 +1,22 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ mkimage {
+ args = "-T script";
+ data-to-imagename;
+
+ imagename {
+ type = "u-boot";
+ };
+
+ u-boot-spl {
+ };
+ };
+ };
+};
diff --git a/tools/binman/test/239_collection_other.dts b/tools/binman/test/239_collection_other.dts
new file mode 100644
index 0000000000..09de20e5bc
--- /dev/null
+++ b/tools/binman/test/239_collection_other.dts
@@ -0,0 +1,29 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ collection {
+ content = <&u_boot_nodtb &dtb>;
+ };
+ section {
+ fill {
+ size = <2>;
+ fill-byte = [ff];
+ };
+ u_boot_nodtb: u-boot-nodtb {
+ };
+ fill2 {
+ type = "fill";
+ size = <3>;
+ fill-byte = [fe];
+ };
+ };
+ dtb: u-boot-dtb {
+ };
+ };
+};
diff --git a/tools/binman/test/240_mkimage_coll.dts b/tools/binman/test/240_mkimage_coll.dts
new file mode 100644
index 0000000000..3086011886
--- /dev/null
+++ b/tools/binman/test/240_mkimage_coll.dts
@@ -0,0 +1,27 @@
+// SPDX-License-Identifier: GPL-2.0+
+
+/dts-v1/;
+
+/ {
+ #address-cells = <1>;
+ #size-cells = <1>;
+
+ binman {
+ collection {
+ content = <&spl &u_boot>;
+ };
+ mkimage {
+ args = "-T script";
+
+ spl: u-boot-spl {
+ };
+
+ imagename {
+ type = "section";
+
+ u_boot: u-boot {
+ };
+ };
+ };
+ };
+};
diff --git a/tools/patman/test_util.py b/tools/patman/test_util.py
index c27e0b39e5..0f6d1aa902 100644
--- a/tools/patman/test_util.py
+++ b/tools/patman/test_util.py
@@ -81,8 +81,7 @@ def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None
print(coverage)
if coverage != '100%':
print(stdout)
- print("Type 'python3-coverage html' to get a report in "
- 'htmlcov/index.html')
+ print("To get a report in 'htmlcov/index.html', type: python3-coverage html")
print('Coverage error: %s, but should be 100%%' % coverage)
ok = False
if not ok:
@@ -209,14 +208,14 @@ def run_test_suites(toolname, debug, verbosity, test_preserve_dirs, processes,
runner = unittest.TextTestRunner(
stream=sys.stdout,
verbosity=(1 if verbosity is None else verbosity),
- buffer=buffer_outputs,
+ buffer=False if test_name else buffer_outputs,
resultclass=FullTextTestResult,
)
if use_concurrent and processes != 1:
suite = ConcurrentTestSuite(suite,
fork_for_tests(processes or multiprocessing.cpu_count(),
- buffer=buffer_outputs))
+ buffer=False if test_name else buffer_outputs))
for module in class_and_module_list:
if isinstance(module, str) and (not test_name or test_name == module):