binman FIT improvements

various minor sandbox improvements
 -----BEGIN PGP SIGNATURE-----
 
 iQFFBAABCgAvFiEEslwAIq+Gp8wWVbYnfxc6PpAIreYFAmI1URkRHHNqZ0BjaHJv
 bWl1bS5vcmcACgkQfxc6PpAIreYglAf/eM5MtohjWDBQUyvPiQTIV0iS5Zu7NV7Z
 1qkrv4OuwZ+U3djtusP7VIUnmdnQlwbF1fFZzo+JUmtYg59K6ZvgI/S+oJzr17/D
 bMh+8FpDggEjJhzwSM5fdImfE1PLp161zUxTWMH25VmB+yxQMGWqZBQls+0GADN5
 jT4dGwTn098a2HLvdAqkNwafY0Q1cUQeYg8z2J3gdDdZos2PdQV3TqjbrAS6Pr+Z
 jAZV+TfTkU2rk6S7B34Ojzx2dF8e80sRK/o/2/+60kt+dUO+nfS/evOb1kqfj8dx
 ULNXEBjS1aMy7Z6TCg/E3uEZLM5xyaTZKTwiNeKMthmOK8ZQ3boRuA==
 =9FkF
 -----END PGP SIGNATURE-----

Merge tag 'dm-pull-18mar22' of https://source.denx.de/u-boot/custodians/u-boot-dm into next

binman FIT improvements
various minor sandbox improvements
This commit is contained in:
Tom Rini 2022-03-19 09:09:58 -04:00
commit a958c58571
42 changed files with 1048 additions and 257 deletions

View file

@ -6,6 +6,7 @@
#include <common.h>
#include <dm.h>
#include <hang.h>
#include <handoff.h>
#include <init.h>
#include <log.h>
#include <os.h>

View file

@ -421,7 +421,8 @@ int state_uninit(void)
{
int err;
log_info("Writing sandbox state\n");
if (state->write_ram_buf || state->write_state)
log_info("Writing sandbox state\n");
state = &main_state;
/* Finish the bloblist, so that it is correct before writing memory */

View file

@ -1334,16 +1334,16 @@ config TPL_SIZE_LIMIT
If this value is zero, it is ignored.
config TPL_BINMAN_SYMBOLS
bool "Declare binman symbols in SPL"
bool "Declare binman symbols in TPL"
depends on SPL_FRAMEWORK && BINMAN
default y
help
This enables use of symbols in TPL which refer to U-Boot, enabling SPL
This enables use of symbols in TPL which refer to U-Boot, enabling TPL
to obtain the location of U-Boot simply by calling spl_get_image_pos()
and spl_get_image_size().
For this to work, you must have a U-Boot image in the binman image, so
binman can update SPL with the location of it.
binman can update TPL with the location of it.
config TPL_FRAMEWORK
bool "Support TPL based upon the common SPL framework"

View file

@ -125,9 +125,14 @@ int host_dev_bind(int devnum, char *filename, bool removable)
fd = os_open(filename, OS_O_RDWR);
if (fd == -1) {
printf("Failed to access host backing file '%s'\n", filename);
ret = -ENOENT;
goto err;
printf("Failed to access host backing file '%s', trying read-only\n",
filename);
fd = os_open(filename, OS_O_RDONLY);
if (fd == -1) {
printf("- still failed\n");
ret = -ENOENT;
goto err;
}
}
ret = blk_create_device(gd->dm_root, "sandbox_host_blk", str,
IF_TYPE_HOST, devnum, 512,

View file

@ -204,7 +204,7 @@ int video_sync(struct udevice *vid, bool force)
struct video_priv *priv = dev_get_uclass_priv(vid);
static ulong last_sync;
if (force || get_timer(last_sync) > 10) {
if (force || get_timer(last_sync) > 100) {
sandbox_sdl_sync(priv->fb);
last_sync = get_timer(0);
}

View file

@ -56,7 +56,6 @@ struct image_header *spl_get_load_buffer(ssize_t offset, size_t size)
static int spl_test_load(struct unit_test_state *uts)
{
const char *cur_prefix, *next_prefix;
struct spl_image_info image;
struct image_header *header;
struct text_ctx text_ctx;
@ -69,10 +68,7 @@ static int spl_test_load(struct unit_test_state *uts)
load.bl_len = 512;
load.read = read_fit_image;
cur_prefix = spl_phase_prefix(spl_phase());
next_prefix = spl_phase_prefix(spl_next_phase());
ret = os_find_u_boot(fname, sizeof(fname), true, cur_prefix,
next_prefix);
ret = sandbox_find_next_phase(fname, sizeof(fname), true);
if (ret) {
printf("(%s not found, error %d)\n", fname, ret);
return ret;

View file

@ -480,8 +480,8 @@ image-pos:
for each entry. This makes it easy to find out exactly where the entry
ended up in the image, regardless of parent sections, etc.
expand-size:
Expand the size of this entry to fit available space. This space is only
extend-size:
Extend the size of this entry to fit available space. This space is only
limited by the size of the image/section and the position of the next
entry.
@ -1375,18 +1375,20 @@ Some entry types deal with data obtained from others. For example,
};
This shows mkimage being passed a file consisting of SPL and U-Boot proper. It
is create by calling `Entry.collect_contents_to_file()`. Note that in this case,
the data is passed to mkimage for processing but does not appear separately in
the image. It may not appear at all, depending on what mkimage does. The
contents of the `mkimage` entry are entirely dependent on the processing done
by the entry, with the provided subnodes (`u-boot-spl` and `u-boot`) simply
providing the input data for that processing.
is created by calling `Entry.collect_contents_to_file()`. Note that in this
case, the data is passed to mkimage for processing but does not appear
separately in the image. It may not appear at all, depending on what mkimage
does. The contents of the `mkimage` entry are entirely dependent on the
processing done by the entry, with the provided subnodes (`u-boot-spl` and
`u-boot`) simply providing the input data for that processing.
Note that `Entry.collect_contents_to_file()` simply concatenates the data from
the different entries together, with no control over alignment, etc. Another
approach is to subclass `Entry_section` so that those features become available,
such as `size` and `pad-byte`. Then the contents of the entry can be obtained by
calling `BuildSectionData()`.
calling `super().BuildSectionData()` in the entry's BuildSectionData()
implementation to get the input data, then write it to a file and process it
however is desired.
There are other ways to obtain data also, depending on the situation. If the
entry type is simply signing data which exists elsewhere in the image, then
@ -1396,6 +1398,7 @@ is used by `Entry_vblock`, for example::
u_boot: u-boot {
};
vblock {
content = <&u_boot &dtb>;
keyblock = "firmware.keyblock";
@ -1440,9 +1443,11 @@ The `soc-fw` node is a `blob-ext` (i.e. it reads in a named binary file) whereas
a known blob type.
When adding new entry types you are encouraged to use subnodes to provide the
data for processing, unless the `content` approach is more suitable. Ad-hoc
properties and other methods of obtaining data are discouraged, since it adds to
confusion for users.
data for processing, unless the `content` approach is more suitable. Consider
whether the input entries are contained within (or consumed by) the entry, vs
just being 'referenced' by the entry. In the latter case, the `content` approach
makes more sense. Ad-hoc properties and other methods of obtaining data are
discouraged, since it adds to confusion for users.
History / Credits
-----------------
@ -1495,7 +1500,8 @@ Some ideas:
- Figure out how to make Fdt support changing the node order, so that
Node.AddSubnode() can support adding a node before another, existing node.
Perhaps it should completely regenerate the flat tree?
- Put faked files into a separate subdir and remove them on start-up, to avoid
seeing them as 'real' files on a subsequent run
--
Simon Glass <sjg@chromium.org>

View file

@ -507,7 +507,7 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
# entry offsets remain the same.
for image in images.values():
image.CollectBintools()
image.ExpandEntries()
image.gen_entries()
if update_fdt:
image.AddMissingProperties(True)
image.ProcessFdt(dtb)

View file

@ -112,7 +112,7 @@ def GetFileOffset(fname, addr):
int: Offset of that address in the ELF file, or None if not valid
"""
if not ELF_TOOLS:
raise ValueError('Python elftools package is not available')
raise ValueError("Python: No module named 'elftools'")
with open(fname, 'rb') as fd:
elf = ELFFile(fd)
return _GetFileOffset(elf, addr)
@ -128,7 +128,7 @@ def GetSymbolFromAddress(fname, addr):
str: Symbol name, or None if no symbol at that address
"""
if not ELF_TOOLS:
raise ValueError('Python elftools package is not available')
raise ValueError("Python: No module named 'elftools'")
with open(fname, 'rb') as fd:
elf = ELFFile(fd)
syms = GetSymbols(fname, None)
@ -149,7 +149,7 @@ def GetSymbolFileOffset(fname, patterns):
value: Hex value of symbol
"""
if not ELF_TOOLS:
raise ValueError('Python elftools package is not available')
raise ValueError("Python: No module named 'elftools'")
syms = {}
with open(fname, 'rb') as fd:
@ -415,7 +415,7 @@ def UpdateFile(infile, outfile, start_sym, end_sym, insert):
tools.write_file(outfile, newdata)
tout.info('Written to offset %#x' % syms[start_sym].offset)
def read_segments(data):
def read_loadable_segments(data):
"""Read segments from an ELF file
Args:
@ -433,7 +433,7 @@ def read_segments(data):
ValueError: elftools is not available
"""
if not ELF_TOOLS:
raise ValueError('Python elftools package is not available')
raise ValueError("Python: No module named 'elftools'")
with io.BytesIO(data) as inf:
try:
elf = ELFFile(inf)

View file

@ -243,7 +243,7 @@ class TestElf(unittest.TestCase):
fname = self.ElfTestFile('embed_data')
with self.assertRaises(ValueError) as e:
elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end'])
self.assertIn('Python elftools package is not available',
self.assertIn("Python: No module named 'elftools'",
str(e.exception))
finally:
elf.ELF_TOOLS = old_val
@ -257,33 +257,81 @@ class TestElf(unittest.TestCase):
offset = elf.GetSymbolFileOffset(fname, ['missing_sym'])
self.assertEqual({}, offset)
def test_read_segments(self):
"""Test for read_segments()"""
def test_read_loadable_segments(self):
"""Test for read_loadable_segments()"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
fname = self.ElfTestFile('embed_data')
segments, entry = elf.read_segments(tools.read_file(fname))
segments, entry = elf.read_loadable_segments(tools.read_file(fname))
def test_read_segments_fail(self):
"""Test for read_segments() without elftools"""
"""Test for read_loadable_segments() without elftools"""
try:
old_val = elf.ELF_TOOLS
elf.ELF_TOOLS = False
fname = self.ElfTestFile('embed_data')
with self.assertRaises(ValueError) as e:
elf.read_segments(tools.read_file(fname))
self.assertIn('Python elftools package is not available',
elf.read_loadable_segments(tools.read_file(fname))
self.assertIn("Python: No module named 'elftools'",
str(e.exception))
finally:
elf.ELF_TOOLS = old_val
def test_read_segments_bad_data(self):
"""Test for read_segments() with an invalid ELF file"""
"""Test for read_loadable_segments() with an invalid ELF file"""
fname = self.ElfTestFile('embed_data')
with self.assertRaises(ValueError) as e:
elf.read_segments(tools.get_bytes(100, 100))
elf.read_loadable_segments(tools.get_bytes(100, 100))
self.assertIn('Magic number does not match', str(e.exception))
def test_get_file_offset(self):
"""Test GetFileOffset() gives the correct file offset for a symbol"""
fname = self.ElfTestFile('embed_data')
syms = elf.GetSymbols(fname, ['embed'])
addr = syms['embed'].address
offset = elf.GetFileOffset(fname, addr)
data = tools.read_file(fname)
# Just use the first 4 bytes and assume it is little endian
embed_data = data[offset:offset + 4]
embed_value = struct.unpack('<I', embed_data)[0]
self.assertEqual(0x1234, embed_value)
def test_get_file_offset_fail(self):
"""Test calling GetFileOffset() without elftools"""
try:
old_val = elf.ELF_TOOLS
elf.ELF_TOOLS = False
fname = self.ElfTestFile('embed_data')
with self.assertRaises(ValueError) as e:
elf.GetFileOffset(fname, 0)
self.assertIn("Python: No module named 'elftools'",
str(e.exception))
finally:
elf.ELF_TOOLS = old_val
def test_get_symbol_from_address(self):
"""Test GetSymbolFromAddress()"""
fname = self.ElfTestFile('elf_sections')
sym_name = 'calculate'
syms = elf.GetSymbols(fname, [sym_name])
addr = syms[sym_name].address
sym = elf.GetSymbolFromAddress(fname, addr)
self.assertEqual(sym_name, sym)
def test_get_symbol_from_address_fail(self):
"""Test calling GetSymbolFromAddress() without elftools"""
try:
old_val = elf.ELF_TOOLS
elf.ELF_TOOLS = False
fname = self.ElfTestFile('embed_data')
with self.assertRaises(ValueError) as e:
elf.GetSymbolFromAddress(fname, 0x1000)
self.assertIn("Python: No module named 'elftools'",
str(e.exception))
finally:
elf.ELF_TOOLS = old_val
if __name__ == '__main__':
unittest.main()

View file

@ -612,6 +612,9 @@ gen-fdt-nodes
Generate FDT nodes as above. This is the default if there is no
`fit,operation` property.
split-elf
Split an ELF file into a separate node for each segment.
Generating nodes from an FDT list (gen-fdt-nodes)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -655,6 +658,149 @@ for each of your two files.
Note that if no devicetree files are provided (with '-a of-list' as above)
then no nodes will be generated.
Generating nodes from an ELF file (split-elf)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This uses the node as a template to generate multiple nodes. The following
special properties are available:
split-elf
Split an ELF file into a separate node for each segment. This uses the
node as a template to generate multiple nodes. The following special
properties are available:
fit,load
Generates a `load = <...>` property with the load address of the
segment
fit,entry
Generates a `entry = <...>` property with the entry address of the
ELF. This is only produced for the first entry
fit,data
Generates a `data = <...>` property with the contents of the segment
fit,loadables
Generates a `loadable = <...>` property with a list of the generated
nodes (including all nodes if this operation is used multiple times)
Here is an example showing ATF, TEE and a device tree all combined::
fit {
description = "test-desc";
#address-cells = <1>;
fit,fdt-list = "of-list";
images {
u-boot {
description = "U-Boot (64-bit)";
type = "standalone";
os = "U-Boot";
arch = "arm64";
compression = "none";
load = <CONFIG_SYS_TEXT_BASE>;
u-boot-nodtb {
};
};
@fdt-SEQ {
description = "fdt-NAME.dtb";
type = "flat_dt";
compression = "none";
};
@atf-SEQ {
fit,operation = "split-elf";
description = "ARM Trusted Firmware";
type = "firmware";
arch = "arm64";
os = "arm-trusted-firmware";
compression = "none";
fit,load;
fit,entry;
fit,data;
atf-bl31 {
};
};
@tee-SEQ {
fit,operation = "split-elf";
description = "TEE";
type = "tee";
arch = "arm64";
os = "tee";
compression = "none";
fit,load;
fit,entry;
fit,data;
tee-os {
};
};
};
configurations {
default = "@config-DEFAULT-SEQ";
@config-SEQ {
description = "conf-NAME.dtb";
fdt = "fdt-SEQ";
firmware = "u-boot";
fit,loadables;
};
};
};
If ATF-BL31 is available, this generates a node for each segment in the
ELF file, for example::
images {
atf-1 {
data = <...contents of first segment...>;
data-offset = <0x00000000>;
entry = <0x00040000>;
load = <0x00040000>;
compression = "none";
os = "arm-trusted-firmware";
arch = "arm64";
type = "firmware";
description = "ARM Trusted Firmware";
};
atf-2 {
data = <...contents of second segment...>;
load = <0xff3b0000>;
compression = "none";
os = "arm-trusted-firmware";
arch = "arm64";
type = "firmware";
description = "ARM Trusted Firmware";
};
};
The same applies for OP-TEE if that is available.
If each binary is not available, the relevant template node (@atf-SEQ or
@tee-SEQ) is removed from the output.
This also generates a `config-xxx` node for each device tree in `of-list`.
Note that the U-Boot build system uses `-a of-list=$(CONFIG_OF_LIST)`
so you can use `CONFIG_OF_LIST` to define that list. In this example it is
set up for `firefly-rk3399` with a single device tree and the default set
with `-a default-dt=$(CONFIG_DEFAULT_DEVICE_TREE)`, so the resulting output
is::
configurations {
default = "config-1";
config-1 {
loadables = "atf-1", "atf-2", "atf-3", "tee-1", "tee-2";
description = "rk3399-firefly.dtb";
fdt = "fdt-1";
firmware = "u-boot";
};
};
U-Boot SPL can then load the firmware (U-Boot proper) and all the loadables
(ATF and TEE), then proceed with the boot.
Entry: fmap: An entry which contains an Fmap section

View file

@ -106,7 +106,7 @@ class Entry(object):
self.pad_after = 0
self.offset_unset = False
self.image_pos = None
self.expand_size = False
self.extend_size = False
self.compress = 'none'
self.missing = False
self.faked = False
@ -235,6 +235,8 @@ class Entry(object):
"""
if 'pos' in self._node.props:
self.Raise("Please use 'offset' instead of 'pos'")
if 'expand-size' in self._node.props:
self.Raise("Please use 'extend-size' instead of 'expand-size'")
self.offset = fdt_util.GetInt(self._node, 'offset')
self.size = fdt_util.GetInt(self._node, 'size')
self.orig_offset = fdt_util.GetInt(self._node, 'orig-offset')
@ -262,7 +264,7 @@ class Entry(object):
self.align_size)
self.align_end = fdt_util.GetInt(self._node, 'align-end')
self.offset_unset = fdt_util.GetBool(self._node, 'offset-unset')
self.expand_size = fdt_util.GetBool(self._node, 'expand-size')
self.extend_size = fdt_util.GetBool(self._node, 'extend-size')
self.missing_msg = fdt_util.GetString(self._node, 'missing-msg')
# This is only supported by blobs and sections at present
@ -284,8 +286,8 @@ class Entry(object):
"""
return {}
def ExpandEntries(self):
"""Expand out entries which produce other entries
def gen_entries(self):
"""Allow entries to generate other entries
Some entries generate subnodes automatically, from which sub-entries
are then created. This method allows those to be added to the binman
@ -415,9 +417,13 @@ class Entry(object):
self.SetContents(data)
return size_ok
def ObtainContents(self):
def ObtainContents(self, skip_entry=None, fake_size=0):
"""Figure out the contents of an entry.
Args:
skip_entry (Entry): Entry to skip when obtaining section contents
fake_size (int): Size of fake file to create if needed
Returns:
True if the contents were found, False if another call is needed
after the other entries are processed.
@ -774,8 +780,8 @@ features to produce new behaviours.
name = '%s.%s' % (node.name, name)
return name
def ExpandToLimit(self, limit):
"""Expand an entry so that it ends at the given offset limit"""
def extend_to_limit(self, limit):
"""Extend an entry so that it ends at the given offset limit"""
if self.offset + self.size < limit:
self.size = limit - self.offset
# Request the contents again, since changing the size requires that
@ -988,24 +994,28 @@ features to produce new behaviours.
if self.missing:
missing_list.append(self)
def check_fake_fname(self, fname):
def check_fake_fname(self, fname, size=0):
"""If the file is missing and the entry allows fake blobs, fake it
Sets self.faked to True if faked
Args:
fname (str): Filename to check
size (int): Size of fake file to create
Returns:
fname (str): Filename of faked file
tuple:
fname (str): Filename of faked file
bool: True if the blob was faked, False if not
"""
if self.allow_fake and not pathlib.Path(fname).is_file():
outfname = tools.get_output_filename(os.path.basename(fname))
with open(outfname, "wb") as out:
out.truncate(1024)
out.truncate(size)
self.faked = True
return outfname
return fname
tout.info(f"Entry '{self._node.path}': Faked file '{outfname}'")
return outfname, True
return fname, False
def CheckFakedBlobs(self, faked_blobs_list):
"""Check if any entries in this section have faked external blobs
@ -1099,11 +1109,11 @@ features to produce new behaviours.
"""
pass
def AddBintools(self, tools):
def AddBintools(self, btools):
"""Add the bintools used by this entry type
Args:
tools (dict of Bintool):
btools (dict of Bintool):
"""
pass
@ -1126,28 +1136,29 @@ features to produce new behaviours.
"""
self.update_hash = update_hash
def collect_contents_to_file(self, entries, prefix):
def collect_contents_to_file(self, entries, prefix, fake_size=0):
"""Put the contents of a list of entries into a file
Args:
entries (list of Entry): Entries to collect
prefix (str): Filename prefix of file to write to
fake_size (int): Size of fake file to create if needed
If any entry does not have contents yet, this function returns False
for the data.
Returns:
Tuple:
bytes: Concatenated data from all the entries (or False)
str: Filename of file written (or False if no data)
str: Unique portion of filename (or False if no data)
bytes: Concatenated data from all the entries (or None)
str: Filename of file written (or None if no data)
str: Unique portion of filename (or None if no data)
"""
data = b''
for entry in entries:
# First get the input data and put it in a file. If not available,
# try later.
if not entry.ObtainContents():
return False, False, False
if not entry.ObtainContents(fake_size=fake_size):
return None, None, None
data += entry.GetData()
uniq = self.GetUniqueName()
fname = tools.get_output_filename(f'{prefix}.{uniq}')

View file

@ -82,7 +82,7 @@ class Entry__testing(Entry):
self.return_contents = True
self.contents = b'aa'
def ObtainContents(self):
def ObtainContents(self, fake_size=0):
if self.return_unknown_contents or not self.return_contents:
return False
if self.return_contents_later:

View file

@ -35,16 +35,18 @@ class Entry_blob(Entry):
super().__init__(section, etype, node)
self._filename = fdt_util.GetString(self._node, 'filename', self.etype)
def ObtainContents(self):
def ObtainContents(self, fake_size=0):
self._filename = self.GetDefaultFilename()
self._pathname = tools.get_input_filename(self._filename,
self.external and self.section.GetAllowMissing())
# Allow the file to be missing
if not self._pathname:
self._pathname = self.check_fake_fname(self._filename)
self.SetContents(b'')
self._pathname, faked = self.check_fake_fname(self._filename,
fake_size)
self.missing = True
return True
if not faked:
self.SetContents(b'')
return True
self.ReadBlobContents()
return True

View file

@ -37,7 +37,7 @@ class Entry_blob_ext_list(Entry_blob):
missing = False
pathnames = []
for fname in self._filenames:
fname = self.check_fake_fname(fname)
fname, _ = self.check_fake_fname(fname)
pathname = tools.get_input_filename(
fname, self.external and self.section.GetAllowMissing())
# Allow the file to be missing

View file

@ -42,7 +42,7 @@ class Entry_blob_phase(Entry_section):
self.dtb_file = dtb_file
self.bss_pad = bss_pad
def ExpandEntries(self):
def gen_entries(self):
"""Create the subnodes"""
names = [self.root_fname + '-nodtb', self.root_fname + '-dtb']
if self.bss_pad:

View file

@ -48,7 +48,7 @@ class Entry_files(Entry_section):
self._require_matches = fdt_util.GetBool(self._node,
'require-matches')
def ExpandEntries(self):
def gen_entries(self):
files = tools.get_input_filename_glob(self._pattern)
if self._require_matches and not files:
self.Raise("Pattern '%s' matched no files" % self._pattern)

View file

@ -2,22 +2,23 @@
# Copyright (c) 2016 Google, Inc
# Written by Simon Glass <sjg@chromium.org>
#
# Entry-type module for producing a FIT
#
from collections import defaultdict, OrderedDict
"""Entry-type module for producing a FIT"""
import libfdt
from binman.entry import Entry, EntryArg
from binman.etype.section import Entry_section
from binman import elf
from dtoc import fdt_util
from dtoc.fdt import Fdt
from patman import tools
# Supported operations, with the fit,operation property
OP_GEN_FDT_NODES = range(1)
OP_GEN_FDT_NODES, OP_SPLIT_ELF = range(2)
OPERATIONS = {
'gen-fdt-nodes': OP_GEN_FDT_NODES,
'split-elf': OP_SPLIT_ELF,
}
class Entry_fit(Entry_section):
@ -113,6 +114,9 @@ class Entry_fit(Entry_section):
Generate FDT nodes as above. This is the default if there is no
`fit,operation` property.
split-elf
Split an ELF file into a separate node for each segment.
Generating nodes from an FDT list (gen-fdt-nodes)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -155,6 +159,149 @@ class Entry_fit(Entry_section):
Note that if no devicetree files are provided (with '-a of-list' as above)
then no nodes will be generated.
Generating nodes from an ELF file (split-elf)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This uses the node as a template to generate multiple nodes. The following
special properties are available:
split-elf
Split an ELF file into a separate node for each segment. This uses the
node as a template to generate multiple nodes. The following special
properties are available:
fit,load
Generates a `load = <...>` property with the load address of the
segment
fit,entry
Generates a `entry = <...>` property with the entry address of the
ELF. This is only produced for the first entry
fit,data
Generates a `data = <...>` property with the contents of the segment
fit,loadables
Generates a `loadable = <...>` property with a list of the generated
nodes (including all nodes if this operation is used multiple times)
Here is an example showing ATF, TEE and a device tree all combined::
fit {
description = "test-desc";
#address-cells = <1>;
fit,fdt-list = "of-list";
images {
u-boot {
description = "U-Boot (64-bit)";
type = "standalone";
os = "U-Boot";
arch = "arm64";
compression = "none";
load = <CONFIG_SYS_TEXT_BASE>;
u-boot-nodtb {
};
};
@fdt-SEQ {
description = "fdt-NAME.dtb";
type = "flat_dt";
compression = "none";
};
@atf-SEQ {
fit,operation = "split-elf";
description = "ARM Trusted Firmware";
type = "firmware";
arch = "arm64";
os = "arm-trusted-firmware";
compression = "none";
fit,load;
fit,entry;
fit,data;
atf-bl31 {
};
};
@tee-SEQ {
fit,operation = "split-elf";
description = "TEE";
type = "tee";
arch = "arm64";
os = "tee";
compression = "none";
fit,load;
fit,entry;
fit,data;
tee-os {
};
};
};
configurations {
default = "@config-DEFAULT-SEQ";
@config-SEQ {
description = "conf-NAME.dtb";
fdt = "fdt-SEQ";
firmware = "u-boot";
fit,loadables;
};
};
};
If ATF-BL31 is available, this generates a node for each segment in the
ELF file, for example::
images {
atf-1 {
data = <...contents of first segment...>;
data-offset = <0x00000000>;
entry = <0x00040000>;
load = <0x00040000>;
compression = "none";
os = "arm-trusted-firmware";
arch = "arm64";
type = "firmware";
description = "ARM Trusted Firmware";
};
atf-2 {
data = <...contents of second segment...>;
load = <0xff3b0000>;
compression = "none";
os = "arm-trusted-firmware";
arch = "arm64";
type = "firmware";
description = "ARM Trusted Firmware";
};
};
The same applies for OP-TEE if that is available.
If each binary is not available, the relevant template node (@atf-SEQ or
@tee-SEQ) is removed from the output.
This also generates a `config-xxx` node for each device tree in `of-list`.
Note that the U-Boot build system uses `-a of-list=$(CONFIG_OF_LIST)`
so you can use `CONFIG_OF_LIST` to define that list. In this example it is
set up for `firefly-rk3399` with a single device tree and the default set
with `-a default-dt=$(CONFIG_DEFAULT_DEVICE_TREE)`, so the resulting output
is::
configurations {
default = "config-1";
config-1 {
loadables = "atf-1", "atf-2", "atf-3", "tee-1", "tee-2";
description = "rk3399-firefly.dtb";
fdt = "fdt-1";
firmware = "u-boot";
};
};
U-Boot SPL can then load the firmware (U-Boot proper) and all the loadables
(ATF and TEE), then proceed with the boot.
"""
def __init__(self, section, etype, node):
"""
@ -164,16 +311,25 @@ class Entry_fit(Entry_section):
key: relative path to entry Node (from the base of the FIT)
value: Entry_section object comprising the contents of this
node
_priv_entries: Internal copy of _entries which includes 'generator'
entries which are used to create the FIT, but should not be
processed as real entries. This is set up once we have the
entries
_loadables: List of generated split-elf nodes, each a node name
"""
super().__init__(section, etype, node)
self._fit = None
self._fit_props = {}
self._fdts = None
self.mkimage = None
self._priv_entries = {}
self._loadables = []
def ReadNode(self):
super().ReadNode()
for pname, prop in self._node.props.items():
if pname.startswith('fit,'):
self._fit_props[pname] = prop
self._fdts = None
self._fit_list_prop = self._fit_props.get('fit,fdt-list')
if self._fit_list_prop:
fdts, = self.GetEntryArgsOrProps(
@ -182,17 +338,12 @@ class Entry_fit(Entry_section):
self._fdts = fdts.split()
self._fit_default_dt = self.GetEntryArgsOrProps([EntryArg('default-dt',
str)])[0]
self.mkimage = None
def ReadNode(self):
self.ReadEntries()
super().ReadNode()
def _get_operation(self, subnode):
def _get_operation(self, base_node, node):
"""Get the operation referenced by a subnode
Args:
subnode (Node): Subnode (of the FIT) to check
node (Node): Subnode (of the FIT) to check
Returns:
int: Operation to perform
@ -200,15 +351,107 @@ class Entry_fit(Entry_section):
Raises:
ValueError: Invalid operation name
"""
oper_name = subnode.props.get('fit,operation')
oper_name = node.props.get('fit,operation')
if not oper_name:
return OP_GEN_FDT_NODES
oper = OPERATIONS.get(oper_name.value)
if not oper:
self.Raise(f"Unknown operation '{oper_name.value}'")
if oper is None:
self._raise_subnode(node, f"Unknown operation '{oper_name.value}'")
return oper
def ReadEntries(self):
def _add_entries(base_node, depth, node):
"""Add entries for any nodes that need them
Args:
base_node: Base Node of the FIT (with 'description' property)
depth: Current node depth (0 is the base 'fit' node)
node: Current node to process
Here we only need to provide binman entries which are used to define
the 'data' for each image. We create an entry_Section for each.
"""
rel_path = node.path[len(base_node.path):]
in_images = rel_path.startswith('/images')
has_images = depth == 2 and in_images
if has_images:
# This node is a FIT subimage node (e.g. "/images/kernel")
# containing content nodes. We collect the subimage nodes and
# section entries for them here to merge the content subnodes
# together and put the merged contents in the subimage node's
# 'data' property later.
entry = Entry.Create(self.section, node, etype='section')
entry.ReadNode()
# The hash subnodes here are for mkimage, not binman.
entry.SetUpdateHash(False)
self._entries[rel_path] = entry
for subnode in node.subnodes:
_add_entries(base_node, depth + 1, subnode)
_add_entries(self._node, 0, self._node)
# Keep a copy of all entries, including generator entries, since these
# removed from self._entries later.
self._priv_entries = dict(self._entries)
def BuildSectionData(self, required):
"""Build FIT entry contents
This adds the 'data' properties to the input ITB (Image-tree Binary)
then runs mkimage to process it.
Args:
required (bool): True if the data must be present, False if it is OK
to return None
Returns:
bytes: Contents of the section
"""
data = self._build_input()
uniq = self.GetUniqueName()
input_fname = tools.get_output_filename(f'{uniq}.itb')
output_fname = tools.get_output_filename(f'{uniq}.fit')
tools.write_file(input_fname, data)
tools.write_file(output_fname, data)
args = {}
ext_offset = self._fit_props.get('fit,external-offset')
if ext_offset is not None:
args = {
'external': True,
'pad': fdt_util.fdt32_to_cpu(ext_offset.value)
}
if self.mkimage.run(reset_timestamp=True, output_fname=output_fname,
**args) is None:
# Bintool is missing; just use empty data as the output
self.record_missing_bintool(self.mkimage)
return tools.get_bytes(0, 1024)
return tools.read_file(output_fname)
def _raise_subnode(self, node, msg):
"""Raise an error with a paticular FIT subnode
Args:
node (Node): FIT subnode containing the error
msg (str): Message to report
Raises:
ValueError, as requested
"""
rel_path = node.path[len(self._node.path) + 1:]
self.Raise(f"subnode '{rel_path}': {msg}")
def _build_input(self):
"""Finish the FIT by adding the 'data' properties to it
Arguments:
fdt: FIT to update
Returns:
bytes: New fdt contents
"""
def _process_prop(pname, prop):
"""Process special properties
@ -229,16 +472,22 @@ class Entry_fit(Entry_section):
if not self._fit_default_dt:
self.Raise("Generated 'default' node requires default-dt entry argument")
if self._fit_default_dt not in self._fdts:
self.Raise("default-dt entry argument '%s' not found in fdt list: %s" %
(self._fit_default_dt,
', '.join(self._fdts)))
self.Raise(
f"default-dt entry argument '{self._fit_default_dt}' "
f"not found in fdt list: {', '.join(self._fdts)}")
seq = self._fdts.index(self._fit_default_dt)
val = val[1:].replace('DEFAULT-SEQ', str(seq + 1))
fsw.property_string(pname, val)
return
elif pname.startswith('fit,'):
# Ignore these, which are commands for binman to process
return
elif pname in ['offset', 'size', 'image-pos']:
# Don't add binman's calculated properties
return
fsw.property(pname, prop.bytes)
def _scan_gen_fdt_nodes(subnode, depth, in_images):
def _gen_fdt_nodes(base_node, node, depth, in_images):
"""Generate FDT nodes
This creates one node for each member of self._fdts using the
@ -248,7 +497,7 @@ class Entry_fit(Entry_section):
first.
Args:
subnode (None): Generator node to process
node (None): Generator node to process
depth: Current node depth (0 is the base 'fit' node)
in_images: True if this is inside the 'images' node, so that
'data' properties should be generated
@ -256,28 +505,77 @@ class Entry_fit(Entry_section):
if self._fdts:
# Generate nodes for each FDT
for seq, fdt_fname in enumerate(self._fdts):
node_name = subnode.name[1:].replace('SEQ', str(seq + 1))
node_name = node.name[1:].replace('SEQ', str(seq + 1))
fname = tools.get_input_filename(fdt_fname + '.dtb')
with fsw.add_node(node_name):
for pname, prop in subnode.props.items():
val = prop.bytes.replace(
b'NAME', tools.to_bytes(fdt_fname))
val = val.replace(
b'SEQ', tools.to_bytes(str(seq + 1)))
fsw.property(pname, val)
for pname, prop in node.props.items():
if pname == 'fit,loadables':
val = '\0'.join(self._loadables) + '\0'
fsw.property('loadables', val.encode('utf-8'))
elif pname == 'fit,operation':
pass
elif pname.startswith('fit,'):
self._raise_subnode(
node, f"Unknown directive '{pname}'")
else:
val = prop.bytes.replace(
b'NAME', tools.to_bytes(fdt_fname))
val = val.replace(
b'SEQ', tools.to_bytes(str(seq + 1)))
fsw.property(pname, val)
# Add data for 'images' nodes (but not 'config')
if depth == 1 and in_images:
fsw.property('data', tools.read_file(fname))
for subnode in node.subnodes:
with fsw.add_node(subnode.name):
_add_node(node, depth + 1, subnode)
else:
if self._fdts is None:
if self._fit_list_prop:
self.Raise("Generator node requires '%s' entry argument" %
self._fit_list_prop.value)
self.Raise('Generator node requires '
f"'{self._fit_list_prop.value}' entry argument")
else:
self.Raise("Generator node requires 'fit,fdt-list' property")
def _scan_node(subnode, depth, in_images):
def _gen_split_elf(base_node, node, elf_data, missing):
"""Add nodes for the ELF file, one per group of contiguous segments
Args:
base_node (Node): Template node from the binman definition
node (Node): Node to replace (in the FIT being built)
data (bytes): ELF-format data to process (may be empty)
missing (bool): True if any of the data is missing
"""
# If any pieces are missing, skip this. The missing entries will
# show an error
if not missing:
try:
segments, entry = elf.read_loadable_segments(elf_data)
except ValueError as exc:
self._raise_subnode(node,
f'Failed to read ELF file: {str(exc)}')
for (seq, start, data) in segments:
node_name = node.name[1:].replace('SEQ', str(seq + 1))
with fsw.add_node(node_name):
loadables.append(node_name)
for pname, prop in node.props.items():
if not pname.startswith('fit,'):
fsw.property(pname, prop.bytes)
elif pname == 'fit,load':
fsw.property_u32('load', start)
elif pname == 'fit,entry':
if seq == 0:
fsw.property_u32('entry', entry)
elif pname == 'fit,data':
fsw.property('data', bytes(data))
elif pname != 'fit,operation':
self._raise_subnode(
node, f"Unknown directive '{pname}'")
def _gen_node(base_node, node, depth, in_images, entry):
"""Generate nodes from a template
This creates one node for each member of self._fdts using the
@ -287,124 +585,93 @@ class Entry_fit(Entry_section):
first.
Args:
subnode (None): Generator node to process
depth: Current node depth (0 is the base 'fit' node)
in_images: True if this is inside the 'images' node, so that
'data' properties should be generated
base_node (Node): Base Node of the FIT (with 'description'
property)
node (Node): Generator node to process
depth (int): Current node depth (0 is the base 'fit' node)
in_images (bool): True if this is inside the 'images' node, so
that 'data' properties should be generated
"""
oper = self._get_operation(subnode)
oper = self._get_operation(base_node, node)
if oper == OP_GEN_FDT_NODES:
_scan_gen_fdt_nodes(subnode, depth, in_images)
_gen_fdt_nodes(base_node, node, depth, in_images)
elif oper == OP_SPLIT_ELF:
# Entry_section.ObtainContents() either returns True or
# raises an exception.
data = None
missing_list = []
entry.ObtainContents()
entry.Pack(0)
data = entry.GetData()
entry.CheckMissing(missing_list)
def _AddNode(base_node, depth, node):
"""Add a node to the FIT
_gen_split_elf(base_node, node, data, bool(missing_list))
def _add_node(base_node, depth, node):
"""Add nodes to the output FIT
Args:
base_node: Base Node of the FIT (with 'description' property)
depth: Current node depth (0 is the base 'fit' node)
node: Current node to process
base_node (Node): Base Node of the FIT (with 'description'
property)
depth (int): Current node depth (0 is the base 'fit' node)
node (Node): Current node to process
There are two cases to deal with:
- hash and signature nodes which become part of the FIT
- binman entries which are used to define the 'data' for each
image
image, so don't appear in the FIT
"""
# Copy over all the relevant properties
for pname, prop in node.props.items():
if not pname.startswith('fit,'):
_process_prop(pname, prop)
_process_prop(pname, prop)
rel_path = node.path[len(base_node.path):]
in_images = rel_path.startswith('/images')
has_images = depth == 2 and in_images
if has_images:
# This node is a FIT subimage node (e.g. "/images/kernel")
# containing content nodes. We collect the subimage nodes and
# section entries for them here to merge the content subnodes
# together and put the merged contents in the subimage node's
# 'data' property later.
entry = Entry.Create(self.section, node, etype='section')
entry.ReadNode()
# The hash subnodes here are for mkimage, not binman.
entry.SetUpdateHash(False)
self._entries[rel_path] = entry
entry = self._priv_entries[rel_path]
data = entry.GetData()
fsw.property('data', bytes(data))
for subnode in node.subnodes:
subnode_path = f'{rel_path}/{subnode.name}'
if has_images and not (subnode.name.startswith('hash') or
subnode.name.startswith('signature')):
# This subnode is a content node not meant to appear in
# the FIT (e.g. "/images/kernel/u-boot"), so don't call
# fsw.add_node() or _AddNode() for it.
# fsw.add_node() or _add_node() for it.
pass
elif self.GetImage().generate and subnode.name.startswith('@'):
_scan_node(subnode, depth, in_images)
entry = self._priv_entries.get(subnode_path)
_gen_node(base_node, subnode, depth, in_images, entry)
# This is a generator (template) entry, so remove it from
# the list of entries used by PackEntries(), etc. Otherwise
# it will appear in the binman output
to_remove.append(subnode_path)
else:
with fsw.add_node(subnode.name):
_AddNode(base_node, depth + 1, subnode)
_add_node(base_node, depth + 1, subnode)
# Build a new tree with all nodes and properties starting from the
# entry node
fsw = libfdt.FdtSw()
fsw.finish_reservemap()
to_remove = []
loadables = []
with fsw.add_node(''):
_AddNode(self._node, 0, self._node)
_add_node(self._node, 0, self._node)
self._loadables = loadables
fdt = fsw.as_fdt()
# Remove generator entries from the main list
for path in to_remove:
if path in self._entries:
del self._entries[path]
# Pack this new FDT and scan it so we can add the data later
fdt.pack()
self._fdt = Fdt.FromData(fdt.as_bytearray())
self._fdt.Scan()
def BuildSectionData(self, required):
"""Build FIT entry contents
This adds the 'data' properties to the input ITB (Image-tree Binary)
then runs mkimage to process it.
Args:
required: True if the data must be present, False if it is OK to
return None
Returns:
Contents of the section (bytes)
"""
data = self._BuildInput(self._fdt)
uniq = self.GetUniqueName()
input_fname = tools.get_output_filename('%s.itb' % uniq)
output_fname = tools.get_output_filename('%s.fit' % uniq)
tools.write_file(input_fname, data)
tools.write_file(output_fname, data)
args = {}
ext_offset = self._fit_props.get('fit,external-offset')
if ext_offset is not None:
args = {
'external': True,
'pad': fdt_util.fdt32_to_cpu(ext_offset.value)
}
if self.mkimage.run(reset_timestamp=True, output_fname=output_fname,
**args) is None:
# Bintool is missing; just use empty data as the output
self.record_missing_bintool(self.mkimage)
return tools.get_bytes(0, 1024)
return tools.read_file(output_fname)
def _BuildInput(self, fdt):
"""Finish the FIT by adding the 'data' properties to it
Arguments:
fdt: FIT to update
Returns:
New fdt contents (bytes)
"""
for path, section in self._entries.items():
node = fdt.GetNode(path)
data = section.GetData()
node.AddData('data', data)
fdt.Sync(auto_resize=True)
data = fdt.GetContents()
data = fdt.as_bytearray()
return data
def SetImagePos(self, image_pos):
@ -414,7 +681,7 @@ class Entry_fit(Entry_section):
according to where they ended up in the packed FIT file.
Args:
image_pos: Position of this entry in the image
image_pos (int): Position of this entry in the image
"""
super().SetImagePos(image_pos)
@ -453,11 +720,18 @@ class Entry_fit(Entry_section):
# This should never happen
else: # pragma: no cover
self.Raise("%s: missing data properties" % (path))
self.Raise(f'{path}: missing data properties')
section.SetOffsetSize(offset, size)
section.SetImagePos(self.image_pos)
def AddBintools(self, tools):
super().AddBintools(tools)
self.mkimage = self.AddBintool(tools, 'mkimage')
def AddBintools(self, btools):
super().AddBintools(btools)
self.mkimage = self.AddBintool(btools, 'mkimage')
def CheckMissing(self, missing_list):
# We must use our private entry list for this since generator notes
# which are removed from self._entries will otherwise not show up as
# missing
for entry in self._priv_entries.values():
entry.CheckMissing(missing_list)

View file

@ -99,5 +99,5 @@ class Entry_gbb(Entry):
return True
def AddBintools(self, tools):
self.futility = self.AddBintool(tools, 'futility')
def AddBintools(self, btools):
self.futility = self.AddBintool(btools, 'futility')

View file

@ -143,5 +143,5 @@ class Entry_intel_ifwi(Entry_blob_ext):
for entry in self._ifwi_entries.values():
entry.WriteSymbols(self)
def AddBintools(self, tools):
self.ifwitool = self.AddBintool(tools, 'ifwitool')
def AddBintools(self, btools):
self.ifwitool = self.AddBintool(btools, 'ifwitool')

View file

@ -51,9 +51,10 @@ class Entry_mkimage(Entry):
self.ReadEntries()
def ObtainContents(self):
# Use a non-zero size for any fake files to keep mkimage happy
data, input_fname, uniq = self.collect_contents_to_file(
self._mkimage_entries.values(), 'mkimage')
if data is False:
self._mkimage_entries.values(), 'mkimage', 1024)
if data is None:
return False
output_fname = tools.get_output_filename('mkimage-out.%s' % uniq)
if self.mkimage.run_cmd('-d', input_fname, *self._args,
@ -73,6 +74,16 @@ class Entry_mkimage(Entry):
entry.ReadNode()
self._mkimage_entries[entry.name] = entry
def SetAllowMissing(self, allow_missing):
"""Set whether a section allows missing external blobs
Args:
allow_missing: True if allowed, False if not allowed
"""
self.allow_missing = allow_missing
for entry in self._mkimage_entries.values():
entry.SetAllowMissing(allow_missing)
def SetAllowFakeBlob(self, allow_fake):
"""Set whether the sub nodes allows to create a fake blob
@ -93,5 +104,5 @@ class Entry_mkimage(Entry):
for entry in self._mkimage_entries.values():
entry.CheckFakedBlobs(faked_blobs_list)
def AddBintools(self, tools):
self.mkimage = self.AddBintool(tools, 'mkimage')
def AddBintools(self, btools):
self.mkimage = self.AddBintool(btools, 'mkimage')

View file

@ -234,10 +234,10 @@ class Entry_section(Entry):
todo)
return True
def ExpandEntries(self):
super().ExpandEntries()
def gen_entries(self):
super().gen_entries()
for entry in self._entries.values():
entry.ExpandEntries()
entry.gen_entries()
def AddMissingProperties(self, have_image_pos):
"""Add new properties to the device tree as needed for this entry"""
@ -247,7 +247,7 @@ class Entry_section(Entry):
for entry in self._entries.values():
entry.AddMissingProperties(have_image_pos)
def ObtainContents(self, skip_entry=None):
def ObtainContents(self, fake_size=0, skip_entry=None):
return self.GetEntryContents(skip_entry=skip_entry)
def GetPaddedDataForEntry(self, entry, entry_data):
@ -386,7 +386,7 @@ class Entry_section(Entry):
self._PackEntries()
if self._sort:
self._SortEntries()
self._ExpandEntries()
self._extend_entries()
data = self.BuildSectionData(True)
self.SetContents(data)
@ -404,17 +404,17 @@ class Entry_section(Entry):
offset = entry.Pack(offset)
return offset
def _ExpandEntries(self):
"""Expand any entries that are permitted to"""
def _extend_entries(self):
"""Extend any entries that are permitted to"""
exp_entry = None
for entry in self._entries.values():
if exp_entry:
exp_entry.ExpandToLimit(entry.offset)
exp_entry.extend_to_limit(entry.offset)
exp_entry = None
if entry.expand_size:
if entry.extend_size:
exp_entry = entry
if exp_entry:
exp_entry.ExpandToLimit(self.size)
exp_entry.extend_to_limit(self.size)
def _SortEntries(self):
"""Sort entries by offset"""
@ -895,6 +895,6 @@ class Entry_section(Entry):
for entry in self._entries.values():
entry.CheckAltFormats(alt_formats)
def AddBintools(self, tools):
def AddBintools(self, btools):
for entry in self._entries.values():
entry.AddBintools(tools)
entry.AddBintools(btools)

View file

@ -97,5 +97,5 @@ class Entry_vblock(Entry_collection):
data = self.GetVblock(True)
return self.ProcessContentsUpdate(data)
def AddBintools(self, tools):
self.futility = self.AddBintool(tools, 'futility')
def AddBintools(self, btools):
self.futility = self.AddBintool(btools, 'futility')

View file

@ -202,6 +202,13 @@ class TestFunctional(unittest.TestCase):
TestFunctional._MakeInputFile('env.txt', ENV_DATA)
# ELF file with two sections in different parts of memory, used for both
# ATF and OP_TEE
TestFunctional._MakeInputFile('bl31.elf',
tools.read_file(cls.ElfTestFile('elf_sections')))
TestFunctional._MakeInputFile('tee.elf',
tools.read_file(cls.ElfTestFile('elf_sections')))
cls.have_lz4 = comp_util.HAVE_LZ4
@classmethod
@ -2028,9 +2035,9 @@ class TestFunctional(unittest.TestCase):
self.assertIn("Node '/binman/files': Missing 'pattern' property",
str(e.exception))
def testExpandSize(self):
"""Test an expanding entry"""
data, _, map_data, _ = self._DoReadFileDtb('088_expand_size.dts',
def testExtendSize(self):
"""Test an extending entry"""
data, _, map_data, _ = self._DoReadFileDtb('088_extend_size.dts',
map=True)
expect = (tools.get_bytes(ord('a'), 8) + U_BOOT_DATA +
MRC_DATA + tools.get_bytes(ord('b'), 1) + U_BOOT_DATA +
@ -2050,11 +2057,11 @@ class TestFunctional(unittest.TestCase):
00000020 00000020 00000008 fill2
''', map_data)
def testExpandSizeBad(self):
"""Test an expanding entry which fails to provide contents"""
def testExtendSizeBad(self):
"""Test an extending entry which fails to provide contents"""
with test_util.capture_sys_output() as (stdout, stderr):
with self.assertRaises(ValueError) as e:
self._DoReadFileDtb('089_expand_size_bad.dts', map=True)
self._DoReadFileDtb('089_extend_size_bad.dts', map=True)
self.assertIn("Node '/binman/_testing': Cannot obtain contents when "
'expanding entry', str(e.exception))
@ -2487,22 +2494,22 @@ class TestFunctional(unittest.TestCase):
str(e.exception))
def testEntryExpand(self):
"""Test expanding an entry after it is packed"""
data = self._DoReadFile('121_entry_expand.dts')
"""Test extending an entry after it is packed"""
data = self._DoReadFile('121_entry_extend.dts')
self.assertEqual(b'aaa', data[:3])
self.assertEqual(U_BOOT_DATA, data[3:3 + len(U_BOOT_DATA)])
self.assertEqual(b'aaa', data[-3:])
def testEntryExpandBad(self):
"""Test expanding an entry after it is packed, twice"""
def testEntryExtendBad(self):
"""Test extending an entry after it is packed, twice"""
with self.assertRaises(ValueError) as e:
self._DoReadFile('122_entry_expand_twice.dts')
self._DoReadFile('122_entry_extend_twice.dts')
self.assertIn("Image '/binman': Entries changed size after packing",
str(e.exception))
def testEntryExpandSection(self):
"""Test expanding an entry within a section after it is packed"""
data = self._DoReadFile('123_entry_expand_section.dts')
def testEntryExtendSection(self):
"""Test extending an entry within a section after it is packed"""
data = self._DoReadFile('123_entry_extend_section.dts')
self.assertEqual(b'aaa', data[:3])
self.assertEqual(U_BOOT_DATA, data[3:3 + len(U_BOOT_DATA)])
self.assertEqual(b'aaa', data[-3:])
@ -3780,6 +3787,7 @@ class TestFunctional(unittest.TestCase):
dtb.Scan()
props = self._GetPropTree(dtb, BASE_DTB_PROPS + REPACK_DTB_PROPS)
self.maxDiff = None
self.assertEqual({
'image-pos': 0,
'offset': 0,
@ -3793,19 +3801,19 @@ class TestFunctional(unittest.TestCase):
'fit:offset': 4,
'fit:size': 1840,
'fit/images/kernel:image-pos': 160,
'fit/images/kernel:offset': 156,
'fit/images/kernel:image-pos': 304,
'fit/images/kernel:offset': 300,
'fit/images/kernel:size': 4,
'fit/images/kernel/u-boot:image-pos': 160,
'fit/images/kernel/u-boot:image-pos': 304,
'fit/images/kernel/u-boot:offset': 0,
'fit/images/kernel/u-boot:size': 4,
'fit/images/fdt-1:image-pos': 456,
'fit/images/fdt-1:offset': 452,
'fit/images/fdt-1:image-pos': 552,
'fit/images/fdt-1:offset': 548,
'fit/images/fdt-1:size': 6,
'fit/images/fdt-1/u-boot-spl-dtb:image-pos': 456,
'fit/images/fdt-1/u-boot-spl-dtb:image-pos': 552,
'fit/images/fdt-1/u-boot-spl-dtb:offset': 0,
'fit/images/fdt-1/u-boot-spl-dtb:size': 6,
@ -4029,6 +4037,7 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(expected_data, fnode.props['data'].bytes)
self.assertEqual('fdt-test-fdt%d.dtb' % seq,
fnode.props['description'].value)
self.assertEqual(fnode.subnodes[0].name, 'hash')
def _CheckConfig(seq, expected_data):
"""Check the configuration nodes
@ -5301,9 +5310,167 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
"""Check handling of an FDT map when the section cannot be found"""
with self.assertRaises(ValueError) as exc:
self._DoReadFileDtb('224_fit_bad_oper.dts')
self.assertIn("Node '/binman/fit': Unknown operation 'unknown'",
self.assertIn("Node '/binman/fit': subnode 'images/@fdt-SEQ': Unknown operation 'unknown'",
str(exc.exception))
def test_uses_expand_size(self):
"""Test that the 'expand-size' property cannot be used anymore"""
with self.assertRaises(ValueError) as e:
data = self._DoReadFile('225_expand_size_bad.dts')
self.assertIn(
"Node '/binman/u-boot': Please use 'extend-size' instead of 'expand-size'",
str(e.exception))
def testMkimageMissingBlob(self):
"""Test using mkimage to build an image"""
with test_util.capture_sys_output() as (stdout, stderr):
self._DoTestFile('229_mkimage_missing.dts', allow_missing=True,
allow_fake_blobs=True)
err = stderr.getvalue()
self.assertRegex(
err,
"Image '.*' has faked external blobs and is non-functional: .*")
def testFitSplitElf(self):
"""Test an image with an FIT with an split-elf operation"""
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
'default-dt': 'test-fdt2',
'atf-bl31-path': 'bl31.elf',
'tee-os-path': 'tee.elf',
}
test_subdir = os.path.join(self._indir, TEST_FDT_SUBDIR)
data = self._DoReadFileDtb(
'226_fit_split_elf.dts',
entry_args=entry_args,
extra_indirs=[test_subdir])[0]
self.assertEqual(U_BOOT_NODTB_DATA, data[-len(U_BOOT_NODTB_DATA):])
fit_data = data[len(U_BOOT_DATA):-len(U_BOOT_NODTB_DATA)]
base_keys = {'description', 'type', 'arch', 'os', 'compression',
'data', 'load'}
dtb = fdt.Fdt.FromData(fit_data)
dtb.Scan()
elf_data = tools.read_file(os.path.join(self._indir, 'bl31.elf'))
segments, entry = elf.read_loadable_segments(elf_data)
# We assume there are two segments
self.assertEquals(2, len(segments))
atf1 = dtb.GetNode('/images/atf-1')
_, start, data = segments[0]
self.assertEqual(base_keys | {'entry'}, atf1.props.keys())
self.assertEqual(entry,
fdt_util.fdt32_to_cpu(atf1.props['entry'].value))
self.assertEqual(start,
fdt_util.fdt32_to_cpu(atf1.props['load'].value))
self.assertEqual(data, atf1.props['data'].bytes)
atf2 = dtb.GetNode('/images/atf-2')
self.assertEqual(base_keys, atf2.props.keys())
_, start, data = segments[1]
self.assertEqual(start,
fdt_util.fdt32_to_cpu(atf2.props['load'].value))
self.assertEqual(data, atf2.props['data'].bytes)
conf = dtb.GetNode('/configurations')
self.assertEqual({'default'}, conf.props.keys())
for subnode in conf.subnodes:
self.assertEqual({'description', 'fdt', 'loadables'},
subnode.props.keys())
self.assertEqual(
['atf-1', 'atf-2', 'tee-1', 'tee-2'],
fdt_util.GetStringList(subnode, 'loadables'))
def _check_bad_fit(self, dts):
"""Check a bad FIT
This runs with the given dts and returns the assertion raised
Args:
dts (str): dts filename to use
Returns:
str: Assertion string raised
"""
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
'default-dt': 'test-fdt2',
'atf-bl31-path': 'bl31.elf',
'tee-os-path': 'tee.elf',
}
test_subdir = os.path.join(self._indir, TEST_FDT_SUBDIR)
with self.assertRaises(ValueError) as exc:
self._DoReadFileDtb(dts, entry_args=entry_args,
extra_indirs=[test_subdir])[0]
return str(exc.exception)
def testFitSplitElfBadElf(self):
"""Test a FIT split-elf operation with an invalid ELF file"""
TestFunctional._MakeInputFile('bad.elf', tools.get_bytes(100, 100))
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
'default-dt': 'test-fdt2',
'atf-bl31-path': 'bad.elf',
'tee-os-path': 'tee.elf',
}
test_subdir = os.path.join(self._indir, TEST_FDT_SUBDIR)
with self.assertRaises(ValueError) as exc:
self._DoReadFileDtb(
'226_fit_split_elf.dts',
entry_args=entry_args,
extra_indirs=[test_subdir])[0]
self.assertIn(
"Node '/binman/fit': subnode 'images/@atf-SEQ': Failed to read ELF file: Magic number does not match",
str(exc.exception))
def testFitSplitElfBadDirective(self):
"""Test a FIT split-elf invalid fit,xxx directive in an image node"""
err = self._check_bad_fit('227_fit_bad_dir.dts')
self.assertIn(
"Node '/binman/fit': subnode 'images/@atf-SEQ': Unknown directive 'fit,something'",
err)
def testFitSplitElfBadDirectiveConfig(self):
"""Test a FIT split-elf with invalid fit,xxx directive in config"""
err = self._check_bad_fit('228_fit_bad_dir_config.dts')
self.assertEqual(
"Node '/binman/fit': subnode 'configurations/@config-SEQ': Unknown directive 'fit,config'",
err)
def checkFitSplitElf(self, **kwargs):
"""Test an split-elf FIT with a missing ELF file"""
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
'default-dt': 'test-fdt2',
'atf-bl31-path': 'bl31.elf',
'tee-os-path': 'missing.elf',
}
test_subdir = os.path.join(self._indir, TEST_FDT_SUBDIR)
with test_util.capture_sys_output() as (stdout, stderr):
self._DoTestFile(
'226_fit_split_elf.dts', entry_args=entry_args,
extra_indirs=[test_subdir], **kwargs)
err = stderr.getvalue()
return err
def testFitSplitElfMissing(self):
"""Test an split-elf FIT with a missing ELF file"""
err = self.checkFitSplitElf(allow_missing=True)
self.assertRegex(
err,
"Image '.*' is missing external blobs and is non-functional: .*")
def testFitSplitElfFaked(self):
"""Test an split-elf FIT with faked ELF file"""
err = self.checkFitSplitElf(allow_missing=True, allow_fake_blobs=True)
self.assertRegex(
err,
"Image '.*' is missing external blobs and is non-functional: .*")
if __name__ == "__main__":
unittest.main()

View file

@ -41,6 +41,7 @@ from patman import test_util
# Bring in the libfdt module
sys.path.insert(2, 'scripts/dtc/pylibfdt')
sys.path.insert(2, os.path.join(srctree, 'scripts/dtc/pylibfdt'))
sys.path.insert(2, os.path.join(srctree, 'build-sandbox/scripts/dtc/pylibfdt'))
sys.path.insert(2, os.path.join(srctree, 'build-sandbox_spl/scripts/dtc/pylibfdt'))
# When running under python-coverage on Ubuntu 16.04, the dist-packages

View file

@ -5,7 +5,7 @@
binman {
size = <40>;
fill {
expand-size;
extend-size;
fill-byte = [61];
size = <0>;
};
@ -13,7 +13,7 @@
offset = <8>;
};
section {
expand-size;
extend-size;
pad-byte = <0x62>;
intel-mrc {
};
@ -25,7 +25,7 @@
section2 {
type = "section";
fill {
expand-size;
extend-size;
fill-byte = [63];
size = <0>;
};
@ -35,7 +35,7 @@
};
fill2 {
type = "fill";
expand-size;
extend-size;
fill-byte = [64];
size = <0>;
};

View file

@ -4,7 +4,7 @@
/ {
binman {
_testing {
expand-size;
extend-size;
return-contents-once;
};
u-boot {

View file

@ -36,6 +36,9 @@
description = "fdt-NAME.dtb";
type = "flat_dt";
compression = "none";
hash {
algo = "sha256";
};
};
};

View file

@ -21,7 +21,5 @@
};
};
};
fdtmap {
};
};
};

View file

@ -0,0 +1,10 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
binman {
u-boot {
expand-size;
};
};
};

View file

@ -0,0 +1,67 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot {
};
fit {
description = "test-desc";
#address-cells = <1>;
fit,fdt-list = "of-list";
images {
@fdt-SEQ {
description = "fdt-NAME.dtb";
type = "flat_dt";
compression = "none";
};
atf: @atf-SEQ {
fit,operation = "split-elf";
description = "ARM Trusted Firmware";
type = "firmware";
arch = "arm64";
os = "arm-trusted-firmware";
compression = "none";
fit,load;
fit,entry;
fit,data;
atf-bl31 {
};
};
@tee-SEQ {
fit,operation = "split-elf";
description = "TEE";
type = "tee";
arch = "arm64";
os = "tee";
compression = "none";
fit,load;
fit,entry;
fit,data;
tee-os {
};
};
};
configurations {
default = "@config-DEFAULT-SEQ";
config: @config-SEQ {
description = "conf-NAME.dtb";
fdt = "fdt-SEQ";
fit,loadables;
};
};
};
u-boot-nodtb {
};
};
};

View file

@ -0,0 +1,9 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
#include "226_fit_split_elf.dts"
&atf {
fit,something = "bad";
};

View file

@ -0,0 +1,9 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
#include "226_fit_split_elf.dts"
&config {
fit,config = "bad";
};

View file

@ -0,0 +1,18 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
mkimage {
args = "-n test -T script";
blob-ext {
filename = "missing.bin";
};
};
};
};

View file

@ -516,9 +516,7 @@ class Node:
Returns:
Prop added
"""
out = b''
for string in val:
out += bytes(string, 'utf-8') + b'\0'
out = b'\0'.join(bytes(s, 'utf-8') for s in val) + b'\0' if val else b''
return self.AddData(prop_name, out)
def AddInt(self, prop_name, val):

View file

@ -158,6 +158,8 @@ def GetString(node, propname, default=None):
if not prop:
return default
value = prop.value
if not prop.bytes:
return ''
if isinstance(value, list):
raise ValueError("Node '%s' property '%s' has list value: expecting "
"a single string" % (node.name, propname))
@ -179,6 +181,8 @@ def GetStringList(node, propname, default=None):
if not prop:
return default
value = prop.value
if not prop.bytes:
return []
if not isinstance(value, list):
strval = GetString(node, propname)
return [strval]
@ -192,8 +196,12 @@ def GetArgs(node, propname):
value = GetStringList(node, propname)
else:
value = []
lists = [v.split() for v in value]
args = [x for l in lists for x in l]
if not value:
args = []
elif len(value) == 1:
args = value[0].split()
else:
args = value
return args
def GetBool(node, propname, default=False):

View file

@ -63,5 +63,7 @@
orig-node {
orig = <1 23 4>;
args = "-n first", "second", "-p", "123,456", "-x";
args2 = "a space", "there";
args3 = "-n first second -p 123,456 -x";
};
};

View file

@ -550,6 +550,12 @@ class TestProp(unittest.TestCase):
data = self.fdt.getprop(self.node.Offset(), 'stringlist')
self.assertEqual(b'123\x00456\0', data)
val = []
self.node.AddStringList('stringlist', val)
self.dtb.Sync(auto_resize=True)
data = self.fdt.getprop(self.node.Offset(), 'stringlist')
self.assertEqual(b'', data)
def test_delete_node(self):
"""Test deleting a node"""
old_offset = self.fdt.path_offset('/spl-test')
@ -637,6 +643,7 @@ class TestFdtUtil(unittest.TestCase):
self.assertEqual('message', fdt_util.GetString(self.node, 'stringval'))
self.assertEqual('test', fdt_util.GetString(self.node, 'missing',
'test'))
self.assertEqual('', fdt_util.GetString(self.node, 'boolval'))
with self.assertRaises(ValueError) as e:
self.assertEqual(3, fdt_util.GetString(self.node, 'stringarray'))
@ -651,6 +658,7 @@ class TestFdtUtil(unittest.TestCase):
fdt_util.GetStringList(self.node, 'stringarray'))
self.assertEqual(['test'],
fdt_util.GetStringList(self.node, 'missing', ['test']))
self.assertEqual([], fdt_util.GetStringList(self.node, 'boolval'))
def testGetArgs(self):
node = self.dtb.GetNode('/orig-node')
@ -659,8 +667,12 @@ class TestFdtUtil(unittest.TestCase):
['multi-word', 'message'],
fdt_util.GetArgs(self.node, 'stringarray'))
self.assertEqual([], fdt_util.GetArgs(self.node, 'boolval'))
self.assertEqual(['-n', 'first', 'second', '-p', '123,456', '-x'],
self.assertEqual(['-n first', 'second', '-p', '123,456', '-x'],
fdt_util.GetArgs(node, 'args'))
self.assertEqual(['a space', 'there'],
fdt_util.GetArgs(node, 'args2'))
self.assertEqual(['-n', 'first', 'second', '-p', '123,456', '-x'],
fdt_util.GetArgs(node, 'args3'))
with self.assertRaises(ValueError) as exc:
fdt_util.GetArgs(self.node, 'missing')
self.assertIn(

View file

@ -94,17 +94,6 @@ SIZES = {
RE_REMOVE_DEFCONFIG = re.compile(r'(.*)_defconfig')
### helper functions ###
def remove_defconfig(defc):
"""Drop the _defconfig suffix on a string
Args:
defc (str): String to convert
Returns:
str: string with the '_defconfig' suffix removed
"""
return RE_REMOVE_DEFCONFIG.match(defc)[1]
def check_top_directory():
"""Exit if we are not at the top of source directory."""
for fname in 'README', 'Licenses':
@ -1618,8 +1607,7 @@ def defconfig_matches(configs, re_match):
bool: True if any CONFIG matches the regex
"""
for cfg in configs:
m_cfg = re_match.match(cfg)
if m_cfg and m_cfg.span()[1] == len(cfg):
if re_match.fullmatch(cfg):
return True
return False
@ -1668,7 +1656,7 @@ def do_find_config(config_list):
print(f"Error: Not in Kconfig: %s" % ' '.join(adhoc))
else:
print(f'{len(out)} matches')
print(' '.join([remove_defconfig(item) for item in out]))
print(' '.join(item.split('_defconfig')[0] for item in out))
def prefix_config(cfg):