binman fixes/improvements to FIT generator

binman SPL fixes
 moveconfig support regex matches
 -----BEGIN PGP SIGNATURE-----
 
 iQFFBAABCgAvFiEEslwAIq+Gp8wWVbYnfxc6PpAIreYFAmIVc+MRHHNqZ0BjaHJv
 bWl1bS5vcmcACgkQfxc6PpAIreY0qAf/QgR6FMENwspJinIYjahLdvgBuury1nKv
 bTDOvzpR8zyYaSoybDj3T3yLoksLzGvg4NA8noedw7vj4a4YRt7oM73HsXhHmNd9
 Ymoqv4Xt+6CyZRhO7DVRu8+/dIjQ/5pywf4tSyUh3x2qLmkoNdPaUcdFp+MY2zLQ
 uCnR5ndNFukOepPdubnchfRzuOYUYHT9ksOF/WlNNor5jol8B2gwrcC8GgGWBTmG
 Toka+j83NaYGZmVzYaVgdrtg/KgHGb4XGl/yMsIF9hPn5mghrr9fsG6VmJHsCv5B
 Vqs7JUY1Y9CWNm8io+4aznAjjkTM6iNMRxj+UemRz59StBSStotEPg==
 =b6mQ
 -----END PGP SIGNATURE-----

Merge tag 'dm-pull-22222' of https://gitlab.denx.de/u-boot/custodians/u-boot-dm

binman fixes/improvements to FIT generator
binman SPL fixes
moveconfig support regex matches
This commit is contained in:
Tom Rini 2022-02-23 12:28:54 -05:00
commit 4cb9bd834e
31 changed files with 1151 additions and 162 deletions

View file

@ -1327,6 +1327,7 @@ cmd_binman = $(srctree)/tools/binman/binman $(if $(BINMAN_DEBUG),-D) \
-I arch/$(ARCH)/dts -a of-list=$(CONFIG_OF_LIST) \
$(foreach f,$(BINMAN_INDIRS),-I $(f)) \
-a atf-bl31-path=${BL31} \
-a tee-os-path=${TEE} \
-a opensbi-path=${OPENSBI} \
-a default-dt=$(default_dt) \
-a scp-path=$(SCP) \

View file

@ -37,7 +37,7 @@
u-boot-tpl-dtb {
};
#endif
spl {
u-boot-spl {
type = "u-boot-spl";
offset = <CONFIG_X86_OFFSET_SPL>;
};

View file

@ -101,6 +101,18 @@ config SPL_SHOW_ERRORS
This adds a small amount to SPL code size, perhaps 100 bytes.
config SPL_BINMAN_SYMBOLS
bool "Declare binman symbols in SPL"
depends on SPL_FRAMEWORK && BINMAN
default y
help
This enables use of symbols in SPL which refer to U-Boot, enabling SPL
to obtain the location of U-Boot simply by calling spl_get_image_pos()
and spl_get_image_size().
For this to work, you must have a U-Boot image in the binman image, so
binman can update SPL with the location of it.
menu "PowerPC and LayerScape SPL Boot options"
config SPL_NAND_BOOT
@ -1321,6 +1333,18 @@ config TPL_SIZE_LIMIT
Specifies the maximum length of the U-Boot TPL image.
If this value is zero, it is ignored.
config TPL_BINMAN_SYMBOLS
bool "Declare binman symbols in SPL"
depends on SPL_FRAMEWORK && BINMAN
default y
help
This enables use of symbols in TPL which refer to U-Boot, enabling SPL
to obtain the location of U-Boot simply by calling spl_get_image_pos()
and spl_get_image_size().
For this to work, you must have a U-Boot image in the binman image, so
binman can update SPL with the location of it.
config TPL_FRAMEWORK
bool "Support TPL based upon the common SPL framework"
default y if SPL_FRAMEWORK

View file

@ -49,13 +49,15 @@ DECLARE_GLOBAL_DATA_PTR;
u32 *boot_params_ptr = NULL;
#if CONFIG_IS_ENABLED(BINMAN_SYMBOLS)
/* See spl.h for information about this */
binman_sym_declare(ulong, u_boot_any, image_pos);
binman_sym_declare(ulong, u_boot_any, size);
#endif
#ifdef CONFIG_TPL
binman_sym_declare(ulong, spl, image_pos);
binman_sym_declare(ulong, spl, size);
binman_sym_declare(ulong, u_boot_spl, image_pos);
binman_sym_declare(ulong, u_boot_spl, size);
#endif
/* Define board data structure */
@ -140,19 +142,21 @@ void spl_fixup_fdt(void *fdt_blob)
#endif
}
#if CONFIG_IS_ENABLED(BINMAN_SYMBOLS)
ulong spl_get_image_pos(void)
{
return spl_phase() == PHASE_TPL ?
binman_sym(ulong, spl, image_pos) :
binman_sym(ulong, u_boot_spl, image_pos) :
binman_sym(ulong, u_boot_any, image_pos);
}
ulong spl_get_image_size(void)
{
return spl_phase() == PHASE_TPL ?
binman_sym(ulong, spl, size) :
binman_sym(ulong, u_boot_spl, size) :
binman_sym(ulong, u_boot_any, size);
}
#endif /* BINMAN_SYMBOLS */
ulong spl_get_image_text_base(void)
{

View file

@ -354,23 +354,31 @@ int generic_phy_configure(struct phy *phy, void *params)
int generic_phy_get_bulk(struct udevice *dev, struct phy_bulk *bulk)
{
int i, ret, count;
struct udevice *phydev = dev;
bulk->count = 0;
/* Return if no phy declared */
if (!dev_read_prop(dev, "phys", NULL))
return 0;
if (!dev_read_prop(dev, "phys", NULL)) {
phydev = dev->parent;
if (!dev_read_prop(phydev, "phys", NULL)) {
pr_err("%s : no phys property\n", __func__);
return 0;
}
}
count = dev_count_phandle_with_args(dev, "phys", "#phy-cells", 0);
if (count < 1)
count = dev_count_phandle_with_args(phydev, "phys", "#phy-cells", 0);
if (count < 1) {
pr_err("%s : no phys found %d\n", __func__, count);
return count;
}
bulk->phys = devm_kcalloc(dev, count, sizeof(struct phy), GFP_KERNEL);
bulk->phys = devm_kcalloc(phydev, count, sizeof(struct phy), GFP_KERNEL);
if (!bulk->phys)
return -ENOMEM;
for (i = 0; i < count; i++) {
ret = generic_phy_get_by_index(dev, i, &bulk->phys[i]);
ret = generic_phy_get_by_index(phydev, i, &bulk->phys[i]);
if (ret) {
pr_err("Failed to get PHY%d for %s\n", i, dev->name);
return ret;

View file

@ -269,8 +269,8 @@ struct spl_load_info {
*/
binman_sym_extern(ulong, u_boot_any, image_pos);
binman_sym_extern(ulong, u_boot_any, size);
binman_sym_extern(ulong, spl, image_pos);
binman_sym_extern(ulong, spl, size);
binman_sym_extern(ulong, u_boot_spl, image_pos);
binman_sym_extern(ulong, u_boot_spl, size);
/**
* spl_get_image_pos() - get the image position of the next phase

View file

@ -937,4 +937,10 @@ int fdt_check_full(const void *fdt, size_t bufsize)
}
}
}
#endif
#else
int fdt_check_full(const void __always_unused *fdt,
size_t __always_unused bufsize)
{
return 0;
}
#endif /* #if !defined(FDT_ASSUME_MASK) || FDT_ASSUME_MASK != 0xff */

View file

@ -90,9 +90,9 @@ tools_binman_cbfs_util_test 9.38
tools_binman_cmdline 9.03
tools_binman_comp_util 6.88
tools_binman_control 5.01
tools_binman_elf 6.73
tools_binman_elf_test 5.41
tools_binman_entry 3.36
tools_binman_elf 6.98
tools_binman_elf_test 5.62
tools_binman_entry 3.55
tools_binman_entry_test 5.34
tools_binman_etype__testing 0.83
tools_binman_etype_atf_bl31 -6.00
@ -109,7 +109,7 @@ tools_binman_etype_cros_ec_rw -6.00
tools_binman_etype_fdtmap -3.28
tools_binman_etype_files -7.43
tools_binman_etype_fill -6.43
tools_binman_etype_fit 4.64
tools_binman_etype_fit 6.31
tools_binman_etype_fmap -0.29
tools_binman_etype_gbb 0.83
tools_binman_etype_image_header 5.77
@ -127,11 +127,12 @@ tools_binman_etype_intel_mrc -10.00
tools_binman_etype_intel_refcode -10.00
tools_binman_etype_intel_vbt -12.50
tools_binman_etype_intel_vga -12.50
tools_binman_etype_mkimage 2.31
tools_binman_etype_mkimage 1.47
tools_binman_etype_opensbi -6.00
tools_binman_etype_powerpc_mpc85xx_bootpg_resetvec -10.00
tools_binman_etype_scp -6.00
tools_binman_etype_section 4.57
tools_binman_etype_tee_os -6.00
tools_binman_etype_text -0.48
tools_binman_etype_u_boot -15.71
tools_binman_etype_u_boot_dtb -12.22
@ -169,7 +170,7 @@ tools_binman_fdt_test 3.23
tools_binman_fip_util 9.85
tools_binman_fip_util_test 10.00
tools_binman_fmap_util 6.88
tools_binman_ftest 7.45
tools_binman_ftest 7.46
tools_binman_image 7.12
tools_binman_image_test 4.48
tools_binman_main 4.86
@ -189,19 +190,19 @@ tools_buildman_test 6.56
tools_buildman_toolchain 6.44
tools_concurrencytest_concurrencytest 7.26
tools_dtoc_dtb_platdata 7.90
tools_dtoc_fdt 4.50
tools_dtoc_fdt_util 6.70
tools_dtoc_fdt 4.46
tools_dtoc_fdt_util 6.80
tools_dtoc_main 7.78
tools_dtoc_setup 5.00
tools_dtoc_src_scan 8.91
tools_dtoc_test_dtoc 8.56
tools_dtoc_test_fdt 6.96
tools_dtoc_test_fdt 6.88
tools_dtoc_test_src_scan 9.43
tools_efivar 6.71
tools_endian-swap 9.29
tools_genboardscfg 7.95
tools_microcode-tool 7.25
tools_moveconfig 8.32
tools_moveconfig 8.34
tools_patman___init__ 0.00
tools_patman_checkpatch 8.48
tools_patman_command 5.51

View file

@ -163,8 +163,8 @@ Consider sunxi. It has the following steps:
Binman is intended to replace the last step. The U-Boot build system builds
u-boot.bin and sunxi-spl.bin. Binman can then take over creation of
sunxi-spl.bin (by calling mksunxiboot, or hopefully one day mkimage). In any
case, it would then create the image from the component parts.
sunxi-spl.bin by calling mksunxiboot or mkimage. In any case, it would then
create the image from the component parts.
This simplifies the U-Boot Makefile somewhat, since various pieces of logic
can be replaced by a call to binman.
@ -1358,6 +1358,92 @@ development, since dealing with exceptions and problems in threads is more
difficult. This avoids any use of ThreadPoolExecutor.
Collecting data for an entry type
---------------------------------
Some entry types deal with data obtained from others. For example,
`Entry_mkimage` calls the `mkimage` tool with data from its subnodes::
mkimage {
args = "-n test -T script";
u-boot-spl {
};
u-boot {
};
};
This shows mkimage being passed a file consisting of SPL and U-Boot proper. It
is create by calling `Entry.collect_contents_to_file()`. Note that in this case,
the data is passed to mkimage for processing but does not appear separately in
the image. It may not appear at all, depending on what mkimage does. The
contents of the `mkimage` entry are entirely dependent on the processing done
by the entry, with the provided subnodes (`u-boot-spl` and `u-boot`) simply
providing the input data for that processing.
Note that `Entry.collect_contents_to_file()` simply concatenates the data from
the different entries together, with no control over alignment, etc. Another
approach is to subclass `Entry_section` so that those features become available,
such as `size` and `pad-byte`. Then the contents of the entry can be obtained by
calling `BuildSectionData()`.
There are other ways to obtain data also, depending on the situation. If the
entry type is simply signing data which exists elsewhere in the image, then
you can use `Entry_collection` as a base class. It lets you use a property
called `content` which lists the entries containing data to be processed. This
is used by `Entry_vblock`, for example::
u_boot: u-boot {
};
vblock {
content = <&u_boot &dtb>;
keyblock = "firmware.keyblock";
signprivate = "firmware_data_key.vbprivk";
version = <1>;
kernelkey = "kernel_subkey.vbpubk";
preamble-flags = <1>;
};
dtb: u-boot-dtb {
};
which shows an image containing `u-boot` and `u-boot-dtb`, with the `vblock`
image collecting their contents to produce input for its signing process,
without affecting those entries, which still appear in the final image
untouched.
Another example is where an entry type needs several independent pieces of input
to function. For example, `Entry_fip` allows a number of different binary blobs
to be placed in their own individual places in a custom data structure in the
output image. To make that work you can add subnodes for each of them and call
`Entry.Create()` on each subnode, as `Entry_fip` does. Then the data for each
blob can come from any suitable place, such as an `Entry_u_boot` or an
`Entry_blob` or anything else::
atf-fip {
fip-hdr-flags = /bits/ 64 <0x123>;
soc-fw {
fip-flags = /bits/ 64 <0x123456789abcdef>;
filename = "bl31.bin";
};
u-boot {
fip-uuid = [fc 65 13 92 4a 5b 11 ec
94 35 ff 2d 1c fc 79 9c];
};
};
The `soc-fw` node is a `blob-ext` (i.e. it reads in a named binary file) whereas
`u-boot` is a normal entry type. This works because `Entry_fip` selects the
`blob-ext` entry type if the node name (here `soc-fw`) is recognised as being
a known blob type.
When adding new entry types you are encouraged to use subnodes to provide the
data for processing, unless the `content` approach is more suitable. Ad-hoc
properties and other methods of obtaining data are discouraged, since it adds to
confusion for users.
History / Credits
-----------------
@ -1402,6 +1488,14 @@ Some ideas:
- Detect invalid properties in nodes
- Sort the fdtmap by offset
- Output temporary files to a different directory
- Rationalise the fdt, fdt_util and pylibfdt modules which currently have some
overlapping and confusing functionality
- Update the fdt library to use a better format for Prop.value (the current one
is useful for dtoc but not much else)
- Figure out how to make Fdt support changing the node order, so that
Node.AddSubnode() can support adding a node before another, existing node.
Perhaps it should completely regenerate the flat tree?
--
Simon Glass <sjg@chromium.org>

View file

@ -20,6 +20,7 @@ from patman import tout
ELF_TOOLS = True
try:
from elftools.elf.elffile import ELFFile
from elftools.elf.elffile import ELFError
from elftools.elf.sections import SymbolTableSection
except: # pragma: no cover
ELF_TOOLS = False
@ -369,3 +370,39 @@ def UpdateFile(infile, outfile, start_sym, end_sym, insert):
newdata += data[syms[end_sym].offset:]
tools.write_file(outfile, newdata)
tout.info('Written to offset %#x' % syms[start_sym].offset)
def read_segments(data):
"""Read segments from an ELF file
Args:
data (bytes): Contents of file
Returns:
tuple:
list of segments, each:
int: Segment number (0 = first)
int: Start address of segment in memory
bytes: Contents of segment
int: entry address for image
Raises:
ValueError: elftools is not available
"""
if not ELF_TOOLS:
raise ValueError('Python elftools package is not available')
with io.BytesIO(data) as inf:
try:
elf = ELFFile(inf)
except ELFError as err:
raise ValueError(err)
entry = elf.header['e_entry']
segments = []
for i in range(elf.num_segments()):
segment = elf.get_segment(i)
if segment['p_type'] != 'PT_LOAD' or not segment['p_memsz']:
skipped = 1 # To make code-coverage see this line
continue
start = segment['p_offset']
rend = start + segment['p_filesz']
segments.append((i, segment['p_paddr'], data[start:rend]))
return segments, entry

View file

@ -56,8 +56,8 @@ class FakeSection:
def BuildElfTestFiles(target_dir):
"""Build ELF files used for testing in binman
This compiles and links the test files into the specified directory. It the
Makefile and source files in the binman test/ directory.
This compiles and links the test files into the specified directory. It uses
the Makefile and source files in the binman test/ directory.
Args:
target_dir: Directory to put the files into
@ -258,6 +258,33 @@ class TestElf(unittest.TestCase):
offset = elf.GetSymbolFileOffset(fname, ['missing_sym'])
self.assertEqual({}, offset)
def test_read_segments(self):
"""Test for read_segments()"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
fname = self.ElfTestFile('embed_data')
segments, entry = elf.read_segments(tools.read_file(fname))
def test_read_segments_fail(self):
"""Test for read_segments() without elftools"""
try:
old_val = elf.ELF_TOOLS
elf.ELF_TOOLS = False
fname = self.ElfTestFile('embed_data')
with self.assertRaises(ValueError) as e:
elf.read_segments(tools.read_file(fname))
self.assertIn('Python elftools package is not available',
str(e.exception))
finally:
elf.ELF_TOOLS = old_val
def test_read_segments_bad_data(self):
"""Test for read_segments() with an invalid ELF file"""
fname = self.ElfTestFile('embed_data')
with self.assertRaises(ValueError) as e:
elf.read_segments(tools.get_bytes(100, 100))
self.assertIn('Magic number does not match', str(e.exception))
if __name__ == '__main__':
unittest.main()

View file

@ -553,11 +553,73 @@ For example, this creates an image containing a FIT with U-Boot SPL::
};
};
More complex setups can be created, with generated nodes, as described
below.
Properties (in the 'fit' node itself)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Special properties have a `fit,` prefix, indicating that they should be
processed but not included in the final FIT.
The top-level 'fit' node supports the following special properties:
fit,external-offset
Indicates that the contents of the FIT are external and provides the
external offset. This is passed to mkimage via the -E and -p flags.
fit,fdt-list
Indicates the entry argument which provides the list of device tree
files for the gen-fdt-nodes operation (as below). This is often
`of-list` meaning that `-a of-list="dtb1 dtb2..."` should be passed
to binman.
Substitutions
~~~~~~~~~~~~~
Node names and property values support a basic string-substitution feature.
Available substitutions for '@' nodes (and property values) are:
SEQ:
Sequence number of the generated fdt (1, 2, ...)
NAME
Name of the dtb as provided (i.e. without adding '.dtb')
The `default` property, if present, will be automatically set to the name
if of configuration whose devicetree matches the `default-dt` entry
argument, e.g. with `-a default-dt=sun50i-a64-pine64-lts`.
Available substitutions for property values in these nodes are:
DEFAULT-SEQ:
Sequence number of the default fdt, as provided by the 'default-dt'
entry argument
Available operations
~~~~~~~~~~~~~~~~~~~~
You can add an operation to an '@' node to indicate which operation is
required::
@fdt-SEQ {
fit,operation = "gen-fdt-nodes";
...
};
Available operations are:
gen-fdt-nodes
Generate FDT nodes as above. This is the default if there is no
`fit,operation` property.
Generating nodes from an FDT list (gen-fdt-nodes)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
U-Boot supports creating fdt and config nodes automatically. To do this,
pass an of-list property (e.g. -a of-list=file1 file2). This tells binman
that you want to generates nodes for two files: file1.dtb and file2.dtb
The fit,fdt-list property (see above) indicates that of-list should be used.
If the property is missing you will get an error.
pass an `of-list` property (e.g. `-a of-list=file1 file2`). This tells
binman that you want to generates nodes for two files: `file1.dtb` and
`file2.dtb`. The `fit,fdt-list` property (see above) indicates that
`of-list` should be used. If the property is missing you will get an error.
Then add a 'generator node', a node with a name starting with '@'::
@ -569,10 +631,11 @@ Then add a 'generator node', a node with a name starting with '@'::
};
};
This tells binman to create nodes fdt-1 and fdt-2 for each of your two
This tells binman to create nodes `fdt-1` and `fdt-2` for each of your two
files. All the properties you specify will be included in the node. This
node acts like a template to generate the nodes. The generator node itself
does not appear in the output - it is replaced with what binman generates.
A 'data' property is created with the contents of the FDT file.
You can create config nodes in a similar way::
@ -586,35 +649,12 @@ You can create config nodes in a similar way::
};
};
This tells binman to create nodes config-1 and config-2, i.e. a config for
each of your two files.
Available substitutions for '@' nodes are:
SEQ:
Sequence number of the generated fdt (1, 2, ...)
NAME
Name of the dtb as provided (i.e. without adding '.dtb')
This tells binman to create nodes `config-1` and `config-2`, i.e. a config
for each of your two files.
Note that if no devicetree files are provided (with '-a of-list' as above)
then no nodes will be generated.
The 'default' property, if present, will be automatically set to the name
if of configuration whose devicetree matches the 'default-dt' entry
argument, e.g. with '-a default-dt=sun50i-a64-pine64-lts'.
Available substitutions for '@' property values are
DEFAULT-SEQ:
Sequence number of the default fdt,as provided by the 'default-dt' entry
argument
Properties (in the 'fit' node itself):
fit,external-offset: Indicates that the contents of the FIT are external
and provides the external offset. This is passsed to mkimage via
the -E and -p flags.
Entry: fmap: An entry which contains an Fmap section
@ -931,6 +971,17 @@ This calls mkimage to create an imximage with u-boot-spl.bin as the input
file. The output from mkimage then becomes part of the image produced by
binman.
To use CONFIG options in the arguments, use a string list instead, as in
this example which also produces four arguments::
mkimage {
args = "-n", CONFIG_SYS_SOC, "-T imximage";
u-boot-spl {
};
};
Entry: opensbi: RISC-V OpenSBI fw_dynamic blob
@ -1103,6 +1154,19 @@ available. This is set by the `SetAllowMissing()` method, if
Entry: tee-os: Entry containing an OP-TEE Trusted OS (TEE) blob
---------------------------------------------------------------
Properties / Entry arguments:
- tee-os-path: Filename of file to read into entry. This is typically
called tee-pager.bin
This entry holds the run-time firmware, typically started by U-Boot SPL.
See the U-Boot README for your architecture or board for how to use it. See
https://github.com/OP-TEE/optee_os for more information about OP-TEE.
Entry: text: An entry which contains text
-----------------------------------------

View file

@ -78,6 +78,8 @@ class Entry(object):
external: True if this entry contains an external binary blob
bintools: Bintools used by this entry (only populated for Image)
missing_bintools: List of missing bintools for this entry
update_hash: True if this entry's "hash" subnode should be
updated with a hash of the entry contents
"""
def __init__(self, section, etype, node, name_prefix=''):
# Put this here to allow entry-docs and help to work without libfdt
@ -111,6 +113,7 @@ class Entry(object):
self.allow_fake = False
self.bintools = {}
self.missing_bintools = []
self.update_hash = True
@staticmethod
def FindEntryClass(etype, expanded):
@ -315,9 +318,11 @@ class Entry(object):
if self.compress != 'none':
state.AddZeroProp(self._node, 'uncomp-size')
err = state.CheckAddHashProp(self._node)
if err:
self.Raise(err)
if self.update_hash:
err = state.CheckAddHashProp(self._node)
if err:
self.Raise(err)
def SetCalculatedProperties(self):
"""Set the value of device-tree properties calculated by binman"""
@ -333,7 +338,9 @@ class Entry(object):
state.SetInt(self._node, 'orig-size', self.orig_size, True)
if self.uncomp_size is not None:
state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
state.CheckSetHashValue(self._node, self.GetData)
if self.update_hash:
state.CheckSetHashValue(self._node, self.GetData)
def ProcessFdt(self, fdt):
"""Allow entries to adjust the device tree
@ -1108,3 +1115,39 @@ features to produce new behaviours.
btool = bintool.Bintool.create(name)
tools[name] = btool
return btool
def SetUpdateHash(self, update_hash):
"""Set whether this entry's "hash" subnode should be updated
Args:
update_hash: True if hash should be updated, False if not
"""
self.update_hash = update_hash
def collect_contents_to_file(self, entries, prefix):
"""Put the contents of a list of entries into a file
Args:
entries (list of Entry): Entries to collect
prefix (str): Filename prefix of file to write to
If any entry does not have contents yet, this function returns False
for the data.
Returns:
Tuple:
bytes: Concatenated data from all the entries (or False)
str: Filename of file written (or False if no data)
str: Unique portion of filename (or False if no data)
"""
data = b''
for entry in entries:
# First get the input data and put it in a file. If not available,
# try later.
if not entry.ObtainContents():
return False, False, False
data += entry.GetData()
uniq = self.GetUniqueName()
fname = tools.get_output_filename(f'{prefix}.{uniq}')
tools.write_file(fname, data)
return data, fname, uniq

View file

@ -14,7 +14,14 @@ from dtoc import fdt_util
from dtoc.fdt import Fdt
from patman import tools
# Supported operations, with the fit,operation property
OP_GEN_FDT_NODES = range(1)
OPERATIONS = {
'gen-fdt-nodes': OP_GEN_FDT_NODES,
}
class Entry_fit(Entry_section):
"""Flat Image Tree (FIT)
This calls mkimage to create a FIT (U-Boot Flat Image Tree) based on the
@ -47,11 +54,73 @@ class Entry_fit(Entry_section):
};
};
More complex setups can be created, with generated nodes, as described
below.
Properties (in the 'fit' node itself)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Special properties have a `fit,` prefix, indicating that they should be
processed but not included in the final FIT.
The top-level 'fit' node supports the following special properties:
fit,external-offset
Indicates that the contents of the FIT are external and provides the
external offset. This is passed to mkimage via the -E and -p flags.
fit,fdt-list
Indicates the entry argument which provides the list of device tree
files for the gen-fdt-nodes operation (as below). This is often
`of-list` meaning that `-a of-list="dtb1 dtb2..."` should be passed
to binman.
Substitutions
~~~~~~~~~~~~~
Node names and property values support a basic string-substitution feature.
Available substitutions for '@' nodes (and property values) are:
SEQ:
Sequence number of the generated fdt (1, 2, ...)
NAME
Name of the dtb as provided (i.e. without adding '.dtb')
The `default` property, if present, will be automatically set to the name
if of configuration whose devicetree matches the `default-dt` entry
argument, e.g. with `-a default-dt=sun50i-a64-pine64-lts`.
Available substitutions for property values in these nodes are:
DEFAULT-SEQ:
Sequence number of the default fdt, as provided by the 'default-dt'
entry argument
Available operations
~~~~~~~~~~~~~~~~~~~~
You can add an operation to an '@' node to indicate which operation is
required::
@fdt-SEQ {
fit,operation = "gen-fdt-nodes";
...
};
Available operations are:
gen-fdt-nodes
Generate FDT nodes as above. This is the default if there is no
`fit,operation` property.
Generating nodes from an FDT list (gen-fdt-nodes)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
U-Boot supports creating fdt and config nodes automatically. To do this,
pass an of-list property (e.g. -a of-list=file1 file2). This tells binman
that you want to generates nodes for two files: file1.dtb and file2.dtb
The fit,fdt-list property (see above) indicates that of-list should be used.
If the property is missing you will get an error.
pass an `of-list` property (e.g. `-a of-list=file1 file2`). This tells
binman that you want to generates nodes for two files: `file1.dtb` and
`file2.dtb`. The `fit,fdt-list` property (see above) indicates that
`of-list` should be used. If the property is missing you will get an error.
Then add a 'generator node', a node with a name starting with '@'::
@ -63,10 +132,11 @@ class Entry_fit(Entry_section):
};
};
This tells binman to create nodes fdt-1 and fdt-2 for each of your two
This tells binman to create nodes `fdt-1` and `fdt-2` for each of your two
files. All the properties you specify will be included in the node. This
node acts like a template to generate the nodes. The generator node itself
does not appear in the output - it is replaced with what binman generates.
A 'data' property is created with the contents of the FDT file.
You can create config nodes in a similar way::
@ -80,34 +150,11 @@ class Entry_fit(Entry_section):
};
};
This tells binman to create nodes config-1 and config-2, i.e. a config for
each of your two files.
Available substitutions for '@' nodes are:
SEQ:
Sequence number of the generated fdt (1, 2, ...)
NAME
Name of the dtb as provided (i.e. without adding '.dtb')
This tells binman to create nodes `config-1` and `config-2`, i.e. a config
for each of your two files.
Note that if no devicetree files are provided (with '-a of-list' as above)
then no nodes will be generated.
The 'default' property, if present, will be automatically set to the name
if of configuration whose devicetree matches the 'default-dt' entry
argument, e.g. with '-a default-dt=sun50i-a64-pine64-lts'.
Available substitutions for '@' property values are
DEFAULT-SEQ:
Sequence number of the default fdt,as provided by the 'default-dt' entry
argument
Properties (in the 'fit' node itself):
fit,external-offset: Indicates that the contents of the FIT are external
and provides the external offset. This is passsed to mkimage via
the -E and -p flags.
"""
def __init__(self, section, etype, node):
"""
@ -141,13 +188,120 @@ class Entry_fit(Entry_section):
self.ReadEntries()
super().ReadNode()
def _get_operation(self, subnode):
"""Get the operation referenced by a subnode
Args:
subnode (Node): Subnode (of the FIT) to check
Returns:
int: Operation to perform
Raises:
ValueError: Invalid operation name
"""
oper_name = subnode.props.get('fit,operation')
if not oper_name:
return OP_GEN_FDT_NODES
oper = OPERATIONS.get(oper_name.value)
if not oper:
self.Raise(f"Unknown operation '{oper_name.value}'")
return oper
def ReadEntries(self):
def _process_prop(pname, prop):
"""Process special properties
Handles properties with generated values. At present the only
supported property is 'default', i.e. the default device tree in
the configurations node.
Args:
pname (str): Name of property
prop (Prop): Property to process
"""
if pname == 'default':
val = prop.value
# Handle the 'default' property
if val.startswith('@'):
if not self._fdts:
return
if not self._fit_default_dt:
self.Raise("Generated 'default' node requires default-dt entry argument")
if self._fit_default_dt not in self._fdts:
self.Raise("default-dt entry argument '%s' not found in fdt list: %s" %
(self._fit_default_dt,
', '.join(self._fdts)))
seq = self._fdts.index(self._fit_default_dt)
val = val[1:].replace('DEFAULT-SEQ', str(seq + 1))
fsw.property_string(pname, val)
return
fsw.property(pname, prop.bytes)
def _scan_gen_fdt_nodes(subnode, depth, in_images):
"""Generate FDT nodes
This creates one node for each member of self._fdts using the
provided template. If a property value contains 'NAME' it is
replaced with the filename of the FDT. If a property value contains
SEQ it is replaced with the node sequence number, where 1 is the
first.
Args:
subnode (None): Generator node to process
depth: Current node depth (0 is the base 'fit' node)
in_images: True if this is inside the 'images' node, so that
'data' properties should be generated
"""
if self._fdts:
# Generate nodes for each FDT
for seq, fdt_fname in enumerate(self._fdts):
node_name = subnode.name[1:].replace('SEQ', str(seq + 1))
fname = tools.get_input_filename(fdt_fname + '.dtb')
with fsw.add_node(node_name):
for pname, prop in subnode.props.items():
val = prop.bytes.replace(
b'NAME', tools.to_bytes(fdt_fname))
val = val.replace(
b'SEQ', tools.to_bytes(str(seq + 1)))
fsw.property(pname, val)
# Add data for 'images' nodes (but not 'config')
if depth == 1 and in_images:
fsw.property('data', tools.read_file(fname))
else:
if self._fdts is None:
if self._fit_list_prop:
self.Raise("Generator node requires '%s' entry argument" %
self._fit_list_prop.value)
else:
self.Raise("Generator node requires 'fit,fdt-list' property")
def _scan_node(subnode, depth, in_images):
"""Generate nodes from a template
This creates one node for each member of self._fdts using the
provided template. If a property value contains 'NAME' it is
replaced with the filename of the FDT. If a property value contains
SEQ it is replaced with the node sequence number, where 1 is the
first.
Args:
subnode (None): Generator node to process
depth: Current node depth (0 is the base 'fit' node)
in_images: True if this is inside the 'images' node, so that
'data' properties should be generated
"""
oper = self._get_operation(subnode)
if oper == OP_GEN_FDT_NODES:
_scan_gen_fdt_nodes(subnode, depth, in_images)
def _AddNode(base_node, depth, node):
"""Add a node to the FIT
Args:
base_node: Base Node of the FIT (with 'description' property)
depth: Current node depth (0 is the base node)
depth: Current node depth (0 is the base 'fit' node)
node: Current node to process
There are two cases to deal with:
@ -157,23 +311,7 @@ class Entry_fit(Entry_section):
"""
for pname, prop in node.props.items():
if not pname.startswith('fit,'):
if pname == 'default':
val = prop.value
# Handle the 'default' property
if val.startswith('@'):
if not self._fdts:
continue
if not self._fit_default_dt:
self.Raise("Generated 'default' node requires default-dt entry argument")
if self._fit_default_dt not in self._fdts:
self.Raise("default-dt entry argument '%s' not found in fdt list: %s" %
(self._fit_default_dt,
', '.join(self._fdts)))
seq = self._fdts.index(self._fit_default_dt)
val = val[1:].replace('DEFAULT-SEQ', str(seq + 1))
fsw.property_string(pname, val)
continue
fsw.property(pname, prop.bytes)
_process_prop(pname, prop)
rel_path = node.path[len(base_node.path):]
in_images = rel_path.startswith('/images')
@ -186,6 +324,8 @@ class Entry_fit(Entry_section):
# 'data' property later.
entry = Entry.Create(self.section, node, etype='section')
entry.ReadNode()
# The hash subnodes here are for mkimage, not binman.
entry.SetUpdateHash(False)
self._entries[rel_path] = entry
for subnode in node.subnodes:
@ -196,31 +336,7 @@ class Entry_fit(Entry_section):
# fsw.add_node() or _AddNode() for it.
pass
elif self.GetImage().generate and subnode.name.startswith('@'):
if self._fdts:
# Generate notes for each FDT
for seq, fdt_fname in enumerate(self._fdts):
node_name = subnode.name[1:].replace('SEQ',
str(seq + 1))
fname = tools.get_input_filename(fdt_fname + '.dtb')
with fsw.add_node(node_name):
for pname, prop in subnode.props.items():
val = prop.bytes.replace(
b'NAME', tools.to_bytes(fdt_fname))
val = val.replace(
b'SEQ', tools.to_bytes(str(seq + 1)))
fsw.property(pname, val)
# Add data for 'fdt' nodes (but not 'config')
if depth == 1 and in_images:
fsw.property('data',
tools.read_file(fname))
else:
if self._fdts is None:
if self._fit_list_prop:
self.Raise("Generator node requires '%s' entry argument" %
self._fit_list_prop.value)
else:
self.Raise("Generator node requires 'fit,fdt-list' property")
_scan_node(subnode, depth, in_images)
else:
with fsw.add_node(subnode.name):
_AddNode(base_node, depth + 1, subnode)
@ -291,16 +407,57 @@ class Entry_fit(Entry_section):
data = fdt.GetContents()
return data
def SetImagePos(self, image_pos):
"""Set the position in the image
This sets each subentry's offsets, sizes and positions-in-image
according to where they ended up in the packed FIT file.
Args:
image_pos: Position of this entry in the image
"""
super().SetImagePos(image_pos)
# If mkimage is missing we'll have empty data,
# which will cause a FDT_ERR_BADMAGIC error
if self.mkimage in self.missing_bintools:
return
fdt = Fdt.FromData(self.GetData())
fdt.Scan()
for path, section in self._entries.items():
node = fdt.GetNode(path)
data_prop = node.props.get("data")
data_pos = fdt_util.GetInt(node, "data-position")
data_offset = fdt_util.GetInt(node, "data-offset")
data_size = fdt_util.GetInt(node, "data-size")
# Contents are inside the FIT
if data_prop is not None:
# GetOffset() returns offset of a fdt_property struct,
# which has 3 fdt32_t members before the actual data.
offset = data_prop.GetOffset() + 12
size = len(data_prop.bytes)
# External offset from the base of the FIT
elif data_pos is not None:
offset = data_pos
size = data_size
# External offset from the end of the FIT, not used in binman
elif data_offset is not None: # pragma: no cover
offset = fdt.GetFdtObj().totalsize() + data_offset
size = data_size
# This should never happen
else: # pragma: no cover
self.Raise("%s: missing data properties" % (path))
section.SetOffsetSize(offset, size)
section.SetImagePos(self.image_pos)
def AddBintools(self, tools):
super().AddBintools(tools)
self.mkimage = self.AddBintool(tools, 'mkimage')
def AddMissingProperties(self, have_image_pos):
# We don't want to interfere with any hash properties in the FIT, so
# disable this for now.
pass
def SetCalculatedProperties(self):
# We don't want to interfere with any hash properties in the FIT, so
# disable this for now.
pass

View file

@ -31,25 +31,30 @@ class Entry_mkimage(Entry):
This calls mkimage to create an imximage with u-boot-spl.bin as the input
file. The output from mkimage then becomes part of the image produced by
binman.
To use CONFIG options in the arguments, use a string list instead, as in
this example which also produces four arguments::
mkimage {
args = "-n", CONFIG_SYS_SOC, "-T imximage";
u-boot-spl {
};
};
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
self._args = fdt_util.GetString(self._node, 'args').split(' ')
self._args = fdt_util.GetArgs(self._node, 'args')
self._mkimage_entries = OrderedDict()
self.align_default = None
self.ReadEntries()
def ObtainContents(self):
data = b''
for entry in self._mkimage_entries.values():
# First get the input data and put it in a file. If not available,
# try later.
if not entry.ObtainContents():
return False
data += entry.GetData()
uniq = self.GetUniqueName()
input_fname = tools.get_output_filename('mkimage.%s' % uniq)
tools.write_file(input_fname, data)
data, input_fname, uniq = self.collect_contents_to_file(
self._mkimage_entries.values(), 'mkimage')
if data is False:
return False
output_fname = tools.get_output_filename('mkimage-out.%s' % uniq)
if self.mkimage.run_cmd('-d', input_fname, *self._args,
output_fname) is not None:

View file

@ -0,0 +1,22 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (C) 2022 Texas Instruments Incorporated - https://www.ti.com/
#
# Entry-type module for OP-TEE Trusted OS firmware blob
#
from binman.etype.blob_named_by_arg import Entry_blob_named_by_arg
class Entry_tee_os(Entry_blob_named_by_arg):
"""Entry containing an OP-TEE Trusted OS (TEE) blob
Properties / Entry arguments:
- tee-os-path: Filename of file to read into entry. This is typically
called tee-pager.bin
This entry holds the run-time firmware, typically started by U-Boot SPL.
See the U-Boot README for your architecture or board for how to use it. See
https://github.com/OP-TEE/optee_os for more information about OP-TEE.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node, 'tee-os')
self.external = True

View file

@ -84,6 +84,7 @@ FSP_M_DATA = b'fsp_m'
FSP_S_DATA = b'fsp_s'
FSP_T_DATA = b'fsp_t'
ATF_BL31_DATA = b'bl31'
TEE_OS_DATA = b'this is some tee OS data'
ATF_BL2U_DATA = b'bl2u'
OPENSBI_DATA = b'opensbi'
SCP_DATA = b'scp'
@ -188,6 +189,7 @@ class TestFunctional(unittest.TestCase):
TestFunctional._MakeInputFile('compress', COMPRESS_DATA)
TestFunctional._MakeInputFile('compress_big', COMPRESS_DATA_BIG)
TestFunctional._MakeInputFile('bl31.bin', ATF_BL31_DATA)
TestFunctional._MakeInputFile('tee-pager.bin', TEE_OS_DATA)
TestFunctional._MakeInputFile('bl2u.bin', ATF_BL2U_DATA)
TestFunctional._MakeInputFile('fw_dynamic.bin', OPENSBI_DATA)
TestFunctional._MakeInputFile('scp.bin', SCP_DATA)
@ -2076,7 +2078,7 @@ class TestFunctional(unittest.TestCase):
def testHashBadAlgo(self):
with self.assertRaises(ValueError) as e:
self._DoReadFileDtb('092_hash_bad_algo.dts', update_dtb=True)
self.assertIn("Node '/binman/u-boot': Unknown hash algorithm",
self.assertIn("Node '/binman/u-boot': Unknown hash algorithm 'invalid'",
str(e.exception))
def testHashSection(self):
@ -3770,6 +3772,62 @@ class TestFunctional(unittest.TestCase):
self._CheckSimpleFitData(fit_data, U_BOOT_EXP_DATA, U_BOOT_SPL_DTB_DATA)
def testSimpleFitImagePos(self):
"""Test that we have correct image-pos for FIT subentries"""
data, _, _, out_dtb_fname = self._DoReadFileDtb('161_fit.dts',
update_dtb=True)
dtb = fdt.Fdt(out_dtb_fname)
dtb.Scan()
props = self._GetPropTree(dtb, BASE_DTB_PROPS + REPACK_DTB_PROPS)
self.assertEqual({
'image-pos': 0,
'offset': 0,
'size': 1890,
'u-boot:image-pos': 0,
'u-boot:offset': 0,
'u-boot:size': 4,
'fit:image-pos': 4,
'fit:offset': 4,
'fit:size': 1840,
'fit/images/kernel:image-pos': 160,
'fit/images/kernel:offset': 156,
'fit/images/kernel:size': 4,
'fit/images/kernel/u-boot:image-pos': 160,
'fit/images/kernel/u-boot:offset': 0,
'fit/images/kernel/u-boot:size': 4,
'fit/images/fdt-1:image-pos': 456,
'fit/images/fdt-1:offset': 452,
'fit/images/fdt-1:size': 6,
'fit/images/fdt-1/u-boot-spl-dtb:image-pos': 456,
'fit/images/fdt-1/u-boot-spl-dtb:offset': 0,
'fit/images/fdt-1/u-boot-spl-dtb:size': 6,
'u-boot-nodtb:image-pos': 1844,
'u-boot-nodtb:offset': 1844,
'u-boot-nodtb:size': 46,
}, props)
# Actually check the data is where we think it is
for node, expected in [
("u-boot", U_BOOT_DATA),
("fit/images/kernel", U_BOOT_DATA),
("fit/images/kernel/u-boot", U_BOOT_DATA),
("fit/images/fdt-1", U_BOOT_SPL_DTB_DATA),
("fit/images/fdt-1/u-boot-spl-dtb", U_BOOT_SPL_DTB_DATA),
("u-boot-nodtb", U_BOOT_NODTB_DATA),
]:
image_pos = props[f"{node}:image-pos"]
size = props[f"{node}:size"]
self.assertEqual(len(expected), size)
self.assertEqual(expected, data[image_pos:image_pos+size])
def testFitExternal(self):
"""Test an image with an FIT with external images"""
data = self._DoReadFile('162_fit_external.dts')
@ -3798,6 +3856,62 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(U_BOOT_DATA + b'aa',
data[actual_pos:actual_pos + external_data_size])
def testFitExternalImagePos(self):
"""Test that we have correct image-pos for external FIT subentries"""
data, _, _, out_dtb_fname = self._DoReadFileDtb('162_fit_external.dts',
update_dtb=True)
dtb = fdt.Fdt(out_dtb_fname)
dtb.Scan()
props = self._GetPropTree(dtb, BASE_DTB_PROPS + REPACK_DTB_PROPS)
self.assertEqual({
'image-pos': 0,
'offset': 0,
'size': 1082,
'u-boot:image-pos': 0,
'u-boot:offset': 0,
'u-boot:size': 4,
'fit:size': 1032,
'fit:offset': 4,
'fit:image-pos': 4,
'fit/images/kernel:size': 4,
'fit/images/kernel:offset': 1024,
'fit/images/kernel:image-pos': 1028,
'fit/images/kernel/u-boot:size': 4,
'fit/images/kernel/u-boot:offset': 0,
'fit/images/kernel/u-boot:image-pos': 1028,
'fit/images/fdt-1:size': 2,
'fit/images/fdt-1:offset': 1028,
'fit/images/fdt-1:image-pos': 1032,
'fit/images/fdt-1/_testing:size': 2,
'fit/images/fdt-1/_testing:offset': 0,
'fit/images/fdt-1/_testing:image-pos': 1032,
'u-boot-nodtb:image-pos': 1036,
'u-boot-nodtb:offset': 1036,
'u-boot-nodtb:size': 46,
}, props)
# Actually check the data is where we think it is
for node, expected in [
("u-boot", U_BOOT_DATA),
("fit/images/kernel", U_BOOT_DATA),
("fit/images/kernel/u-boot", U_BOOT_DATA),
("fit/images/fdt-1", b'aa'),
("fit/images/fdt-1/_testing", b'aa'),
("u-boot-nodtb", U_BOOT_NODTB_DATA),
]:
image_pos = props[f"{node}:image-pos"]
size = props[f"{node}:size"]
self.assertEqual(len(expected), size)
self.assertEqual(expected, data[image_pos:image_pos+size])
def testFitMissing(self):
"""Test that binman still produces a FIT image if mkimage is missing"""
with test_util.capture_sys_output() as (_, stderr):
@ -5160,5 +5274,53 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self.assertRegex(err,
"Image 'main-section'.*missing bintools.*: futility")
def testFitSubentryHashSubnode(self):
"""Test an image with a FIT inside"""
data, _, _, out_dtb_name = self._DoReadFileDtb(
'221_fit_subentry_hash.dts', use_real_dtb=True, update_dtb=True)
mkimage_dtb = fdt.Fdt.FromData(data)
mkimage_dtb.Scan()
binman_dtb = fdt.Fdt(out_dtb_name)
binman_dtb.Scan()
# Check that binman didn't add hash values
fnode = binman_dtb.GetNode('/binman/fit/images/kernel/hash')
self.assertNotIn('value', fnode.props)
fnode = binman_dtb.GetNode('/binman/fit/images/fdt-1/hash')
self.assertNotIn('value', fnode.props)
# Check that mkimage added hash values
fnode = mkimage_dtb.GetNode('/images/kernel/hash')
self.assertIn('value', fnode.props)
fnode = mkimage_dtb.GetNode('/images/fdt-1/hash')
self.assertIn('value', fnode.props)
def testPackTeeOs(self):
"""Test that an image with an TEE binary can be created"""
data = self._DoReadFile('222_tee_os.dts')
self.assertEqual(TEE_OS_DATA, data[:len(TEE_OS_DATA)])
def testFitFdtOper(self):
"""Check handling of a specified FIT operation"""
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
'default-dt': 'test-fdt2',
}
self._DoReadFileDtb(
'223_fit_fdt_oper.dts',
entry_args=entry_args,
extra_indirs=[os.path.join(self._indir, TEST_FDT_SUBDIR)])[0]
def testFitFdtBadOper(self):
"""Check handling of an FDT map when the section cannot be found"""
with self.assertRaises(ValueError) as exc:
self._DoReadFileDtb('224_fit_bad_oper.dts')
self.assertIn("Node '/binman/fit': Unknown operation 'unknown'",
str(exc.exception))
if __name__ == "__main__":
unittest.main()

View file

@ -33,3 +33,7 @@ k3-rti-wdt-firmware:
If CONFIG_WDT_K3_RTI_LOAD_FW is enabled, a firmware image is needed for
the R5F core(s) to trigger the system reset. One possible source is
https://github.com/siemens/k3-rti-wdt.
tee-os:
See the documentation for your board. You may need to build Open Portable
Trusted Execution Environment (OP-TEE) with TEE=/path/to/tee.bin

View file

@ -397,7 +397,7 @@ def CheckAddHashProp(node):
if algo.value == 'sha256':
size = 32
else:
return "Unknown hash algorithm '%s'" % algo
return "Unknown hash algorithm '%s'" % algo.value
for n in GetUpdateNodes(hash_node):
n.AddEmptyProp('value', size)

View file

@ -0,0 +1,52 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
fit {
description = "test-desc";
#address-cells = <1>;
images {
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
arch = "ppc";
os = "linux";
compression = "gzip";
load = <00000000>;
entry = <00000000>;
hash {
algo = "sha1";
};
u-boot {
};
};
fdt-1 {
description = "Flattened Device Tree blob";
type = "flat_dt";
arch = "ppc";
compression = "none";
hash {
algo = "crc32";
};
u-boot-spl-dtb {
};
};
};
configurations {
default = "conf-1";
conf-1 {
description = "Boot Linux kernel with FDT blob";
kernel = "kernel";
fdt = "fdt-1";
};
};
};
};
};

View file

@ -0,0 +1,14 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
tee-os {
filename = "tee-pager.bin";
};
};
};

View file

@ -0,0 +1,56 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot {
};
fit {
description = "test-desc";
#address-cells = <1>;
fit,fdt-list = "of-list";
images {
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
arch = "ppc";
os = "linux";
compression = "gzip";
load = <00000000>;
entry = <00000000>;
hash-1 {
algo = "crc32";
};
hash-2 {
algo = "sha1";
};
u-boot {
};
};
@fdt-SEQ {
fit,operation = "gen-fdt-nodes";
description = "fdt-NAME.dtb";
type = "flat_dt";
compression = "none";
};
};
configurations {
default = "@config-DEFAULT-SEQ";
@config-SEQ {
description = "conf-NAME.dtb";
firmware = "uboot";
loadables = "atf";
fdt = "fdt-SEQ";
};
};
};
u-boot-nodtb {
};
};
};

View file

@ -0,0 +1,27 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
fit {
description = "test-desc";
#address-cells = <1>;
fit,fdt-list = "of-list";
images {
@fdt-SEQ {
fit,operation = "unknown";
description = "fdt-NAME.dtb";
type = "flat_dt";
compression = "none";
};
};
};
fdtmap {
};
};
};

View file

@ -29,11 +29,12 @@ LDS_BINMAN := -T $(SRC)u_boot_binman_syms.lds
LDS_BINMAN_BAD := -T $(SRC)u_boot_binman_syms_bad.lds
LDS_BINMAN_X86 := -T $(SRC)u_boot_binman_syms_x86.lds
LDS_BINMAN_EMBED := -T $(SRC)u_boot_binman_embed.lds
LDS_EFL_SECTIONS := -T $(SRC)elf_sections.lds
TARGETS = u_boot_ucode_ptr u_boot_no_ucode_ptr bss_data \
u_boot_binman_syms u_boot_binman_syms.bin u_boot_binman_syms_bad \
u_boot_binman_syms_size u_boot_binman_syms_x86 embed_data \
u_boot_binman_embed u_boot_binman_embed_sm
u_boot_binman_embed u_boot_binman_embed_sm elf_sections
all: $(TARGETS)
@ -70,6 +71,9 @@ u_boot_binman_embed: u_boot_binman_embed.c
u_boot_binman_embed_sm: CFLAGS += $(LDS_BINMAN_EMBED)
u_boot_binman_embed_sm: u_boot_binman_embed_sm.c
elf_sections: CFLAGS += $(LDS_EFL_SECTIONS)
elf_sections: elf_sections.c
clean:
rm -f $(TARGETS)

View file

@ -0,0 +1,20 @@
// SPDX-License-Identifier: GPL-2.0+
/*
* Copyright 2022 Google LLC
*
* Program containing two text sections
*/
int __attribute__((section(".sram_data"))) data[29];
int __attribute__((section(".sram_code"))) calculate(int x)
{
data[0] = x;
return x * x;
}
int main(void)
{
return calculate(123);
}

View file

@ -0,0 +1,31 @@
/* SPDX-License-Identifier: GPL-2.0+ */
/*
* Copyright (c) 2016 Google, Inc
*/
OUTPUT_FORMAT("elf32-i386", "elf32-i386", "elf32-i386")
OUTPUT_ARCH(i386)
ENTRY(_start)
SECTIONS
{
. = 0x00000010;
_start = .;
. = ALIGN(4);
.text :
{
*(.text*)
}
. = 0x00001000;
.sram :
{
*(.sram*)
}
/DISCARD/ : {
*(.comment)
*(.dyn*)
}
}

View file

@ -356,6 +356,8 @@ class Node:
offset = fdt_obj.first_subnode(self._offset, QUIET_NOTFOUND)
for subnode in self.subnodes:
if subnode._offset is None:
continue
if subnode.name != fdt_obj.get_name(offset):
raise ValueError('Internal error, node name mismatch %s != %s' %
(subnode.name, fdt_obj.get_name(offset)))
@ -501,6 +503,24 @@ class Node:
val = bytes(val, 'utf-8')
return self.AddData(prop_name, val + b'\0')
def AddStringList(self, prop_name, val):
"""Add a new string-list property to a node
The device tree is marked dirty so that the value will be written to
the blob on the next sync.
Args:
prop_name: Name of property to add
val (list of str): List of strings to add
Returns:
Prop added
"""
out = b''
for string in val:
out += bytes(string, 'utf-8') + b'\0'
return self.AddData(prop_name, out)
def AddInt(self, prop_name, val):
"""Add a new integer property to a node
@ -530,6 +550,23 @@ class Node:
self.subnodes.append(subnode)
return subnode
def Delete(self):
"""Delete a node
The node is deleted and the offset cache is invalidated.
Args:
node (Node): Node to delete
Raises:
ValueError if the node does not exist
"""
CheckErr(self._fdt._fdt_obj.del_node(self.Offset()),
"Node '%s': delete" % self.path)
parent = self.parent
self._fdt.Invalidate()
parent.subnodes.remove(self)
def Sync(self, auto_resize=False):
"""Sync node changes back to the device tree

View file

@ -184,6 +184,18 @@ def GetStringList(node, propname, default=None):
return [strval]
return value
def GetArgs(node, propname):
prop = node.props.get(propname)
if not prop:
raise ValueError(f"Node '{node.path}': Expected property '{propname}'")
if prop.bytes:
value = GetStringList(node, propname)
else:
value = []
lists = [v.split() for v in value]
args = [x for l in lists for x in l]
return args
def GetBool(node, propname, default=False):
"""Get an boolean from a property

View file

@ -62,5 +62,6 @@
orig-node {
orig = <1 23 4>;
args = "-n first", "second", "-p", "123,456", "-x";
};
};

View file

@ -272,6 +272,17 @@ class TestNode(unittest.TestCase):
self.dtb.Sync(auto_resize=True)
def testAddOneNode(self):
"""Testing deleting and adding a subnode before syncing"""
subnode = self.node.AddSubnode('subnode')
self.node.AddSubnode('subnode2')
self.dtb.Sync(auto_resize=True)
# Delete a node and add a new one
subnode.Delete()
self.node.AddSubnode('subnode3')
self.dtb.Sync()
def testRefreshNameMismatch(self):
"""Test name mismatch when syncing nodes and properties"""
prop = self.node.AddInt('integer-a', 12)
@ -531,6 +542,23 @@ class TestProp(unittest.TestCase):
self.node.AddData('data', tools.get_bytes(65, 20000))
self.dtb.Sync(auto_resize=True)
def test_string_list(self):
"""Test adding string-list property to a node"""
val = ['123', '456']
self.node.AddStringList('stringlist', val)
self.dtb.Sync(auto_resize=True)
data = self.fdt.getprop(self.node.Offset(), 'stringlist')
self.assertEqual(b'123\x00456\0', data)
def test_delete_node(self):
"""Test deleting a node"""
old_offset = self.fdt.path_offset('/spl-test')
self.assertGreater(old_offset, 0)
self.node.Delete()
self.dtb.Sync()
new_offset = self.fdt.path_offset('/spl-test', libfdt.QUIET_NOTFOUND)
self.assertEqual(-libfdt.NOTFOUND, new_offset)
def testFromData(self):
dtb2 = fdt.Fdt.FromData(self.dtb.GetContents())
self.assertEqual(dtb2.GetContents(), self.dtb.GetContents())
@ -624,6 +652,21 @@ class TestFdtUtil(unittest.TestCase):
self.assertEqual(['test'],
fdt_util.GetStringList(self.node, 'missing', ['test']))
def testGetArgs(self):
node = self.dtb.GetNode('/orig-node')
self.assertEqual(['message'], fdt_util.GetArgs(self.node, 'stringval'))
self.assertEqual(
['multi-word', 'message'],
fdt_util.GetArgs(self.node, 'stringarray'))
self.assertEqual([], fdt_util.GetArgs(self.node, 'boolval'))
self.assertEqual(['-n', 'first', 'second', '-p', '123,456', '-x'],
fdt_util.GetArgs(node, 'args'))
with self.assertRaises(ValueError) as exc:
fdt_util.GetArgs(self.node, 'missing')
self.assertIn(
"Node '/spl-test': Expected property 'missing'",
str(exc.exception))
def testGetBool(self):
self.assertEqual(True, fdt_util.GetBool(self.node, 'boolval'))
self.assertEqual(False, fdt_util.GetBool(self.node, 'missing'))

View file

@ -91,7 +91,20 @@ SIZES = {
'SZ_4G': 0x100000000
}
RE_REMOVE_DEFCONFIG = re.compile(r'(.*)_defconfig')
### helper functions ###
def remove_defconfig(defc):
"""Drop the _defconfig suffix on a string
Args:
defc (str): String to convert
Returns:
str: string with the '_defconfig' suffix removed
"""
return RE_REMOVE_DEFCONFIG.match(defc)[1]
def check_top_directory():
"""Exit if we are not at the top of source directory."""
for fname in 'README', 'Licenses':
@ -1593,12 +1606,31 @@ def do_imply_config(config_list, add_imply, imply_flags, skip_added,
for linenum in sorted(linenums, reverse=True):
add_imply_rule(config[CONFIG_LEN:], fname, linenum)
def defconfig_matches(configs, re_match):
"""Check if any CONFIG option matches a regex
The match must be complete, i.e. from the start to end of the CONFIG option.
Args:
configs (dict): Dict of CONFIG options:
key: CONFIG option
value: Value of option
re_match (re.Pattern): Match to check
Returns:
bool: True if any CONFIG matches the regex
"""
for cfg in configs:
m_cfg = re_match.match(cfg)
if m_cfg and m_cfg.span()[1] == len(cfg):
return True
return False
def do_find_config(config_list):
"""Find boards with a given combination of CONFIGs
Params:
config_list: List of CONFIG options to check (each a string consisting
config_list: List of CONFIG options to check (each a regex consisting
of a config option, with or without a CONFIG_ prefix. If an option
is preceded by a tilde (~) then it must be false, otherwise it must
be true)
@ -1630,15 +1662,16 @@ def do_find_config(config_list):
# running for the next stage
in_list = out
out = set()
re_match = re.compile(cfg)
for defc in in_list:
has_cfg = cfg in config_db[defc]
has_cfg = defconfig_matches(config_db[defc], re_match)
if has_cfg == want:
out.add(defc)
if adhoc:
print(f"Error: Not in Kconfig: %s" % ' '.join(adhoc))
else:
print(f'{len(out)} matches')
print(' '.join(out))
print(' '.join([remove_defconfig(item) for item in out]))
def prefix_config(cfg):