binman mkimage and template enhancements

misc fixes
 -----BEGIN PGP SIGNATURE-----
 
 iQFFBAABCgAvFiEEslwAIq+Gp8wWVbYnfxc6PpAIreYFAmS5lQQRHHNqZ0BjaHJv
 bWl1bS5vcmcACgkQfxc6PpAIreYiowgAzKVER9eU9rWDCP76GXpxwBmNUDYeS/hQ
 ivzmzcvs7luo78LpSbVHGzTNM4cQ/KUViCl4nxvBlLlKLoOTKFf9R0wv7AnBM16X
 cNARdffqfTdoptBBoJnhuHPpvVQ9M22YcTQSMaD9FvlCmAHdBaiP4T4wN04Ulckb
 u7zeQYiy/nQfFP4KxsPw3looHGNz3LaKgXw3fvbBVOsGcVR5avASLpxoEOwNbWA4
 +NcvpAqdkJ1CRjJRrVPjGqNudef69E4xjPzmoRd9Ni2HXJsSD3FoEelfKbhgovpD
 Ss23tUycbQrJyFnQj1lYiKeZRtEGBKxQUvZqhdaDg6cileRXP803HA==
 =o9IH
 -----END PGP SIGNATURE-----

Merge tag 'dm-pull-20jul23' of https://source.denx.de/u-boot/custodians/u-boot-dm

binman mkimage and template enhancements
misc fixes
This commit is contained in:
Tom Rini 2023-07-20 21:31:31 -04:00
commit e896279ac3
44 changed files with 1246 additions and 161 deletions

View file

@ -226,6 +226,7 @@
mkimage { mkimage {
args = "-T stm32image -a 0x2ffc2500 -e 0x2ffc2500"; args = "-T stm32image -a 0x2ffc2500 -e 0x2ffc2500";
u-boot-spl { u-boot-spl {
no-write-symbols;
}; };
}; };
}; };

View file

@ -7,6 +7,7 @@
#define __SANDBOX_SDL_H #define __SANDBOX_SDL_H
#include <errno.h> #include <errno.h>
#include <video.h>
#ifdef CONFIG_SANDBOX_SDL #ifdef CONFIG_SANDBOX_SDL
@ -87,6 +88,22 @@ int sandbox_sdl_sound_stop(void);
*/ */
int sandbox_sdl_sound_init(int rate, int channels); int sandbox_sdl_sound_init(int rate, int channels);
/**
* sandbox_sdl_set_bpp() - Set the depth of the sandbox display
*
* The device must not be active when this function is called. It activiates it
* before returning.
*
* This updates the depth value and adjusts a few other settings accordingly.
* It must be called before the display is probed.
*
* @dev: Device to adjust
* @l2bpp: depth to set
* Return: 0 if the device was already active, other error if it fails to probe
* after the change
*/
int sandbox_sdl_set_bpp(struct udevice *dev, enum video_log2_bpp l2bpp);
#else #else
static inline int sandbox_sdl_init_display(int width, int height, int log2_bpp, static inline int sandbox_sdl_init_display(int width, int height, int log2_bpp,
bool double_size) bool double_size)
@ -134,6 +151,12 @@ static inline int sandbox_sdl_sound_init(int rate, int channels)
return -ENODEV; return -ENODEV;
} }
static inline int sandbox_sdl_set_bpp(struct udevice *dev,
enum video_log2_bpp l2bpp)
{
return -ENOSYS;
}
#endif #endif
#endif #endif

View file

@ -8,7 +8,6 @@
#ifndef __ASM_TEST_H #ifndef __ASM_TEST_H
#define __ASM_TEST_H #define __ASM_TEST_H
#include <video.h>
#include <pci_ids.h> #include <pci_ids.h>
struct unit_test_state; struct unit_test_state;
@ -300,30 +299,6 @@ void sandbox_cros_ec_set_test_flags(struct udevice *dev, uint flags);
*/ */
int sandbox_cros_ec_get_pwm_duty(struct udevice *dev, uint index, uint *duty); int sandbox_cros_ec_get_pwm_duty(struct udevice *dev, uint index, uint *duty);
#if IS_ENABLED(CONFIG_SANDBOX_SDL)
/**
* sandbox_sdl_set_bpp() - Set the depth of the sandbox display
*
* The device must not be active when this function is called. It activiates it
* before returning.
*
* This updates the depth value and adjusts a few other settings accordingly.
* It must be called before the display is probed.
*
* @dev: Device to adjust
* @l2bpp: depth to set
* Return: 0 if the device was already active, other error if it fails to probe
* after the change
*/
int sandbox_sdl_set_bpp(struct udevice *dev, enum video_log2_bpp l2bpp);
#else
static inline int sandbox_sdl_set_bpp(struct udevice *dev,
enum video_log2_bpp l2bpp)
{
return -ENOSYS;
}
#endif
/** /**
* sandbox_set_fake_efi_mgr_dev() - Control EFI bootmgr producing valid bootflow * sandbox_set_fake_efi_mgr_dev() - Control EFI bootmgr producing valid bootflow
* *

View file

@ -190,6 +190,9 @@ static int script_boot(struct udevice *dev, struct bootflow *bflow)
ulong addr; ulong addr;
int ret; int ret;
if (desc->uclass_id == UCLASS_USB)
ret = env_set("devtype", "usb");
else
ret = env_set("devtype", blk_get_devtype(bflow->blk)); ret = env_set("devtype", blk_get_devtype(bflow->blk));
if (!ret) if (!ret)
ret = env_set_hex("devnum", desc->devnum); ret = env_set_hex("devnum", desc->devnum);

View file

@ -181,13 +181,17 @@ static ulong load_serial(long offset)
} else } else
#endif #endif
{ {
void *dst;
ret = lmb_reserve(&lmb, store_addr, binlen); ret = lmb_reserve(&lmb, store_addr, binlen);
if (ret) { if (ret) {
printf("\nCannot overwrite reserved area (%08lx..%08lx)\n", printf("\nCannot overwrite reserved area (%08lx..%08lx)\n",
store_addr, store_addr + binlen); store_addr, store_addr + binlen);
return ret; return ret;
} }
memcpy((char *)(store_addr), binbuf, binlen); dst = map_sysmem(store_addr, binlen);
memcpy(dst, binbuf, binlen);
unmap_sysmem(dst);
lmb_free(&lmb, store_addr, binlen); lmb_free(&lmb, store_addr, binlen);
} }
if ((store_addr) < start_addr) if ((store_addr) < start_addr)
@ -350,8 +354,11 @@ static int save_serial(ulong address, ulong count)
if(write_record(SREC3_START)) /* write the header */ if(write_record(SREC3_START)) /* write the header */
return (-1); return (-1);
do { do {
volatile uchar *src;
src = map_sysmem(address, count);
if (count) { /* collect hex data in the buffer */ if (count) { /* collect hex data in the buffer */
c = *(volatile uchar*)(address + reclen); /* get one byte */ c = src[reclen]; /* get one byte */
checksum += c; /* accumulate checksum */ checksum += c; /* accumulate checksum */
data[2*reclen] = hex[(c>>4)&0x0f]; data[2*reclen] = hex[(c>>4)&0x0f];
data[2*reclen+1] = hex[c & 0x0f]; data[2*reclen+1] = hex[c & 0x0f];
@ -359,6 +366,7 @@ static int save_serial(ulong address, ulong count)
++reclen; ++reclen;
--count; --count;
} }
unmap_sysmem((void *)src);
if(reclen == SREC_BYTES_PER_RECORD || count == 0) { if(reclen == SREC_BYTES_PER_RECORD || count == 0) {
/* enough data collected for one record: dump it */ /* enough data collected for one record: dump it */
if(reclen) { /* build & write a data record: */ if(reclen) { /* build & write a data record: */

View file

@ -593,11 +593,14 @@ int of_read_u64(const struct device_node *np, const char *propname, u64 *outp)
int of_property_match_string(const struct device_node *np, const char *propname, int of_property_match_string(const struct device_node *np, const char *propname,
const char *string) const char *string)
{ {
const struct property *prop = of_find_property(np, propname, NULL); int len = 0;
const struct property *prop = of_find_property(np, propname, &len);
size_t l; size_t l;
int i; int i;
const char *p, *end; const char *p, *end;
if (!prop && len == -FDT_ERR_NOTFOUND)
return -ENOENT;
if (!prop) if (!prop)
return -EINVAL; return -EINVAL;
if (!prop->value) if (!prop->value)

View file

@ -211,10 +211,9 @@ void *dev_remap_addr(const struct udevice *dev)
return dev_remap_addr_index(dev, 0); return dev_remap_addr_index(dev, 0);
} }
fdt_addr_t dev_read_addr_size(const struct udevice *dev, const char *property, fdt_addr_t dev_read_addr_size(const struct udevice *dev, fdt_size_t *sizep)
fdt_size_t *sizep)
{ {
return ofnode_get_addr_size(dev_ofnode(dev), property, sizep); return dev_read_addr_size_index(dev, 0, sizep);
} }
const char *dev_read_name(const struct udevice *dev) const char *dev_read_name(const struct udevice *dev)

View file

@ -97,7 +97,7 @@ static int rockchip_reset_probe(struct udevice *dev)
fdt_addr_t addr; fdt_addr_t addr;
fdt_size_t size; fdt_size_t size;
addr = dev_read_addr_size(dev, "reg", &size); addr = dev_read_addr_size(dev, &size);
if (addr == FDT_ADDR_T_NONE) if (addr == FDT_ADDR_T_NONE)
return -EINVAL; return -EINVAL;

View file

@ -347,18 +347,13 @@ fdt_addr_t dev_read_addr_pci(const struct udevice *dev);
void *dev_remap_addr(const struct udevice *dev); void *dev_remap_addr(const struct udevice *dev);
/** /**
* dev_read_addr_size() - get address and size from a device property * dev_read_addr_size() - Get the reg property of a device
*
* This does no address translation. It simply reads an property that contains
* an address and a size value, one after the other.
* *
* @dev: Device to read from * @dev: Device to read from
* @propname: property to read
* @sizep: place to put size value (on success) * @sizep: place to put size value (on success)
* Return: address value, or FDT_ADDR_T_NONE on error * Return: address value, or FDT_ADDR_T_NONE on error
*/ */
fdt_addr_t dev_read_addr_size(const struct udevice *dev, const char *propname, fdt_addr_t dev_read_addr_size(const struct udevice *dev, fdt_size_t *sizep);
fdt_size_t *sizep);
/** /**
* dev_read_name() - get the name of a device's node * dev_read_name() - get the name of a device's node
@ -1002,10 +997,9 @@ static inline void *dev_remap_addr_name(const struct udevice *dev,
} }
static inline fdt_addr_t dev_read_addr_size(const struct udevice *dev, static inline fdt_addr_t dev_read_addr_size(const struct udevice *dev,
const char *propname,
fdt_size_t *sizep) fdt_size_t *sizep)
{ {
return ofnode_get_addr_size(dev_ofnode(dev), propname, sizep); return dev_read_addr_size_index(dev, 0, sizep);
} }
static inline const char *dev_read_name(const struct udevice *dev) static inline const char *dev_read_name(const struct udevice *dev)

View file

@ -15,6 +15,7 @@
#include <video.h> #include <video.h>
#include <video_console.h> #include <video_console.h>
#include <asm/test.h> #include <asm/test.h>
#include <asm/sdl.h>
#include <dm/test.h> #include <dm/test.h>
#include <dm/uclass-internal.h> #include <dm/uclass-internal.h>
#include <test/test.h> #include <test/test.h>

View file

@ -727,6 +727,13 @@ optional:
Note that missing, optional blobs do not produce a non-zero exit code from Note that missing, optional blobs do not produce a non-zero exit code from
binman, although it does show a warning about the missing external blob. binman, although it does show a warning about the missing external blob.
insert-template:
This is not strictly speaking an entry property, since it is processed early
in Binman before the entries are read. It is a list of phandles of nodes to
include in the current (target) node. For each node, its subnodes and their
properties are brought into the target node. See Templates_ below for
more information.
The attributes supported for images and sections are described below. Several The attributes supported for images and sections are described below. Several
are similar to those for entries. are similar to those for entries.
@ -831,6 +838,13 @@ write-symbols:
binman. This is automatic for certain entry types, e.g. `u-boot-spl`. See binman. This is automatic for certain entry types, e.g. `u-boot-spl`. See
binman_syms_ for more information. binman_syms_ for more information.
no-write-symbols:
Disables symbol writing for this entry. This can be used in entry types
where symbol writing is automatic. For example, if `u-boot-spl` refers to
the `u_boot_any_image_pos` symbol but U-Boot is not available in the image
containing SPL, this can be used to disable the writing. Quite likely this
indicates a bug in your setup.
elf-filename: elf-filename:
Sets the file name of a blob's associated ELF file. For example, if the Sets the file name of a blob's associated ELF file. For example, if the
blob is `zephyr.bin` then the ELF file may be `zephyr.elf`. This allows blob is `zephyr.bin` then the ELF file may be `zephyr.elf`. This allows
@ -1165,6 +1179,86 @@ If you are having trouble figuring out what is going on, you can use
arch/arm/dts/u-boot.dtsi ... found: "arch/arm/dts/juno-r2-u-boot.dtsi" arch/arm/dts/u-boot.dtsi ... found: "arch/arm/dts/juno-r2-u-boot.dtsi"
Templates
=========
Sometimes multiple images need to be created which have all have a common
part. For example, a board may generate SPI and eMMC images which both include
a FIT. Since the FIT includes many entries, it is tedious to repeat them twice
in the image description.
Templates provide a simple way to handle this::
binman {
multiple-images;
common_part: template-1 {
some-property;
fit {
... lots of entries in here
};
text {
text = "base image";
};
};
spi-image {
filename = "image-spi.bin";
insert-template = <&fit>;
/* things specific to SPI follow */
footer {
];
text {
text = "SPI image";
};
};
mmc-image {
filename = "image-mmc.bin";
insert-template = <&fit>;
/* things specific to MMC follow */
footer {
];
text {
text = "MMC image";
};
};
};
The template node name must start with 'template', so it is not considered to be
an image itself.
The mechanism is very simple. For each phandle in the 'insert-templates'
property, the source node is looked up. Then the subnodes of that source node
are copied into the target node, i.e. the one containing the `insert-template`
property.
If the target node has a node with the same name as a template, its properties
override corresponding properties in the template. This allows the template to
be uses as a base, with the node providing updates to the properties as needed.
The overriding happens recursively.
Template nodes appear first in each node that they are inserted into and
ordering of template nodes is preserved. Other nodes come afterwards. If a
template node also appears in the target node, then the template node sets the
order. Thus the template can be used to set the ordering, even if the target
node provides all the properties. In the above example, `fit` and `text` appear
first in the `spi-image` and `mmc-image` images, followed by `footer`.
Where there are multiple template nodes, they are inserted in that order. so
the first template node appears first, then the second.
Properties in the template node are inserted into the destination node if they
do not exist there. In the example above, `some-property` is added to each of
`spi-image` and `mmc-image`.
Note that template nodes are not removed from the binman description at present.
Updating an ELF file Updating an ELF file
==================== ====================

View file

@ -288,7 +288,7 @@ class Bintool:
name = os.path.expanduser(self.name) # Expand paths containing ~ name = os.path.expanduser(self.name) # Expand paths containing ~
all_args = (name,) + args all_args = (name,) + args
env = tools.get_env_with_path() env = tools.get_env_with_path()
tout.detail(f"bintool: {' '.join(all_args)}") tout.debug(f"bintool: {' '.join(all_args)}")
result = command.run_pipe( result = command.run_pipe(
[all_args], capture=True, capture_stderr=True, env=env, [all_args], capture=True, capture_stderr=True, env=env,
raise_on_error=False, binary=binary) raise_on_error=False, binary=binary)

View file

@ -22,6 +22,7 @@ from binman import bintool
from binman import cbfs_util from binman import cbfs_util
from binman import elf from binman import elf
from binman import entry from binman import entry
from dtoc import fdt_util
from u_boot_pylib import command from u_boot_pylib import command
from u_boot_pylib import tools from u_boot_pylib import tools
from u_boot_pylib import tout from u_boot_pylib import tout
@ -56,6 +57,7 @@ def _ReadImageDesc(binman_node, use_expanded):
images = OrderedDict() images = OrderedDict()
if 'multiple-images' in binman_node.props: if 'multiple-images' in binman_node.props:
for node in binman_node.subnodes: for node in binman_node.subnodes:
if 'template' not in node.name:
images[node.name] = Image(node.name, node, images[node.name] = Image(node.name, node,
use_expanded=use_expanded) use_expanded=use_expanded)
else: else:
@ -478,6 +480,30 @@ def SignEntries(image_fname, input_fname, privatekey_fname, algo, entry_paths,
AfterReplace(image, allow_resize=True, write_map=write_map) AfterReplace(image, allow_resize=True, write_map=write_map)
def _ProcessTemplates(parent):
"""Handle any templates in the binman description
Args:
parent: Binman node to process (typically /binman)
Search though each target node looking for those with an 'insert-template'
property. Use that as a list of references to template nodes to use to
adjust the target node.
Processing involves copying each subnode of the template node into the
target node.
This is done recursively, so templates can be at any level of the binman
image, e.g. inside a section.
See 'Templates' in the Binman documnentation for details.
"""
for node in parent.subnodes:
tmpl = fdt_util.GetPhandleList(node, 'insert-template')
if tmpl:
node.copy_subnodes_from_phandles(tmpl)
_ProcessTemplates(node)
def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded): def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
"""Prepare the images to be processed and select the device tree """Prepare the images to be processed and select the device tree
@ -520,6 +546,8 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
raise ValueError("Device tree '%s' does not have a 'binman' " raise ValueError("Device tree '%s' does not have a 'binman' "
"node" % dtb_fname) "node" % dtb_fname)
_ProcessTemplates(node)
images = _ReadImageDesc(node, use_expanded) images = _ReadImageDesc(node, use_expanded)
if select_images: if select_images:

View file

@ -248,6 +248,9 @@ def LookupAndWriteSymbols(elf_fname, entry, section, is_elf=False,
entry: Entry to process entry: Entry to process
section: Section which can be used to lookup symbol values section: Section which can be used to lookup symbol values
base_sym: Base symbol marking the start of the image base_sym: Base symbol marking the start of the image
Returns:
int: Number of symbols written
""" """
if not base_sym: if not base_sym:
base_sym = '__image_copy_start' base_sym = '__image_copy_start'
@ -269,12 +272,13 @@ def LookupAndWriteSymbols(elf_fname, entry, section, is_elf=False,
if not syms: if not syms:
tout.debug('LookupAndWriteSymbols: no syms') tout.debug('LookupAndWriteSymbols: no syms')
return return 0
base = syms.get(base_sym) base = syms.get(base_sym)
if not base and not is_elf: if not base and not is_elf:
tout.debug('LookupAndWriteSymbols: no base') tout.debug('LookupAndWriteSymbols: no base')
return return 0
base_addr = 0 if is_elf else base.address base_addr = 0 if is_elf else base.address
count = 0
for name, sym in syms.items(): for name, sym in syms.items():
if name.startswith('_binman'): if name.startswith('_binman'):
msg = ("Section '%s': Symbol '%s'\n in entry '%s'" % msg = ("Section '%s': Symbol '%s'\n in entry '%s'" %
@ -307,6 +311,11 @@ def LookupAndWriteSymbols(elf_fname, entry, section, is_elf=False,
(msg, name, offset, value, len(value_bytes))) (msg, name, offset, value, len(value_bytes)))
entry.data = (entry.data[:offset] + value_bytes + entry.data = (entry.data[:offset] + value_bytes +
entry.data[offset + sym.size:]) entry.data[offset + sym.size:])
count += 1
if count:
tout.detail(
f"Section '{section.GetPath()}': entry '{entry.GetPath()}' : {count} symbols")
return count
def GetSymbolValue(sym, data, msg): def GetSymbolValue(sym, data, msg):
"""Get the value of a symbol """Get the value of a symbol

View file

@ -141,7 +141,8 @@ class TestElf(unittest.TestCase):
entry = FakeEntry(10) entry = FakeEntry(10)
section = FakeSection() section = FakeSection()
elf_fname = self.ElfTestFile('u_boot_binman_syms_bad') elf_fname = self.ElfTestFile('u_boot_binman_syms_bad')
elf.LookupAndWriteSymbols(elf_fname, entry, section) count = elf.LookupAndWriteSymbols(elf_fname, entry, section)
self.assertEqual(0, count)
def testBadSymbolSize(self): def testBadSymbolSize(self):
"""Test that an attempt to use an 8-bit symbol are detected """Test that an attempt to use an 8-bit symbol are detected
@ -162,7 +163,7 @@ class TestElf(unittest.TestCase):
def testNoValue(self): def testNoValue(self):
"""Test the case where we have no value for the symbol """Test the case where we have no value for the symbol
This should produce -1 values for all thress symbols, taking up the This should produce -1 values for all three symbols, taking up the
first 16 bytes of the image. first 16 bytes of the image.
""" """
if not elf.ELF_TOOLS: if not elf.ELF_TOOLS:
@ -170,7 +171,8 @@ class TestElf(unittest.TestCase):
entry = FakeEntry(28) entry = FakeEntry(28)
section = FakeSection(sym_value=None) section = FakeSection(sym_value=None)
elf_fname = self.ElfTestFile('u_boot_binman_syms') elf_fname = self.ElfTestFile('u_boot_binman_syms')
elf.LookupAndWriteSymbols(elf_fname, entry, section) count = elf.LookupAndWriteSymbols(elf_fname, entry, section)
self.assertEqual(5, count)
expected = (struct.pack('<L', elf.BINMAN_SYM_MAGIC_VALUE) + expected = (struct.pack('<L', elf.BINMAN_SYM_MAGIC_VALUE) +
tools.get_bytes(255, 20) + tools.get_bytes(255, 20) +
tools.get_bytes(ord('a'), 4)) tools.get_bytes(ord('a'), 4))
@ -369,6 +371,11 @@ class TestElf(unittest.TestCase):
elf.GetSymbolOffset(fname, 'embed') elf.GetSymbolOffset(fname, 'embed')
self.assertIn('__image_copy_start', str(e.exception)) self.assertIn('__image_copy_start', str(e.exception))
def test_get_symbol_address(self):
fname = self.ElfTestFile('embed_data')
addr = elf.GetSymbolAddress(fname, 'region_size')
self.assertEqual(0, addr)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -615,6 +615,12 @@ The top-level 'fit' node supports the following special properties:
`of-list` meaning that `-a of-list="dtb1 dtb2..."` should be passed `of-list` meaning that `-a of-list="dtb1 dtb2..."` should be passed
to binman. to binman.
fit,fdt-list-val
As an alternative to fit,fdt-list the list of device tree files
can be provided in this property as a string list, e.g.::
fit,fdt-list-val = "dtb1", "dtb2";
Substitutions Substitutions
~~~~~~~~~~~~~ ~~~~~~~~~~~~~

View file

@ -158,6 +158,7 @@ class Entry(object):
self.offset_from_elf = None self.offset_from_elf = None
self.preserve = False self.preserve = False
self.build_done = False self.build_done = False
self.no_write_symbols = False
@staticmethod @staticmethod
def FindEntryClass(etype, expanded): def FindEntryClass(etype, expanded):
@ -321,6 +322,7 @@ class Entry(object):
'offset-from-elf') 'offset-from-elf')
self.preserve = fdt_util.GetBool(self._node, 'preserve') self.preserve = fdt_util.GetBool(self._node, 'preserve')
self.no_write_symbols = fdt_util.GetBool(self._node, 'no-write-symbols')
def GetDefaultFilename(self): def GetDefaultFilename(self):
return None return None
@ -472,6 +474,9 @@ class Entry(object):
def ObtainContents(self, skip_entry=None, fake_size=0): def ObtainContents(self, skip_entry=None, fake_size=0):
"""Figure out the contents of an entry. """Figure out the contents of an entry.
For missing blobs (where allow-missing is enabled), the contents are set
to b'' and self.missing is set to True.
Args: Args:
skip_entry (Entry): Entry to skip when obtaining section contents skip_entry (Entry): Entry to skip when obtaining section contents
fake_size (int): Size of fake file to create if needed fake_size (int): Size of fake file to create if needed
@ -695,7 +700,7 @@ class Entry(object):
Args: Args:
section: Section containing the entry section: Section containing the entry
""" """
if self.auto_write_symbols: if self.auto_write_symbols and not self.no_write_symbols:
# Check if we are writing symbols into an ELF file # Check if we are writing symbols into an ELF file
is_elf = self.GetDefaultFilename() == self.elf_fname is_elf = self.GetDefaultFilename() == self.elf_fname
elf.LookupAndWriteSymbols(self.elf_fname, self, section.GetImage(), elf.LookupAndWriteSymbols(self.elf_fname, self, section.GetImage(),
@ -1309,10 +1314,6 @@ features to produce new behaviours.
""" """
data = b'' data = b''
for entry in entries: for entry in entries:
# First get the input data and put it in a file. If not available,
# try later.
if not entry.ObtainContents(fake_size=fake_size):
return None, None, None
data += entry.GetData() data += entry.GetData()
uniq = self.GetUniqueName() uniq = self.GetUniqueName()
fname = tools.get_output_filename(f'{prefix}.{uniq}') fname = tools.get_output_filename(f'{prefix}.{uniq}')

View file

@ -52,3 +52,8 @@ class Entry_blob_phase(Entry_section):
# Read entries again, now that we have some # Read entries again, now that we have some
self.ReadEntries() self.ReadEntries()
# Propagate the no-write-symbols property
if self.no_write_symbols:
for entry in self._entries.values():
entry.no_write_symbols = True

View file

@ -81,6 +81,12 @@ class Entry_fit(Entry_section):
`of-list` meaning that `-a of-list="dtb1 dtb2..."` should be passed `of-list` meaning that `-a of-list="dtb1 dtb2..."` should be passed
to binman. to binman.
fit,fdt-list-val
As an alternative to fit,fdt-list the list of device tree files
can be provided in this property as a string list, e.g.::
fit,fdt-list-val = "dtb1", "dtb2";
Substitutions Substitutions
~~~~~~~~~~~~~ ~~~~~~~~~~~~~
@ -361,6 +367,9 @@ class Entry_fit(Entry_section):
[EntryArg(self._fit_list_prop.value, str)]) [EntryArg(self._fit_list_prop.value, str)])
if fdts is not None: if fdts is not None:
self._fdts = fdts.split() self._fdts = fdts.split()
else:
self._fdts = fdt_util.GetStringList(self._node, 'fit,fdt-list-val')
self._fit_default_dt = self.GetEntryArgsOrProps([EntryArg('default-dt', self._fit_default_dt = self.GetEntryArgsOrProps([EntryArg('default-dt',
str)])[0] str)])[0]

View file

@ -8,10 +8,11 @@
from collections import OrderedDict from collections import OrderedDict
from binman.entry import Entry from binman.entry import Entry
from binman.etype.section import Entry_section
from dtoc import fdt_util from dtoc import fdt_util
from u_boot_pylib import tools from u_boot_pylib import tools
class Entry_mkimage(Entry): class Entry_mkimage(Entry_section):
"""Binary produced by mkimage """Binary produced by mkimage
Properties / Entry arguments: Properties / Entry arguments:
@ -121,54 +122,67 @@ class Entry_mkimage(Entry):
""" """
def __init__(self, section, etype, node): def __init__(self, section, etype, node):
super().__init__(section, etype, node) super().__init__(section, etype, node)
self._multiple_data_files = fdt_util.GetBool(self._node, 'multiple-data-files')
self._mkimage_entries = OrderedDict()
self._imagename = None self._imagename = None
self._filename = fdt_util.GetString(self._node, 'filename') self._multiple_data_files = False
self.align_default = None
def ReadNode(self): def ReadNode(self):
super().ReadNode() super().ReadNode()
self._multiple_data_files = fdt_util.GetBool(self._node,
'multiple-data-files')
self._args = fdt_util.GetArgs(self._node, 'args') self._args = fdt_util.GetArgs(self._node, 'args')
self._data_to_imagename = fdt_util.GetBool(self._node, self._data_to_imagename = fdt_util.GetBool(self._node,
'data-to-imagename') 'data-to-imagename')
if self._data_to_imagename and self._node.FindNode('imagename'): if self._data_to_imagename and self._node.FindNode('imagename'):
self.Raise('Cannot use both imagename node and data-to-imagename') self.Raise('Cannot use both imagename node and data-to-imagename')
self.ReadEntries()
def ReadEntries(self): def ReadEntries(self):
"""Read the subnodes to find out what should go in this image""" """Read the subnodes to find out what should go in this image"""
for node in self._node.subnodes: for node in self._node.subnodes:
entry = Entry.Create(self, node) if self.IsSpecialSubnode(node):
continue
entry = Entry.Create(self, node,
expanded=self.GetImage().use_expanded,
missing_etype=self.GetImage().missing_etype)
entry.ReadNode() entry.ReadNode()
entry.SetPrefix(self._name_prefix)
if entry.name == 'imagename': if entry.name == 'imagename':
self._imagename = entry self._imagename = entry
else: else:
self._mkimage_entries[entry.name] = entry self._entries[entry.name] = entry
def ObtainContents(self): def BuildSectionData(self, required):
"""Build mkimage entry contents
Runs mkimage to build the entry contents
Args:
required (bool): True if the data must be present, False if it is OK
to return None
Returns:
bytes: Contents of the section
"""
# Use a non-zero size for any fake files to keep mkimage happy # Use a non-zero size for any fake files to keep mkimage happy
# Note that testMkimageImagename() relies on this 'mkimage' parameter # Note that testMkimageImagename() relies on this 'mkimage' parameter
fake_size = 1024 fake_size = 1024
if self._multiple_data_files: if self._multiple_data_files:
fnames = [] fnames = []
uniq = self.GetUniqueName() uniq = self.GetUniqueName()
for entry in self._mkimage_entries.values(): for entry in self._entries.values():
if not entry.ObtainContents(fake_size=fake_size): # Put the contents in a temporary file
return False ename = f'mkimage-in-{uniq}-{entry.name}'
if entry._pathname: fname = tools.get_output_filename(ename)
fnames.append(entry._pathname) data = entry.GetData(required)
tools.write_file(fname, data)
fnames.append(fname)
input_fname = ":".join(fnames) input_fname = ":".join(fnames)
data = b''
else: else:
data, input_fname, uniq = self.collect_contents_to_file( data, input_fname, uniq = self.collect_contents_to_file(
self._mkimage_entries.values(), 'mkimage', fake_size) self._entries.values(), 'mkimage', fake_size)
if data is None:
return False
if self._imagename: if self._imagename:
image_data, imagename_fname, _ = self.collect_contents_to_file( image_data, imagename_fname, _ = self.collect_contents_to_file(
[self._imagename], 'mkimage-n', 1024) [self._imagename], 'mkimage-n', 1024)
if image_data is None:
return False
outfile = self._filename if self._filename else 'mkimage-out.%s' % uniq outfile = self._filename if self._filename else 'mkimage-out.%s' % uniq
output_fname = tools.get_output_filename(outfile) output_fname = tools.get_output_filename(outfile)
@ -176,8 +190,7 @@ class Entry_mkimage(Entry):
self.CheckMissing(missing_list) self.CheckMissing(missing_list)
self.missing = bool(missing_list) self.missing = bool(missing_list)
if self.missing: if self.missing:
self.SetContents(b'') return b''
return self.allow_missing
args = ['-d', input_fname] args = ['-d', input_fname]
if self._data_to_imagename: if self._data_to_imagename:
@ -186,71 +199,58 @@ class Entry_mkimage(Entry):
args += ['-n', imagename_fname] args += ['-n', imagename_fname]
args += self._args + [output_fname] args += self._args + [output_fname]
if self.mkimage.run_cmd(*args) is not None: if self.mkimage.run_cmd(*args) is not None:
self.SetContents(tools.read_file(output_fname)) return tools.read_file(output_fname)
else: else:
# Bintool is missing; just use the input data as the output # Bintool is missing; just use the input data as the output
self.record_missing_bintool(self.mkimage) self.record_missing_bintool(self.mkimage)
self.SetContents(data) return data
return True
def GetEntries(self): def GetEntries(self):
# Make a copy so we don't change the original # Make a copy so we don't change the original
entries = OrderedDict(self._mkimage_entries) entries = OrderedDict(self._entries)
if self._imagename: if self._imagename:
entries['imagename'] = self._imagename entries['imagename'] = self._imagename
return entries return entries
def SetAllowMissing(self, allow_missing):
"""Set whether a section allows missing external blobs
Args:
allow_missing: True if allowed, False if not allowed
"""
self.allow_missing = allow_missing
for entry in self._mkimage_entries.values():
entry.SetAllowMissing(allow_missing)
if self._imagename:
self._imagename.SetAllowMissing(allow_missing)
def SetAllowFakeBlob(self, allow_fake):
"""Set whether the sub nodes allows to create a fake blob
Args:
allow_fake: True if allowed, False if not allowed
"""
for entry in self._mkimage_entries.values():
entry.SetAllowFakeBlob(allow_fake)
if self._imagename:
self._imagename.SetAllowFakeBlob(allow_fake)
def CheckMissing(self, missing_list):
"""Check if any entries in this section have missing external blobs
If there are missing (non-optional) blobs, the entries are added to the
list
Args:
missing_list: List of Entry objects to be added to
"""
for entry in self._mkimage_entries.values():
entry.CheckMissing(missing_list)
if self._imagename:
self._imagename.CheckMissing(missing_list)
def CheckFakedBlobs(self, faked_blobs_list):
"""Check if any entries in this section have faked external blobs
If there are faked blobs, the entries are added to the list
Args:
faked_blobs_list: List of Entry objects to be added to
"""
for entry in self._mkimage_entries.values():
entry.CheckFakedBlobs(faked_blobs_list)
if self._imagename:
self._imagename.CheckFakedBlobs(faked_blobs_list)
def AddBintools(self, btools): def AddBintools(self, btools):
super().AddBintools(btools) super().AddBintools(btools)
self.mkimage = self.AddBintool(btools, 'mkimage') self.mkimage = self.AddBintool(btools, 'mkimage')
def CheckEntries(self):
pass
def ProcessContents(self):
# The blob may have changed due to WriteSymbols()
ok = super().ProcessContents()
data = self.BuildSectionData(True)
ok2 = self.ProcessContentsUpdate(data)
return ok and ok2
def SetImagePos(self, image_pos):
"""Set the position in the image
This sets each subentry's offsets, sizes and positions-in-image
according to where they ended up in the packed mkimage file.
NOTE: This assumes a legacy mkimage and assumes that the images are
written to the output in order. SoC-specific mkimage handling may not
conform to this, in which case these values may be wrong.
Args:
image_pos (int): Position of this entry in the image
"""
# The mkimage header consists of 0x40 bytes, following by a table of
# offsets for each file
upto = 0x40
# Skip the 0-terminated list of offsets (assume a single image)
upto += 4 + 4
for entry in self.GetEntries().values():
entry.SetOffsetSize(upto, None)
# Give up if any entries lack a size
if entry.size is None:
return
upto += entry.size
super().SetImagePos(image_pos)

View file

@ -168,6 +168,7 @@ class Entry_section(Entry):
self._end_4gb = False self._end_4gb = False
self._ignore_missing = False self._ignore_missing = False
self._filename = None self._filename = None
self.align_default = 0
def IsSpecialSubnode(self, node): def IsSpecialSubnode(self, node):
"""Check if a node is a special one used by the section itself """Check if a node is a special one used by the section itself
@ -178,7 +179,8 @@ class Entry_section(Entry):
Returns: Returns:
bool: True if the node is a special one, else False bool: True if the node is a special one, else False
""" """
return node.name.startswith('hash') or node.name.startswith('signature') start_list = ('hash', 'signature', 'template')
return any(node.name.startswith(name) for name in start_list)
def ReadNode(self): def ReadNode(self):
"""Read properties from the section node""" """Read properties from the section node"""
@ -315,12 +317,15 @@ class Entry_section(Entry):
This should be overridden by subclasses which want to build their own This should be overridden by subclasses which want to build their own
data structure for the section. data structure for the section.
Missing entries will have be given empty (or fake) data, so are
processed normally here.
Args: Args:
required: True if the data must be present, False if it is OK to required: True if the data must be present, False if it is OK to
return None return None
Returns: Returns:
Contents of the section (bytes) Contents of the section (bytes), None if not available
""" """
section_data = bytearray() section_data = bytearray()
@ -710,6 +715,33 @@ class Entry_section(Entry):
def GetEntryContents(self, skip_entry=None): def GetEntryContents(self, skip_entry=None):
"""Call ObtainContents() for each entry in the section """Call ObtainContents() for each entry in the section
The overall goal of this function is to read in any available data in
this entry and any subentries. This includes reading in blobs, setting
up objects which have predefined contents, etc.
Since entry types which contain entries call ObtainContents() on all
those entries too, the result is that ObtainContents() is called
recursively for the whole tree below this one.
Entries with subentries are generally not *themselves& processed here,
i.e. their ObtainContents() implementation simply obtains contents of
their subentries, skipping their own contents. For example, the
implementation here (for entry_Section) does not attempt to pack the
entries into a final result. That is handled later.
Generally, calling this results in SetContents() being called for each
entry, so that the 'data' and 'contents_size; properties are set, and
subsequent calls to GetData() will return value data.
Where 'allow_missing' is set, this can result in the 'missing' property
being set to True if there is no data. This is handled by setting the
data to b''. This function will still return success. Future calls to
GetData() for this entry will return b'', or in the case where the data
is faked, GetData() will return that fake data.
Args:
skip_entry: (single) Entry to skip, or None to process all entries
Note that this may set entry.absent to True if the entry is not Note that this may set entry.absent to True if the entry is not
actually needed actually needed
""" """
@ -719,7 +751,7 @@ class Entry_section(Entry):
next_todo.append(entry) next_todo.append(entry)
return entry return entry
todo = self._entries.values() todo = self.GetEntries().values()
for passnum in range(3): for passnum in range(3):
threads = state.GetThreads() threads = state.GetThreads()
next_todo = [] next_todo = []
@ -892,7 +924,7 @@ class Entry_section(Entry):
allow_missing: True if allowed, False if not allowed allow_missing: True if allowed, False if not allowed
""" """
self.allow_missing = allow_missing self.allow_missing = allow_missing
for entry in self._entries.values(): for entry in self.GetEntries().values():
entry.SetAllowMissing(allow_missing) entry.SetAllowMissing(allow_missing)
def SetAllowFakeBlob(self, allow_fake): def SetAllowFakeBlob(self, allow_fake):
@ -902,7 +934,7 @@ class Entry_section(Entry):
allow_fake: True if allowed, False if not allowed allow_fake: True if allowed, False if not allowed
""" """
super().SetAllowFakeBlob(allow_fake) super().SetAllowFakeBlob(allow_fake)
for entry in self._entries.values(): for entry in self.GetEntries().values():
entry.SetAllowFakeBlob(allow_fake) entry.SetAllowFakeBlob(allow_fake)
def CheckMissing(self, missing_list): def CheckMissing(self, missing_list):
@ -914,7 +946,7 @@ class Entry_section(Entry):
Args: Args:
missing_list: List of Entry objects to be added to missing_list: List of Entry objects to be added to
""" """
for entry in self._entries.values(): for entry in self.GetEntries().values():
entry.CheckMissing(missing_list) entry.CheckMissing(missing_list)
def CheckFakedBlobs(self, faked_blobs_list): def CheckFakedBlobs(self, faked_blobs_list):
@ -925,7 +957,7 @@ class Entry_section(Entry):
Args: Args:
faked_blobs_list: List of Entry objects to be added to faked_blobs_list: List of Entry objects to be added to
""" """
for entry in self._entries.values(): for entry in self.GetEntries().values():
entry.CheckFakedBlobs(faked_blobs_list) entry.CheckFakedBlobs(faked_blobs_list)
def CheckOptional(self, optional_list): def CheckOptional(self, optional_list):
@ -936,7 +968,7 @@ class Entry_section(Entry):
Args: Args:
optional_list (list): List of Entry objects to be added to optional_list (list): List of Entry objects to be added to
""" """
for entry in self._entries.values(): for entry in self.GetEntries().values():
entry.CheckOptional(optional_list) entry.CheckOptional(optional_list)
def check_missing_bintools(self, missing_list): def check_missing_bintools(self, missing_list):
@ -948,7 +980,7 @@ class Entry_section(Entry):
missing_list: List of Bintool objects to be added to missing_list: List of Bintool objects to be added to
""" """
super().check_missing_bintools(missing_list) super().check_missing_bintools(missing_list)
for entry in self._entries.values(): for entry in self.GetEntries().values():
entry.check_missing_bintools(missing_list) entry.check_missing_bintools(missing_list)
def _CollectEntries(self, entries, entries_by_name, add_entry): def _CollectEntries(self, entries, entries_by_name, add_entry):
@ -998,12 +1030,12 @@ class Entry_section(Entry):
entry.Raise(f'Missing required properties/entry args: {missing}') entry.Raise(f'Missing required properties/entry args: {missing}')
def CheckAltFormats(self, alt_formats): def CheckAltFormats(self, alt_formats):
for entry in self._entries.values(): for entry in self.GetEntries().values():
entry.CheckAltFormats(alt_formats) entry.CheckAltFormats(alt_formats)
def AddBintools(self, btools): def AddBintools(self, btools):
super().AddBintools(btools) super().AddBintools(btools)
for entry in self._entries.values(): for entry in self.GetEntries().values():
entry.AddBintools(btools) entry.AddBintools(btools)
def read_elf_segments(self): def read_elf_segments(self):

View file

@ -38,7 +38,7 @@ class Entry_u_boot_spl_bss_pad(Entry_blob):
def ObtainContents(self): def ObtainContents(self):
fname = tools.get_input_filename('spl/u-boot-spl') fname = tools.get_input_filename('spl/u-boot-spl')
bss_size = elf.GetSymbolAddress(fname, '__bss_size') bss_size = elf.GetSymbolAddress(fname, '__bss_size')
if not bss_size: if bss_size is None:
self.Raise('Expected __bss_size symbol in spl/u-boot-spl') self.Raise('Expected __bss_size symbol in spl/u-boot-spl')
self.SetContents(tools.get_bytes(0, bss_size)) self.SetContents(tools.get_bytes(0, bss_size))
return True return True

View file

@ -38,7 +38,7 @@ class Entry_u_boot_tpl_bss_pad(Entry_blob):
def ObtainContents(self): def ObtainContents(self):
fname = tools.get_input_filename('tpl/u-boot-tpl') fname = tools.get_input_filename('tpl/u-boot-tpl')
bss_size = elf.GetSymbolAddress(fname, '__bss_size') bss_size = elf.GetSymbolAddress(fname, '__bss_size')
if not bss_size: if bss_size is None:
self.Raise('Expected __bss_size symbol in tpl/u-boot-tpl') self.Raise('Expected __bss_size symbol in tpl/u-boot-tpl')
self.SetContents(tools.get_bytes(0, bss_size)) self.SetContents(tools.get_bytes(0, bss_size))
return True return True

View file

@ -38,7 +38,7 @@ class Entry_u_boot_vpl_bss_pad(Entry_blob):
def ObtainContents(self): def ObtainContents(self):
fname = tools.get_input_filename('vpl/u-boot-vpl') fname = tools.get_input_filename('vpl/u-boot-vpl')
bss_size = elf.GetSymbolAddress(fname, '__bss_size') bss_size = elf.GetSymbolAddress(fname, '__bss_size')
if not bss_size: if bss_size is None:
self.Raise('Expected __bss_size symbol in vpl/u-boot-vpl') self.Raise('Expected __bss_size symbol in vpl/u-boot-vpl')
self.SetContents(tools.get_bytes(0, bss_size)) self.SetContents(tools.get_bytes(0, bss_size))
return True return True

View file

@ -1103,6 +1103,7 @@ class TestFunctional(unittest.TestCase):
def testPackZeroOffset(self): def testPackZeroOffset(self):
"""Test that an entry at offset 0 is not given a new offset""" """Test that an entry at offset 0 is not given a new offset"""
self._SetupSplElf()
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
self._DoTestFile('025_pack_zero_size.dts') self._DoTestFile('025_pack_zero_size.dts')
self.assertIn("Node '/binman/u-boot-spl': Offset 0x0 (0) overlaps " self.assertIn("Node '/binman/u-boot-spl': Offset 0x0 (0) overlaps "
@ -1116,6 +1117,7 @@ class TestFunctional(unittest.TestCase):
def testPackX86RomNoSize(self): def testPackX86RomNoSize(self):
"""Test that the end-at-4gb property requires a size property""" """Test that the end-at-4gb property requires a size property"""
self._SetupSplElf()
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
self._DoTestFile('027_pack_4gb_no_size.dts') self._DoTestFile('027_pack_4gb_no_size.dts')
self.assertIn("Image '/binman': Section size must be provided when " self.assertIn("Image '/binman': Section size must be provided when "
@ -1124,6 +1126,7 @@ class TestFunctional(unittest.TestCase):
def test4gbAndSkipAtStartTogether(self): def test4gbAndSkipAtStartTogether(self):
"""Test that the end-at-4gb and skip-at-size property can't be used """Test that the end-at-4gb and skip-at-size property can't be used
together""" together"""
self._SetupSplElf()
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
self._DoTestFile('098_4gb_and_skip_at_start_together.dts') self._DoTestFile('098_4gb_and_skip_at_start_together.dts')
self.assertIn("Image '/binman': Provide either 'end-at-4gb' or " self.assertIn("Image '/binman': Provide either 'end-at-4gb' or "
@ -1131,6 +1134,7 @@ class TestFunctional(unittest.TestCase):
def testPackX86RomOutside(self): def testPackX86RomOutside(self):
"""Test that the end-at-4gb property checks for offset boundaries""" """Test that the end-at-4gb property checks for offset boundaries"""
self._SetupSplElf()
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
self._DoTestFile('028_pack_4gb_outside.dts') self._DoTestFile('028_pack_4gb_outside.dts')
self.assertIn("Node '/binman/u-boot': Offset 0x0 (0) size 0x4 (4) " self.assertIn("Node '/binman/u-boot': Offset 0x0 (0) size 0x4 (4) "
@ -1423,6 +1427,7 @@ class TestFunctional(unittest.TestCase):
def testPackUbootSplMicrocode(self): def testPackUbootSplMicrocode(self):
"""Test that x86 microcode can be handled correctly in SPL""" """Test that x86 microcode can be handled correctly in SPL"""
self._SetupSplElf()
self._PackUbootSplMicrocode('049_x86_ucode_spl.dts') self._PackUbootSplMicrocode('049_x86_ucode_spl.dts')
def testPackUbootSplMicrocodeReorder(self): def testPackUbootSplMicrocodeReorder(self):
@ -1442,6 +1447,7 @@ class TestFunctional(unittest.TestCase):
def testSplDtb(self): def testSplDtb(self):
"""Test that an image with spl/u-boot-spl.dtb can be created""" """Test that an image with spl/u-boot-spl.dtb can be created"""
self._SetupSplElf()
data = self._DoReadFile('051_u_boot_spl_dtb.dts') data = self._DoReadFile('051_u_boot_spl_dtb.dts')
self.assertEqual(U_BOOT_SPL_DTB_DATA, data[:len(U_BOOT_SPL_DTB_DATA)]) self.assertEqual(U_BOOT_SPL_DTB_DATA, data[:len(U_BOOT_SPL_DTB_DATA)])
@ -1452,7 +1458,7 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(U_BOOT_SPL_NODTB_DATA, data[:len(U_BOOT_SPL_NODTB_DATA)]) self.assertEqual(U_BOOT_SPL_NODTB_DATA, data[:len(U_BOOT_SPL_NODTB_DATA)])
def checkSymbols(self, dts, base_data, u_boot_offset, entry_args=None, def checkSymbols(self, dts, base_data, u_boot_offset, entry_args=None,
use_expanded=False): use_expanded=False, no_write_symbols=False):
"""Check the image contains the expected symbol values """Check the image contains the expected symbol values
Args: Args:
@ -1481,6 +1487,11 @@ class TestFunctional(unittest.TestCase):
sym_values = struct.pack('<LLQLL', elf.BINMAN_SYM_MAGIC_VALUE, sym_values = struct.pack('<LLQLL', elf.BINMAN_SYM_MAGIC_VALUE,
0x00, u_boot_offset + len(U_BOOT_DATA), 0x00, u_boot_offset + len(U_BOOT_DATA),
0x10 + u_boot_offset, 0x04) 0x10 + u_boot_offset, 0x04)
if no_write_symbols:
expected = (base_data +
tools.get_bytes(0xff, 0x38 - len(base_data)) +
U_BOOT_DATA + base_data)
else:
expected = (sym_values + base_data[24:] + expected = (sym_values + base_data[24:] +
tools.get_bytes(0xff, 1) + U_BOOT_DATA + sym_values + tools.get_bytes(0xff, 1) + U_BOOT_DATA + sym_values +
base_data[24:]) base_data[24:])
@ -1957,6 +1968,8 @@ class TestFunctional(unittest.TestCase):
def testUpdateFdtAll(self): def testUpdateFdtAll(self):
"""Test that all device trees are updated with offset/size info""" """Test that all device trees are updated with offset/size info"""
self._SetupSplElf()
self._SetupTplElf()
data = self._DoReadFileRealDtb('082_fdt_update_all.dts') data = self._DoReadFileRealDtb('082_fdt_update_all.dts')
base_expected = { base_expected = {
@ -3279,6 +3292,8 @@ class TestFunctional(unittest.TestCase):
def testUpdateFdtAllRepack(self): def testUpdateFdtAllRepack(self):
"""Test that all device trees are updated with offset/size info""" """Test that all device trees are updated with offset/size info"""
self._SetupSplElf()
self._SetupTplElf()
data = self._DoReadFileRealDtb('134_fdt_update_all_repack.dts') data = self._DoReadFileRealDtb('134_fdt_update_all_repack.dts')
SECTION_SIZE = 0x300 SECTION_SIZE = 0x300
DTB_SIZE = 602 DTB_SIZE = 602
@ -3732,6 +3747,7 @@ class TestFunctional(unittest.TestCase):
def testMkimage(self): def testMkimage(self):
"""Test using mkimage to build an image""" """Test using mkimage to build an image"""
self._SetupSplElf()
data = self._DoReadFile('156_mkimage.dts') data = self._DoReadFile('156_mkimage.dts')
# Just check that the data appears in the file somewhere # Just check that the data appears in the file somewhere
@ -3739,6 +3755,7 @@ class TestFunctional(unittest.TestCase):
def testMkimageMissing(self): def testMkimageMissing(self):
"""Test that binman still produces an image if mkimage is missing""" """Test that binman still produces an image if mkimage is missing"""
self._SetupSplElf()
with test_util.capture_sys_output() as (_, stderr): with test_util.capture_sys_output() as (_, stderr):
self._DoTestFile('156_mkimage.dts', self._DoTestFile('156_mkimage.dts',
force_missing_bintools='mkimage') force_missing_bintools='mkimage')
@ -3851,6 +3868,7 @@ class TestFunctional(unittest.TestCase):
def testSimpleFit(self): def testSimpleFit(self):
"""Test an image with a FIT inside""" """Test an image with a FIT inside"""
self._SetupSplElf()
data = self._DoReadFile('161_fit.dts') data = self._DoReadFile('161_fit.dts')
self.assertEqual(U_BOOT_DATA, data[:len(U_BOOT_DATA)]) self.assertEqual(U_BOOT_DATA, data[:len(U_BOOT_DATA)])
self.assertEqual(U_BOOT_NODTB_DATA, data[-len(U_BOOT_NODTB_DATA):]) self.assertEqual(U_BOOT_NODTB_DATA, data[-len(U_BOOT_NODTB_DATA):])
@ -5370,6 +5388,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSubentryHashSubnode(self): def testFitSubentryHashSubnode(self):
"""Test an image with a FIT inside""" """Test an image with a FIT inside"""
self._SetupSplElf()
data, _, _, out_dtb_name = self._DoReadFileDtb( data, _, _, out_dtb_name = self._DoReadFileDtb(
'221_fit_subentry_hash.dts', use_real_dtb=True, update_dtb=True) '221_fit_subentry_hash.dts', use_real_dtb=True, update_dtb=True)
@ -5888,6 +5907,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkimageImagename(self): def testMkimageImagename(self):
"""Test using mkimage with -n holding the data too""" """Test using mkimage with -n holding the data too"""
self._SetupSplElf()
data = self._DoReadFile('242_mkimage_name.dts') data = self._DoReadFile('242_mkimage_name.dts')
# Check that the data appears in the file somewhere # Check that the data appears in the file somewhere
@ -5905,6 +5925,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkimageImage(self): def testMkimageImage(self):
"""Test using mkimage with -n holding the data too""" """Test using mkimage with -n holding the data too"""
self._SetupSplElf()
data = self._DoReadFile('243_mkimage_image.dts') data = self._DoReadFile('243_mkimage_image.dts')
# Check that the data appears in the file somewhere # Check that the data appears in the file somewhere
@ -5925,6 +5946,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkimageImageNoContent(self): def testMkimageImageNoContent(self):
"""Test using mkimage with -n and no data""" """Test using mkimage with -n and no data"""
self._SetupSplElf()
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self._DoReadFile('244_mkimage_image_no_content.dts') self._DoReadFile('244_mkimage_image_no_content.dts')
self.assertIn('Could not complete processing of contents', self.assertIn('Could not complete processing of contents',
@ -5932,6 +5954,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkimageImageBad(self): def testMkimageImageBad(self):
"""Test using mkimage with imagename node and data-to-imagename""" """Test using mkimage with imagename node and data-to-imagename"""
self._SetupSplElf()
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self._DoReadFile('245_mkimage_image_bad.dts') self._DoReadFile('245_mkimage_image_bad.dts')
self.assertIn('Cannot use both imagename node and data-to-imagename', self.assertIn('Cannot use both imagename node and data-to-imagename',
@ -5947,6 +5970,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkimageCollection(self): def testMkimageCollection(self):
"""Test using a collection referring to an entry in a mkimage entry""" """Test using a collection referring to an entry in a mkimage entry"""
self._SetupSplElf()
data = self._DoReadFile('247_mkimage_coll.dts') data = self._DoReadFile('247_mkimage_coll.dts')
expect = U_BOOT_SPL_DATA + U_BOOT_DATA expect = U_BOOT_SPL_DATA + U_BOOT_DATA
self.assertEqual(expect, data[:len(expect)]) self.assertEqual(expect, data[:len(expect)])
@ -6032,6 +6056,8 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkimageMultipleDataFiles(self): def testMkimageMultipleDataFiles(self):
"""Test passing multiple files to mkimage in a mkimage entry""" """Test passing multiple files to mkimage in a mkimage entry"""
self._SetupSplElf()
self._SetupTplElf()
data = self._DoReadFile('252_mkimage_mult_data.dts') data = self._DoReadFile('252_mkimage_mult_data.dts')
# Size of files are packed in their 4B big-endian format # Size of files are packed in their 4B big-endian format
expect = struct.pack('>I', len(U_BOOT_TPL_DATA)) expect = struct.pack('>I', len(U_BOOT_TPL_DATA))
@ -6046,8 +6072,42 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
expect += U_BOOT_SPL_DATA expect += U_BOOT_SPL_DATA
self.assertEqual(expect, data[-len(expect):]) self.assertEqual(expect, data[-len(expect):])
def testMkimageMultipleExpanded(self):
"""Test passing multiple files to mkimage in a mkimage entry"""
self._SetupSplElf()
self._SetupTplElf()
entry_args = {
'spl-bss-pad': 'y',
'spl-dtb': 'y',
}
data = self._DoReadFileDtb('252_mkimage_mult_data.dts',
use_expanded=True, entry_args=entry_args)[0]
pad_len = 10
tpl_expect = U_BOOT_TPL_DATA
spl_expect = U_BOOT_SPL_NODTB_DATA + tools.get_bytes(0, pad_len)
spl_expect += U_BOOT_SPL_DTB_DATA
content = data[0x40:]
lens = struct.unpack('>III', content[:12])
# Size of files are packed in their 4B big-endian format
# Size info is always followed by a 4B zero value.
self.assertEqual(len(tpl_expect), lens[0])
self.assertEqual(len(spl_expect), lens[1])
self.assertEqual(0, lens[2])
rest = content[12:]
self.assertEqual(tpl_expect, rest[:len(tpl_expect)])
rest = rest[len(tpl_expect):]
align_pad = len(tpl_expect) % 4
self.assertEqual(tools.get_bytes(0, align_pad), rest[:align_pad])
rest = rest[align_pad:]
self.assertEqual(spl_expect, rest)
def testMkimageMultipleNoContent(self): def testMkimageMultipleNoContent(self):
"""Test passing multiple data files to mkimage with one data file having no content""" """Test passing multiple data files to mkimage with one data file having no content"""
self._SetupSplElf()
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self._DoReadFile('253_mkimage_mult_no_content.dts') self._DoReadFile('253_mkimage_mult_no_content.dts')
self.assertIn('Could not complete processing of contents', self.assertIn('Could not complete processing of contents',
@ -6055,6 +6115,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testMkimageFilename(self): def testMkimageFilename(self):
"""Test using mkimage to build a binary with a filename""" """Test using mkimage to build a binary with a filename"""
self._SetupSplElf()
retcode = self._DoTestFile('254_mkimage_filename.dts') retcode = self._DoTestFile('254_mkimage_filename.dts')
self.assertEqual(0, retcode) self.assertEqual(0, retcode)
fname = tools.get_output_filename('mkimage-test.bin') fname = tools.get_output_filename('mkimage-test.bin')
@ -6529,6 +6590,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testReplaceFitSibling(self): def testReplaceFitSibling(self):
"""Test an image with a FIT inside where we replace its sibling""" """Test an image with a FIT inside where we replace its sibling"""
self._SetupSplElf()
fname = TestFunctional._MakeInputFile('once', b'available once') fname = TestFunctional._MakeInputFile('once', b'available once')
self._DoReadFileRealDtb('277_replace_fit_sibling.dts') self._DoReadFileRealDtb('277_replace_fit_sibling.dts')
os.remove(fname) os.remove(fname)
@ -6603,7 +6665,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
Private key Private key
DTB DTB
""" """
self._SetupSplElf()
data = self._DoReadFileRealDtb(dts) data = self._DoReadFileRealDtb(dts)
updated_fname = tools.get_output_filename('image-updated.bin') updated_fname = tools.get_output_filename('image-updated.bin')
tools.write_file(updated_fname, data) tools.write_file(updated_fname, data)
@ -6676,6 +6738,152 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
['fit']) ['fit'])
self.assertIn("Node '/fit': Missing tool: 'mkimage'", str(e.exception)) self.assertIn("Node '/fit': Missing tool: 'mkimage'", str(e.exception))
def testSymbolNoWrite(self):
"""Test disabling of symbol writing"""
self._SetupSplElf()
self.checkSymbols('282_symbols_disable.dts', U_BOOT_SPL_DATA, 0x1c,
no_write_symbols=True)
def testSymbolNoWriteExpanded(self):
"""Test disabling of symbol writing in expanded entries"""
entry_args = {
'spl-dtb': '1',
}
self.checkSymbols('282_symbols_disable.dts', U_BOOT_SPL_NODTB_DATA +
U_BOOT_SPL_DTB_DATA, 0x38,
entry_args=entry_args, use_expanded=True,
no_write_symbols=True)
def testMkimageSpecial(self):
"""Test mkimage ignores special hash-1 node"""
data = self._DoReadFile('283_mkimage_special.dts')
# Just check that the data appears in the file somewhere
self.assertIn(U_BOOT_DATA, data)
def testFitFdtList(self):
"""Test an image with an FIT with the fit,fdt-list-val option"""
entry_args = {
'default-dt': 'test-fdt2',
}
data = self._DoReadFileDtb(
'284_fit_fdt_list.dts',
entry_args=entry_args,
extra_indirs=[os.path.join(self._indir, TEST_FDT_SUBDIR)])[0]
self.assertEqual(U_BOOT_NODTB_DATA, data[-len(U_BOOT_NODTB_DATA):])
fit_data = data[len(U_BOOT_DATA):-len(U_BOOT_NODTB_DATA)]
def testSplEmptyBss(self):
"""Test an expanded SPL with a zero-size BSS"""
# ELF file with a '__bss_size' symbol
self._SetupSplElf(src_fname='bss_data_zero')
entry_args = {
'spl-bss-pad': 'y',
'spl-dtb': 'y',
}
data = self._DoReadFileDtb('285_spl_expand.dts',
use_expanded=True, entry_args=entry_args)[0]
def testTemplate(self):
"""Test using a template"""
TestFunctional._MakeInputFile('vga2.bin', b'#' + VGA_DATA)
data = self._DoReadFile('286_template.dts')
first = U_BOOT_DATA + VGA_DATA + U_BOOT_DTB_DATA
second = U_BOOT_DATA + b'#' + VGA_DATA + U_BOOT_DTB_DATA
self.assertEqual(U_BOOT_IMG_DATA + first + second, data)
def testTemplateBlobMulti(self):
"""Test using a template with 'multiple-images' enabled"""
TestFunctional._MakeInputFile('my-blob.bin', b'blob')
TestFunctional._MakeInputFile('my-blob2.bin', b'other')
retcode = self._DoTestFile('287_template_multi.dts')
self.assertEqual(0, retcode)
image = control.images['image']
image_fname = tools.get_output_filename('my-image.bin')
data = tools.read_file(image_fname)
self.assertEqual(b'blob@@@@other', data)
def testTemplateFit(self):
"""Test using a template in a FIT"""
fit_data = self._DoReadFile('288_template_fit.dts')
fname = os.path.join(self._indir, 'fit_data.fit')
tools.write_file(fname, fit_data)
out = tools.run('dumpimage', '-l', fname)
def testTemplateSection(self):
"""Test using a template in a section (not at top level)"""
TestFunctional._MakeInputFile('vga2.bin', b'#' + VGA_DATA)
data = self._DoReadFile('289_template_section.dts')
first = U_BOOT_DATA + VGA_DATA + U_BOOT_DTB_DATA
second = U_BOOT_DATA + b'#' + VGA_DATA + U_BOOT_DTB_DATA
self.assertEqual(U_BOOT_IMG_DATA + first + second + first, data)
def testMkimageSymbols(self):
"""Test using mkimage to build an image with symbols in it"""
self._SetupSplElf('u_boot_binman_syms')
data = self._DoReadFile('290_mkimage_sym.dts')
image = control.images['image']
entries = image.GetEntries()
self.assertIn('u-boot', entries)
u_boot = entries['u-boot']
mkim = entries['mkimage']
mkim_entries = mkim.GetEntries()
self.assertIn('u-boot-spl', mkim_entries)
spl = mkim_entries['u-boot-spl']
self.assertIn('u-boot-spl2', mkim_entries)
spl2 = mkim_entries['u-boot-spl2']
# skip the mkimage header and the area sizes
mk_data = data[mkim.offset + 0x40:]
size, term = struct.unpack('>LL', mk_data[:8])
# There should be only one image, so check that the zero terminator is
# present
self.assertEqual(0, term)
content = mk_data[8:8 + size]
# The image should contain the symbols from u_boot_binman_syms.c
# Note that image_pos is adjusted by the base address of the image,
# which is 0x10 in our test image
spl_data = content[:0x18]
content = content[0x1b:]
# After the header is a table of offsets for each image. There should
# only be one image, then a 0 terminator, so figure out the real start
# of the image data
base = 0x40 + 8
# Check symbols in both u-boot-spl and u-boot-spl2
for i in range(2):
vals = struct.unpack('<LLQLL', spl_data)
# The image should contain the symbols from u_boot_binman_syms.c
# Note that image_pos is adjusted by the base address of the image,
# which is 0x10 in our 'u_boot_binman_syms' test image
self.assertEqual(elf.BINMAN_SYM_MAGIC_VALUE, vals[0])
self.assertEqual(base, vals[1])
self.assertEqual(spl2.offset, vals[2])
# figure out the internal positions of its components
self.assertEqual(0x10 + u_boot.image_pos, vals[3])
# Check that spl and spl2 are actually at the indicated positions
self.assertEqual(
elf.BINMAN_SYM_MAGIC_VALUE,
struct.unpack('<I', data[spl.image_pos:spl.image_pos + 4])[0])
self.assertEqual(
elf.BINMAN_SYM_MAGIC_VALUE,
struct.unpack('<I', data[spl2.image_pos:spl2.image_pos + 4])[0])
self.assertEqual(len(U_BOOT_DATA), vals[4])
# Move to next
spl_data = content[:0x18]
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()

View file

@ -385,7 +385,7 @@ def SetInt(node, prop, value, for_repack=False):
for_repack: True is this property is only needed for repacking for_repack: True is this property is only needed for repacking
""" """
for n in GetUpdateNodes(node, for_repack): for n in GetUpdateNodes(node, for_repack):
tout.detail("File %s: Update node '%s' prop '%s' to %#x" % tout.debug("File %s: Update node '%s' prop '%s' to %#x" %
(n.GetFdt().name, n.path, prop, value)) (n.GetFdt().name, n.path, prop, value))
n.SetInt(prop, value) n.SetInt(prop, value)

View file

@ -0,0 +1,25 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
pad-byte = <0xff>;
u-boot-spl {
no-write-symbols;
};
u-boot {
offset = <0x38>;
no-expanded;
};
u-boot-spl2 {
type = "u-boot-spl";
no-write-symbols;
};
};
};

View file

@ -0,0 +1,24 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
mkimage {
args = "-T script";
u-boot {
};
hash {
};
imagename {
type = "u-boot";
};
};
};
};

View file

@ -0,0 +1,58 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot {
};
fit {
description = "test-desc";
#address-cells = <1>;
fit,fdt-list-val = "test-fdt1", "test-fdt2";
images {
kernel {
description = "Vanilla Linux kernel";
type = "kernel";
arch = "ppc";
os = "linux";
compression = "gzip";
load = <00000000>;
entry = <00000000>;
hash-1 {
algo = "crc32";
};
hash-2 {
algo = "sha1";
};
u-boot {
};
};
@fdt-SEQ {
description = "fdt-NAME.dtb";
type = "flat_dt";
compression = "none";
hash {
algo = "sha256";
};
};
};
configurations {
default = "@config-DEFAULT-SEQ";
@config-SEQ {
description = "conf-NAME.dtb";
firmware = "uboot";
loadables = "atf";
fdt = "fdt-SEQ";
};
};
};
u-boot-nodtb {
};
};
};

View file

@ -0,0 +1,13 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot-spl {
};
};
};

View file

@ -0,0 +1,42 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot-img {
};
common_part: template {
u-boot {
};
intel-vga {
filename = "vga.bin";
};
};
first {
type = "section";
insert-template = <&common_part>;
u-boot-dtb {
};
};
second {
type = "section";
insert-template = <&common_part>;
u-boot-dtb {
};
intel-vga {
filename = "vga2.bin";
};
};
};
};

View file

@ -0,0 +1,27 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
binman: binman {
multiple-images;
my_template: template {
blob-ext@0 {
filename = "my-blob.bin";
offset = <0>;
};
blob-ext@8 {
offset = <8>;
};
};
image {
pad-byte = <0x40>;
filename = "my-image.bin";
insert-template = <&my_template>;
blob-ext@8 {
filename = "my-blob2.bin";
};
};
};
};

View file

@ -0,0 +1,37 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
binman: binman {
multiple-images;
my_template: template {
fit@0 {
images {
kernel-1 {
};
kernel-2 {
};
};
};
};
image {
filename = "image.bin";
insert-template = <&my_template>;
fit@0 {
description = "desc";
configurations {
};
images {
kernel-3 {
};
kernel-4 {
};
};
};
};
};
};

View file

@ -0,0 +1,52 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot-img {
};
common_part: template {
u-boot {
};
intel-vga {
filename = "vga.bin";
};
};
first {
type = "section";
insert-template = <&common_part>;
u-boot-dtb {
};
};
section {
second {
type = "section";
insert-template = <&common_part>;
u-boot-dtb {
};
intel-vga {
filename = "vga2.bin";
};
};
};
second {
type = "section";
insert-template = <&common_part>;
u-boot-dtb {
};
};
};
};

View file

@ -0,0 +1,27 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot-dtb {
};
mkimage {
args = "-n test -T script";
u-boot-spl {
};
u-boot-spl2 {
type = "u-boot-spl";
};
};
u-boot {
};
};
};

View file

@ -32,7 +32,7 @@ LDS_BINMAN_EMBED := -T $(SRC)u_boot_binman_embed.lds
LDS_EFL_SECTIONS := -T $(SRC)elf_sections.lds LDS_EFL_SECTIONS := -T $(SRC)elf_sections.lds
LDS_BLOB := -T $(SRC)blob_syms.lds LDS_BLOB := -T $(SRC)blob_syms.lds
TARGETS = u_boot_ucode_ptr u_boot_no_ucode_ptr bss_data \ TARGETS = u_boot_ucode_ptr u_boot_no_ucode_ptr bss_data bss_data_zero \
u_boot_binman_syms u_boot_binman_syms.bin u_boot_binman_syms_bad \ u_boot_binman_syms u_boot_binman_syms.bin u_boot_binman_syms_bad \
u_boot_binman_syms_size u_boot_binman_syms_x86 embed_data \ u_boot_binman_syms_size u_boot_binman_syms_x86 embed_data \
u_boot_binman_embed u_boot_binman_embed_sm elf_sections blob_syms.bin u_boot_binman_embed u_boot_binman_embed_sm elf_sections blob_syms.bin
@ -48,6 +48,9 @@ u_boot_ucode_ptr: u_boot_ucode_ptr.c
bss_data: CFLAGS += $(SRC)bss_data.lds bss_data: CFLAGS += $(SRC)bss_data.lds
bss_data: bss_data.c bss_data: bss_data.c
bss_data_zero: CFLAGS += $(SRC)bss_data_zero.lds
bss_data_zero: bss_data_zero.c
embed_data: CFLAGS += $(SRC)embed_data.lds embed_data: CFLAGS += $(SRC)embed_data.lds
embed_data: embed_data.c embed_data: embed_data.c

View file

@ -7,9 +7,8 @@
*/ */
int bss_data[10]; int bss_data[10];
int __bss_size = sizeof(bss_data);
int main() int main(void)
{ {
bss_data[2] = 2; bss_data[2] = 2;

View file

@ -0,0 +1,16 @@
// SPDX-License-Identifier: GPL-2.0+
/*
* Copyright (c) 2016 Google, Inc
*
* Simple program to create a bss_data region so the symbol can be read
* by binutils. This is used by binman tests.
*/
int bss_data[10];
int main(void)
{
bss_data[2] = 2;
return 0;
}

View file

@ -0,0 +1,15 @@
/* SPDX-License-Identifier: GPL-2.0+ */
/*
* Copyright (c) 2016 Google, Inc
*/
OUTPUT_FORMAT("elf32-i386", "elf32-i386", "elf32-i386")
OUTPUT_ARCH(i386)
ENTRY(_start)
SECTIONS
{
. = 0xfffffdf0;
_start = .;
__bss_size = 0;
}

View file

@ -17,6 +17,7 @@ SECTIONS
embed_start = .; embed_start = .;
*(.embed*) *(.embed*)
embed_end = .; embed_end = .;
region_size = 0;
. = ALIGN(32); . = ALIGN(32);
*(.data*) *(.data*)
} }

View file

@ -13,6 +13,7 @@ from dtoc import fdt_util
import libfdt import libfdt
from libfdt import QUIET_NOTFOUND from libfdt import QUIET_NOTFOUND
from u_boot_pylib import tools from u_boot_pylib import tools
from u_boot_pylib import tout
# This deals with a device tree, presenting it as an assortment of Node and # This deals with a device tree, presenting it as an assortment of Node and
# Prop objects, representing nodes and properties, respectively. This file # Prop objects, representing nodes and properties, respectively. This file
@ -264,6 +265,13 @@ class Prop:
fdt_obj.setprop(node.Offset(), self.name, self.bytes) fdt_obj.setprop(node.Offset(), self.name, self.bytes)
self.dirty = False self.dirty = False
def purge(self):
"""Set a property offset to None
The property remains in the tree structure and will be recreated when
the FDT is synced
"""
self._offset = None
class Node: class Node:
"""A device tree node """A device tree node
@ -534,8 +542,8 @@ class Node:
""" """
return self.AddData(prop_name, struct.pack('>I', val)) return self.AddData(prop_name, struct.pack('>I', val))
def AddSubnode(self, name): def Subnode(self, name):
"""Add a new subnode to the node """Create new subnode for the node
Args: Args:
name: name of node to add name: name of node to add
@ -544,10 +552,72 @@ class Node:
New subnode that was created New subnode that was created
""" """
path = self.path + '/' + name path = self.path + '/' + name
subnode = Node(self._fdt, self, None, name, path) return Node(self._fdt, self, None, name, path)
def AddSubnode(self, name):
"""Add a new subnode to the node, after all other subnodes
Args:
name: name of node to add
Returns:
New subnode that was created
"""
subnode = self.Subnode(name)
self.subnodes.append(subnode) self.subnodes.append(subnode)
return subnode return subnode
def insert_subnode(self, name):
"""Add a new subnode to the node, before all other subnodes
This deletes other subnodes and sets their offset to None, so that they
will be recreated after this one.
Args:
name: name of node to add
Returns:
New subnode that was created
"""
# Deleting a node invalidates the offsets of all following nodes, so
# process in reverse order so that the offset of each node remains valid
# until deletion.
for subnode in reversed(self.subnodes):
subnode.purge(True)
subnode = self.Subnode(name)
self.subnodes.insert(0, subnode)
return subnode
def purge(self, delete_it=False):
"""Purge this node, setting offset to None and deleting from FDT"""
if self._offset is not None:
if delete_it:
CheckErr(self._fdt._fdt_obj.del_node(self.Offset()),
"Node '%s': delete" % self.path)
self._offset = None
self._fdt.Invalidate()
for prop in self.props.values():
prop.purge()
for subnode in self.subnodes:
subnode.purge(False)
def move_to_first(self):
"""Move the current node to first in its parent's node list"""
parent = self.parent
if parent.subnodes and parent.subnodes[0] == self:
return
for subnode in reversed(parent.subnodes):
subnode.purge(True)
new_subnodes = [self]
for subnode in parent.subnodes:
#subnode.purge(False)
if subnode != self:
new_subnodes.append(subnode)
parent.subnodes = new_subnodes
def Delete(self): def Delete(self):
"""Delete a node """Delete a node
@ -635,6 +705,71 @@ class Node:
prop.Sync(auto_resize) prop.Sync(auto_resize)
return added return added
def merge_props(self, src):
"""Copy missing properties (except 'phandle') from another node
Args:
src (Node): Node containing properties to copy
Adds properties which are present in src but not in this node. Any
'phandle' property is not copied since this might result in two nodes
with the same phandle, thus making phandle references ambiguous.
"""
for name, src_prop in src.props.items():
if name != 'phandle' and name not in self.props:
self.props[name] = Prop(self, None, name, src_prop.bytes)
def copy_node(self, src):
"""Copy a node and all its subnodes into this node
Args:
src (Node): Node to copy
Returns:
Node: Resulting destination node
This works recursively.
The new node is put before all other nodes. If the node already
exists, just its subnodes and properties are copied, placing them before
any existing subnodes. Properties which exist in the destination node
already are not copied.
"""
dst = self.FindNode(src.name)
if dst:
dst.move_to_first()
else:
dst = self.insert_subnode(src.name)
dst.merge_props(src)
# Process in reverse order so that they appear correctly in the result,
# since copy_node() puts the node first in the list
for node in reversed(src.subnodes):
dst.copy_node(node)
return dst
def copy_subnodes_from_phandles(self, phandle_list):
"""Copy subnodes of a list of nodes into another node
Args:
phandle_list (list of int): List of phandles of nodes to copy
For each node in the phandle list, its subnodes and their properties are
copied recursively. Note that it does not copy the node itself, nor its
properties.
"""
# Process in reverse order, since new nodes are inserted at the start of
# the destination's node list. We want them to appear in order of the
# phandle list
for phandle in phandle_list.__reversed__():
parent = self.GetFdt().LookupPhandle(phandle)
tout.debug(f'adding template {parent.path} to node {self.path}')
for node in parent.subnodes.__reversed__():
dst = self.copy_node(node)
tout.debug(f'merge props from {parent.path} to {dst.path}')
self.merge_props(parent)
class Fdt: class Fdt:
"""Provides simple access to a flat device tree blob using libfdts. """Provides simple access to a flat device tree blob using libfdts.

View file

@ -0,0 +1,88 @@
// SPDX-License-Identifier: GPL-2.0+
/*
* Test device tree file for dtoc
*
* Copyright 2017 Google, Inc
*/
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
reference = <&over>; /* nake sure that the 'over' phandle exists */
copy-list = <&another &base>;
dest {
bootph-all;
compatible = "sandbox,spl-test";
stringarray = "one";
longbytearray = [09 0a 0b 0c 0d 0e 0f 10];
maybe-empty-int = <1>;
first@0 {
a-prop = <456>;
b-prop = <1>;
};
existing {
};
base {
second {
second3 {
};
second2 {
new-prop;
};
second1 {
new-prop;
};
second4 {
};
};
};
};
base: base {
compatible = "sandbox,i2c";
bootph-all;
#address-cells = <1>;
#size-cells = <0>;
over: over {
compatible = "sandbox,pmic";
bootph-all;
reg = <9>;
low-power;
};
first@0 {
reg = <0>;
a-prop = <123>;
};
second: second {
second1 {
some-prop;
};
second2 {
some-prop;
};
};
};
another: another {
new-prop = "hello";
earlier {
wibble = <2>;
};
later {
fibble = <3>;
};
};
};

View file

@ -306,6 +306,119 @@ class TestNode(unittest.TestCase):
self.assertIn("Internal error, node '/spl-test' name mismatch 'i2c@0'", self.assertIn("Internal error, node '/spl-test' name mismatch 'i2c@0'",
str(exc.exception)) str(exc.exception))
def test_copy_node(self):
"""Test copy_node() function"""
def do_copy_checks(dtb, dst, expect_none):
self.assertEqual(
['/dest/base', '/dest/first@0', '/dest/existing'],
[n.path for n in dst.subnodes])
chk = dtb.GetNode('/dest/base')
self.assertTrue(chk)
self.assertEqual(
{'compatible', 'bootph-all', '#address-cells', '#size-cells'},
chk.props.keys())
# Check the first property
prop = chk.props['bootph-all']
self.assertEqual('bootph-all', prop.name)
self.assertEqual(True, prop.value)
self.assertEqual(chk.path, prop._node.path)
# Check the second property
prop2 = chk.props['compatible']
self.assertEqual('compatible', prop2.name)
self.assertEqual('sandbox,i2c', prop2.value)
self.assertEqual(chk.path, prop2._node.path)
base = chk.FindNode('base')
self.assertTrue(chk)
first = dtb.GetNode('/dest/base/first@0')
self.assertTrue(first)
over = dtb.GetNode('/dest/base/over')
self.assertTrue(over)
# Make sure that the phandle for 'over' is not copied
self.assertNotIn('phandle', over.props.keys())
second = dtb.GetNode('/dest/base/second')
self.assertTrue(second)
self.assertEqual([over.name, first.name, second.name],
[n.name for n in chk.subnodes])
self.assertEqual(chk, over.parent)
self.assertEqual(
{'bootph-all', 'compatible', 'reg', 'low-power'},
over.props.keys())
if expect_none:
self.assertIsNone(prop._offset)
self.assertIsNone(prop2._offset)
self.assertIsNone(over._offset)
else:
self.assertTrue(prop._offset)
self.assertTrue(prop2._offset)
self.assertTrue(over._offset)
# Now check ordering of the subnodes
self.assertEqual(
['second1', 'second2', 'second3', 'second4'],
[n.name for n in second.subnodes])
dtb = fdt.FdtScan(find_dtb_file('dtoc_test_copy.dts'))
tmpl = dtb.GetNode('/base')
dst = dtb.GetNode('/dest')
dst.copy_node(tmpl)
do_copy_checks(dtb, dst, expect_none=True)
dtb.Sync(auto_resize=True)
# Now check that the FDT looks correct
new_dtb = fdt.Fdt.FromData(dtb.GetContents())
new_dtb.Scan()
dst = new_dtb.GetNode('/dest')
do_copy_checks(new_dtb, dst, expect_none=False)
def test_copy_subnodes_from_phandles(self):
"""Test copy_node() function"""
dtb = fdt.FdtScan(find_dtb_file('dtoc_test_copy.dts'))
orig = dtb.GetNode('/')
node_list = fdt_util.GetPhandleList(orig, 'copy-list')
dst = dtb.GetNode('/dest')
dst.copy_subnodes_from_phandles(node_list)
pmic = dtb.GetNode('/dest/over')
self.assertTrue(pmic)
subn = dtb.GetNode('/dest/first@0')
self.assertTrue(subn)
self.assertEqual({'a-prop', 'b-prop', 'reg'}, subn.props.keys())
self.assertEqual(
['/dest/earlier', '/dest/later', '/dest/over', '/dest/first@0',
'/dest/second', '/dest/existing', '/dest/base'],
[n.path for n in dst.subnodes])
# Make sure that the phandle for 'over' is not copied
over = dst.FindNode('over')
print('keys', over.props.keys())
self.assertNotIn('phandle', over.props.keys())
# Check the merged properties, first the base ones in '/dest'
expect = {'bootph-all', 'compatible', 'stringarray', 'longbytearray',
'maybe-empty-int'}
# Properties from 'base'
expect.update({'#address-cells', '#size-cells'})
# Properties from 'another'
expect.add('new-prop')
self.assertEqual(expect, set(dst.props.keys()))
class TestProp(unittest.TestCase): class TestProp(unittest.TestCase):
"""Test operation of the Prop class""" """Test operation of the Prop class"""

View file

@ -9,7 +9,7 @@ authors = [
{ name="Simon Glass", email="sjg@chromium.org" }, { name="Simon Glass", email="sjg@chromium.org" },
] ]
description = "U-Boot python library" description = "U-Boot python library"
readme = "README.md" readme = "README.rst"
requires-python = ">=3.7" requires-python = ">=3.7"
classifiers = [ classifiers = [
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
@ -20,3 +20,7 @@ classifiers = [
[project.urls] [project.urls]
"Homepage" = "https://u-boot.readthedocs.io" "Homepage" = "https://u-boot.readthedocs.io"
"Bug Tracker" = "https://source.denx.de/groups/u-boot/-/issues" "Bug Tracker" = "https://source.denx.de/groups/u-boot/-/issues"
[tool.setuptools.packages.find]
where = [".."]
include = ["u_boot_pylib*"]