2017-06-19 04:08:58 +00:00
|
|
|
#!/usr/bin/python
|
2018-05-06 21:58:06 +00:00
|
|
|
# SPDX-License-Identifier: GPL-2.0+
|
2017-06-19 04:08:58 +00:00
|
|
|
#
|
|
|
|
# Copyright (C) 2017 Google, Inc
|
|
|
|
# Written by Simon Glass <sjg@chromium.org>
|
|
|
|
#
|
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
"""Device tree to platform data class
|
|
|
|
|
|
|
|
This supports converting device tree data to C structures definitions and
|
|
|
|
static data.
|
|
|
|
"""
|
|
|
|
|
2017-08-29 20:15:55 +00:00
|
|
|
import collections
|
2017-06-19 04:08:58 +00:00
|
|
|
import copy
|
2020-07-03 11:07:17 +00:00
|
|
|
import os
|
|
|
|
import re
|
2017-06-19 04:08:59 +00:00
|
|
|
import sys
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2020-04-18 00:09:04 +00:00
|
|
|
from dtoc import fdt
|
|
|
|
from dtoc import fdt_util
|
|
|
|
from patman import tools
|
2017-06-19 04:08:58 +00:00
|
|
|
|
|
|
|
# When we see these properties we ignore them - i.e. do not create a structure member
|
|
|
|
PROP_IGNORE_LIST = [
|
|
|
|
'#address-cells',
|
|
|
|
'#gpio-cells',
|
|
|
|
'#size-cells',
|
|
|
|
'compatible',
|
|
|
|
'linux,phandle',
|
|
|
|
"status",
|
|
|
|
'phandle',
|
|
|
|
'u-boot,dm-pre-reloc',
|
|
|
|
'u-boot,dm-tpl',
|
|
|
|
'u-boot,dm-spl',
|
|
|
|
]
|
|
|
|
|
|
|
|
# C type declarations for the tyues we support
|
|
|
|
TYPE_NAMES = {
|
|
|
|
fdt.TYPE_INT: 'fdt32_t',
|
|
|
|
fdt.TYPE_BYTE: 'unsigned char',
|
|
|
|
fdt.TYPE_STRING: 'const char *',
|
|
|
|
fdt.TYPE_BOOL: 'bool',
|
2017-08-29 20:15:48 +00:00
|
|
|
fdt.TYPE_INT64: 'fdt64_t',
|
2017-06-19 04:08:59 +00:00
|
|
|
}
|
2017-06-19 04:08:58 +00:00
|
|
|
|
|
|
|
STRUCT_PREFIX = 'dtd_'
|
|
|
|
VAL_PREFIX = 'dtv_'
|
|
|
|
|
2017-08-29 20:15:55 +00:00
|
|
|
# This holds information about a property which includes phandles.
|
|
|
|
#
|
|
|
|
# max_args: integer: Maximum number or arguments that any phandle uses (int).
|
|
|
|
# args: Number of args for each phandle in the property. The total number of
|
|
|
|
# phandles is len(args). This is a list of integers.
|
|
|
|
PhandleInfo = collections.namedtuple('PhandleInfo', ['max_args', 'args'])
|
|
|
|
|
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def conv_name_to_c(name):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Convert a device-tree name to a C identifier
|
|
|
|
|
2017-06-19 04:09:04 +00:00
|
|
|
This uses multiple replace() calls instead of re.sub() since it is faster
|
|
|
|
(400ms for 1m calls versus 1000ms for the 're' version).
|
|
|
|
|
2017-06-19 04:08:58 +00:00
|
|
|
Args:
|
|
|
|
name: Name to convert
|
|
|
|
Return:
|
|
|
|
String containing the C version of this name
|
|
|
|
"""
|
2017-06-19 04:08:59 +00:00
|
|
|
new = name.replace('@', '_at_')
|
|
|
|
new = new.replace('-', '_')
|
|
|
|
new = new.replace(',', '_')
|
|
|
|
new = new.replace('.', '_')
|
|
|
|
return new
|
|
|
|
|
|
|
|
def tab_to(num_tabs, line):
|
|
|
|
"""Append tabs to a line of text to reach a tab stop.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
num_tabs: Tab stop to obtain (0 = column 0, 1 = column 8, etc.)
|
|
|
|
line: Line of text to append to
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
line with the correct number of tabs appeneded. If the line already
|
|
|
|
extends past that tab stop then a single space is appended.
|
|
|
|
"""
|
|
|
|
if len(line) >= num_tabs * 8:
|
|
|
|
return line + ' '
|
|
|
|
return line + '\t' * (num_tabs - len(line) // 8)
|
|
|
|
|
2017-06-19 04:09:02 +00:00
|
|
|
def get_value(ftype, value):
|
|
|
|
"""Get a value as a C expression
|
|
|
|
|
|
|
|
For integers this returns a byte-swapped (little-endian) hex string
|
|
|
|
For bytes this returns a hex string, e.g. 0x12
|
|
|
|
For strings this returns a literal string enclosed in quotes
|
|
|
|
For booleans this return 'true'
|
|
|
|
|
|
|
|
Args:
|
|
|
|
type: Data type (fdt_util)
|
|
|
|
value: Data value, as a string of bytes
|
|
|
|
"""
|
|
|
|
if ftype == fdt.TYPE_INT:
|
|
|
|
return '%#x' % fdt_util.fdt32_to_cpu(value)
|
|
|
|
elif ftype == fdt.TYPE_BYTE:
|
2019-05-18 04:00:43 +00:00
|
|
|
return '%#x' % tools.ToByte(value[0])
|
2017-06-19 04:09:02 +00:00
|
|
|
elif ftype == fdt.TYPE_STRING:
|
2020-07-08 03:32:06 +00:00
|
|
|
# Handle evil ACPI backslashes by adding another backslash before them.
|
|
|
|
# So "\\_SB.GPO0" in the device tree effectively stays like that in C
|
|
|
|
return '"%s"' % value.replace('\\', '\\\\')
|
2017-06-19 04:09:02 +00:00
|
|
|
elif ftype == fdt.TYPE_BOOL:
|
|
|
|
return 'true'
|
2017-08-29 20:15:48 +00:00
|
|
|
elif ftype == fdt.TYPE_INT64:
|
|
|
|
return '%#x' % value
|
2017-06-19 04:09:02 +00:00
|
|
|
|
|
|
|
def get_compat_name(node):
|
2020-07-23 03:22:03 +00:00
|
|
|
"""Get the node's list of compatible string as a C identifiers
|
2017-06-19 04:09:02 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
node: Node object to check
|
|
|
|
Return:
|
2020-07-23 03:22:03 +00:00
|
|
|
List of C identifiers for all the compatible strings
|
2017-06-19 04:09:02 +00:00
|
|
|
"""
|
|
|
|
compat = node.props['compatible'].value
|
2020-07-23 03:22:03 +00:00
|
|
|
if not isinstance(compat, list):
|
|
|
|
compat = [compat]
|
|
|
|
return [conv_name_to_c(c) for c in compat]
|
2017-06-19 04:09:02 +00:00
|
|
|
|
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
class DtbPlatdata(object):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Provide a means to convert device tree binary data to platform data
|
|
|
|
|
|
|
|
The output of this process is C structures which can be used in space-
|
|
|
|
constrained encvironments where the ~3KB code overhead of device tree
|
|
|
|
code is not affordable.
|
|
|
|
|
|
|
|
Properties:
|
2017-06-19 04:08:59 +00:00
|
|
|
_fdt: Fdt object, referencing the device tree
|
2017-06-19 04:08:58 +00:00
|
|
|
_dtb_fname: Filename of the input device tree binary file
|
|
|
|
_valid_nodes: A list of Node object with compatible strings
|
2017-06-19 04:09:01 +00:00
|
|
|
_include_disabled: true to include nodes marked status = "disabled"
|
2017-06-19 04:08:58 +00:00
|
|
|
_outfile: The current output file (sys.stdout or a real file)
|
2020-06-25 04:10:08 +00:00
|
|
|
_warning_disabled: true to disable warnings about driver names not found
|
2017-06-19 04:08:58 +00:00
|
|
|
_lines: Stashed list of output lines for outputting in the future
|
2020-07-03 11:07:17 +00:00
|
|
|
_drivers: List of valid driver names found in drivers/
|
|
|
|
_driver_aliases: Dict that holds aliases for driver names
|
|
|
|
key: Driver alias declared with
|
|
|
|
U_BOOT_DRIVER_ALIAS(driver_alias, driver_name)
|
|
|
|
value: Driver name declared with U_BOOT_DRIVER(driver_name)
|
2020-06-25 04:10:13 +00:00
|
|
|
_links: List of links to be included in dm_populate_phandle_data()
|
2020-07-28 22:06:23 +00:00
|
|
|
_drivers_additional: List of additional drivers to use during scanning
|
2017-06-19 04:08:58 +00:00
|
|
|
"""
|
2020-07-28 22:06:23 +00:00
|
|
|
def __init__(self, dtb_fname, include_disabled, warning_disabled,
|
|
|
|
drivers_additional=[]):
|
2017-06-19 04:08:59 +00:00
|
|
|
self._fdt = None
|
2017-06-19 04:08:58 +00:00
|
|
|
self._dtb_fname = dtb_fname
|
|
|
|
self._valid_nodes = None
|
2017-06-19 04:09:01 +00:00
|
|
|
self._include_disabled = include_disabled
|
2017-06-19 04:08:58 +00:00
|
|
|
self._outfile = None
|
2020-06-25 04:10:08 +00:00
|
|
|
self._warning_disabled = warning_disabled
|
2017-06-19 04:08:58 +00:00
|
|
|
self._lines = []
|
2020-07-03 11:07:17 +00:00
|
|
|
self._drivers = []
|
|
|
|
self._driver_aliases = {}
|
2020-06-25 04:10:13 +00:00
|
|
|
self._links = []
|
2020-07-28 22:06:23 +00:00
|
|
|
self._drivers_additional = drivers_additional
|
2020-07-03 11:07:17 +00:00
|
|
|
|
|
|
|
def get_normalized_compat_name(self, node):
|
|
|
|
"""Get a node's normalized compat name
|
|
|
|
|
2020-07-23 03:22:03 +00:00
|
|
|
Returns a valid driver name by retrieving node's list of compatible
|
2020-07-03 11:07:17 +00:00
|
|
|
string as a C identifier and performing a check against _drivers
|
|
|
|
and a lookup in driver_aliases printing a warning in case of failure.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
node: Node object to check
|
|
|
|
Return:
|
|
|
|
Tuple:
|
|
|
|
Driver name associated with the first compatible string
|
|
|
|
List of C identifiers for all the other compatible strings
|
|
|
|
(possibly empty)
|
|
|
|
In case of no match found, the return will be the same as
|
|
|
|
get_compat_name()
|
|
|
|
"""
|
2020-07-23 03:22:03 +00:00
|
|
|
compat_list_c = get_compat_name(node)
|
|
|
|
|
|
|
|
for compat_c in compat_list_c:
|
|
|
|
if not compat_c in self._drivers:
|
|
|
|
compat_c = self._driver_aliases.get(compat_c)
|
|
|
|
if not compat_c:
|
|
|
|
continue
|
|
|
|
|
|
|
|
aliases_c = compat_list_c
|
|
|
|
if compat_c in aliases_c:
|
|
|
|
aliases_c.remove(compat_c)
|
|
|
|
return compat_c, aliases_c
|
|
|
|
|
|
|
|
if not self._warning_disabled:
|
|
|
|
print('WARNING: the driver %s was not found in the driver list'
|
|
|
|
% (compat_list_c[0]))
|
|
|
|
|
|
|
|
return compat_list_c[0], compat_list_c[1:]
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def setup_output(self, fname):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Set up the output destination
|
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
Once this is done, future calls to self.out() will output to this
|
2017-06-19 04:08:58 +00:00
|
|
|
file.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: Filename to send output to, or '-' for stdout
|
|
|
|
"""
|
|
|
|
if fname == '-':
|
|
|
|
self._outfile = sys.stdout
|
|
|
|
else:
|
|
|
|
self._outfile = open(fname, 'w')
|
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def out(self, line):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Output a string to the output file
|
|
|
|
|
|
|
|
Args:
|
2017-06-19 04:08:59 +00:00
|
|
|
line: String to output
|
2017-06-19 04:08:58 +00:00
|
|
|
"""
|
2017-06-19 04:08:59 +00:00
|
|
|
self._outfile.write(line)
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def buf(self, line):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Buffer up a string to send later
|
|
|
|
|
|
|
|
Args:
|
2017-06-19 04:08:59 +00:00
|
|
|
line: String to add to our 'buffer' list
|
2017-06-19 04:08:58 +00:00
|
|
|
"""
|
2017-06-19 04:08:59 +00:00
|
|
|
self._lines.append(line)
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def get_buf(self):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Get the contents of the output buffer, and clear it
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The output buffer, which is then cleared for future use
|
|
|
|
"""
|
|
|
|
lines = self._lines
|
|
|
|
self._lines = []
|
|
|
|
return lines
|
|
|
|
|
2017-08-29 20:16:01 +00:00
|
|
|
def out_header(self):
|
|
|
|
"""Output a message indicating that this is an auto-generated file"""
|
|
|
|
self.out('''/*
|
|
|
|
* DO NOT MODIFY
|
|
|
|
*
|
|
|
|
* This file was generated by dtoc from a .dtb (device tree binary) file.
|
|
|
|
*/
|
|
|
|
|
|
|
|
''')
|
|
|
|
|
2017-08-29 20:15:55 +00:00
|
|
|
def get_phandle_argc(self, prop, node_name):
|
|
|
|
"""Check if a node contains phandles
|
2017-08-29 20:15:54 +00:00
|
|
|
|
2017-08-29 20:15:55 +00:00
|
|
|
We have no reliable way of detecting whether a node uses a phandle
|
|
|
|
or not. As an interim measure, use a list of known property names.
|
2017-08-29 20:15:54 +00:00
|
|
|
|
2017-08-29 20:15:55 +00:00
|
|
|
Args:
|
|
|
|
prop: Prop object to check
|
|
|
|
Return:
|
|
|
|
Number of argument cells is this is a phandle, else None
|
|
|
|
"""
|
2020-06-25 04:10:16 +00:00
|
|
|
if prop.name in ['clocks', 'cd-gpios']:
|
2018-07-06 16:27:31 +00:00
|
|
|
if not isinstance(prop.value, list):
|
|
|
|
prop.value = [prop.value]
|
2017-08-29 20:15:55 +00:00
|
|
|
val = prop.value
|
|
|
|
i = 0
|
|
|
|
|
|
|
|
max_args = 0
|
|
|
|
args = []
|
|
|
|
while i < len(val):
|
|
|
|
phandle = fdt_util.fdt32_to_cpu(val[i])
|
2018-07-06 16:27:31 +00:00
|
|
|
# If we get to the end of the list, stop. This can happen
|
|
|
|
# since some nodes have more phandles in the list than others,
|
|
|
|
# but we allocate enough space for the largest list. So those
|
|
|
|
# nodes with shorter lists end up with zeroes at the end.
|
|
|
|
if not phandle:
|
|
|
|
break
|
2017-08-29 20:15:55 +00:00
|
|
|
target = self._fdt.phandle_to_node.get(phandle)
|
|
|
|
if not target:
|
|
|
|
raise ValueError("Cannot parse '%s' in node '%s'" %
|
|
|
|
(prop.name, node_name))
|
2020-06-25 04:10:16 +00:00
|
|
|
cells = None
|
|
|
|
for prop_name in ['#clock-cells', '#gpio-cells']:
|
|
|
|
cells = target.props.get(prop_name)
|
|
|
|
if cells:
|
|
|
|
break
|
2017-08-29 20:15:55 +00:00
|
|
|
if not cells:
|
2020-06-25 04:10:16 +00:00
|
|
|
raise ValueError("Node '%s' has no cells property" %
|
|
|
|
(target.name))
|
2017-08-29 20:15:55 +00:00
|
|
|
num_args = fdt_util.fdt32_to_cpu(cells.value)
|
|
|
|
max_args = max(max_args, num_args)
|
|
|
|
args.append(num_args)
|
|
|
|
i += 1 + num_args
|
|
|
|
return PhandleInfo(max_args, args)
|
|
|
|
return None
|
2017-08-29 20:15:54 +00:00
|
|
|
|
2020-07-03 11:07:17 +00:00
|
|
|
def scan_driver(self, fn):
|
|
|
|
"""Scan a driver file to build a list of driver names and aliases
|
|
|
|
|
|
|
|
This procedure will populate self._drivers and self._driver_aliases
|
|
|
|
|
|
|
|
Args
|
|
|
|
fn: Driver filename to scan
|
|
|
|
"""
|
|
|
|
with open(fn, encoding='utf-8') as fd:
|
|
|
|
try:
|
|
|
|
buff = fd.read()
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
# This seems to happen on older Python versions
|
|
|
|
print("Skipping file '%s' due to unicode error" % fn)
|
|
|
|
return
|
|
|
|
|
|
|
|
# The following re will search for driver names declared as
|
|
|
|
# U_BOOT_DRIVER(driver_name)
|
|
|
|
drivers = re.findall('U_BOOT_DRIVER\((.*)\)', buff)
|
|
|
|
|
|
|
|
for driver in drivers:
|
|
|
|
self._drivers.append(driver)
|
|
|
|
|
|
|
|
# The following re will search for driver aliases declared as
|
|
|
|
# U_BOOT_DRIVER_ALIAS(alias, driver_name)
|
|
|
|
driver_aliases = re.findall('U_BOOT_DRIVER_ALIAS\(\s*(\w+)\s*,\s*(\w+)\s*\)',
|
|
|
|
buff)
|
|
|
|
|
|
|
|
for alias in driver_aliases: # pragma: no cover
|
|
|
|
if len(alias) != 2:
|
|
|
|
continue
|
|
|
|
self._driver_aliases[alias[1]] = alias[0]
|
|
|
|
|
|
|
|
def scan_drivers(self):
|
|
|
|
"""Scan the driver folders to build a list of driver names and aliases
|
|
|
|
|
|
|
|
This procedure will populate self._drivers and self._driver_aliases
|
|
|
|
|
|
|
|
"""
|
|
|
|
basedir = sys.argv[0].replace('tools/dtoc/dtoc', '')
|
|
|
|
if basedir == '':
|
|
|
|
basedir = './'
|
|
|
|
for (dirpath, dirnames, filenames) in os.walk(basedir):
|
|
|
|
for fn in filenames:
|
|
|
|
if not fn.endswith('.c'):
|
|
|
|
continue
|
|
|
|
self.scan_driver(dirpath + '/' + fn)
|
|
|
|
|
2020-07-28 22:06:23 +00:00
|
|
|
for fn in self._drivers_additional:
|
|
|
|
if not isinstance(fn, str) or len(fn) == 0:
|
|
|
|
continue
|
|
|
|
if fn[0] == '/':
|
|
|
|
self.scan_driver(fn)
|
|
|
|
else:
|
|
|
|
self.scan_driver(basedir + '/' + fn)
|
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def scan_dtb(self):
|
2017-08-18 15:58:51 +00:00
|
|
|
"""Scan the device tree to obtain a tree of nodes and properties
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
Once this is done, self._fdt.GetRoot() can be called to obtain the
|
2017-06-19 04:08:58 +00:00
|
|
|
device tree root node, and progress from there.
|
|
|
|
"""
|
2017-06-19 04:08:59 +00:00
|
|
|
self._fdt = fdt.FdtScan(self._dtb_fname)
|
|
|
|
|
|
|
|
def scan_node(self, root):
|
|
|
|
"""Scan a node and subnodes to build a tree of node and phandle info
|
|
|
|
|
2017-08-29 20:15:53 +00:00
|
|
|
This adds each node to self._valid_nodes.
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
Args:
|
|
|
|
root: Root node for scan
|
|
|
|
"""
|
2017-06-19 04:08:58 +00:00
|
|
|
for node in root.subnodes:
|
|
|
|
if 'compatible' in node.props:
|
|
|
|
status = node.props.get('status')
|
2017-06-19 04:09:01 +00:00
|
|
|
if (not self._include_disabled and not status or
|
2017-06-19 04:08:59 +00:00
|
|
|
status.value != 'disabled'):
|
2017-06-19 04:08:58 +00:00
|
|
|
self._valid_nodes.append(node)
|
|
|
|
|
|
|
|
# recurse to handle any subnodes
|
2017-06-19 04:08:59 +00:00
|
|
|
self.scan_node(node)
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def scan_tree(self):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Scan the device tree for useful information
|
|
|
|
|
|
|
|
This fills in the following properties:
|
|
|
|
_valid_nodes: A list of nodes we wish to consider include in the
|
|
|
|
platform data
|
|
|
|
"""
|
|
|
|
self._valid_nodes = []
|
2017-06-19 04:08:59 +00:00
|
|
|
return self.scan_node(self._fdt.GetRoot())
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-08-29 20:15:50 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_num_cells(node):
|
|
|
|
"""Get the number of cells in addresses and sizes for this node
|
|
|
|
|
|
|
|
Args:
|
|
|
|
node: Node to check
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Tuple:
|
|
|
|
Number of address cells for this node
|
|
|
|
Number of size cells for this node
|
|
|
|
"""
|
|
|
|
parent = node.parent
|
|
|
|
na, ns = 2, 2
|
|
|
|
if parent:
|
|
|
|
na_prop = parent.props.get('#address-cells')
|
|
|
|
ns_prop = parent.props.get('#size-cells')
|
|
|
|
if na_prop:
|
|
|
|
na = fdt_util.fdt32_to_cpu(na_prop.value)
|
|
|
|
if ns_prop:
|
|
|
|
ns = fdt_util.fdt32_to_cpu(ns_prop.value)
|
|
|
|
return na, ns
|
|
|
|
|
|
|
|
def scan_reg_sizes(self):
|
|
|
|
"""Scan for 64-bit 'reg' properties and update the values
|
|
|
|
|
|
|
|
This finds 'reg' properties with 64-bit data and converts the value to
|
|
|
|
an array of 64-values. This allows it to be output in a way that the
|
|
|
|
C code can read.
|
|
|
|
"""
|
|
|
|
for node in self._valid_nodes:
|
|
|
|
reg = node.props.get('reg')
|
|
|
|
if not reg:
|
|
|
|
continue
|
|
|
|
na, ns = self.get_num_cells(node)
|
|
|
|
total = na + ns
|
|
|
|
|
|
|
|
if reg.type != fdt.TYPE_INT:
|
2018-07-06 16:27:32 +00:00
|
|
|
raise ValueError("Node '%s' reg property is not an int" %
|
|
|
|
node.name)
|
2017-08-29 20:15:50 +00:00
|
|
|
if len(reg.value) % total:
|
|
|
|
raise ValueError("Node '%s' reg property has %d cells "
|
|
|
|
'which is not a multiple of na + ns = %d + %d)' %
|
|
|
|
(node.name, len(reg.value), na, ns))
|
|
|
|
reg.na = na
|
|
|
|
reg.ns = ns
|
|
|
|
if na != 1 or ns != 1:
|
|
|
|
reg.type = fdt.TYPE_INT64
|
|
|
|
i = 0
|
|
|
|
new_value = []
|
|
|
|
val = reg.value
|
|
|
|
if not isinstance(val, list):
|
|
|
|
val = [val]
|
|
|
|
while i < len(val):
|
|
|
|
addr = fdt_util.fdt_cells_to_cpu(val[i:], reg.na)
|
|
|
|
i += na
|
|
|
|
size = fdt_util.fdt_cells_to_cpu(val[i:], reg.ns)
|
|
|
|
i += ns
|
|
|
|
new_value += [addr, size]
|
|
|
|
reg.value = new_value
|
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def scan_structs(self):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Scan the device tree building up the C structures we will use.
|
|
|
|
|
|
|
|
Build a dict keyed by C struct name containing a dict of Prop
|
|
|
|
object for each struct field (keyed by property name). Where the
|
|
|
|
same struct appears multiple times, try to use the 'widest'
|
|
|
|
property, i.e. the one with a type which can express all others.
|
|
|
|
|
|
|
|
Once the widest property is determined, all other properties are
|
|
|
|
updated to match that width.
|
|
|
|
"""
|
|
|
|
structs = {}
|
|
|
|
for node in self._valid_nodes:
|
2020-07-03 11:07:17 +00:00
|
|
|
node_name, _ = self.get_normalized_compat_name(node)
|
2017-06-19 04:08:58 +00:00
|
|
|
fields = {}
|
|
|
|
|
|
|
|
# Get a list of all the valid properties in this node.
|
|
|
|
for name, prop in node.props.items():
|
|
|
|
if name not in PROP_IGNORE_LIST and name[0] != '#':
|
|
|
|
fields[name] = copy.deepcopy(prop)
|
|
|
|
|
|
|
|
# If we've seen this node_name before, update the existing struct.
|
|
|
|
if node_name in structs:
|
|
|
|
struct = structs[node_name]
|
|
|
|
for name, prop in fields.items():
|
|
|
|
oldprop = struct.get(name)
|
|
|
|
if oldprop:
|
|
|
|
oldprop.Widen(prop)
|
|
|
|
else:
|
|
|
|
struct[name] = prop
|
|
|
|
|
|
|
|
# Otherwise store this as a new struct.
|
|
|
|
else:
|
|
|
|
structs[node_name] = fields
|
|
|
|
|
|
|
|
upto = 0
|
|
|
|
for node in self._valid_nodes:
|
2020-07-03 11:07:17 +00:00
|
|
|
node_name, _ = self.get_normalized_compat_name(node)
|
2017-06-19 04:08:58 +00:00
|
|
|
struct = structs[node_name]
|
|
|
|
for name, prop in node.props.items():
|
|
|
|
if name not in PROP_IGNORE_LIST and name[0] != '#':
|
|
|
|
prop.Widen(struct[name])
|
|
|
|
upto += 1
|
|
|
|
|
|
|
|
return structs
|
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def scan_phandles(self):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Figure out what phandles each node uses
|
|
|
|
|
|
|
|
We need to be careful when outputing nodes that use phandles since
|
|
|
|
they must come after the declaration of the phandles in the C file.
|
|
|
|
Otherwise we get a compiler error since the phandle struct is not yet
|
|
|
|
declared.
|
|
|
|
|
|
|
|
This function adds to each node a list of phandle nodes that the node
|
|
|
|
depends on. This allows us to output things in the right order.
|
|
|
|
"""
|
|
|
|
for node in self._valid_nodes:
|
|
|
|
node.phandles = set()
|
|
|
|
for pname, prop in node.props.items():
|
|
|
|
if pname in PROP_IGNORE_LIST or pname[0] == '#':
|
|
|
|
continue
|
2017-08-29 20:15:55 +00:00
|
|
|
info = self.get_phandle_argc(prop, node.name)
|
|
|
|
if info:
|
|
|
|
# Process the list as pairs of (phandle, id)
|
2017-08-29 20:15:59 +00:00
|
|
|
pos = 0
|
|
|
|
for args in info.args:
|
|
|
|
phandle_cell = prop.value[pos]
|
2017-08-29 20:15:55 +00:00
|
|
|
phandle = fdt_util.fdt32_to_cpu(phandle_cell)
|
|
|
|
target_node = self._fdt.phandle_to_node[phandle]
|
|
|
|
node.phandles.add(target_node)
|
2017-08-29 20:15:59 +00:00
|
|
|
pos += 1 + args
|
2017-06-19 04:08:58 +00:00
|
|
|
|
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def generate_structs(self, structs):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Generate struct defintions for the platform data
|
|
|
|
|
|
|
|
This writes out the body of a header file consisting of structure
|
|
|
|
definitions for node in self._valid_nodes. See the documentation in
|
2020-02-25 20:35:39 +00:00
|
|
|
doc/driver-model/of-plat.rst for more information.
|
2017-06-19 04:08:58 +00:00
|
|
|
"""
|
2017-08-29 20:16:01 +00:00
|
|
|
self.out_header()
|
2017-06-19 04:08:59 +00:00
|
|
|
self.out('#include <stdbool.h>\n')
|
2018-03-04 16:20:11 +00:00
|
|
|
self.out('#include <linux/libfdt.h>\n')
|
2017-06-19 04:08:58 +00:00
|
|
|
|
|
|
|
# Output the struct definition
|
|
|
|
for name in sorted(structs):
|
2017-06-19 04:08:59 +00:00
|
|
|
self.out('struct %s%s {\n' % (STRUCT_PREFIX, name))
|
2017-06-19 04:08:58 +00:00
|
|
|
for pname in sorted(structs[name]):
|
|
|
|
prop = structs[name][pname]
|
2017-08-29 20:15:55 +00:00
|
|
|
info = self.get_phandle_argc(prop, structs[name])
|
|
|
|
if info:
|
2017-06-19 04:08:58 +00:00
|
|
|
# For phandles, include a reference to the target
|
2017-08-29 20:15:56 +00:00
|
|
|
struct_name = 'struct phandle_%d_arg' % info.max_args
|
|
|
|
self.out('\t%s%s[%d]' % (tab_to(2, struct_name),
|
2017-06-19 04:08:59 +00:00
|
|
|
conv_name_to_c(prop.name),
|
2017-08-29 20:15:59 +00:00
|
|
|
len(info.args)))
|
2017-06-19 04:08:58 +00:00
|
|
|
else:
|
|
|
|
ptype = TYPE_NAMES[prop.type]
|
2017-06-19 04:08:59 +00:00
|
|
|
self.out('\t%s%s' % (tab_to(2, ptype),
|
|
|
|
conv_name_to_c(prop.name)))
|
|
|
|
if isinstance(prop.value, list):
|
|
|
|
self.out('[%d]' % len(prop.value))
|
|
|
|
self.out(';\n')
|
|
|
|
self.out('};\n')
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def output_node(self, node):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Output the C code for a node
|
|
|
|
|
|
|
|
Args:
|
|
|
|
node: node to output
|
|
|
|
"""
|
2020-07-03 11:07:17 +00:00
|
|
|
struct_name, _ = self.get_normalized_compat_name(node)
|
2017-06-19 04:08:59 +00:00
|
|
|
var_name = conv_name_to_c(node.name)
|
2020-06-25 04:10:13 +00:00
|
|
|
self.buf('static struct %s%s %s%s = {\n' %
|
2017-06-19 04:08:59 +00:00
|
|
|
(STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name))
|
2019-05-18 04:00:32 +00:00
|
|
|
for pname in sorted(node.props):
|
|
|
|
prop = node.props[pname]
|
2017-06-19 04:08:58 +00:00
|
|
|
if pname in PROP_IGNORE_LIST or pname[0] == '#':
|
|
|
|
continue
|
2017-06-19 04:08:59 +00:00
|
|
|
member_name = conv_name_to_c(prop.name)
|
|
|
|
self.buf('\t%s= ' % tab_to(3, '.' + member_name))
|
2017-06-19 04:08:58 +00:00
|
|
|
|
|
|
|
# Special handling for lists
|
2017-06-19 04:08:59 +00:00
|
|
|
if isinstance(prop.value, list):
|
|
|
|
self.buf('{')
|
2017-06-19 04:08:58 +00:00
|
|
|
vals = []
|
|
|
|
# For phandles, output a reference to the platform data
|
|
|
|
# of the target node.
|
2017-08-29 20:15:55 +00:00
|
|
|
info = self.get_phandle_argc(prop, node.name)
|
|
|
|
if info:
|
2017-06-19 04:08:58 +00:00
|
|
|
# Process the list as pairs of (phandle, id)
|
2017-08-29 20:15:59 +00:00
|
|
|
pos = 0
|
2020-06-25 04:10:13 +00:00
|
|
|
item = 0
|
2017-08-29 20:15:59 +00:00
|
|
|
for args in info.args:
|
|
|
|
phandle_cell = prop.value[pos]
|
2017-06-19 04:08:58 +00:00
|
|
|
phandle = fdt_util.fdt32_to_cpu(phandle_cell)
|
2017-08-29 20:15:53 +00:00
|
|
|
target_node = self._fdt.phandle_to_node[phandle]
|
2017-06-19 04:08:59 +00:00
|
|
|
name = conv_name_to_c(target_node.name)
|
2017-08-29 20:15:59 +00:00
|
|
|
arg_values = []
|
|
|
|
for i in range(args):
|
|
|
|
arg_values.append(str(fdt_util.fdt32_to_cpu(prop.value[pos + 1 + i])))
|
|
|
|
pos += 1 + args
|
2020-06-25 04:10:13 +00:00
|
|
|
# node member is filled with NULL as the real value
|
|
|
|
# will be update at run-time during dm_init_and_scan()
|
|
|
|
# by dm_populate_phandle_data()
|
|
|
|
vals.append('\t{NULL, {%s}}' % (', '.join(arg_values)))
|
|
|
|
var_node = '%s%s.%s[%d].node' % \
|
|
|
|
(VAL_PREFIX, var_name, member_name, item)
|
|
|
|
# Save the the link information to be use to define
|
|
|
|
# dm_populate_phandle_data()
|
|
|
|
self._links.append({'var_node': var_node, 'dev_name': name})
|
|
|
|
item += 1
|
2017-08-29 20:15:57 +00:00
|
|
|
for val in vals:
|
|
|
|
self.buf('\n\t\t%s,' % val)
|
2017-06-19 04:08:58 +00:00
|
|
|
else:
|
|
|
|
for val in prop.value:
|
2017-06-19 04:09:02 +00:00
|
|
|
vals.append(get_value(prop.type, val))
|
2017-08-29 20:15:49 +00:00
|
|
|
|
2017-08-29 20:15:57 +00:00
|
|
|
# Put 8 values per line to avoid very long lines.
|
2019-05-18 04:00:31 +00:00
|
|
|
for i in range(0, len(vals), 8):
|
2017-08-29 20:15:57 +00:00
|
|
|
if i:
|
|
|
|
self.buf(',\n\t\t')
|
|
|
|
self.buf(', '.join(vals[i:i + 8]))
|
2017-06-19 04:08:59 +00:00
|
|
|
self.buf('}')
|
2017-06-19 04:08:58 +00:00
|
|
|
else:
|
2017-06-19 04:09:02 +00:00
|
|
|
self.buf(get_value(prop.type, prop.value))
|
2017-06-19 04:08:59 +00:00
|
|
|
self.buf(',\n')
|
|
|
|
self.buf('};\n')
|
2017-06-19 04:08:58 +00:00
|
|
|
|
|
|
|
# Add a device declaration
|
2017-06-19 04:08:59 +00:00
|
|
|
self.buf('U_BOOT_DEVICE(%s) = {\n' % var_name)
|
|
|
|
self.buf('\t.name\t\t= "%s",\n' % struct_name)
|
|
|
|
self.buf('\t.platdata\t= &%s%s,\n' % (VAL_PREFIX, var_name))
|
|
|
|
self.buf('\t.platdata_size\t= sizeof(%s%s),\n' % (VAL_PREFIX, var_name))
|
|
|
|
self.buf('};\n')
|
|
|
|
self.buf('\n')
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
self.out(''.join(self.get_buf()))
|
2017-06-19 04:08:58 +00:00
|
|
|
|
2017-06-19 04:08:59 +00:00
|
|
|
def generate_tables(self):
|
2017-06-19 04:08:58 +00:00
|
|
|
"""Generate device defintions for the platform data
|
|
|
|
|
|
|
|
This writes out C platform data initialisation data and
|
|
|
|
U_BOOT_DEVICE() declarations for each valid node. Where a node has
|
|
|
|
multiple compatible strings, a #define is used to make them equivalent.
|
|
|
|
|
2020-02-25 20:35:39 +00:00
|
|
|
See the documentation in doc/driver-model/of-plat.rst for more
|
2017-06-19 04:08:58 +00:00
|
|
|
information.
|
|
|
|
"""
|
2017-08-29 20:16:01 +00:00
|
|
|
self.out_header()
|
2017-06-19 04:08:59 +00:00
|
|
|
self.out('#include <common.h>\n')
|
|
|
|
self.out('#include <dm.h>\n')
|
|
|
|
self.out('#include <dt-structs.h>\n')
|
|
|
|
self.out('\n')
|
2017-06-19 04:08:58 +00:00
|
|
|
nodes_to_output = list(self._valid_nodes)
|
|
|
|
|
|
|
|
# Keep outputing nodes until there is none left
|
|
|
|
while nodes_to_output:
|
|
|
|
node = nodes_to_output[0]
|
|
|
|
# Output all the node's dependencies first
|
|
|
|
for req_node in node.phandles:
|
|
|
|
if req_node in nodes_to_output:
|
2017-06-19 04:08:59 +00:00
|
|
|
self.output_node(req_node)
|
2017-06-19 04:08:58 +00:00
|
|
|
nodes_to_output.remove(req_node)
|
2017-06-19 04:08:59 +00:00
|
|
|
self.output_node(node)
|
2017-06-19 04:08:58 +00:00
|
|
|
nodes_to_output.remove(node)
|
2017-06-19 04:09:03 +00:00
|
|
|
|
2020-06-25 04:10:13 +00:00
|
|
|
# Define dm_populate_phandle_data() which will add the linking between
|
|
|
|
# nodes using DM_GET_DEVICE
|
|
|
|
# dtv_dmc_at_xxx.clocks[0].node = DM_GET_DEVICE(clock_controller_at_xxx)
|
|
|
|
self.buf('void dm_populate_phandle_data(void) {\n')
|
|
|
|
for l in self._links:
|
2020-06-25 04:10:16 +00:00
|
|
|
self.buf('\t%s = DM_GET_DEVICE(%s);\n' %
|
|
|
|
(l['var_node'], l['dev_name']))
|
2020-06-25 04:10:13 +00:00
|
|
|
self.buf('}\n')
|
|
|
|
|
|
|
|
self.out(''.join(self.get_buf()))
|
2017-06-19 04:09:03 +00:00
|
|
|
|
2020-07-28 22:06:23 +00:00
|
|
|
def run_steps(args, dtb_file, include_disabled, output, warning_disabled=False,
|
|
|
|
drivers_additional=[]):
|
2017-06-19 04:09:03 +00:00
|
|
|
"""Run all the steps of the dtoc tool
|
|
|
|
|
|
|
|
Args:
|
|
|
|
args: List of non-option arguments provided to the problem
|
|
|
|
dtb_file: Filename of dtb file to process
|
|
|
|
include_disabled: True to include disabled nodes
|
|
|
|
output: Name of output file
|
|
|
|
"""
|
|
|
|
if not args:
|
|
|
|
raise ValueError('Please specify a command: struct, platdata')
|
|
|
|
|
2020-07-28 22:06:23 +00:00
|
|
|
plat = DtbPlatdata(dtb_file, include_disabled, warning_disabled, drivers_additional)
|
2020-07-03 11:07:17 +00:00
|
|
|
plat.scan_drivers()
|
2017-06-19 04:09:03 +00:00
|
|
|
plat.scan_dtb()
|
|
|
|
plat.scan_tree()
|
2017-08-29 20:15:50 +00:00
|
|
|
plat.scan_reg_sizes()
|
2017-06-19 04:09:03 +00:00
|
|
|
plat.setup_output(output)
|
|
|
|
structs = plat.scan_structs()
|
|
|
|
plat.scan_phandles()
|
|
|
|
|
|
|
|
for cmd in args[0].split(','):
|
|
|
|
if cmd == 'struct':
|
|
|
|
plat.generate_structs(structs)
|
|
|
|
elif cmd == 'platdata':
|
|
|
|
plat.generate_tables()
|
|
|
|
else:
|
|
|
|
raise ValueError("Unknown command '%s': (use: struct, platdata)" %
|
|
|
|
cmd)
|