2018-05-06 21:58:06 +00:00
|
|
|
# SPDX-License-Identifier: GPL-2.0+
|
2016-07-26 00:59:08 +00:00
|
|
|
#
|
|
|
|
# Copyright (c) 2016 Google, Inc
|
|
|
|
#
|
|
|
|
|
2018-09-14 10:57:28 +00:00
|
|
|
import glob
|
2016-07-26 00:59:08 +00:00
|
|
|
import os
|
2021-09-08 11:38:02 +00:00
|
|
|
import shlex
|
2016-07-26 00:59:08 +00:00
|
|
|
import shutil
|
2019-05-14 21:53:47 +00:00
|
|
|
import sys
|
2016-07-26 00:59:08 +00:00
|
|
|
import tempfile
|
2022-01-10 03:13:41 +00:00
|
|
|
import urllib.request
|
2016-07-26 00:59:08 +00:00
|
|
|
|
2020-04-18 00:09:04 +00:00
|
|
|
from patman import command
|
|
|
|
from patman import tout
|
2016-07-26 00:59:08 +00:00
|
|
|
|
2018-07-17 19:25:43 +00:00
|
|
|
# Output directly (generally this is temporary)
|
2016-07-26 00:59:08 +00:00
|
|
|
outdir = None
|
2018-07-17 19:25:43 +00:00
|
|
|
|
|
|
|
# True to keep the output directory around after exiting
|
2016-07-26 00:59:08 +00:00
|
|
|
preserve_outdir = False
|
|
|
|
|
2018-07-17 19:25:43 +00:00
|
|
|
# Path to the Chrome OS chroot, if we know it
|
|
|
|
chroot_path = None
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
# Search paths to use for filename(), used to find files
|
2018-07-17 19:25:43 +00:00
|
|
|
search_paths = []
|
|
|
|
|
2019-07-08 19:18:27 +00:00
|
|
|
tool_search_paths = []
|
|
|
|
|
2018-09-14 10:57:25 +00:00
|
|
|
# Tools and the packages that contain them, on debian
|
|
|
|
packages = {
|
|
|
|
'lz4': 'liblz4-tool',
|
|
|
|
}
|
2018-07-17 19:25:43 +00:00
|
|
|
|
2018-10-02 03:12:44 +00:00
|
|
|
# List of paths to use when looking for an input file
|
|
|
|
indir = []
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def prepare_output_dir(dirname, preserve=False):
|
2016-07-26 00:59:08 +00:00
|
|
|
"""Select an output directory, ensuring it exists.
|
|
|
|
|
|
|
|
This either creates a temporary directory or checks that the one supplied
|
|
|
|
by the user is valid. For a temporary directory, it makes a note to
|
|
|
|
remove it later if required.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
dirname: a string, name of the output directory to use to store
|
|
|
|
intermediate and output files. If is None - create a temporary
|
|
|
|
directory.
|
|
|
|
preserve: a Boolean. If outdir above is None and preserve is False, the
|
|
|
|
created temporary directory will be destroyed on exit.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
OSError: If it cannot create the output directory.
|
|
|
|
"""
|
|
|
|
global outdir, preserve_outdir
|
|
|
|
|
|
|
|
preserve_outdir = dirname or preserve
|
|
|
|
if dirname:
|
|
|
|
outdir = dirname
|
|
|
|
if not os.path.isdir(outdir):
|
|
|
|
try:
|
|
|
|
os.makedirs(outdir)
|
|
|
|
except OSError as err:
|
2022-02-11 20:23:18 +00:00
|
|
|
raise ValueError(
|
|
|
|
f"Cannot make output directory 'outdir': 'err.strerror'")
|
2022-01-29 21:14:15 +00:00
|
|
|
tout.debug("Using output directory '%s'" % outdir)
|
2016-07-26 00:59:08 +00:00
|
|
|
else:
|
|
|
|
outdir = tempfile.mkdtemp(prefix='binman.')
|
2022-01-29 21:14:15 +00:00
|
|
|
tout.debug("Using temporary directory '%s'" % outdir)
|
2016-07-26 00:59:08 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def _remove_output_dir():
|
2016-07-26 00:59:08 +00:00
|
|
|
global outdir
|
|
|
|
|
|
|
|
shutil.rmtree(outdir)
|
2022-01-29 21:14:15 +00:00
|
|
|
tout.debug("Deleted temporary directory '%s'" % outdir)
|
2016-07-26 00:59:08 +00:00
|
|
|
outdir = None
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def finalise_output_dir():
|
2016-07-26 00:59:08 +00:00
|
|
|
global outdir, preserve_outdir
|
|
|
|
|
|
|
|
"""Tidy up: delete output directory if temporary and not preserved."""
|
|
|
|
if outdir and not preserve_outdir:
|
2022-01-29 21:14:04 +00:00
|
|
|
_remove_output_dir()
|
2019-07-20 18:24:07 +00:00
|
|
|
outdir = None
|
2016-07-26 00:59:08 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def get_output_filename(fname):
|
2016-07-26 00:59:08 +00:00
|
|
|
"""Return a filename within the output directory.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: Filename to use for new file
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The full path of the filename, within the output directory
|
|
|
|
"""
|
|
|
|
return os.path.join(outdir, fname)
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def get_output_dir():
|
2020-12-29 03:34:52 +00:00
|
|
|
"""Return the current output directory
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str: The output directory
|
|
|
|
"""
|
|
|
|
return outdir
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def _finalise_for_test():
|
2016-07-26 00:59:08 +00:00
|
|
|
"""Remove the output directory (for use by tests)"""
|
|
|
|
global outdir
|
|
|
|
|
|
|
|
if outdir:
|
2022-01-29 21:14:04 +00:00
|
|
|
_remove_output_dir()
|
2019-07-20 18:24:07 +00:00
|
|
|
outdir = None
|
2016-07-26 00:59:08 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def set_input_dirs(dirname):
|
2016-07-26 00:59:08 +00:00
|
|
|
"""Add a list of input directories, where input files are kept.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
dirname: a list of paths to input directories to use for obtaining
|
|
|
|
files needed by binman to place in the image.
|
|
|
|
"""
|
|
|
|
global indir
|
|
|
|
|
|
|
|
indir = dirname
|
2022-01-29 21:14:15 +00:00
|
|
|
tout.debug("Using input directories %s" % indir)
|
2016-07-26 00:59:08 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def get_input_filename(fname, allow_missing=False):
|
2016-07-26 00:59:08 +00:00
|
|
|
"""Return a filename for use as input.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: Filename to use for new file
|
2020-07-10 00:39:38 +00:00
|
|
|
allow_missing: True if the filename can be missing
|
2016-07-26 00:59:08 +00:00
|
|
|
|
|
|
|
Returns:
|
binman: Use standard filenames for SPL/TPL devicetree
At present, before any entry expansion is done (such as a 'files' entry
expanding out to individual entries for each file it contains), we check
the binman definition (i.e. '/binman' node) to find out what devicetree
files are used in the images.
This is a pain, since the definition may change during expansion. For
example if there is no u-boot-spl-dtb entry in the definition at the start,
we assume that the SPL devicetree is not used. But if an entry later
expands to include this, then we don't notice.
In fact the flexibility provided by the current approach of checking the
definition is not really useful. We know that we can have SPL and TPL
devicetrees. We know the pathname to each, so we can simply check if the
files are present. If they are present, we can prepare them and update
them regardless of whether they are actually used. If they are not present,
we cannot prepare/update them anyway, i.e. an error will be generated.
Simplify state.Prepare() so it uses a hard-coded list of devicetree files.
Note that state.PrepareFromLoadedData() is left untouched, since in that
case we have a complete definition from the loaded file, but cannot of
course rely on the devicetree files that created it still being present.
So in that case we still check the image defitions.
Signed-off-by: Simon Glass <sjg@chromium.org>
2021-03-18 07:25:03 +00:00
|
|
|
fname, if indir is None;
|
|
|
|
full path of the filename, within the input directory;
|
|
|
|
None, if file is missing and allow_missing is True
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
ValueError if file is missing and allow_missing is False
|
2016-07-26 00:59:08 +00:00
|
|
|
"""
|
2019-08-24 13:22:54 +00:00
|
|
|
if not indir or fname[:1] == '/':
|
2016-07-26 00:59:08 +00:00
|
|
|
return fname
|
|
|
|
for dirname in indir:
|
|
|
|
pathname = os.path.join(dirname, fname)
|
|
|
|
if os.path.exists(pathname):
|
|
|
|
return pathname
|
|
|
|
|
2020-07-10 00:39:38 +00:00
|
|
|
if allow_missing:
|
|
|
|
return None
|
2018-07-17 19:25:45 +00:00
|
|
|
raise ValueError("Filename '%s' not found in input path (%s) (cwd='%s')" %
|
|
|
|
(fname, ','.join(indir), os.getcwd()))
|
2016-07-26 00:59:08 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def get_input_filename_glob(pattern):
|
2018-09-14 10:57:28 +00:00
|
|
|
"""Return a list of filenames for use as input.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
pattern: Filename pattern to search for
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of matching files in all input directories
|
|
|
|
"""
|
|
|
|
if not indir:
|
2022-02-11 20:23:18 +00:00
|
|
|
return glob.glob(pattern)
|
2018-09-14 10:57:28 +00:00
|
|
|
files = []
|
|
|
|
for dirname in indir:
|
|
|
|
pathname = os.path.join(dirname, pattern)
|
|
|
|
files += glob.glob(pathname)
|
|
|
|
return sorted(files)
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def align(pos, align):
|
2016-07-26 00:59:08 +00:00
|
|
|
if align:
|
|
|
|
mask = align - 1
|
|
|
|
pos = (pos + mask) & ~mask
|
|
|
|
return pos
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def not_power_of_two(num):
|
2016-07-26 00:59:08 +00:00
|
|
|
return num and (num & (num - 1))
|
2018-07-17 19:25:43 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def set_tool_paths(toolpaths):
|
2019-07-08 19:18:27 +00:00
|
|
|
"""Set the path to search for tools
|
|
|
|
|
|
|
|
Args:
|
2022-01-29 21:14:04 +00:00
|
|
|
toolpaths: List of paths to search for tools executed by run()
|
2019-07-08 19:18:27 +00:00
|
|
|
"""
|
|
|
|
global tool_search_paths
|
|
|
|
|
|
|
|
tool_search_paths = toolpaths
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def path_has_file(path_spec, fname):
|
2018-09-14 10:57:25 +00:00
|
|
|
"""Check if a given filename is in the PATH
|
|
|
|
|
|
|
|
Args:
|
2019-07-08 19:18:27 +00:00
|
|
|
path_spec: Value of PATH variable to check
|
2018-09-14 10:57:25 +00:00
|
|
|
fname: Filename to check
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if found, False if not
|
|
|
|
"""
|
2019-07-08 19:18:27 +00:00
|
|
|
for dir in path_spec.split(':'):
|
2018-09-14 10:57:25 +00:00
|
|
|
if os.path.exists(os.path.join(dir, fname)):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2022-02-11 20:23:18 +00:00
|
|
|
def get_host_compile_tool(env, name):
|
2020-09-06 11:46:06 +00:00
|
|
|
"""Get the host-specific version for a compile tool
|
|
|
|
|
|
|
|
This checks the environment variables that specify which version of
|
|
|
|
the tool should be used (e.g. ${HOSTCC}).
|
|
|
|
|
|
|
|
The following table lists the host-specific versions of the tools
|
|
|
|
this function resolves to:
|
|
|
|
|
|
|
|
Compile Tool | Host version
|
|
|
|
--------------+----------------
|
|
|
|
as | ${HOSTAS}
|
|
|
|
ld | ${HOSTLD}
|
|
|
|
cc | ${HOSTCC}
|
|
|
|
cpp | ${HOSTCPP}
|
|
|
|
c++ | ${HOSTCXX}
|
|
|
|
ar | ${HOSTAR}
|
|
|
|
nm | ${HOSTNM}
|
|
|
|
ldr | ${HOSTLDR}
|
|
|
|
strip | ${HOSTSTRIP}
|
|
|
|
objcopy | ${HOSTOBJCOPY}
|
|
|
|
objdump | ${HOSTOBJDUMP}
|
|
|
|
dtc | ${HOSTDTC}
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: Command name to run
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
host_name: Exact command name to run instead
|
|
|
|
extra_args: List of extra arguments to pass
|
|
|
|
"""
|
|
|
|
host_name = None
|
|
|
|
extra_args = []
|
|
|
|
if name in ('as', 'ld', 'cc', 'cpp', 'ar', 'nm', 'ldr', 'strip',
|
|
|
|
'objcopy', 'objdump', 'dtc'):
|
|
|
|
host_name, *host_args = env.get('HOST' + name.upper(), '').split(' ')
|
|
|
|
elif name == 'c++':
|
|
|
|
host_name, *host_args = env.get('HOSTCXX', '').split(' ')
|
|
|
|
|
|
|
|
if host_name:
|
|
|
|
return host_name, extra_args
|
|
|
|
return name, []
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def get_target_compile_tool(name, cross_compile=None):
|
binman: Use target-specific tools when cross-compiling
Currently, binman always runs the compile tools like cc, objcopy, strip,
etc. using their literal name. Instead, this patch makes it use the
target-specific versions by default, derived from the tool-specific
environment variables (CC, OBJCOPY, STRIP, etc.) or from the
CROSS_COMPILE environment variable.
For example, the u-boot-elf etype directly uses 'strip'. Trying to run
the tests with 'CROSS_COMPILE=i686-linux-gnu- binman test' on an arm64
host results in the '097_elf_strip.dts' test to fail as the arm64
version of 'strip' can't understand the format of the x86 ELF file.
This also adjusts some command.Output() calls that caused test errors or
failures to use the target versions of the tools they call. After this,
patch, an arm64 host can run all tests with no errors or failures using
a correct CROSS_COMPILE value.
Signed-off-by: Alper Nebi Yasak <alpernebiyasak@gmail.com>
Reviewed-by: Simon Glass <sjg@chromium.org>
2020-09-06 11:46:05 +00:00
|
|
|
"""Get the target-specific version for a compile tool
|
|
|
|
|
|
|
|
This first checks the environment variables that specify which
|
|
|
|
version of the tool should be used (e.g. ${CC}). If those aren't
|
|
|
|
specified, it checks the CROSS_COMPILE variable as a prefix for the
|
|
|
|
tool with some substitutions (e.g. "${CROSS_COMPILE}gcc" for cc).
|
|
|
|
|
|
|
|
The following table lists the target-specific versions of the tools
|
|
|
|
this function resolves to:
|
|
|
|
|
|
|
|
Compile Tool | First choice | Second choice
|
|
|
|
--------------+----------------+----------------------------
|
|
|
|
as | ${AS} | ${CROSS_COMPILE}as
|
|
|
|
ld | ${LD} | ${CROSS_COMPILE}ld.bfd
|
|
|
|
| | or ${CROSS_COMPILE}ld
|
|
|
|
cc | ${CC} | ${CROSS_COMPILE}gcc
|
|
|
|
cpp | ${CPP} | ${CROSS_COMPILE}gcc -E
|
|
|
|
c++ | ${CXX} | ${CROSS_COMPILE}g++
|
|
|
|
ar | ${AR} | ${CROSS_COMPILE}ar
|
|
|
|
nm | ${NM} | ${CROSS_COMPILE}nm
|
|
|
|
ldr | ${LDR} | ${CROSS_COMPILE}ldr
|
|
|
|
strip | ${STRIP} | ${CROSS_COMPILE}strip
|
|
|
|
objcopy | ${OBJCOPY} | ${CROSS_COMPILE}objcopy
|
|
|
|
objdump | ${OBJDUMP} | ${CROSS_COMPILE}objdump
|
|
|
|
dtc | ${DTC} | (no CROSS_COMPILE version)
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: Command name to run
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
target_name: Exact command name to run instead
|
|
|
|
extra_args: List of extra arguments to pass
|
|
|
|
"""
|
|
|
|
env = dict(os.environ)
|
|
|
|
|
|
|
|
target_name = None
|
|
|
|
extra_args = []
|
|
|
|
if name in ('as', 'ld', 'cc', 'cpp', 'ar', 'nm', 'ldr', 'strip',
|
|
|
|
'objcopy', 'objdump', 'dtc'):
|
|
|
|
target_name, *extra_args = env.get(name.upper(), '').split(' ')
|
|
|
|
elif name == 'c++':
|
|
|
|
target_name, *extra_args = env.get('CXX', '').split(' ')
|
|
|
|
|
|
|
|
if target_name:
|
|
|
|
return target_name, extra_args
|
|
|
|
|
|
|
|
if cross_compile is None:
|
|
|
|
cross_compile = env.get('CROSS_COMPILE', '')
|
|
|
|
|
|
|
|
if name in ('as', 'ar', 'nm', 'ldr', 'strip', 'objcopy', 'objdump'):
|
|
|
|
target_name = cross_compile + name
|
|
|
|
elif name == 'ld':
|
|
|
|
try:
|
2022-01-29 21:14:04 +00:00
|
|
|
if run(cross_compile + 'ld.bfd', '-v'):
|
binman: Use target-specific tools when cross-compiling
Currently, binman always runs the compile tools like cc, objcopy, strip,
etc. using their literal name. Instead, this patch makes it use the
target-specific versions by default, derived from the tool-specific
environment variables (CC, OBJCOPY, STRIP, etc.) or from the
CROSS_COMPILE environment variable.
For example, the u-boot-elf etype directly uses 'strip'. Trying to run
the tests with 'CROSS_COMPILE=i686-linux-gnu- binman test' on an arm64
host results in the '097_elf_strip.dts' test to fail as the arm64
version of 'strip' can't understand the format of the x86 ELF file.
This also adjusts some command.Output() calls that caused test errors or
failures to use the target versions of the tools they call. After this,
patch, an arm64 host can run all tests with no errors or failures using
a correct CROSS_COMPILE value.
Signed-off-by: Alper Nebi Yasak <alpernebiyasak@gmail.com>
Reviewed-by: Simon Glass <sjg@chromium.org>
2020-09-06 11:46:05 +00:00
|
|
|
target_name = cross_compile + 'ld.bfd'
|
|
|
|
except:
|
|
|
|
target_name = cross_compile + 'ld'
|
|
|
|
elif name == 'cc':
|
|
|
|
target_name = cross_compile + 'gcc'
|
|
|
|
elif name == 'cpp':
|
|
|
|
target_name = cross_compile + 'gcc'
|
|
|
|
extra_args = ['-E']
|
|
|
|
elif name == 'c++':
|
|
|
|
target_name = cross_compile + 'g++'
|
|
|
|
else:
|
|
|
|
target_name = name
|
|
|
|
return target_name, extra_args
|
|
|
|
|
2022-01-10 03:13:40 +00:00
|
|
|
def get_env_with_path():
|
|
|
|
"""Get an updated environment with the PATH variable set correctly
|
|
|
|
|
|
|
|
If there are any search paths set, these need to come first in the PATH so
|
|
|
|
that these override any other version of the tools.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict: New environment with PATH updated, or None if there are not search
|
|
|
|
paths
|
|
|
|
"""
|
|
|
|
if tool_search_paths:
|
|
|
|
env = dict(os.environ)
|
|
|
|
env['PATH'] = ':'.join(tool_search_paths) + ':' + env['PATH']
|
|
|
|
return env
|
|
|
|
|
|
|
|
def run_result(name, *args, **kwargs):
|
2019-07-08 19:18:27 +00:00
|
|
|
"""Run a tool with some arguments
|
|
|
|
|
|
|
|
This runs a 'tool', which is a program used by binman to process files and
|
|
|
|
perhaps produce some output. Tools can be located on the PATH or in a
|
|
|
|
search path.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: Command name to run
|
|
|
|
args: Arguments to the tool
|
2020-09-06 11:46:06 +00:00
|
|
|
for_host: True to resolve the command to the version for the host
|
binman: Use target-specific tools when cross-compiling
Currently, binman always runs the compile tools like cc, objcopy, strip,
etc. using their literal name. Instead, this patch makes it use the
target-specific versions by default, derived from the tool-specific
environment variables (CC, OBJCOPY, STRIP, etc.) or from the
CROSS_COMPILE environment variable.
For example, the u-boot-elf etype directly uses 'strip'. Trying to run
the tests with 'CROSS_COMPILE=i686-linux-gnu- binman test' on an arm64
host results in the '097_elf_strip.dts' test to fail as the arm64
version of 'strip' can't understand the format of the x86 ELF file.
This also adjusts some command.Output() calls that caused test errors or
failures to use the target versions of the tools they call. After this,
patch, an arm64 host can run all tests with no errors or failures using
a correct CROSS_COMPILE value.
Signed-off-by: Alper Nebi Yasak <alpernebiyasak@gmail.com>
Reviewed-by: Simon Glass <sjg@chromium.org>
2020-09-06 11:46:05 +00:00
|
|
|
for_target: False to run the command as-is, without resolving it
|
|
|
|
to the version for the compile target
|
2022-01-10 03:13:40 +00:00
|
|
|
raise_on_error: Raise an error if the command fails (True by default)
|
2019-07-08 19:18:27 +00:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
CommandResult object
|
|
|
|
"""
|
2018-09-14 10:57:25 +00:00
|
|
|
try:
|
2019-10-31 13:42:50 +00:00
|
|
|
binary = kwargs.get('binary')
|
2020-09-06 11:46:06 +00:00
|
|
|
for_host = kwargs.get('for_host', False)
|
|
|
|
for_target = kwargs.get('for_target', not for_host)
|
2022-01-10 03:13:40 +00:00
|
|
|
raise_on_error = kwargs.get('raise_on_error', True)
|
|
|
|
env = get_env_with_path()
|
binman: Use target-specific tools when cross-compiling
Currently, binman always runs the compile tools like cc, objcopy, strip,
etc. using their literal name. Instead, this patch makes it use the
target-specific versions by default, derived from the tool-specific
environment variables (CC, OBJCOPY, STRIP, etc.) or from the
CROSS_COMPILE environment variable.
For example, the u-boot-elf etype directly uses 'strip'. Trying to run
the tests with 'CROSS_COMPILE=i686-linux-gnu- binman test' on an arm64
host results in the '097_elf_strip.dts' test to fail as the arm64
version of 'strip' can't understand the format of the x86 ELF file.
This also adjusts some command.Output() calls that caused test errors or
failures to use the target versions of the tools they call. After this,
patch, an arm64 host can run all tests with no errors or failures using
a correct CROSS_COMPILE value.
Signed-off-by: Alper Nebi Yasak <alpernebiyasak@gmail.com>
Reviewed-by: Simon Glass <sjg@chromium.org>
2020-09-06 11:46:05 +00:00
|
|
|
if for_target:
|
2022-01-29 21:14:04 +00:00
|
|
|
name, extra_args = get_target_compile_tool(name)
|
binman: Use target-specific tools when cross-compiling
Currently, binman always runs the compile tools like cc, objcopy, strip,
etc. using their literal name. Instead, this patch makes it use the
target-specific versions by default, derived from the tool-specific
environment variables (CC, OBJCOPY, STRIP, etc.) or from the
CROSS_COMPILE environment variable.
For example, the u-boot-elf etype directly uses 'strip'. Trying to run
the tests with 'CROSS_COMPILE=i686-linux-gnu- binman test' on an arm64
host results in the '097_elf_strip.dts' test to fail as the arm64
version of 'strip' can't understand the format of the x86 ELF file.
This also adjusts some command.Output() calls that caused test errors or
failures to use the target versions of the tools they call. After this,
patch, an arm64 host can run all tests with no errors or failures using
a correct CROSS_COMPILE value.
Signed-off-by: Alper Nebi Yasak <alpernebiyasak@gmail.com>
Reviewed-by: Simon Glass <sjg@chromium.org>
2020-09-06 11:46:05 +00:00
|
|
|
args = tuple(extra_args) + args
|
2020-09-06 11:46:06 +00:00
|
|
|
elif for_host:
|
2022-02-11 20:23:18 +00:00
|
|
|
name, extra_args = get_host_compile_tool(env, name)
|
2020-09-06 11:46:06 +00:00
|
|
|
args = tuple(extra_args) + args
|
2020-11-09 14:45:02 +00:00
|
|
|
name = os.path.expanduser(name) # Expand paths containing ~
|
2019-08-24 13:22:42 +00:00
|
|
|
all_args = (name,) + args
|
2022-01-29 21:14:05 +00:00
|
|
|
result = command.run_pipe([all_args], capture=True, capture_stderr=True,
|
2019-10-31 13:42:50 +00:00
|
|
|
env=env, raise_on_error=False, binary=binary)
|
2019-08-24 13:22:42 +00:00
|
|
|
if result.return_code:
|
2022-01-10 03:13:40 +00:00
|
|
|
if raise_on_error:
|
|
|
|
raise ValueError("Error %d running '%s': %s" %
|
|
|
|
(result.return_code,' '.join(all_args),
|
|
|
|
result.stderr or result.stdout))
|
|
|
|
return result
|
|
|
|
except ValueError:
|
2022-01-29 21:14:04 +00:00
|
|
|
if env and not path_has_file(env['PATH'], name):
|
2019-07-08 19:18:27 +00:00
|
|
|
msg = "Please install tool '%s'" % name
|
2018-09-14 10:57:25 +00:00
|
|
|
package = packages.get(name)
|
|
|
|
if package:
|
|
|
|
msg += " (e.g. from package '%s')" % package
|
|
|
|
raise ValueError(msg)
|
|
|
|
raise
|
2018-07-17 19:25:43 +00:00
|
|
|
|
2022-01-10 03:13:43 +00:00
|
|
|
def tool_find(name):
|
|
|
|
"""Search the current path for a tool
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
This uses both PATH and any value from set_tool_paths() to search for a tool
|
2022-01-10 03:13:43 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
name (str): Name of tool to locate
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str: Full path to tool if found, else None
|
|
|
|
"""
|
|
|
|
name = os.path.expanduser(name) # Expand paths containing ~
|
|
|
|
paths = []
|
|
|
|
pathvar = os.environ.get('PATH')
|
|
|
|
if pathvar:
|
|
|
|
paths = pathvar.split(':')
|
|
|
|
if tool_search_paths:
|
|
|
|
paths += tool_search_paths
|
|
|
|
for path in paths:
|
|
|
|
fname = os.path.join(path, name)
|
|
|
|
if os.path.isfile(fname) and os.access(fname, os.X_OK):
|
|
|
|
return fname
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def run(name, *args, **kwargs):
|
2022-01-10 03:13:40 +00:00
|
|
|
"""Run a tool with some arguments
|
|
|
|
|
|
|
|
This runs a 'tool', which is a program used by binman to process files and
|
|
|
|
perhaps produce some output. Tools can be located on the PATH or in a
|
|
|
|
search path.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: Command name to run
|
|
|
|
args: Arguments to the tool
|
|
|
|
for_host: True to resolve the command to the version for the host
|
|
|
|
for_target: False to run the command as-is, without resolving it
|
|
|
|
to the version for the compile target
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
CommandResult object
|
|
|
|
"""
|
|
|
|
result = run_result(name, *args, **kwargs)
|
|
|
|
if result is not None:
|
|
|
|
return result.stdout
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def filename(fname):
|
2018-07-17 19:25:43 +00:00
|
|
|
"""Resolve a file path to an absolute path.
|
|
|
|
|
|
|
|
If fname starts with ##/ and chroot is available, ##/ gets replaced with
|
|
|
|
the chroot path. If chroot is not available, this file name can not be
|
|
|
|
resolved, `None' is returned.
|
|
|
|
|
|
|
|
If fname is not prepended with the above prefix, and is not an existing
|
|
|
|
file, the actual file name is retrieved from the passed in string and the
|
|
|
|
search_paths directories (if any) are searched to for the file. If found -
|
|
|
|
the path to the found file is returned, `None' is returned otherwise.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: a string, the path to resolve.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Absolute path to the file or None if not found.
|
|
|
|
"""
|
|
|
|
if fname.startswith('##/'):
|
|
|
|
if chroot_path:
|
|
|
|
fname = os.path.join(chroot_path, fname[3:])
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Search for a pathname that exists, and return it if found
|
|
|
|
if fname and not os.path.exists(fname):
|
|
|
|
for path in search_paths:
|
|
|
|
pathname = os.path.join(path, os.path.basename(fname))
|
|
|
|
if os.path.exists(pathname):
|
|
|
|
return pathname
|
|
|
|
|
|
|
|
# If not found, just return the standard, unchanged path
|
|
|
|
return fname
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def read_file(fname, binary=True):
|
2018-07-17 19:25:43 +00:00
|
|
|
"""Read and return the contents of a file.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: path to filename to read, where ## signifiies the chroot.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
data read from file, as a string.
|
|
|
|
"""
|
2022-01-29 21:14:04 +00:00
|
|
|
with open(filename(fname), binary and 'rb' or 'r') as fd:
|
2018-07-17 19:25:43 +00:00
|
|
|
data = fd.read()
|
|
|
|
#self._out.Info("Read file '%s' size %d (%#0x)" %
|
|
|
|
#(fname, len(data), len(data)))
|
|
|
|
return data
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def write_file(fname, data, binary=True):
|
2018-07-17 19:25:43 +00:00
|
|
|
"""Write data into a file.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: path to filename to write
|
|
|
|
data: data to write to file, as a string
|
|
|
|
"""
|
|
|
|
#self._out.Info("Write file '%s' size %d (%#0x)" %
|
|
|
|
#(fname, len(data), len(data)))
|
2022-01-29 21:14:04 +00:00
|
|
|
with open(filename(fname), binary and 'wb' or 'w') as fd:
|
2018-07-17 19:25:43 +00:00
|
|
|
fd.write(data)
|
2019-05-14 21:53:47 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def get_bytes(byte, size):
|
2019-05-14 21:53:47 +00:00
|
|
|
"""Get a string of bytes of a given size
|
|
|
|
|
|
|
|
Args:
|
|
|
|
byte: Numeric byte value to use
|
|
|
|
size: Size of bytes/string to return
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A bytes type with 'byte' repeated 'size' times
|
|
|
|
"""
|
2020-11-09 03:36:18 +00:00
|
|
|
return bytes([byte]) * size
|
2019-05-18 04:00:35 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def to_bytes(string):
|
2019-05-18 04:00:36 +00:00
|
|
|
"""Convert a str type into a bytes type
|
|
|
|
|
|
|
|
Args:
|
2019-10-31 13:42:50 +00:00
|
|
|
string: string to convert
|
2019-05-18 04:00:36 +00:00
|
|
|
|
|
|
|
Returns:
|
2020-11-09 03:36:18 +00:00
|
|
|
A bytes type
|
2019-05-18 04:00:36 +00:00
|
|
|
"""
|
2020-11-09 03:36:18 +00:00
|
|
|
return string.encode('utf-8')
|
2019-07-08 19:18:41 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def to_string(bval):
|
2019-10-31 13:42:50 +00:00
|
|
|
"""Convert a bytes type into a str type
|
|
|
|
|
|
|
|
Args:
|
|
|
|
bval: bytes value to convert
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Python 3: A bytes type
|
|
|
|
Python 2: A string type
|
|
|
|
"""
|
|
|
|
return bval.decode('utf-8')
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def to_hex(val):
|
2019-07-20 18:23:36 +00:00
|
|
|
"""Convert an integer value (or None) to a string
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
hex value, or 'None' if the value is None
|
|
|
|
"""
|
|
|
|
return 'None' if val is None else '%#x' % val
|
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def to_hex_size(val):
|
2019-07-20 18:23:36 +00:00
|
|
|
"""Return the size of an object in hex
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
hex value of size, or 'None' if the value is None
|
|
|
|
"""
|
|
|
|
return 'None' if val is None else '%#x' % len(val)
|
2021-09-08 11:38:01 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def print_full_help(fname):
|
2021-09-08 11:38:01 +00:00
|
|
|
"""Print the full help message for a tool using an appropriate pager.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: Path to a file containing the full help message
|
|
|
|
"""
|
2021-09-08 11:38:02 +00:00
|
|
|
pager = shlex.split(os.getenv('PAGER', ''))
|
2021-09-08 11:38:01 +00:00
|
|
|
if not pager:
|
2021-09-08 11:38:02 +00:00
|
|
|
lesspath = shutil.which('less')
|
|
|
|
pager = [lesspath] if lesspath else None
|
2021-09-08 11:38:01 +00:00
|
|
|
if not pager:
|
2021-09-08 11:38:02 +00:00
|
|
|
pager = ['more']
|
2022-01-29 21:14:05 +00:00
|
|
|
command.run(*pager, fname)
|
2022-01-10 03:13:41 +00:00
|
|
|
|
2022-01-29 21:14:04 +00:00
|
|
|
def download(url, tmpdir_pattern='.patman'):
|
2022-01-10 03:13:41 +00:00
|
|
|
"""Download a file to a temporary directory
|
|
|
|
|
|
|
|
Args:
|
2022-01-10 03:13:42 +00:00
|
|
|
url (str): URL to download
|
|
|
|
tmpdir_pattern (str): pattern to use for the temporary directory
|
|
|
|
|
2022-01-10 03:13:41 +00:00
|
|
|
Returns:
|
|
|
|
Tuple:
|
|
|
|
Full path to the downloaded archive file in that directory,
|
|
|
|
or None if there was an error while downloading
|
2022-01-10 03:13:42 +00:00
|
|
|
Temporary directory name
|
2022-01-10 03:13:41 +00:00
|
|
|
"""
|
2022-01-10 03:13:42 +00:00
|
|
|
print('- downloading: %s' % url)
|
2022-01-10 03:13:41 +00:00
|
|
|
leaf = url.split('/')[-1]
|
2022-01-10 03:13:42 +00:00
|
|
|
tmpdir = tempfile.mkdtemp(tmpdir_pattern)
|
2022-01-10 03:13:41 +00:00
|
|
|
response = urllib.request.urlopen(url)
|
|
|
|
fname = os.path.join(tmpdir, leaf)
|
|
|
|
fd = open(fname, 'wb')
|
|
|
|
meta = response.info()
|
|
|
|
size = int(meta.get('Content-Length'))
|
|
|
|
done = 0
|
|
|
|
block_size = 1 << 16
|
|
|
|
status = ''
|
|
|
|
|
|
|
|
# Read the file in chunks and show progress as we go
|
|
|
|
while True:
|
|
|
|
buffer = response.read(block_size)
|
|
|
|
if not buffer:
|
|
|
|
print(chr(8) * (len(status) + 1), '\r', end=' ')
|
|
|
|
break
|
|
|
|
|
|
|
|
done += len(buffer)
|
|
|
|
fd.write(buffer)
|
|
|
|
status = r'%10d MiB [%3d%%]' % (done // 1024 // 1024,
|
|
|
|
done * 100 // size)
|
|
|
|
status = status + chr(8) * (len(status) + 1)
|
|
|
|
print(status, end=' ')
|
|
|
|
sys.stdout.flush()
|
2022-01-10 03:13:42 +00:00
|
|
|
print('\r', end='')
|
|
|
|
sys.stdout.flush()
|
2022-01-10 03:13:41 +00:00
|
|
|
fd.close()
|
|
|
|
if done != size:
|
|
|
|
print('Error, failed to download')
|
|
|
|
os.remove(fname)
|
|
|
|
fname = None
|
2022-01-10 03:13:42 +00:00
|
|
|
return fname, tmpdir
|