2018-05-06 21:58:06 +00:00
|
|
|
# SPDX-License-Identifier: GPL-2.0+
|
2013-04-03 11:07:16 +00:00
|
|
|
# Copyright (c) 2012 The Chromium OS Authors.
|
|
|
|
#
|
|
|
|
|
2013-09-23 23:35:17 +00:00
|
|
|
import re
|
2013-04-03 11:07:16 +00:00
|
|
|
import glob
|
2019-10-31 13:42:53 +00:00
|
|
|
from html.parser import HTMLParser
|
2013-04-03 11:07:16 +00:00
|
|
|
import os
|
2014-12-02 00:34:06 +00:00
|
|
|
import sys
|
|
|
|
import tempfile
|
2019-10-31 13:42:53 +00:00
|
|
|
import urllib.request, urllib.error, urllib.parse
|
2013-04-03 11:07:16 +00:00
|
|
|
|
2020-04-18 00:09:02 +00:00
|
|
|
from buildman import bsettings
|
2020-04-18 00:09:04 +00:00
|
|
|
from patman import command
|
|
|
|
from patman import terminal
|
|
|
|
from patman import tools
|
2013-04-03 11:07:16 +00:00
|
|
|
|
2016-03-13 01:50:32 +00:00
|
|
|
(PRIORITY_FULL_PREFIX, PRIORITY_PREFIX_GCC, PRIORITY_PREFIX_GCC_PATH,
|
2019-10-31 13:42:53 +00:00
|
|
|
PRIORITY_CALC) = list(range(4))
|
2016-03-07 02:45:37 +00:00
|
|
|
|
2019-12-05 22:59:14 +00:00
|
|
|
(VAR_CROSS_COMPILE, VAR_PATH, VAR_ARCH, VAR_MAKE_ARGS) = range(4)
|
|
|
|
|
2014-12-02 00:34:06 +00:00
|
|
|
# Simple class to collect links from a page
|
|
|
|
class MyHTMLParser(HTMLParser):
|
|
|
|
def __init__(self, arch):
|
|
|
|
"""Create a new parser
|
|
|
|
|
|
|
|
After the parser runs, self.links will be set to a list of the links
|
|
|
|
to .xz archives found in the page, and self.arch_link will be set to
|
|
|
|
the one for the given architecture (or None if not found).
|
|
|
|
|
|
|
|
Args:
|
|
|
|
arch: Architecture to search for
|
|
|
|
"""
|
|
|
|
HTMLParser.__init__(self)
|
|
|
|
self.arch_link = None
|
|
|
|
self.links = []
|
2018-05-10 11:15:53 +00:00
|
|
|
self.re_arch = re.compile('[-_]%s-' % arch)
|
2014-12-02 00:34:06 +00:00
|
|
|
|
|
|
|
def handle_starttag(self, tag, attrs):
|
|
|
|
if tag == 'a':
|
|
|
|
for tag, value in attrs:
|
|
|
|
if tag == 'href':
|
|
|
|
if value and value.endswith('.xz'):
|
|
|
|
self.links.append(value)
|
2018-05-10 11:15:53 +00:00
|
|
|
if self.re_arch.search(value):
|
2014-12-02 00:34:06 +00:00
|
|
|
self.arch_link = value
|
|
|
|
|
|
|
|
|
2013-04-03 11:07:16 +00:00
|
|
|
class Toolchain:
|
|
|
|
"""A single toolchain
|
|
|
|
|
|
|
|
Public members:
|
|
|
|
gcc: Full path to C compiler
|
|
|
|
path: Directory path containing C compiler
|
|
|
|
cross: Cross compile string, e.g. 'arm-linux-'
|
|
|
|
arch: Architecture of toolchain as determined from the first
|
|
|
|
component of the filename. E.g. arm-linux-gcc becomes arm
|
2016-03-07 02:45:37 +00:00
|
|
|
priority: Toolchain priority (0=highest, 20=lowest)
|
2019-01-07 23:44:20 +00:00
|
|
|
override_toolchain: Toolchain to use for sandbox, overriding the normal
|
|
|
|
one
|
2013-04-03 11:07:16 +00:00
|
|
|
"""
|
2016-03-07 02:45:38 +00:00
|
|
|
def __init__(self, fname, test, verbose=False, priority=PRIORITY_CALC,
|
2019-01-07 23:44:20 +00:00
|
|
|
arch=None, override_toolchain=None):
|
2013-04-03 11:07:16 +00:00
|
|
|
"""Create a new toolchain object.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: Filename of the gcc component
|
|
|
|
test: True to run the toolchain to test it
|
2016-03-07 02:45:35 +00:00
|
|
|
verbose: True to print out the information
|
2016-03-07 02:45:37 +00:00
|
|
|
priority: Priority to use for this toolchain, or PRIORITY_CALC to
|
|
|
|
calculate it
|
2013-04-03 11:07:16 +00:00
|
|
|
"""
|
|
|
|
self.gcc = fname
|
|
|
|
self.path = os.path.dirname(fname)
|
2019-01-07 23:44:20 +00:00
|
|
|
self.override_toolchain = override_toolchain
|
2014-12-02 00:33:58 +00:00
|
|
|
|
|
|
|
# Find the CROSS_COMPILE prefix to use for U-Boot. For example,
|
|
|
|
# 'arm-linux-gnueabihf-gcc' turns into 'arm-linux-gnueabihf-'.
|
|
|
|
basename = os.path.basename(fname)
|
|
|
|
pos = basename.rfind('-')
|
|
|
|
self.cross = basename[:pos + 1] if pos != -1 else ''
|
|
|
|
|
|
|
|
# The architecture is the first part of the name
|
2013-04-03 11:07:16 +00:00
|
|
|
pos = self.cross.find('-')
|
2016-03-07 02:45:38 +00:00
|
|
|
if arch:
|
|
|
|
self.arch = arch
|
|
|
|
else:
|
|
|
|
self.arch = self.cross[:pos] if pos != -1 else 'sandbox'
|
2019-01-07 23:44:20 +00:00
|
|
|
if self.arch == 'sandbox' and override_toolchain:
|
|
|
|
self.gcc = override_toolchain
|
2013-04-03 11:07:16 +00:00
|
|
|
|
2014-12-02 00:34:00 +00:00
|
|
|
env = self.MakeEnvironment(False)
|
2013-04-03 11:07:16 +00:00
|
|
|
|
|
|
|
# As a basic sanity check, run the C compiler with --version
|
|
|
|
cmd = [fname, '--version']
|
2016-03-07 02:45:37 +00:00
|
|
|
if priority == PRIORITY_CALC:
|
|
|
|
self.priority = self.GetPriority(fname)
|
|
|
|
else:
|
|
|
|
self.priority = priority
|
2013-04-03 11:07:16 +00:00
|
|
|
if test:
|
2013-10-09 20:28:09 +00:00
|
|
|
result = command.RunPipe([cmd], capture=True, env=env,
|
|
|
|
raise_on_error=False)
|
2013-04-03 11:07:16 +00:00
|
|
|
self.ok = result.return_code == 0
|
|
|
|
if verbose:
|
2019-10-31 13:42:53 +00:00
|
|
|
print('Tool chain test: ', end=' ')
|
2013-04-03 11:07:16 +00:00
|
|
|
if self.ok:
|
2019-10-31 13:42:53 +00:00
|
|
|
print("OK, arch='%s', priority %d" % (self.arch,
|
|
|
|
self.priority))
|
2013-04-03 11:07:16 +00:00
|
|
|
else:
|
2019-10-31 13:42:53 +00:00
|
|
|
print('BAD')
|
|
|
|
print('Command: ', cmd)
|
|
|
|
print(result.stdout)
|
|
|
|
print(result.stderr)
|
2013-04-03 11:07:16 +00:00
|
|
|
else:
|
|
|
|
self.ok = True
|
|
|
|
|
|
|
|
def GetPriority(self, fname):
|
|
|
|
"""Return the priority of the toolchain.
|
|
|
|
|
|
|
|
Toolchains are ranked according to their suitability by their
|
|
|
|
filename prefix.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: Filename of toolchain
|
|
|
|
Returns:
|
2016-03-07 02:45:37 +00:00
|
|
|
Priority of toolchain, PRIORITY_CALC=highest, 20=lowest.
|
2013-04-03 11:07:16 +00:00
|
|
|
"""
|
2014-07-07 00:47:45 +00:00
|
|
|
priority_list = ['-elf', '-unknown-linux-gnu', '-linux',
|
2017-04-14 23:47:50 +00:00
|
|
|
'-none-linux-gnueabi', '-none-linux-gnueabihf', '-uclinux',
|
|
|
|
'-none-eabi', '-gentoo-linux-gnu', '-linux-gnueabi',
|
|
|
|
'-linux-gnueabihf', '-le-linux', '-uclinux']
|
2013-04-03 11:07:16 +00:00
|
|
|
for prio in range(len(priority_list)):
|
|
|
|
if priority_list[prio] in fname:
|
2016-03-07 02:45:37 +00:00
|
|
|
return PRIORITY_CALC + prio
|
|
|
|
return PRIORITY_CALC + prio
|
2013-04-03 11:07:16 +00:00
|
|
|
|
2016-10-04 21:33:51 +00:00
|
|
|
def GetWrapper(self, show_warning=True):
|
|
|
|
"""Get toolchain wrapper from the setting file.
|
|
|
|
"""
|
2019-01-07 23:44:24 +00:00
|
|
|
value = ''
|
|
|
|
for name, value in bsettings.GetItems('toolchain-wrapper'):
|
2016-10-04 21:33:51 +00:00
|
|
|
if not value:
|
2019-10-31 13:42:53 +00:00
|
|
|
print("Warning: Wrapper not found")
|
2016-10-04 21:33:51 +00:00
|
|
|
if value:
|
|
|
|
value = value + ' '
|
|
|
|
|
|
|
|
return value
|
|
|
|
|
2019-12-05 22:59:14 +00:00
|
|
|
def GetEnvArgs(self, which):
|
|
|
|
"""Get an environment variable/args value based on the the toolchain
|
|
|
|
|
|
|
|
Args:
|
|
|
|
which: VAR_... value to get
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Value of that environment variable or arguments
|
|
|
|
"""
|
|
|
|
wrapper = self.GetWrapper()
|
|
|
|
if which == VAR_CROSS_COMPILE:
|
|
|
|
return wrapper + os.path.join(self.path, self.cross)
|
|
|
|
elif which == VAR_PATH:
|
|
|
|
return self.path
|
|
|
|
elif which == VAR_ARCH:
|
|
|
|
return self.arch
|
|
|
|
elif which == VAR_MAKE_ARGS:
|
|
|
|
args = self.MakeArgs()
|
|
|
|
if args:
|
|
|
|
return ' '.join(args)
|
|
|
|
return ''
|
|
|
|
else:
|
|
|
|
raise ValueError('Unknown arg to GetEnvArgs (%d)' % which)
|
|
|
|
|
2014-12-02 00:34:00 +00:00
|
|
|
def MakeEnvironment(self, full_path):
|
2013-04-03 11:07:16 +00:00
|
|
|
"""Returns an environment for using the toolchain.
|
|
|
|
|
2014-12-02 00:34:00 +00:00
|
|
|
Thie takes the current environment and adds CROSS_COMPILE so that
|
2017-06-08 01:07:08 +00:00
|
|
|
the tool chain will operate correctly. This also disables localized
|
|
|
|
output and possibly unicode encoded output of all build tools by
|
|
|
|
adding LC_ALL=C.
|
2014-12-02 00:34:00 +00:00
|
|
|
|
2021-04-11 04:27:28 +00:00
|
|
|
Note that os.environb is used to obtain the environment, since in some
|
|
|
|
cases the environment many contain non-ASCII characters and we see
|
|
|
|
errors like:
|
|
|
|
|
|
|
|
UnicodeEncodeError: 'utf-8' codec can't encode characters in position
|
|
|
|
569-570: surrogates not allowed
|
|
|
|
|
2014-12-02 00:34:00 +00:00
|
|
|
Args:
|
|
|
|
full_path: Return the full path in CROSS_COMPILE and don't set
|
|
|
|
PATH
|
2019-01-07 23:44:20 +00:00
|
|
|
Returns:
|
2021-04-11 04:27:28 +00:00
|
|
|
Dict containing the (bytes) environment to use. This is based on the
|
|
|
|
current environment, with changes as needed to CROSS_COMPILE, PATH
|
|
|
|
and LC_ALL.
|
2013-04-03 11:07:16 +00:00
|
|
|
"""
|
2021-04-11 04:27:28 +00:00
|
|
|
env = dict(os.environb)
|
2016-10-04 21:33:51 +00:00
|
|
|
wrapper = self.GetWrapper()
|
|
|
|
|
2019-01-07 23:44:20 +00:00
|
|
|
if self.override_toolchain:
|
|
|
|
# We'll use MakeArgs() to provide this
|
|
|
|
pass
|
|
|
|
elif full_path:
|
2021-04-11 04:27:28 +00:00
|
|
|
env[b'CROSS_COMPILE'] = tools.ToBytes(
|
|
|
|
wrapper + os.path.join(self.path, self.cross))
|
2014-12-02 00:34:00 +00:00
|
|
|
else:
|
2021-04-11 04:27:28 +00:00
|
|
|
env[b'CROSS_COMPILE'] = tools.ToBytes(wrapper + self.cross)
|
|
|
|
env[b'PATH'] = tools.ToBytes(self.path) + b':' + env[b'PATH']
|
2014-12-02 00:34:00 +00:00
|
|
|
|
2021-04-11 04:27:28 +00:00
|
|
|
env[b'LC_ALL'] = b'C'
|
2017-06-08 01:07:08 +00:00
|
|
|
|
2013-04-03 11:07:16 +00:00
|
|
|
return env
|
|
|
|
|
2019-01-07 23:44:20 +00:00
|
|
|
def MakeArgs(self):
|
|
|
|
"""Create the 'make' arguments for a toolchain
|
|
|
|
|
|
|
|
This is only used when the toolchain is being overridden. Since the
|
|
|
|
U-Boot Makefile sets CC and HOSTCC explicitly we cannot rely on the
|
|
|
|
environment (and MakeEnvironment()) to override these values. This
|
|
|
|
function returns the arguments to accomplish this.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
List of arguments to pass to 'make'
|
|
|
|
"""
|
|
|
|
if self.override_toolchain:
|
|
|
|
return ['HOSTCC=%s' % self.override_toolchain,
|
|
|
|
'CC=%s' % self.override_toolchain]
|
|
|
|
return []
|
|
|
|
|
2013-04-03 11:07:16 +00:00
|
|
|
|
|
|
|
class Toolchains:
|
|
|
|
"""Manage a list of toolchains for building U-Boot
|
|
|
|
|
|
|
|
We select one toolchain for each architecture type
|
|
|
|
|
|
|
|
Public members:
|
|
|
|
toolchains: Dict of Toolchain objects, keyed by architecture name
|
2016-03-13 01:50:32 +00:00
|
|
|
prefixes: Dict of prefixes to check, keyed by architecture. This can
|
|
|
|
be a full path and toolchain prefix, for example
|
|
|
|
{'x86', 'opt/i386-linux/bin/i386-linux-'}, or the name of
|
|
|
|
something on the search path, for example
|
|
|
|
{'arm', 'arm-linux-gnueabihf-'}. Wildcards are not supported.
|
2013-04-03 11:07:16 +00:00
|
|
|
paths: List of paths to check for toolchains (may contain wildcards)
|
|
|
|
"""
|
|
|
|
|
2019-01-07 23:44:20 +00:00
|
|
|
def __init__(self, override_toolchain=None):
|
2013-04-03 11:07:16 +00:00
|
|
|
self.toolchains = {}
|
2016-03-13 01:50:32 +00:00
|
|
|
self.prefixes = {}
|
2013-04-03 11:07:16 +00:00
|
|
|
self.paths = []
|
2019-01-07 23:44:20 +00:00
|
|
|
self.override_toolchain = override_toolchain
|
2014-09-06 01:00:13 +00:00
|
|
|
self._make_flags = dict(bsettings.GetItems('make-flags'))
|
|
|
|
|
2016-07-28 02:33:01 +00:00
|
|
|
def GetPathList(self, show_warning=True):
|
2014-12-02 00:34:06 +00:00
|
|
|
"""Get a list of available toolchain paths
|
|
|
|
|
2016-07-28 02:33:01 +00:00
|
|
|
Args:
|
|
|
|
show_warning: True to show a warning if there are no tool chains.
|
|
|
|
|
2014-12-02 00:34:06 +00:00
|
|
|
Returns:
|
|
|
|
List of strings, each a path to a toolchain mentioned in the
|
|
|
|
[toolchain] section of the settings file.
|
|
|
|
"""
|
2013-09-23 23:35:17 +00:00
|
|
|
toolchains = bsettings.GetItems('toolchain')
|
2016-07-28 02:33:01 +00:00
|
|
|
if show_warning and not toolchains:
|
2019-10-31 13:42:53 +00:00
|
|
|
print(("Warning: No tool chains. Please run 'buildman "
|
2016-07-28 02:33:02 +00:00
|
|
|
"--fetch-arch all' to download all available toolchains, or "
|
|
|
|
"add a [toolchain] section to your buildman config file "
|
|
|
|
"%s. See README for details" %
|
2019-10-31 13:42:53 +00:00
|
|
|
bsettings.config_fname))
|
2013-09-23 23:35:17 +00:00
|
|
|
|
2014-12-02 00:34:06 +00:00
|
|
|
paths = []
|
2013-09-23 23:35:17 +00:00
|
|
|
for name, value in toolchains:
|
2013-04-03 11:07:16 +00:00
|
|
|
if '*' in value:
|
2014-12-02 00:34:06 +00:00
|
|
|
paths += glob.glob(value)
|
2013-04-03 11:07:16 +00:00
|
|
|
else:
|
2014-12-02 00:34:06 +00:00
|
|
|
paths.append(value)
|
|
|
|
return paths
|
|
|
|
|
2016-07-28 02:33:01 +00:00
|
|
|
def GetSettings(self, show_warning=True):
|
|
|
|
"""Get toolchain settings from the settings file.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
show_warning: True to show a warning if there are no tool chains.
|
|
|
|
"""
|
|
|
|
self.prefixes = bsettings.GetItems('toolchain-prefix')
|
|
|
|
self.paths += self.GetPathList(show_warning)
|
2013-04-03 11:07:16 +00:00
|
|
|
|
2016-03-07 02:45:38 +00:00
|
|
|
def Add(self, fname, test=True, verbose=False, priority=PRIORITY_CALC,
|
|
|
|
arch=None):
|
2013-04-03 11:07:16 +00:00
|
|
|
"""Add a toolchain to our list
|
|
|
|
|
|
|
|
We select the given toolchain as our preferred one for its
|
|
|
|
architecture if it is a higher priority than the others.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: Filename of toolchain's gcc driver
|
|
|
|
test: True to run the toolchain to test it
|
2016-03-07 02:45:37 +00:00
|
|
|
priority: Priority to use for this toolchain
|
2016-03-07 02:45:38 +00:00
|
|
|
arch: Toolchain architecture, or None if not known
|
2013-04-03 11:07:16 +00:00
|
|
|
"""
|
2019-01-07 23:44:20 +00:00
|
|
|
toolchain = Toolchain(fname, test, verbose, priority, arch,
|
|
|
|
self.override_toolchain)
|
2013-04-03 11:07:16 +00:00
|
|
|
add_it = toolchain.ok
|
|
|
|
if toolchain.arch in self.toolchains:
|
|
|
|
add_it = (toolchain.priority <
|
|
|
|
self.toolchains[toolchain.arch].priority)
|
|
|
|
if add_it:
|
|
|
|
self.toolchains[toolchain.arch] = toolchain
|
2016-03-07 02:45:37 +00:00
|
|
|
elif verbose:
|
2019-10-31 13:42:53 +00:00
|
|
|
print(("Toolchain '%s' at priority %d will be ignored because "
|
2016-03-07 02:45:37 +00:00
|
|
|
"another toolchain for arch '%s' has priority %d" %
|
|
|
|
(toolchain.gcc, toolchain.priority, toolchain.arch,
|
2019-10-31 13:42:53 +00:00
|
|
|
self.toolchains[toolchain.arch].priority)))
|
2013-04-03 11:07:16 +00:00
|
|
|
|
2014-12-02 00:34:06 +00:00
|
|
|
def ScanPath(self, path, verbose):
|
|
|
|
"""Scan a path for a valid toolchain
|
|
|
|
|
|
|
|
Args:
|
|
|
|
path: Path to scan
|
|
|
|
verbose: True to print out progress information
|
|
|
|
Returns:
|
|
|
|
Filename of C compiler if found, else None
|
|
|
|
"""
|
2015-01-31 23:12:44 +00:00
|
|
|
fnames = []
|
2014-12-02 00:34:06 +00:00
|
|
|
for subdir in ['.', 'bin', 'usr/bin']:
|
|
|
|
dirname = os.path.join(path, subdir)
|
2019-10-31 13:42:53 +00:00
|
|
|
if verbose: print(" - looking in '%s'" % dirname)
|
2014-12-02 00:34:06 +00:00
|
|
|
for fname in glob.glob(dirname + '/*gcc'):
|
2019-10-31 13:42:53 +00:00
|
|
|
if verbose: print(" - found '%s'" % fname)
|
2015-01-31 23:12:44 +00:00
|
|
|
fnames.append(fname)
|
|
|
|
return fnames
|
2014-12-02 00:34:06 +00:00
|
|
|
|
2016-03-13 01:50:32 +00:00
|
|
|
def ScanPathEnv(self, fname):
|
|
|
|
"""Scan the PATH environment variable for a given filename.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: Filename to scan for
|
|
|
|
Returns:
|
|
|
|
List of matching pathanames, or [] if none
|
|
|
|
"""
|
|
|
|
pathname_list = []
|
|
|
|
for path in os.environ["PATH"].split(os.pathsep):
|
|
|
|
path = path.strip('"')
|
|
|
|
pathname = os.path.join(path, fname)
|
|
|
|
if os.path.exists(pathname):
|
|
|
|
pathname_list.append(pathname)
|
|
|
|
return pathname_list
|
2014-12-02 00:34:06 +00:00
|
|
|
|
2013-04-03 11:07:16 +00:00
|
|
|
def Scan(self, verbose):
|
|
|
|
"""Scan for available toolchains and select the best for each arch.
|
|
|
|
|
|
|
|
We look for all the toolchains we can file, figure out the
|
|
|
|
architecture for each, and whether it works. Then we select the
|
|
|
|
highest priority toolchain for each arch.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
verbose: True to print out progress information
|
|
|
|
"""
|
2019-10-31 13:42:53 +00:00
|
|
|
if verbose: print('Scanning for tool chains')
|
2016-03-13 01:50:32 +00:00
|
|
|
for name, value in self.prefixes:
|
2019-10-31 13:42:53 +00:00
|
|
|
if verbose: print(" - scanning prefix '%s'" % value)
|
2016-03-13 01:50:32 +00:00
|
|
|
if os.path.exists(value):
|
|
|
|
self.Add(value, True, verbose, PRIORITY_FULL_PREFIX, name)
|
|
|
|
continue
|
|
|
|
fname = value + 'gcc'
|
|
|
|
if os.path.exists(fname):
|
|
|
|
self.Add(fname, True, verbose, PRIORITY_PREFIX_GCC, name)
|
|
|
|
continue
|
|
|
|
fname_list = self.ScanPathEnv(fname)
|
|
|
|
for f in fname_list:
|
|
|
|
self.Add(f, True, verbose, PRIORITY_PREFIX_GCC_PATH, name)
|
|
|
|
if not fname_list:
|
2019-10-31 13:42:53 +00:00
|
|
|
raise ValueError("No tool chain found for prefix '%s'" %
|
2016-03-13 01:50:32 +00:00
|
|
|
value)
|
2013-04-03 11:07:16 +00:00
|
|
|
for path in self.paths:
|
2019-10-31 13:42:53 +00:00
|
|
|
if verbose: print(" - scanning path '%s'" % path)
|
2015-01-31 23:12:44 +00:00
|
|
|
fnames = self.ScanPath(path, verbose)
|
|
|
|
for fname in fnames:
|
2014-12-02 00:34:06 +00:00
|
|
|
self.Add(fname, True, verbose)
|
2013-04-03 11:07:16 +00:00
|
|
|
|
|
|
|
def List(self):
|
|
|
|
"""List out the selected toolchains for each architecture"""
|
2016-07-28 02:33:02 +00:00
|
|
|
col = terminal.Color()
|
2019-10-31 13:42:53 +00:00
|
|
|
print(col.Color(col.BLUE, 'List of available toolchains (%d):' %
|
|
|
|
len(self.toolchains)))
|
2013-04-03 11:07:16 +00:00
|
|
|
if len(self.toolchains):
|
2019-10-31 13:42:53 +00:00
|
|
|
for key, value in sorted(self.toolchains.items()):
|
|
|
|
print('%-10s: %s' % (key, value.gcc))
|
2013-04-03 11:07:16 +00:00
|
|
|
else:
|
2019-10-31 13:42:53 +00:00
|
|
|
print('None')
|
2013-04-03 11:07:16 +00:00
|
|
|
|
|
|
|
def Select(self, arch):
|
|
|
|
"""Returns the toolchain for a given architecture
|
|
|
|
|
|
|
|
Args:
|
|
|
|
args: Name of architecture (e.g. 'arm', 'ppc_8xx')
|
|
|
|
|
|
|
|
returns:
|
|
|
|
toolchain object, or None if none found
|
|
|
|
"""
|
2014-12-02 00:34:05 +00:00
|
|
|
for tag, value in bsettings.GetItems('toolchain-alias'):
|
|
|
|
if arch == tag:
|
|
|
|
for alias in value.split():
|
|
|
|
if alias in self.toolchains:
|
|
|
|
return self.toolchains[alias]
|
2013-04-03 11:07:16 +00:00
|
|
|
|
|
|
|
if not arch in self.toolchains:
|
2019-10-31 13:42:53 +00:00
|
|
|
raise ValueError("No tool chain found for arch '%s'" % arch)
|
2013-04-03 11:07:16 +00:00
|
|
|
return self.toolchains[arch]
|
2013-09-23 23:35:17 +00:00
|
|
|
|
|
|
|
def ResolveReferences(self, var_dict, args):
|
|
|
|
"""Resolve variable references in a string
|
|
|
|
|
|
|
|
This converts ${blah} within the string to the value of blah.
|
|
|
|
This function works recursively.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
var_dict: Dictionary containing variables and their values
|
|
|
|
args: String containing make arguments
|
|
|
|
Returns:
|
|
|
|
Resolved string
|
|
|
|
|
|
|
|
>>> bsettings.Setup()
|
|
|
|
>>> tcs = Toolchains()
|
|
|
|
>>> tcs.Add('fred', False)
|
|
|
|
>>> var_dict = {'oblique' : 'OBLIQUE', 'first' : 'fi${second}rst', \
|
|
|
|
'second' : '2nd'}
|
|
|
|
>>> tcs.ResolveReferences(var_dict, 'this=${oblique}_set')
|
|
|
|
'this=OBLIQUE_set'
|
|
|
|
>>> tcs.ResolveReferences(var_dict, 'this=${oblique}_set${first}nd')
|
|
|
|
'this=OBLIQUE_setfi2ndrstnd'
|
|
|
|
"""
|
2014-08-28 15:43:40 +00:00
|
|
|
re_var = re.compile('(\$\{[-_a-z0-9A-Z]{1,}\})')
|
2013-09-23 23:35:17 +00:00
|
|
|
|
|
|
|
while True:
|
|
|
|
m = re_var.search(args)
|
|
|
|
if not m:
|
|
|
|
break
|
|
|
|
lookup = m.group(0)[2:-1]
|
|
|
|
value = var_dict.get(lookup, '')
|
|
|
|
args = args[:m.start(0)] + value + args[m.end(0):]
|
|
|
|
return args
|
|
|
|
|
|
|
|
def GetMakeArguments(self, board):
|
|
|
|
"""Returns 'make' arguments for a given board
|
|
|
|
|
|
|
|
The flags are in a section called 'make-flags'. Flags are named
|
|
|
|
after the target they represent, for example snapper9260=TESTING=1
|
|
|
|
will pass TESTING=1 to make when building the snapper9260 board.
|
|
|
|
|
|
|
|
References to other boards can be added in the string also. For
|
|
|
|
example:
|
|
|
|
|
|
|
|
[make-flags]
|
|
|
|
at91-boards=ENABLE_AT91_TEST=1
|
|
|
|
snapper9260=${at91-boards} BUILD_TAG=442
|
|
|
|
snapper9g45=${at91-boards} BUILD_TAG=443
|
|
|
|
|
|
|
|
This will return 'ENABLE_AT91_TEST=1 BUILD_TAG=442' for snapper9260
|
|
|
|
and 'ENABLE_AT91_TEST=1 BUILD_TAG=443' for snapper9g45.
|
|
|
|
|
|
|
|
A special 'target' variable is set to the board target.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
board: Board object for the board to check.
|
|
|
|
Returns:
|
|
|
|
'make' flags for that board, or '' if none
|
|
|
|
"""
|
|
|
|
self._make_flags['target'] = board.target
|
|
|
|
arg_str = self.ResolveReferences(self._make_flags,
|
|
|
|
self._make_flags.get(board.target, ''))
|
2019-11-24 20:30:26 +00:00
|
|
|
args = re.findall("(?:\".*?\"|\S)+", arg_str)
|
2013-09-23 23:35:17 +00:00
|
|
|
i = 0
|
|
|
|
while i < len(args):
|
2019-11-24 20:30:26 +00:00
|
|
|
args[i] = args[i].replace('"', '')
|
2013-09-23 23:35:17 +00:00
|
|
|
if not args[i]:
|
|
|
|
del args[i]
|
|
|
|
else:
|
|
|
|
i += 1
|
|
|
|
return args
|
2014-12-02 00:34:06 +00:00
|
|
|
|
|
|
|
def LocateArchUrl(self, fetch_arch):
|
|
|
|
"""Find a toolchain available online
|
|
|
|
|
|
|
|
Look in standard places for available toolchains. At present the
|
|
|
|
only standard place is at kernel.org.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
arch: Architecture to look for, or 'list' for all
|
|
|
|
Returns:
|
|
|
|
If fetch_arch is 'list', a tuple:
|
|
|
|
Machine architecture (e.g. x86_64)
|
|
|
|
List of toolchains
|
|
|
|
else
|
|
|
|
URL containing this toolchain, if avaialble, else None
|
|
|
|
"""
|
|
|
|
arch = command.OutputOneLine('uname', '-m')
|
2020-01-17 09:53:37 +00:00
|
|
|
if arch == 'aarch64':
|
|
|
|
arch = 'arm64'
|
2014-12-02 00:34:06 +00:00
|
|
|
base = 'https://www.kernel.org/pub/tools/crosstool/files/bin'
|
2021-10-05 16:20:36 +00:00
|
|
|
versions = ['11.1.0', '9.2.0', '7.3.0', '6.4.0', '4.9.4']
|
2014-12-02 00:34:06 +00:00
|
|
|
links = []
|
|
|
|
for version in versions:
|
|
|
|
url = '%s/%s/%s/' % (base, arch, version)
|
2019-10-31 13:42:53 +00:00
|
|
|
print('Checking: %s' % url)
|
|
|
|
response = urllib.request.urlopen(url)
|
|
|
|
html = tools.ToString(response.read())
|
2014-12-02 00:34:06 +00:00
|
|
|
parser = MyHTMLParser(fetch_arch)
|
|
|
|
parser.feed(html)
|
|
|
|
if fetch_arch == 'list':
|
|
|
|
links += parser.links
|
|
|
|
elif parser.arch_link:
|
|
|
|
return url + parser.arch_link
|
|
|
|
if fetch_arch == 'list':
|
|
|
|
return arch, links
|
|
|
|
return None
|
|
|
|
|
|
|
|
def Download(self, url):
|
|
|
|
"""Download a file to a temporary directory
|
|
|
|
|
|
|
|
Args:
|
|
|
|
url: URL to download
|
|
|
|
Returns:
|
|
|
|
Tuple:
|
|
|
|
Temporary directory name
|
|
|
|
Full path to the downloaded archive file in that directory,
|
|
|
|
or None if there was an error while downloading
|
|
|
|
"""
|
2019-10-31 13:42:53 +00:00
|
|
|
print('Downloading: %s' % url)
|
2014-12-02 00:34:06 +00:00
|
|
|
leaf = url.split('/')[-1]
|
|
|
|
tmpdir = tempfile.mkdtemp('.buildman')
|
2019-10-31 13:42:53 +00:00
|
|
|
response = urllib.request.urlopen(url)
|
2014-12-02 00:34:06 +00:00
|
|
|
fname = os.path.join(tmpdir, leaf)
|
|
|
|
fd = open(fname, 'wb')
|
|
|
|
meta = response.info()
|
2019-10-31 13:42:53 +00:00
|
|
|
size = int(meta.get('Content-Length'))
|
2014-12-02 00:34:06 +00:00
|
|
|
done = 0
|
|
|
|
block_size = 1 << 16
|
|
|
|
status = ''
|
|
|
|
|
|
|
|
# Read the file in chunks and show progress as we go
|
|
|
|
while True:
|
|
|
|
buffer = response.read(block_size)
|
|
|
|
if not buffer:
|
2019-10-31 13:42:53 +00:00
|
|
|
print(chr(8) * (len(status) + 1), '\r', end=' ')
|
2014-12-02 00:34:06 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
done += len(buffer)
|
|
|
|
fd.write(buffer)
|
2019-10-31 13:42:53 +00:00
|
|
|
status = r'%10d MiB [%3d%%]' % (done // 1024 // 1024,
|
|
|
|
done * 100 // size)
|
2014-12-02 00:34:06 +00:00
|
|
|
status = status + chr(8) * (len(status) + 1)
|
2019-10-31 13:42:53 +00:00
|
|
|
print(status, end=' ')
|
2014-12-02 00:34:06 +00:00
|
|
|
sys.stdout.flush()
|
|
|
|
fd.close()
|
|
|
|
if done != size:
|
2019-10-31 13:42:53 +00:00
|
|
|
print('Error, failed to download')
|
2014-12-02 00:34:06 +00:00
|
|
|
os.remove(fname)
|
|
|
|
fname = None
|
|
|
|
return tmpdir, fname
|
|
|
|
|
|
|
|
def Unpack(self, fname, dest):
|
|
|
|
"""Unpack a tar file
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname: Filename to unpack
|
|
|
|
dest: Destination directory
|
|
|
|
Returns:
|
|
|
|
Directory name of the first entry in the archive, without the
|
|
|
|
trailing /
|
|
|
|
"""
|
|
|
|
stdout = command.Output('tar', 'xvfJ', fname, '-C', dest)
|
2018-11-21 08:31:12 +00:00
|
|
|
dirs = stdout.splitlines()[1].split('/')[:2]
|
|
|
|
return '/'.join(dirs)
|
2014-12-02 00:34:06 +00:00
|
|
|
|
|
|
|
def TestSettingsHasPath(self, path):
|
2016-07-28 02:33:03 +00:00
|
|
|
"""Check if buildman will find this toolchain
|
2014-12-02 00:34:06 +00:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the path is in settings, False if not
|
|
|
|
"""
|
2016-07-28 02:33:01 +00:00
|
|
|
paths = self.GetPathList(False)
|
2014-12-02 00:34:06 +00:00
|
|
|
return path in paths
|
|
|
|
|
|
|
|
def ListArchs(self):
|
|
|
|
"""List architectures with available toolchains to download"""
|
|
|
|
host_arch, archives = self.LocateArchUrl('list')
|
buildman/toolchain.py: handle inconsistent tarball names
Unfortunately, for some releases the kernel.org toolchain tarball names adhere
to the following pattern:
<hostarch>-gcc-<ver>-nolib-<targetarch>-<type>.tar.xz
e.g.:
x86_64-gcc-8.1.0-nolibc-aarch64-linux.tar.xz
while others use the following pattern:
<hostarch>-gcc-<ver>-nolib_<targetarch>-<type>.tar.xz
e.g.:
x86_64-gcc-7.3.0-nolibc_aarch64-linux.tar.xz
Notice that the first pattern has dashes throughout, while the second has
dashes throughout except just before the target architecture which has an
underscore.
The "dash throughout" versions from kernel.org are:
8.1.0, 6.4.0, 5.5.0, 4.9.4, 4.8.5, 4.6.1
while the "dash and underscore" versions from kernel.org are:
7.3.0, 4.9.0, 4.8.0, 4.7.3, 4.6.3, 4.6.2, 4.5.1, 4.2.4
This tweak allows the code to handle both versions. Note that this tweak also
causes the architecture parsing to get confused and find the following two
bogus architectures, "2.0" and "64", which are explicitly checked for, and
removed.
Signed-off-by: Trevor Woerner <trevor@toganlabs.com>
Reviewed-by: Simon Glass <sjg@chromium.org>
Change single quotes to double quotes:
Signed-off-by: Simon Glass <sjg@chromium.org>
2018-11-21 08:31:13 +00:00
|
|
|
re_arch = re.compile('[-a-z0-9.]*[-_]([^-]*)-.*')
|
2014-12-02 00:34:06 +00:00
|
|
|
arch_set = set()
|
|
|
|
for archive in archives:
|
|
|
|
# Remove the host architecture from the start
|
|
|
|
arch = re_arch.match(archive[len(host_arch):])
|
|
|
|
if arch:
|
buildman/toolchain.py: handle inconsistent tarball names
Unfortunately, for some releases the kernel.org toolchain tarball names adhere
to the following pattern:
<hostarch>-gcc-<ver>-nolib-<targetarch>-<type>.tar.xz
e.g.:
x86_64-gcc-8.1.0-nolibc-aarch64-linux.tar.xz
while others use the following pattern:
<hostarch>-gcc-<ver>-nolib_<targetarch>-<type>.tar.xz
e.g.:
x86_64-gcc-7.3.0-nolibc_aarch64-linux.tar.xz
Notice that the first pattern has dashes throughout, while the second has
dashes throughout except just before the target architecture which has an
underscore.
The "dash throughout" versions from kernel.org are:
8.1.0, 6.4.0, 5.5.0, 4.9.4, 4.8.5, 4.6.1
while the "dash and underscore" versions from kernel.org are:
7.3.0, 4.9.0, 4.8.0, 4.7.3, 4.6.3, 4.6.2, 4.5.1, 4.2.4
This tweak allows the code to handle both versions. Note that this tweak also
causes the architecture parsing to get confused and find the following two
bogus architectures, "2.0" and "64", which are explicitly checked for, and
removed.
Signed-off-by: Trevor Woerner <trevor@toganlabs.com>
Reviewed-by: Simon Glass <sjg@chromium.org>
Change single quotes to double quotes:
Signed-off-by: Simon Glass <sjg@chromium.org>
2018-11-21 08:31:13 +00:00
|
|
|
if arch.group(1) != '2.0' and arch.group(1) != '64':
|
|
|
|
arch_set.add(arch.group(1))
|
2014-12-02 00:34:06 +00:00
|
|
|
return sorted(arch_set)
|
|
|
|
|
|
|
|
def FetchAndInstall(self, arch):
|
|
|
|
"""Fetch and install a new toolchain
|
|
|
|
|
|
|
|
arch:
|
|
|
|
Architecture to fetch, or 'list' to list
|
|
|
|
"""
|
|
|
|
# Fist get the URL for this architecture
|
2016-07-28 02:33:02 +00:00
|
|
|
col = terminal.Color()
|
2019-10-31 13:42:53 +00:00
|
|
|
print(col.Color(col.BLUE, "Downloading toolchain for arch '%s'" % arch))
|
2014-12-02 00:34:06 +00:00
|
|
|
url = self.LocateArchUrl(arch)
|
|
|
|
if not url:
|
2019-10-31 13:42:53 +00:00
|
|
|
print(("Cannot find toolchain for arch '%s' - use 'list' to list" %
|
|
|
|
arch))
|
2014-12-02 00:34:06 +00:00
|
|
|
return 2
|
|
|
|
home = os.environ['HOME']
|
|
|
|
dest = os.path.join(home, '.buildman-toolchains')
|
|
|
|
if not os.path.exists(dest):
|
|
|
|
os.mkdir(dest)
|
|
|
|
|
|
|
|
# Download the tar file for this toolchain and unpack it
|
|
|
|
tmpdir, tarfile = self.Download(url)
|
|
|
|
if not tarfile:
|
|
|
|
return 1
|
2019-10-31 13:42:53 +00:00
|
|
|
print(col.Color(col.GREEN, 'Unpacking to: %s' % dest), end=' ')
|
2014-12-02 00:34:06 +00:00
|
|
|
sys.stdout.flush()
|
|
|
|
path = self.Unpack(tarfile, dest)
|
|
|
|
os.remove(tarfile)
|
|
|
|
os.rmdir(tmpdir)
|
2019-10-31 13:42:53 +00:00
|
|
|
print()
|
2014-12-02 00:34:06 +00:00
|
|
|
|
|
|
|
# Check that the toolchain works
|
2019-10-31 13:42:53 +00:00
|
|
|
print(col.Color(col.GREEN, 'Testing'))
|
2014-12-02 00:34:06 +00:00
|
|
|
dirpath = os.path.join(dest, path)
|
2015-03-03 00:05:15 +00:00
|
|
|
compiler_fname_list = self.ScanPath(dirpath, True)
|
|
|
|
if not compiler_fname_list:
|
2019-10-31 13:42:53 +00:00
|
|
|
print('Could not locate C compiler - fetch failed.')
|
2014-12-02 00:34:06 +00:00
|
|
|
return 1
|
2015-03-03 00:05:15 +00:00
|
|
|
if len(compiler_fname_list) != 1:
|
2019-10-31 13:42:53 +00:00
|
|
|
print(col.Color(col.RED, 'Warning, ambiguous toolchains: %s' %
|
|
|
|
', '.join(compiler_fname_list)))
|
2015-03-03 00:05:15 +00:00
|
|
|
toolchain = Toolchain(compiler_fname_list[0], True, True)
|
2014-12-02 00:34:06 +00:00
|
|
|
|
|
|
|
# Make sure that it will be found by buildman
|
|
|
|
if not self.TestSettingsHasPath(dirpath):
|
2019-10-31 13:42:53 +00:00
|
|
|
print(("Adding 'download' to config file '%s'" %
|
|
|
|
bsettings.config_fname))
|
2016-07-28 02:33:05 +00:00
|
|
|
bsettings.SetItem('toolchain', 'download', '%s/*/*' % dest)
|
2014-12-02 00:34:06 +00:00
|
|
|
return 0
|