2019-10-31 13:42:57 +00:00
|
|
|
#!/usr/bin/env python3
|
2018-05-06 21:58:06 +00:00
|
|
|
# SPDX-License-Identifier: GPL-2.0+
|
2015-05-20 02:36:07 +00:00
|
|
|
#
|
|
|
|
# Author: Masahiro Yamada <yamada.masahiro@socionext.com>
|
|
|
|
#
|
|
|
|
|
|
|
|
"""
|
|
|
|
Move config options from headers to defconfig files.
|
|
|
|
|
2021-07-22 03:35:51 +00:00
|
|
|
See doc/develop/moveconfig.rst for documentation.
|
2015-05-20 02:36:07 +00:00
|
|
|
"""
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
from argparse import ArgumentParser
|
2020-02-12 19:46:44 +00:00
|
|
|
import asteval
|
2017-06-02 01:39:03 +00:00
|
|
|
import collections
|
2021-12-18 21:54:35 +00:00
|
|
|
from contextlib import ExitStack
|
2016-07-25 10:15:24 +00:00
|
|
|
import copy
|
2016-07-25 10:15:25 +00:00
|
|
|
import difflib
|
2021-12-18 15:09:45 +00:00
|
|
|
import doctest
|
2016-05-19 06:52:07 +00:00
|
|
|
import filecmp
|
2015-05-20 02:36:07 +00:00
|
|
|
import fnmatch
|
2016-10-19 05:39:54 +00:00
|
|
|
import glob
|
2015-05-20 02:36:07 +00:00
|
|
|
import multiprocessing
|
|
|
|
import os
|
2019-10-31 13:42:57 +00:00
|
|
|
import queue
|
2015-05-20 02:36:07 +00:00
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tempfile
|
2017-06-02 01:39:02 +00:00
|
|
|
import threading
|
2015-05-20 02:36:07 +00:00
|
|
|
import time
|
2021-12-18 15:09:45 +00:00
|
|
|
import unittest
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2020-04-18 00:09:02 +00:00
|
|
|
from buildman import bsettings
|
|
|
|
from buildman import kconfiglib
|
|
|
|
from buildman import toolchain
|
2017-06-16 03:39:33 +00:00
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
SHOW_GNU_MAKE = 'scripts/show-gnu-make'
|
|
|
|
SLEEP_TIME=0.03
|
|
|
|
|
|
|
|
STATE_IDLE = 0
|
|
|
|
STATE_DEFCONFIG = 1
|
|
|
|
STATE_AUTOCONF = 2
|
2015-05-19 18:21:17 +00:00
|
|
|
STATE_SAVEDEFCONFIG = 3
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
ACTION_MOVE = 0
|
tools: moveconfig: do not rely on type and default value given by users
Commit 96464badc794 ("moveconfig: Always run savedefconfig on the
moved config") changed the work flow of this tool a lot from the
original intention when this tool was designed first.
Since then, before running this tool, users must edit the Kconfig to
add the menu entries for the configs they are moving. It means users
had already specified the type and the default value for each CONFIG
via its Kconfig entry. Nevertheless, users are still required to
dictate the same type and the default value in the input file. This
is tedious to use. So, my idea here is to deprecate the latter.
Before moving forward with it, there is one issue worth mentioning;
since the savedefconfig re-sync was introduced, this tool has not
been able to move bool options with "default y". Joe sent a patch
to solve this problem about a year ago, but it was not applied for
some reasons. Now, he came back with an updated patch, so this
problem will be fixed soon.
For other use cases, I see no reason to require redundant dictation
in the input file. Instead, the tool can know the types and default
values by parsing the .config file.
This commit changes the tool to use the CONFIG names, but ignore the
types and default values given by the input file.
This commit also fixes one bug. Prior to this commit, it could not
move an integer-typed CONFIG with value 1.
For example, assume we are moving CONFIG_CONS_INDEX. Please note
this is an integer type option.
Many board headers define this CONFIG as 1.
#define CONFIG_CONS_INDEX 1
It will be converted to
CONFIG_CONS_INDEX=y
and moved to include/autoconf.mk, by the tools/scripts/define2mk.sed.
It will cause "make savedefconfig" to fail due to the type conflict.
This commit takes care of it by detecting the type and converting the
CONFIG value correctly.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:56 +00:00
|
|
|
ACTION_NO_ENTRY = 1
|
2016-08-22 13:18:21 +00:00
|
|
|
ACTION_NO_ENTRY_WARN = 2
|
|
|
|
ACTION_NO_CHANGE = 3
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
COLOR_BLACK = '0;30'
|
|
|
|
COLOR_RED = '0;31'
|
|
|
|
COLOR_GREEN = '0;32'
|
|
|
|
COLOR_BROWN = '0;33'
|
|
|
|
COLOR_BLUE = '0;34'
|
|
|
|
COLOR_PURPLE = '0;35'
|
|
|
|
COLOR_CYAN = '0;36'
|
|
|
|
COLOR_LIGHT_GRAY = '0;37'
|
|
|
|
COLOR_DARK_GRAY = '1;30'
|
|
|
|
COLOR_LIGHT_RED = '1;31'
|
|
|
|
COLOR_LIGHT_GREEN = '1;32'
|
|
|
|
COLOR_YELLOW = '1;33'
|
|
|
|
COLOR_LIGHT_BLUE = '1;34'
|
|
|
|
COLOR_LIGHT_PURPLE = '1;35'
|
|
|
|
COLOR_LIGHT_CYAN = '1;36'
|
|
|
|
COLOR_WHITE = '1;37'
|
|
|
|
|
2017-06-02 01:39:01 +00:00
|
|
|
AUTO_CONF_PATH = 'include/config/auto.conf'
|
2017-06-02 01:39:02 +00:00
|
|
|
CONFIG_DATABASE = 'moveconfig.db'
|
2017-06-02 01:39:01 +00:00
|
|
|
|
2017-06-16 03:39:33 +00:00
|
|
|
CONFIG_LEN = len('CONFIG_')
|
2017-06-02 01:39:01 +00:00
|
|
|
|
2019-05-15 13:15:52 +00:00
|
|
|
SIZES = {
|
2021-12-18 21:54:30 +00:00
|
|
|
'SZ_1': 0x00000001, 'SZ_2': 0x00000002,
|
|
|
|
'SZ_4': 0x00000004, 'SZ_8': 0x00000008,
|
|
|
|
'SZ_16': 0x00000010, 'SZ_32': 0x00000020,
|
|
|
|
'SZ_64': 0x00000040, 'SZ_128': 0x00000080,
|
|
|
|
'SZ_256': 0x00000100, 'SZ_512': 0x00000200,
|
|
|
|
'SZ_1K': 0x00000400, 'SZ_2K': 0x00000800,
|
|
|
|
'SZ_4K': 0x00001000, 'SZ_8K': 0x00002000,
|
|
|
|
'SZ_16K': 0x00004000, 'SZ_32K': 0x00008000,
|
|
|
|
'SZ_64K': 0x00010000, 'SZ_128K': 0x00020000,
|
|
|
|
'SZ_256K': 0x00040000, 'SZ_512K': 0x00080000,
|
|
|
|
'SZ_1M': 0x00100000, 'SZ_2M': 0x00200000,
|
|
|
|
'SZ_4M': 0x00400000, 'SZ_8M': 0x00800000,
|
|
|
|
'SZ_16M': 0x01000000, 'SZ_32M': 0x02000000,
|
|
|
|
'SZ_64M': 0x04000000, 'SZ_128M': 0x08000000,
|
|
|
|
'SZ_256M': 0x10000000, 'SZ_512M': 0x20000000,
|
|
|
|
'SZ_1G': 0x40000000, 'SZ_2G': 0x80000000,
|
|
|
|
'SZ_4G': 0x100000000
|
2019-05-15 13:15:52 +00:00
|
|
|
}
|
|
|
|
|
2022-02-08 18:49:45 +00:00
|
|
|
RE_REMOVE_DEFCONFIG = re.compile(r'(.*)_defconfig')
|
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
### helper functions ###
|
|
|
|
def check_top_directory():
|
|
|
|
"""Exit if we are not at the top of source directory."""
|
2021-12-18 21:54:35 +00:00
|
|
|
for fname in 'README', 'Licenses':
|
|
|
|
if not os.path.exists(fname):
|
2015-05-20 02:36:07 +00:00
|
|
|
sys.exit('Please run at the top of source directory.')
|
|
|
|
|
tools: moveconfig: exit with error message for not clean directory
When the source tree is not clean, this tool raises an exception
with a message like follows:
Traceback (most recent call last):
File "tools/moveconfig.py", line 939, in <module>
main()
File "tools/moveconfig.py", line 934, in main
move_config(config_attrs, options)
File "tools/moveconfig.py", line 808, in move_config
while not slots.available():
File "tools/moveconfig.py", line 733, in available
if slot.poll():
File "tools/moveconfig.py", line 645, in poll
self.parser.update_dotconfig(self.defconfig)
File "tools/moveconfig.py", line 503, in update_dotconfig
with open(autoconf_path) as f:
IOError: [Errno 2] No such file or directory: '/tmp/tmpDtzCgl/include/autoconf.mk'
This does not explain what is wrong. Show an appropriate error
message "source tree is not clean, please run 'make mrproper'"
in such a situation.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:54 +00:00
|
|
|
def check_clean_directory():
|
|
|
|
"""Exit if the source tree is not clean."""
|
2021-12-18 21:54:35 +00:00
|
|
|
for fname in '.config', 'include/config':
|
|
|
|
if os.path.exists(fname):
|
tools: moveconfig: exit with error message for not clean directory
When the source tree is not clean, this tool raises an exception
with a message like follows:
Traceback (most recent call last):
File "tools/moveconfig.py", line 939, in <module>
main()
File "tools/moveconfig.py", line 934, in main
move_config(config_attrs, options)
File "tools/moveconfig.py", line 808, in move_config
while not slots.available():
File "tools/moveconfig.py", line 733, in available
if slot.poll():
File "tools/moveconfig.py", line 645, in poll
self.parser.update_dotconfig(self.defconfig)
File "tools/moveconfig.py", line 503, in update_dotconfig
with open(autoconf_path) as f:
IOError: [Errno 2] No such file or directory: '/tmp/tmpDtzCgl/include/autoconf.mk'
This does not explain what is wrong. Show an appropriate error
message "source tree is not clean, please run 'make mrproper'"
in such a situation.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:54 +00:00
|
|
|
sys.exit("source tree is not clean, please run 'make mrproper'")
|
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
def get_make_cmd():
|
|
|
|
"""Get the command name of GNU Make.
|
|
|
|
|
|
|
|
U-Boot needs GNU Make for building, but the command name is not
|
|
|
|
necessarily "make". (for example, "gmake" on FreeBSD).
|
|
|
|
Returns the most appropriate command name on your system.
|
|
|
|
"""
|
2021-12-18 21:54:35 +00:00
|
|
|
with subprocess.Popen([SHOW_GNU_MAKE], stdout=subprocess.PIPE) as proc:
|
|
|
|
ret = proc.communicate()
|
|
|
|
if proc.returncode:
|
|
|
|
sys.exit('GNU Make not found')
|
2015-05-20 02:36:07 +00:00
|
|
|
return ret[0].rstrip()
|
|
|
|
|
2017-06-02 01:38:58 +00:00
|
|
|
def get_matched_defconfig(line):
|
|
|
|
"""Get the defconfig files that match a pattern
|
|
|
|
|
|
|
|
Args:
|
2021-12-18 21:54:35 +00:00
|
|
|
line (str): Path or filename to match, e.g. 'configs/snow_defconfig' or
|
2017-06-02 01:38:58 +00:00
|
|
|
'k2*_defconfig'. If no directory is provided, 'configs/' is
|
|
|
|
prepended
|
|
|
|
|
|
|
|
Returns:
|
2021-12-18 21:54:35 +00:00
|
|
|
list of str: a list of matching defconfig files
|
2017-06-02 01:38:58 +00:00
|
|
|
"""
|
|
|
|
dirname = os.path.dirname(line)
|
|
|
|
if dirname:
|
|
|
|
pattern = line
|
|
|
|
else:
|
|
|
|
pattern = os.path.join('configs', line)
|
|
|
|
return glob.glob(pattern) + glob.glob(pattern + '_defconfig')
|
|
|
|
|
2016-10-19 05:39:54 +00:00
|
|
|
def get_matched_defconfigs(defconfigs_file):
|
2017-06-02 01:38:59 +00:00
|
|
|
"""Get all the defconfig files that match the patterns in a file.
|
|
|
|
|
|
|
|
Args:
|
2021-12-18 21:54:35 +00:00
|
|
|
defconfigs_file (str): File containing a list of defconfigs to process,
|
|
|
|
or '-' to read the list from stdin
|
2017-06-02 01:38:59 +00:00
|
|
|
|
|
|
|
Returns:
|
2021-12-18 21:54:35 +00:00
|
|
|
list of str: A list of paths to defconfig files, with no duplicates
|
2017-06-02 01:38:59 +00:00
|
|
|
"""
|
2016-10-19 05:39:54 +00:00
|
|
|
defconfigs = []
|
2021-12-18 21:54:35 +00:00
|
|
|
with ExitStack() as stack:
|
|
|
|
if defconfigs_file == '-':
|
|
|
|
inf = sys.stdin
|
|
|
|
defconfigs_file = 'stdin'
|
|
|
|
else:
|
|
|
|
inf = stack.enter_context(open(defconfigs_file, encoding='utf-8'))
|
|
|
|
for i, line in enumerate(inf):
|
|
|
|
line = line.strip()
|
|
|
|
if not line:
|
|
|
|
continue # skip blank lines silently
|
|
|
|
if ' ' in line:
|
|
|
|
line = line.split(' ')[0] # handle 'git log' input
|
|
|
|
matched = get_matched_defconfig(line)
|
|
|
|
if not matched:
|
|
|
|
print(f"warning: {defconfigs_file}:{i + 1}: no defconfig matched '{line}'",
|
|
|
|
file=sys.stderr)
|
|
|
|
|
|
|
|
defconfigs += matched
|
2016-10-19 05:39:54 +00:00
|
|
|
|
|
|
|
# use set() to drop multiple matching
|
2021-12-18 21:54:35 +00:00
|
|
|
return [defconfig[len('configs') + 1:] for defconfig in set(defconfigs)]
|
2016-10-19 05:39:54 +00:00
|
|
|
|
2016-07-25 10:15:28 +00:00
|
|
|
def get_all_defconfigs():
|
2021-12-18 21:54:35 +00:00
|
|
|
"""Get all the defconfig files under the configs/ directory.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list of str: List of paths to defconfig files
|
|
|
|
"""
|
2016-07-25 10:15:28 +00:00
|
|
|
defconfigs = []
|
2021-12-18 21:54:35 +00:00
|
|
|
for (dirpath, _, filenames) in os.walk('configs'):
|
2016-07-25 10:15:28 +00:00
|
|
|
dirpath = dirpath[len('configs') + 1:]
|
|
|
|
for filename in fnmatch.filter(filenames, '*_defconfig'):
|
|
|
|
defconfigs.append(os.path.join(dirpath, filename))
|
|
|
|
|
|
|
|
return defconfigs
|
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
def color_text(color_enabled, color, string):
|
|
|
|
"""Return colored string."""
|
|
|
|
if color_enabled:
|
2016-05-19 06:52:02 +00:00
|
|
|
# LF should not be surrounded by the escape sequence.
|
|
|
|
# Otherwise, additional whitespace or line-feed might be printed.
|
|
|
|
return '\n'.join([ '\033[' + color + 'm' + s + '\033[0m' if s else ''
|
|
|
|
for s in string.split('\n') ])
|
2021-12-18 21:54:35 +00:00
|
|
|
return string
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
def show_diff(alines, blines, file_path, color_enabled):
|
2016-07-25 10:15:25 +00:00
|
|
|
"""Show unidified diff.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
|
|
|
alines (list of str): A list of lines (before)
|
|
|
|
blines (list of str): A list of lines (after)
|
|
|
|
file_path (str): Path to the file
|
|
|
|
color_enabled (bool): Display the diff in color
|
2016-07-25 10:15:25 +00:00
|
|
|
"""
|
2021-12-18 21:54:35 +00:00
|
|
|
diff = difflib.unified_diff(alines, blines,
|
2016-07-25 10:15:25 +00:00
|
|
|
fromfile=os.path.join('a', file_path),
|
|
|
|
tofile=os.path.join('b', file_path))
|
|
|
|
|
|
|
|
for line in diff:
|
2022-01-29 15:22:08 +00:00
|
|
|
if line.startswith('-') and not line.startswith('--'):
|
|
|
|
print(color_text(color_enabled, COLOR_RED, line))
|
|
|
|
elif line.startswith('+') and not line.startswith('++'):
|
|
|
|
print(color_text(color_enabled, COLOR_GREEN, line))
|
2016-07-25 10:15:26 +00:00
|
|
|
else:
|
2022-01-29 15:22:08 +00:00
|
|
|
print(line)
|
2016-07-25 10:15:25 +00:00
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
def extend_matched_lines(lines, matched, pre_patterns, post_patterns,
|
|
|
|
extend_pre, extend_post):
|
2016-07-25 10:15:24 +00:00
|
|
|
"""Extend matched lines if desired patterns are found before/after already
|
|
|
|
matched lines.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
|
|
|
lines (list of str): list of lines handled.
|
|
|
|
matched (list of int): list of line numbers that have been already
|
|
|
|
matched (will be updated by this function)
|
|
|
|
pre_patterns (list of re.Pattern): list of regular expression that should
|
|
|
|
be matched as preamble
|
|
|
|
post_patterns (list of re.Pattern): list of regular expression that should
|
|
|
|
be matched as postamble
|
|
|
|
extend_pre (bool): Add the line number of matched preamble to the matched
|
|
|
|
list
|
|
|
|
extend_post (bool): Add the line number of matched postamble to the
|
|
|
|
matched list
|
2016-07-25 10:15:24 +00:00
|
|
|
"""
|
|
|
|
extended_matched = []
|
|
|
|
|
|
|
|
j = matched[0]
|
|
|
|
|
|
|
|
for i in matched:
|
|
|
|
if i == 0 or i < j:
|
|
|
|
continue
|
|
|
|
j = i
|
|
|
|
while j in matched:
|
|
|
|
j += 1
|
|
|
|
if j >= len(lines):
|
|
|
|
break
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
for pat in pre_patterns:
|
|
|
|
if pat.search(lines[i - 1]):
|
2016-07-25 10:15:24 +00:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
# not matched
|
|
|
|
continue
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
for pat in post_patterns:
|
|
|
|
if pat.search(lines[j]):
|
2016-07-25 10:15:24 +00:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
# not matched
|
|
|
|
continue
|
|
|
|
|
|
|
|
if extend_pre:
|
|
|
|
extended_matched.append(i - 1)
|
|
|
|
if extend_post:
|
|
|
|
extended_matched.append(j)
|
|
|
|
|
|
|
|
matched += extended_matched
|
|
|
|
matched.sort()
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def confirm(args, prompt):
|
2021-12-18 21:54:35 +00:00
|
|
|
"""Ask the user to confirm something
|
|
|
|
|
|
|
|
Args:
|
|
|
|
args (Namespace ): program arguments
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
bool: True to confirm, False to cancel/stop
|
|
|
|
"""
|
2021-12-18 21:54:31 +00:00
|
|
|
if not args.yes:
|
2017-05-02 09:30:46 +00:00
|
|
|
while True:
|
2021-12-18 21:54:35 +00:00
|
|
|
choice = input(f'{prompt} [y/n]: ')
|
2017-05-02 09:30:46 +00:00
|
|
|
choice = choice.lower()
|
2019-10-31 13:42:57 +00:00
|
|
|
print(choice)
|
2021-12-18 21:54:35 +00:00
|
|
|
if choice in ('y', 'n'):
|
2017-05-02 09:30:46 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
if choice == 'n':
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2021-12-18 21:54:33 +00:00
|
|
|
def write_file(fname, data):
|
|
|
|
"""Write data to a file
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname (str): Filename to write to
|
|
|
|
data (list of str): Lines to write (with or without trailing newline);
|
|
|
|
or str to write
|
|
|
|
"""
|
|
|
|
with open(fname, 'w', encoding='utf-8') as out:
|
|
|
|
if isinstance(data, list):
|
|
|
|
for line in data:
|
|
|
|
print(line.rstrip('\n'), file=out)
|
|
|
|
else:
|
|
|
|
out.write(data)
|
|
|
|
|
2021-12-18 21:54:34 +00:00
|
|
|
def read_file(fname, as_lines=True, skip_unicode=False):
|
|
|
|
"""Read a file and return the contents
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fname (str): Filename to read from
|
|
|
|
as_lines: Return file contents as a list of lines
|
|
|
|
skip_unicode (bool): True to report unicode errors and continue
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
iter of str: List of ;ines from the file with newline removed; str if
|
|
|
|
as_lines is False with newlines intact; or None if a unicode error
|
|
|
|
occurred
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
UnicodeDecodeError: Unicode error occurred when reading
|
|
|
|
"""
|
|
|
|
with open(fname, encoding='utf-8') as inf:
|
|
|
|
try:
|
|
|
|
if as_lines:
|
|
|
|
return [line.rstrip('\n') for line in inf.readlines()]
|
|
|
|
else:
|
|
|
|
return inf.read()
|
|
|
|
except UnicodeDecodeError as e:
|
|
|
|
if not skip_unicode:
|
2022-02-11 20:23:22 +00:00
|
|
|
raise
|
2021-12-18 21:54:34 +00:00
|
|
|
print("Failed on file %s': %s" % (fname, e))
|
|
|
|
return None
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def cleanup_empty_blocks(header_path, args):
|
2019-01-30 07:23:16 +00:00
|
|
|
"""Clean up empty conditional blocks
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
|
|
|
header_path (str): path to the cleaned file.
|
|
|
|
args (Namespace): program arguments
|
2019-01-30 07:23:16 +00:00
|
|
|
"""
|
|
|
|
pattern = re.compile(r'^\s*#\s*if.*$\n^\s*#\s*endif.*$\n*', flags=re.M)
|
2021-12-18 21:54:34 +00:00
|
|
|
data = read_file(header_path, as_lines=False, skip_unicode=True)
|
|
|
|
if data is None:
|
|
|
|
return
|
2019-01-30 07:23:16 +00:00
|
|
|
|
|
|
|
new_data = pattern.sub('\n', data)
|
|
|
|
|
|
|
|
show_diff(data.splitlines(True), new_data.splitlines(True), header_path,
|
2021-12-18 21:54:31 +00:00
|
|
|
args.color)
|
2019-01-30 07:23:16 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.dry_run:
|
2019-01-30 07:23:16 +00:00
|
|
|
return
|
|
|
|
|
2021-12-18 21:54:34 +00:00
|
|
|
if new_data != data:
|
|
|
|
write_file(header_path, new_data)
|
2019-01-30 07:23:16 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def cleanup_one_header(header_path, patterns, args):
|
2015-05-20 02:36:07 +00:00
|
|
|
"""Clean regex-matched lines away from a file.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2015-05-20 02:36:07 +00:00
|
|
|
header_path: path to the cleaned file.
|
|
|
|
patterns: list of regex patterns. Any lines matching to these
|
|
|
|
patterns are deleted.
|
2021-12-18 21:54:35 +00:00
|
|
|
args (Namespace): program arguments
|
2015-05-20 02:36:07 +00:00
|
|
|
"""
|
2021-12-18 21:54:34 +00:00
|
|
|
lines = read_file(header_path, skip_unicode=True)
|
|
|
|
if lines is None:
|
|
|
|
return
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
matched = []
|
|
|
|
for i, line in enumerate(lines):
|
2022-01-29 15:22:08 +00:00
|
|
|
if i - 1 in matched and lines[i - 1].endswith('\\'):
|
2016-07-25 10:15:27 +00:00
|
|
|
matched.append(i)
|
|
|
|
continue
|
2015-05-20 02:36:07 +00:00
|
|
|
for pattern in patterns:
|
2016-07-25 10:15:24 +00:00
|
|
|
if pattern.search(line):
|
2015-05-20 02:36:07 +00:00
|
|
|
matched.append(i)
|
|
|
|
break
|
|
|
|
|
2016-07-25 10:15:24 +00:00
|
|
|
if not matched:
|
|
|
|
return
|
|
|
|
|
|
|
|
# remove empty #ifdef ... #endif, successive blank lines
|
2022-01-29 15:22:08 +00:00
|
|
|
pattern_if = re.compile(r'#\s*if(def|ndef)?\b') # #if, #ifdef, #ifndef
|
|
|
|
pattern_elif = re.compile(r'#\s*el(if|se)\b') # #elif, #else
|
|
|
|
pattern_endif = re.compile(r'#\s*endif\b') # #endif
|
2016-07-25 10:15:24 +00:00
|
|
|
pattern_blank = re.compile(r'^\s*$') # empty line
|
|
|
|
|
|
|
|
while True:
|
|
|
|
old_matched = copy.copy(matched)
|
|
|
|
extend_matched_lines(lines, matched, [pattern_if],
|
|
|
|
[pattern_endif], True, True)
|
|
|
|
extend_matched_lines(lines, matched, [pattern_elif],
|
|
|
|
[pattern_elif, pattern_endif], True, False)
|
|
|
|
extend_matched_lines(lines, matched, [pattern_if, pattern_elif],
|
|
|
|
[pattern_blank], False, True)
|
|
|
|
extend_matched_lines(lines, matched, [pattern_blank],
|
|
|
|
[pattern_elif, pattern_endif], True, False)
|
|
|
|
extend_matched_lines(lines, matched, [pattern_blank],
|
|
|
|
[pattern_blank], True, False)
|
|
|
|
if matched == old_matched:
|
|
|
|
break
|
|
|
|
|
2016-07-25 10:15:25 +00:00
|
|
|
tolines = copy.copy(lines)
|
|
|
|
|
|
|
|
for i in reversed(matched):
|
|
|
|
tolines.pop(i)
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
show_diff(lines, tolines, header_path, args.color)
|
2016-07-25 10:15:24 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.dry_run:
|
2015-05-20 02:36:07 +00:00
|
|
|
return
|
|
|
|
|
2021-12-18 21:54:33 +00:00
|
|
|
write_file(header_path, tolines)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def cleanup_headers(configs, args):
|
2015-05-20 02:36:07 +00:00
|
|
|
"""Delete config defines from board headers.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2016-05-19 06:51:57 +00:00
|
|
|
configs: A list of CONFIGs to remove.
|
2021-12-18 21:54:35 +00:00
|
|
|
args (Namespace): program arguments
|
2015-05-20 02:36:07 +00:00
|
|
|
"""
|
2021-12-18 21:54:31 +00:00
|
|
|
if not confirm(args, 'Clean up headers?'):
|
2017-05-02 09:30:46 +00:00
|
|
|
return
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
patterns = []
|
2016-05-19 06:51:57 +00:00
|
|
|
for config in configs:
|
2022-01-29 15:22:08 +00:00
|
|
|
patterns.append(re.compile(r'#\s*define\s+%s\b' % config))
|
|
|
|
patterns.append(re.compile(r'#\s*undef\s+%s\b' % config))
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2015-05-19 18:21:21 +00:00
|
|
|
for dir in 'include', 'arch', 'board':
|
|
|
|
for (dirpath, dirnames, filenames) in os.walk(dir):
|
2016-07-25 10:15:22 +00:00
|
|
|
if dirpath == os.path.join('include', 'generated'):
|
|
|
|
continue
|
2015-05-19 18:21:21 +00:00
|
|
|
for filename in filenames:
|
2020-08-11 17:23:34 +00:00
|
|
|
if not filename.endswith(('~', '.dts', '.dtsi', '.bin',
|
2021-03-15 16:01:33 +00:00
|
|
|
'.elf','.aml','.dat')):
|
2019-01-30 07:23:16 +00:00
|
|
|
header_path = os.path.join(dirpath, filename)
|
2019-11-11 02:19:37 +00:00
|
|
|
# This file contains UTF-16 data and no CONFIG symbols
|
|
|
|
if header_path == 'include/video_font_data.h':
|
|
|
|
continue
|
2021-12-18 21:54:31 +00:00
|
|
|
cleanup_one_header(header_path, patterns, args)
|
|
|
|
cleanup_empty_blocks(header_path, args)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def cleanup_one_extra_option(defconfig_path, configs, args):
|
2016-07-25 10:15:29 +00:00
|
|
|
"""Delete config defines in CONFIG_SYS_EXTRA_OPTIONS in one defconfig file.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2016-07-25 10:15:29 +00:00
|
|
|
defconfig_path: path to the cleaned defconfig file.
|
|
|
|
configs: A list of CONFIGs to remove.
|
2021-12-18 21:54:35 +00:00
|
|
|
args (Namespace): program arguments
|
2016-07-25 10:15:29 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
start = 'CONFIG_SYS_EXTRA_OPTIONS="'
|
2022-01-29 15:22:08 +00:00
|
|
|
end = '"'
|
2016-07-25 10:15:29 +00:00
|
|
|
|
2021-12-18 21:54:34 +00:00
|
|
|
lines = read_file(defconfig_path)
|
2016-07-25 10:15:29 +00:00
|
|
|
|
|
|
|
for i, line in enumerate(lines):
|
|
|
|
if line.startswith(start) and line.endswith(end):
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# CONFIG_SYS_EXTRA_OPTIONS was not found in this defconfig
|
|
|
|
return
|
|
|
|
|
|
|
|
old_tokens = line[len(start):-len(end)].split(',')
|
|
|
|
new_tokens = []
|
|
|
|
|
|
|
|
for token in old_tokens:
|
|
|
|
pos = token.find('=')
|
|
|
|
if not (token[:pos] if pos >= 0 else token) in configs:
|
|
|
|
new_tokens.append(token)
|
|
|
|
|
|
|
|
if new_tokens == old_tokens:
|
|
|
|
return
|
|
|
|
|
|
|
|
tolines = copy.copy(lines)
|
|
|
|
|
|
|
|
if new_tokens:
|
|
|
|
tolines[i] = start + ','.join(new_tokens) + end
|
|
|
|
else:
|
|
|
|
tolines.pop(i)
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
show_diff(lines, tolines, defconfig_path, args.color)
|
2016-07-25 10:15:29 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.dry_run:
|
2016-07-25 10:15:29 +00:00
|
|
|
return
|
|
|
|
|
2021-12-18 21:54:33 +00:00
|
|
|
write_file(defconfig_path, tolines)
|
2016-07-25 10:15:29 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def cleanup_extra_options(configs, args):
|
2016-07-25 10:15:29 +00:00
|
|
|
"""Delete config defines in CONFIG_SYS_EXTRA_OPTIONS in defconfig files.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2016-07-25 10:15:29 +00:00
|
|
|
configs: A list of CONFIGs to remove.
|
2021-12-18 21:54:35 +00:00
|
|
|
args (Namespace): program arguments
|
2016-07-25 10:15:29 +00:00
|
|
|
"""
|
2021-12-18 21:54:31 +00:00
|
|
|
if not confirm(args, 'Clean up CONFIG_SYS_EXTRA_OPTIONS?'):
|
2017-05-02 09:30:46 +00:00
|
|
|
return
|
2016-07-25 10:15:29 +00:00
|
|
|
|
|
|
|
configs = [ config[len('CONFIG_'):] for config in configs ]
|
|
|
|
|
|
|
|
defconfigs = get_all_defconfigs()
|
|
|
|
|
|
|
|
for defconfig in defconfigs:
|
|
|
|
cleanup_one_extra_option(os.path.join('configs', defconfig), configs,
|
2021-12-18 21:54:31 +00:00
|
|
|
args)
|
2016-07-25 10:15:29 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def cleanup_whitelist(configs, args):
|
2017-05-02 09:30:47 +00:00
|
|
|
"""Delete config whitelist entries
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2017-05-02 09:30:47 +00:00
|
|
|
configs: A list of CONFIGs to remove.
|
2021-12-18 21:54:35 +00:00
|
|
|
args (Namespace): program arguments
|
2017-05-02 09:30:47 +00:00
|
|
|
"""
|
2021-12-18 21:54:31 +00:00
|
|
|
if not confirm(args, 'Clean up whitelist entries?'):
|
2017-05-02 09:30:47 +00:00
|
|
|
return
|
|
|
|
|
2021-12-18 21:54:34 +00:00
|
|
|
lines = read_file(os.path.join('scripts', 'config_whitelist.txt'))
|
2017-05-02 09:30:47 +00:00
|
|
|
|
|
|
|
lines = [x for x in lines if x.strip() not in configs]
|
|
|
|
|
2021-12-18 21:54:33 +00:00
|
|
|
write_file(os.path.join('scripts', 'config_whitelist.txt'), lines)
|
2017-05-02 09:30:47 +00:00
|
|
|
|
2017-05-02 09:30:48 +00:00
|
|
|
def find_matching(patterns, line):
|
|
|
|
for pat in patterns:
|
|
|
|
if pat.search(line):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def cleanup_readme(configs, args):
|
2017-05-02 09:30:48 +00:00
|
|
|
"""Delete config description in README
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2017-05-02 09:30:48 +00:00
|
|
|
configs: A list of CONFIGs to remove.
|
2021-12-18 21:54:35 +00:00
|
|
|
args (Namespace): program arguments
|
2017-05-02 09:30:48 +00:00
|
|
|
"""
|
2021-12-18 21:54:31 +00:00
|
|
|
if not confirm(args, 'Clean up README?'):
|
2017-05-02 09:30:48 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
patterns = []
|
|
|
|
for config in configs:
|
|
|
|
patterns.append(re.compile(r'^\s+%s' % config))
|
|
|
|
|
2021-12-18 21:54:34 +00:00
|
|
|
lines = read_file('README')
|
2017-05-02 09:30:48 +00:00
|
|
|
|
|
|
|
found = False
|
|
|
|
newlines = []
|
|
|
|
for line in lines:
|
|
|
|
if not found:
|
|
|
|
found = find_matching(patterns, line)
|
|
|
|
if found:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if found and re.search(r'^\s+CONFIG', line):
|
|
|
|
found = False
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
newlines.append(line)
|
|
|
|
|
2021-12-18 21:54:33 +00:00
|
|
|
write_file('README', newlines)
|
2017-05-02 09:30:48 +00:00
|
|
|
|
2019-05-15 13:15:52 +00:00
|
|
|
def try_expand(line):
|
|
|
|
"""If value looks like an expression, try expanding it
|
|
|
|
Otherwise just return the existing value
|
|
|
|
"""
|
|
|
|
if line.find('=') == -1:
|
|
|
|
return line
|
|
|
|
|
|
|
|
try:
|
2020-02-12 19:46:44 +00:00
|
|
|
aeval = asteval.Interpreter( usersyms=SIZES, minimal=True )
|
2019-05-15 13:15:52 +00:00
|
|
|
cfg, val = re.split("=", line)
|
|
|
|
val= val.strip('\"')
|
2021-12-18 21:54:30 +00:00
|
|
|
if re.search(r'[*+-/]|<<|SZ_+|\(([^\)]+)\)', val):
|
2020-02-12 19:46:44 +00:00
|
|
|
newval = hex(aeval(val))
|
2021-12-18 21:54:30 +00:00
|
|
|
print('\tExpanded expression %s to %s' % (val, newval))
|
2019-05-15 13:15:52 +00:00
|
|
|
return cfg+'='+newval
|
|
|
|
except:
|
2021-12-18 21:54:30 +00:00
|
|
|
print('\tFailed to expand expression in %s' % line)
|
2019-05-15 13:15:52 +00:00
|
|
|
|
|
|
|
return line
|
|
|
|
|
2017-05-02 09:30:47 +00:00
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
### classes ###
|
2016-05-19 06:51:55 +00:00
|
|
|
class Progress:
|
|
|
|
|
|
|
|
"""Progress Indicator"""
|
|
|
|
|
|
|
|
def __init__(self, total):
|
|
|
|
"""Create a new progress indicator.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2016-05-19 06:51:55 +00:00
|
|
|
total: A number of defconfig files to process.
|
|
|
|
"""
|
|
|
|
self.current = 0
|
|
|
|
self.total = total
|
|
|
|
|
|
|
|
def inc(self):
|
|
|
|
"""Increment the number of processed defconfig files."""
|
|
|
|
|
|
|
|
self.current += 1
|
|
|
|
|
|
|
|
def show(self):
|
|
|
|
"""Display the progress."""
|
2019-10-31 13:42:57 +00:00
|
|
|
print(' %d defconfigs out of %d\r' % (self.current, self.total), end=' ')
|
2016-05-19 06:51:55 +00:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
2017-06-16 03:39:33 +00:00
|
|
|
|
|
|
|
class KconfigScanner:
|
|
|
|
"""Kconfig scanner."""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
"""Scan all the Kconfig files and create a Config object."""
|
|
|
|
# Define environment variables referenced from Kconfig
|
|
|
|
os.environ['srctree'] = os.getcwd()
|
|
|
|
os.environ['UBOOTVERSION'] = 'dummy'
|
|
|
|
os.environ['KCONFIG_OBJDIR'] = ''
|
2019-09-20 21:42:09 +00:00
|
|
|
self.conf = kconfiglib.Kconfig()
|
2017-06-16 03:39:33 +00:00
|
|
|
|
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
class KconfigParser:
|
|
|
|
|
|
|
|
"""A parser of .config and include/autoconf.mk."""
|
|
|
|
|
|
|
|
re_arch = re.compile(r'CONFIG_SYS_ARCH="(.*)"')
|
|
|
|
re_cpu = re.compile(r'CONFIG_SYS_CPU="(.*)"')
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def __init__(self, configs, args, build_dir):
|
2015-05-20 02:36:07 +00:00
|
|
|
"""Create a new parser.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2016-05-19 06:51:57 +00:00
|
|
|
configs: A list of CONFIGs to move.
|
2021-12-18 21:54:35 +00:00
|
|
|
args (Namespace): program arguments
|
2015-05-20 02:36:07 +00:00
|
|
|
build_dir: Build directory.
|
|
|
|
"""
|
2016-05-19 06:51:57 +00:00
|
|
|
self.configs = configs
|
2021-12-18 21:54:31 +00:00
|
|
|
self.args = args
|
2016-05-19 06:52:00 +00:00
|
|
|
self.dotconfig = os.path.join(build_dir, '.config')
|
|
|
|
self.autoconf = os.path.join(build_dir, 'include', 'autoconf.mk')
|
2016-08-22 13:18:22 +00:00
|
|
|
self.spl_autoconf = os.path.join(build_dir, 'spl', 'include',
|
|
|
|
'autoconf.mk')
|
2017-06-02 01:39:01 +00:00
|
|
|
self.config_autoconf = os.path.join(build_dir, AUTO_CONF_PATH)
|
2016-05-19 06:52:06 +00:00
|
|
|
self.defconfig = os.path.join(build_dir, 'defconfig')
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2017-07-10 20:47:47 +00:00
|
|
|
def get_arch(self):
|
|
|
|
"""Parse .config file and return the architecture.
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
Returns:
|
2017-07-10 20:47:47 +00:00
|
|
|
Architecture name (e.g. 'arm').
|
2015-05-20 02:36:07 +00:00
|
|
|
"""
|
|
|
|
arch = ''
|
|
|
|
cpu = ''
|
2021-12-18 21:54:34 +00:00
|
|
|
for line in read_file(self.dotconfig):
|
2015-05-20 02:36:07 +00:00
|
|
|
m = self.re_arch.match(line)
|
|
|
|
if m:
|
|
|
|
arch = m.group(1)
|
|
|
|
continue
|
|
|
|
m = self.re_cpu.match(line)
|
|
|
|
if m:
|
|
|
|
cpu = m.group(1)
|
|
|
|
|
2016-05-19 06:51:53 +00:00
|
|
|
if not arch:
|
|
|
|
return None
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
# fix-up for aarch64
|
|
|
|
if arch == 'arm' and cpu == 'armv8':
|
|
|
|
arch = 'aarch64'
|
|
|
|
|
2017-07-10 20:47:47 +00:00
|
|
|
return arch
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-05-19 06:51:57 +00:00
|
|
|
def parse_one_config(self, config, dotconfig_lines, autoconf_lines):
|
2015-05-20 02:36:07 +00:00
|
|
|
"""Parse .config, defconfig, include/autoconf.mk for one config.
|
|
|
|
|
|
|
|
This function looks for the config options in the lines from
|
|
|
|
defconfig, .config, and include/autoconf.mk in order to decide
|
|
|
|
which action should be taken for this defconfig.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2016-05-19 06:51:57 +00:00
|
|
|
config: CONFIG name to parse.
|
tools: moveconfig: do not rely on type and default value given by users
Commit 96464badc794 ("moveconfig: Always run savedefconfig on the
moved config") changed the work flow of this tool a lot from the
original intention when this tool was designed first.
Since then, before running this tool, users must edit the Kconfig to
add the menu entries for the configs they are moving. It means users
had already specified the type and the default value for each CONFIG
via its Kconfig entry. Nevertheless, users are still required to
dictate the same type and the default value in the input file. This
is tedious to use. So, my idea here is to deprecate the latter.
Before moving forward with it, there is one issue worth mentioning;
since the savedefconfig re-sync was introduced, this tool has not
been able to move bool options with "default y". Joe sent a patch
to solve this problem about a year ago, but it was not applied for
some reasons. Now, he came back with an updated patch, so this
problem will be fixed soon.
For other use cases, I see no reason to require redundant dictation
in the input file. Instead, the tool can know the types and default
values by parsing the .config file.
This commit changes the tool to use the CONFIG names, but ignore the
types and default values given by the input file.
This commit also fixes one bug. Prior to this commit, it could not
move an integer-typed CONFIG with value 1.
For example, assume we are moving CONFIG_CONS_INDEX. Please note
this is an integer type option.
Many board headers define this CONFIG as 1.
#define CONFIG_CONS_INDEX 1
It will be converted to
CONFIG_CONS_INDEX=y
and moved to include/autoconf.mk, by the tools/scripts/define2mk.sed.
It will cause "make savedefconfig" to fail due to the type conflict.
This commit takes care of it by detecting the type and converting the
CONFIG value correctly.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:56 +00:00
|
|
|
dotconfig_lines: lines from the .config file.
|
2015-05-20 02:36:07 +00:00
|
|
|
autoconf_lines: lines from the include/autoconf.mk file.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A tupple of the action for this defconfig and the line
|
|
|
|
matched for the config.
|
|
|
|
"""
|
|
|
|
not_set = '# %s is not set' % config
|
|
|
|
|
|
|
|
for line in autoconf_lines:
|
|
|
|
line = line.rstrip()
|
|
|
|
if line.startswith(config + '='):
|
tools: moveconfig: do not rely on type and default value given by users
Commit 96464badc794 ("moveconfig: Always run savedefconfig on the
moved config") changed the work flow of this tool a lot from the
original intention when this tool was designed first.
Since then, before running this tool, users must edit the Kconfig to
add the menu entries for the configs they are moving. It means users
had already specified the type and the default value for each CONFIG
via its Kconfig entry. Nevertheless, users are still required to
dictate the same type and the default value in the input file. This
is tedious to use. So, my idea here is to deprecate the latter.
Before moving forward with it, there is one issue worth mentioning;
since the savedefconfig re-sync was introduced, this tool has not
been able to move bool options with "default y". Joe sent a patch
to solve this problem about a year ago, but it was not applied for
some reasons. Now, he came back with an updated patch, so this
problem will be fixed soon.
For other use cases, I see no reason to require redundant dictation
in the input file. Instead, the tool can know the types and default
values by parsing the .config file.
This commit changes the tool to use the CONFIG names, but ignore the
types and default values given by the input file.
This commit also fixes one bug. Prior to this commit, it could not
move an integer-typed CONFIG with value 1.
For example, assume we are moving CONFIG_CONS_INDEX. Please note
this is an integer type option.
Many board headers define this CONFIG as 1.
#define CONFIG_CONS_INDEX 1
It will be converted to
CONFIG_CONS_INDEX=y
and moved to include/autoconf.mk, by the tools/scripts/define2mk.sed.
It will cause "make savedefconfig" to fail due to the type conflict.
This commit takes care of it by detecting the type and converting the
CONFIG value correctly.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:56 +00:00
|
|
|
new_val = line
|
2015-05-20 02:36:07 +00:00
|
|
|
break
|
|
|
|
else:
|
tools: moveconfig: do not rely on type and default value given by users
Commit 96464badc794 ("moveconfig: Always run savedefconfig on the
moved config") changed the work flow of this tool a lot from the
original intention when this tool was designed first.
Since then, before running this tool, users must edit the Kconfig to
add the menu entries for the configs they are moving. It means users
had already specified the type and the default value for each CONFIG
via its Kconfig entry. Nevertheless, users are still required to
dictate the same type and the default value in the input file. This
is tedious to use. So, my idea here is to deprecate the latter.
Before moving forward with it, there is one issue worth mentioning;
since the savedefconfig re-sync was introduced, this tool has not
been able to move bool options with "default y". Joe sent a patch
to solve this problem about a year ago, but it was not applied for
some reasons. Now, he came back with an updated patch, so this
problem will be fixed soon.
For other use cases, I see no reason to require redundant dictation
in the input file. Instead, the tool can know the types and default
values by parsing the .config file.
This commit changes the tool to use the CONFIG names, but ignore the
types and default values given by the input file.
This commit also fixes one bug. Prior to this commit, it could not
move an integer-typed CONFIG with value 1.
For example, assume we are moving CONFIG_CONS_INDEX. Please note
this is an integer type option.
Many board headers define this CONFIG as 1.
#define CONFIG_CONS_INDEX 1
It will be converted to
CONFIG_CONS_INDEX=y
and moved to include/autoconf.mk, by the tools/scripts/define2mk.sed.
It will cause "make savedefconfig" to fail due to the type conflict.
This commit takes care of it by detecting the type and converting the
CONFIG value correctly.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:56 +00:00
|
|
|
new_val = not_set
|
|
|
|
|
2019-05-15 13:15:52 +00:00
|
|
|
new_val = try_expand(new_val)
|
|
|
|
|
2016-08-22 13:18:21 +00:00
|
|
|
for line in dotconfig_lines:
|
|
|
|
line = line.rstrip()
|
|
|
|
if line.startswith(config + '=') or line == not_set:
|
|
|
|
old_val = line
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
if new_val == not_set:
|
|
|
|
return (ACTION_NO_ENTRY, config)
|
|
|
|
else:
|
|
|
|
return (ACTION_NO_ENTRY_WARN, config)
|
|
|
|
|
tools: moveconfig: do not rely on type and default value given by users
Commit 96464badc794 ("moveconfig: Always run savedefconfig on the
moved config") changed the work flow of this tool a lot from the
original intention when this tool was designed first.
Since then, before running this tool, users must edit the Kconfig to
add the menu entries for the configs they are moving. It means users
had already specified the type and the default value for each CONFIG
via its Kconfig entry. Nevertheless, users are still required to
dictate the same type and the default value in the input file. This
is tedious to use. So, my idea here is to deprecate the latter.
Before moving forward with it, there is one issue worth mentioning;
since the savedefconfig re-sync was introduced, this tool has not
been able to move bool options with "default y". Joe sent a patch
to solve this problem about a year ago, but it was not applied for
some reasons. Now, he came back with an updated patch, so this
problem will be fixed soon.
For other use cases, I see no reason to require redundant dictation
in the input file. Instead, the tool can know the types and default
values by parsing the .config file.
This commit changes the tool to use the CONFIG names, but ignore the
types and default values given by the input file.
This commit also fixes one bug. Prior to this commit, it could not
move an integer-typed CONFIG with value 1.
For example, assume we are moving CONFIG_CONS_INDEX. Please note
this is an integer type option.
Many board headers define this CONFIG as 1.
#define CONFIG_CONS_INDEX 1
It will be converted to
CONFIG_CONS_INDEX=y
and moved to include/autoconf.mk, by the tools/scripts/define2mk.sed.
It will cause "make savedefconfig" to fail due to the type conflict.
This commit takes care of it by detecting the type and converting the
CONFIG value correctly.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:56 +00:00
|
|
|
# If this CONFIG is neither bool nor trisate
|
|
|
|
if old_val[-2:] != '=y' and old_val[-2:] != '=m' and old_val != not_set:
|
|
|
|
# tools/scripts/define2mk.sed changes '1' to 'y'.
|
|
|
|
# This is a problem if the CONFIG is int type.
|
|
|
|
# Check the type in Kconfig and handle it correctly.
|
|
|
|
if new_val[-2:] == '=y':
|
|
|
|
new_val = new_val[:-1] + '1'
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-06-15 05:33:50 +00:00
|
|
|
return (ACTION_NO_CHANGE if old_val == new_val else ACTION_MOVE,
|
|
|
|
new_val)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-05-19 06:52:02 +00:00
|
|
|
def update_dotconfig(self):
|
2016-05-19 06:51:50 +00:00
|
|
|
"""Parse files for the config options and update the .config.
|
2015-05-20 02:36:07 +00:00
|
|
|
|
tools: moveconfig: do not rely on type and default value given by users
Commit 96464badc794 ("moveconfig: Always run savedefconfig on the
moved config") changed the work flow of this tool a lot from the
original intention when this tool was designed first.
Since then, before running this tool, users must edit the Kconfig to
add the menu entries for the configs they are moving. It means users
had already specified the type and the default value for each CONFIG
via its Kconfig entry. Nevertheless, users are still required to
dictate the same type and the default value in the input file. This
is tedious to use. So, my idea here is to deprecate the latter.
Before moving forward with it, there is one issue worth mentioning;
since the savedefconfig re-sync was introduced, this tool has not
been able to move bool options with "default y". Joe sent a patch
to solve this problem about a year ago, but it was not applied for
some reasons. Now, he came back with an updated patch, so this
problem will be fixed soon.
For other use cases, I see no reason to require redundant dictation
in the input file. Instead, the tool can know the types and default
values by parsing the .config file.
This commit changes the tool to use the CONFIG names, but ignore the
types and default values given by the input file.
This commit also fixes one bug. Prior to this commit, it could not
move an integer-typed CONFIG with value 1.
For example, assume we are moving CONFIG_CONS_INDEX. Please note
this is an integer type option.
Many board headers define this CONFIG as 1.
#define CONFIG_CONS_INDEX 1
It will be converted to
CONFIG_CONS_INDEX=y
and moved to include/autoconf.mk, by the tools/scripts/define2mk.sed.
It will cause "make savedefconfig" to fail due to the type conflict.
This commit takes care of it by detecting the type and converting the
CONFIG value correctly.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:56 +00:00
|
|
|
This function parses the generated .config and include/autoconf.mk
|
|
|
|
searching the target options.
|
2016-05-19 06:51:50 +00:00
|
|
|
Move the config option(s) to the .config as needed.
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2015-05-20 02:36:07 +00:00
|
|
|
defconfig: defconfig name.
|
2016-05-19 06:52:01 +00:00
|
|
|
|
|
|
|
Returns:
|
2016-05-19 06:52:04 +00:00
|
|
|
Return a tuple of (updated flag, log string).
|
|
|
|
The "updated flag" is True if the .config was updated, False
|
|
|
|
otherwise. The "log string" shows what happend to the .config.
|
2015-05-20 02:36:07 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
results = []
|
2016-05-19 06:52:04 +00:00
|
|
|
updated = False
|
2016-08-22 13:18:21 +00:00
|
|
|
suspicious = False
|
2016-08-22 13:18:22 +00:00
|
|
|
rm_files = [self.config_autoconf, self.autoconf]
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if self.args.spl:
|
2016-08-22 13:18:22 +00:00
|
|
|
if os.path.exists(self.spl_autoconf):
|
|
|
|
autoconf_path = self.spl_autoconf
|
|
|
|
rm_files.append(self.spl_autoconf)
|
|
|
|
else:
|
|
|
|
for f in rm_files:
|
|
|
|
os.remove(f)
|
|
|
|
return (updated, suspicious,
|
2021-12-18 21:54:31 +00:00
|
|
|
color_text(self.args.color, COLOR_BROWN,
|
2016-08-22 13:18:22 +00:00
|
|
|
"SPL is not enabled. Skipped.") + '\n')
|
|
|
|
else:
|
|
|
|
autoconf_path = self.autoconf
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:34 +00:00
|
|
|
dotconfig_lines = read_file(self.dotconfig)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:34 +00:00
|
|
|
autoconf_lines = read_file(autoconf_path)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-05-19 06:51:57 +00:00
|
|
|
for config in self.configs:
|
|
|
|
result = self.parse_one_config(config, dotconfig_lines,
|
2015-05-19 18:21:17 +00:00
|
|
|
autoconf_lines)
|
2015-05-20 02:36:07 +00:00
|
|
|
results.append(result)
|
|
|
|
|
|
|
|
log = ''
|
|
|
|
|
|
|
|
for (action, value) in results:
|
|
|
|
if action == ACTION_MOVE:
|
|
|
|
actlog = "Move '%s'" % value
|
|
|
|
log_color = COLOR_LIGHT_GREEN
|
tools: moveconfig: do not rely on type and default value given by users
Commit 96464badc794 ("moveconfig: Always run savedefconfig on the
moved config") changed the work flow of this tool a lot from the
original intention when this tool was designed first.
Since then, before running this tool, users must edit the Kconfig to
add the menu entries for the configs they are moving. It means users
had already specified the type and the default value for each CONFIG
via its Kconfig entry. Nevertheless, users are still required to
dictate the same type and the default value in the input file. This
is tedious to use. So, my idea here is to deprecate the latter.
Before moving forward with it, there is one issue worth mentioning;
since the savedefconfig re-sync was introduced, this tool has not
been able to move bool options with "default y". Joe sent a patch
to solve this problem about a year ago, but it was not applied for
some reasons. Now, he came back with an updated patch, so this
problem will be fixed soon.
For other use cases, I see no reason to require redundant dictation
in the input file. Instead, the tool can know the types and default
values by parsing the .config file.
This commit changes the tool to use the CONFIG names, but ignore the
types and default values given by the input file.
This commit also fixes one bug. Prior to this commit, it could not
move an integer-typed CONFIG with value 1.
For example, assume we are moving CONFIG_CONS_INDEX. Please note
this is an integer type option.
Many board headers define this CONFIG as 1.
#define CONFIG_CONS_INDEX 1
It will be converted to
CONFIG_CONS_INDEX=y
and moved to include/autoconf.mk, by the tools/scripts/define2mk.sed.
It will cause "make savedefconfig" to fail due to the type conflict.
This commit takes care of it by detecting the type and converting the
CONFIG value correctly.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:56 +00:00
|
|
|
elif action == ACTION_NO_ENTRY:
|
2021-12-18 21:54:30 +00:00
|
|
|
actlog = '%s is not defined in Kconfig. Do nothing.' % value
|
2015-05-20 02:36:07 +00:00
|
|
|
log_color = COLOR_LIGHT_BLUE
|
2016-08-22 13:18:21 +00:00
|
|
|
elif action == ACTION_NO_ENTRY_WARN:
|
2021-12-18 21:54:30 +00:00
|
|
|
actlog = '%s is not defined in Kconfig (suspicious). Do nothing.' % value
|
2016-08-22 13:18:21 +00:00
|
|
|
log_color = COLOR_YELLOW
|
|
|
|
suspicious = True
|
tools: moveconfig: do not rely on type and default value given by users
Commit 96464badc794 ("moveconfig: Always run savedefconfig on the
moved config") changed the work flow of this tool a lot from the
original intention when this tool was designed first.
Since then, before running this tool, users must edit the Kconfig to
add the menu entries for the configs they are moving. It means users
had already specified the type and the default value for each CONFIG
via its Kconfig entry. Nevertheless, users are still required to
dictate the same type and the default value in the input file. This
is tedious to use. So, my idea here is to deprecate the latter.
Before moving forward with it, there is one issue worth mentioning;
since the savedefconfig re-sync was introduced, this tool has not
been able to move bool options with "default y". Joe sent a patch
to solve this problem about a year ago, but it was not applied for
some reasons. Now, he came back with an updated patch, so this
problem will be fixed soon.
For other use cases, I see no reason to require redundant dictation
in the input file. Instead, the tool can know the types and default
values by parsing the .config file.
This commit changes the tool to use the CONFIG names, but ignore the
types and default values given by the input file.
This commit also fixes one bug. Prior to this commit, it could not
move an integer-typed CONFIG with value 1.
For example, assume we are moving CONFIG_CONS_INDEX. Please note
this is an integer type option.
Many board headers define this CONFIG as 1.
#define CONFIG_CONS_INDEX 1
It will be converted to
CONFIG_CONS_INDEX=y
and moved to include/autoconf.mk, by the tools/scripts/define2mk.sed.
It will cause "make savedefconfig" to fail due to the type conflict.
This commit takes care of it by detecting the type and converting the
CONFIG value correctly.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Joe Hershberger <joe.hershberger@ni.com>
2016-05-19 06:51:56 +00:00
|
|
|
elif action == ACTION_NO_CHANGE:
|
|
|
|
actlog = "'%s' is the same as the define in Kconfig. Do nothing." \
|
|
|
|
% value
|
2015-05-20 02:36:07 +00:00
|
|
|
log_color = COLOR_LIGHT_PURPLE
|
|
|
|
else:
|
2021-12-18 21:54:30 +00:00
|
|
|
sys.exit('Internal Error. This should not happen.')
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
log += color_text(self.args.color, log_color, actlog) + '\n'
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
with open(self.dotconfig, 'a', encoding='utf-8') as out:
|
2016-05-19 06:51:49 +00:00
|
|
|
for (action, value) in results:
|
|
|
|
if action == ACTION_MOVE:
|
2021-12-18 21:54:35 +00:00
|
|
|
out.write(value + '\n')
|
2016-05-19 06:52:04 +00:00
|
|
|
updated = True
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-05-19 06:52:06 +00:00
|
|
|
self.results = results
|
2016-08-22 13:18:22 +00:00
|
|
|
for f in rm_files:
|
|
|
|
os.remove(f)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-08-22 13:18:21 +00:00
|
|
|
return (updated, suspicious, log)
|
2016-05-19 06:52:01 +00:00
|
|
|
|
2016-05-19 06:52:06 +00:00
|
|
|
def check_defconfig(self):
|
|
|
|
"""Check the defconfig after savedefconfig
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Return additional log if moved CONFIGs were removed again by
|
|
|
|
'make savedefconfig'.
|
|
|
|
"""
|
|
|
|
|
|
|
|
log = ''
|
|
|
|
|
2021-12-18 21:54:34 +00:00
|
|
|
defconfig_lines = read_file(self.defconfig)
|
2016-05-19 06:52:06 +00:00
|
|
|
|
|
|
|
for (action, value) in self.results:
|
|
|
|
if action != ACTION_MOVE:
|
|
|
|
continue
|
2022-01-29 15:22:08 +00:00
|
|
|
if not value in defconfig_lines:
|
2021-12-18 21:54:31 +00:00
|
|
|
log += color_text(self.args.color, COLOR_YELLOW,
|
2016-05-19 06:52:06 +00:00
|
|
|
"'%s' was removed by savedefconfig.\n" %
|
|
|
|
value)
|
|
|
|
|
|
|
|
return log
|
|
|
|
|
2017-06-02 01:39:02 +00:00
|
|
|
|
|
|
|
class DatabaseThread(threading.Thread):
|
|
|
|
"""This thread processes results from Slot threads.
|
|
|
|
|
|
|
|
It collects the data in the master config directary. There is only one
|
|
|
|
result thread, and this helps to serialise the build output.
|
|
|
|
"""
|
|
|
|
def __init__(self, config_db, db_queue):
|
|
|
|
"""Set up a new result thread
|
|
|
|
|
|
|
|
Args:
|
|
|
|
builder: Builder which will be sent each result
|
|
|
|
"""
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
self.config_db = config_db
|
|
|
|
self.db_queue= db_queue
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
"""Called to start up the result thread.
|
|
|
|
|
|
|
|
We collect the next result job and pass it on to the build.
|
|
|
|
"""
|
|
|
|
while True:
|
|
|
|
defconfig, configs = self.db_queue.get()
|
|
|
|
self.config_db[defconfig] = configs
|
|
|
|
self.db_queue.task_done()
|
|
|
|
|
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
class Slot:
|
|
|
|
|
|
|
|
"""A slot to store a subprocess.
|
|
|
|
|
|
|
|
Each instance of this class handles one subprocess.
|
|
|
|
This class is useful to control multiple threads
|
|
|
|
for faster processing.
|
|
|
|
"""
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def __init__(self, toolchains, configs, args, progress, devnull,
|
2017-07-10 20:47:47 +00:00
|
|
|
make_cmd, reference_src_dir, db_queue):
|
2015-05-20 02:36:07 +00:00
|
|
|
"""Create a new process slot.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2017-07-10 20:47:47 +00:00
|
|
|
toolchains: Toolchains object containing toolchains.
|
2016-05-19 06:51:57 +00:00
|
|
|
configs: A list of CONFIGs to move.
|
2021-12-18 21:54:31 +00:00
|
|
|
args: Program arguments
|
2016-05-19 06:51:55 +00:00
|
|
|
progress: A progress indicator.
|
2015-05-20 02:36:07 +00:00
|
|
|
devnull: A file object of '/dev/null'.
|
|
|
|
make_cmd: command name of GNU Make.
|
2016-06-10 19:53:32 +00:00
|
|
|
reference_src_dir: Determine the true starting config state from this
|
|
|
|
source tree.
|
2017-06-02 01:39:02 +00:00
|
|
|
db_queue: output queue to write config info for the database
|
2015-05-20 02:36:07 +00:00
|
|
|
"""
|
2017-07-10 20:47:47 +00:00
|
|
|
self.toolchains = toolchains
|
2021-12-18 21:54:31 +00:00
|
|
|
self.args = args
|
2016-05-19 06:51:55 +00:00
|
|
|
self.progress = progress
|
2015-05-20 02:36:07 +00:00
|
|
|
self.build_dir = tempfile.mkdtemp()
|
|
|
|
self.devnull = devnull
|
|
|
|
self.make_cmd = (make_cmd, 'O=' + self.build_dir)
|
2016-06-10 19:53:32 +00:00
|
|
|
self.reference_src_dir = reference_src_dir
|
2017-06-02 01:39:02 +00:00
|
|
|
self.db_queue = db_queue
|
2021-12-18 21:54:31 +00:00
|
|
|
self.parser = KconfigParser(configs, args, self.build_dir)
|
2015-05-20 02:36:07 +00:00
|
|
|
self.state = STATE_IDLE
|
2016-08-22 13:18:20 +00:00
|
|
|
self.failed_boards = set()
|
|
|
|
self.suspicious_boards = set()
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
"""Delete the working directory
|
|
|
|
|
|
|
|
This function makes sure the temporary directory is cleaned away
|
|
|
|
even if Python suddenly dies due to error. It should be done in here
|
2016-06-10 19:53:29 +00:00
|
|
|
because it is guaranteed the destructor is always invoked when the
|
2015-05-20 02:36:07 +00:00
|
|
|
instance of the class gets unreferenced.
|
|
|
|
|
|
|
|
If the subprocess is still running, wait until it finishes.
|
|
|
|
"""
|
|
|
|
if self.state != STATE_IDLE:
|
|
|
|
while self.ps.poll() == None:
|
|
|
|
pass
|
|
|
|
shutil.rmtree(self.build_dir)
|
|
|
|
|
2016-05-19 06:51:55 +00:00
|
|
|
def add(self, defconfig):
|
2015-05-20 02:36:07 +00:00
|
|
|
"""Assign a new subprocess for defconfig and add it to the slot.
|
|
|
|
|
|
|
|
If the slot is vacant, create a new subprocess for processing the
|
|
|
|
given defconfig and add it to the slot. Just returns False if
|
|
|
|
the slot is occupied (i.e. the current subprocess is still running).
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2015-05-20 02:36:07 +00:00
|
|
|
defconfig: defconfig name.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Return True on success or False on failure
|
|
|
|
"""
|
|
|
|
if self.state != STATE_IDLE:
|
|
|
|
return False
|
2016-06-08 02:47:37 +00:00
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
self.defconfig = defconfig
|
2016-05-19 06:52:02 +00:00
|
|
|
self.log = ''
|
2016-06-15 05:33:52 +00:00
|
|
|
self.current_src_dir = self.reference_src_dir
|
2016-06-08 02:47:37 +00:00
|
|
|
self.do_defconfig()
|
2015-05-20 02:36:07 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
def poll(self):
|
|
|
|
"""Check the status of the subprocess and handle it as needed.
|
|
|
|
|
|
|
|
Returns True if the slot is vacant (i.e. in idle state).
|
|
|
|
If the configuration is successfully finished, assign a new
|
|
|
|
subprocess to build include/autoconf.mk.
|
|
|
|
If include/autoconf.mk is generated, invoke the parser to
|
2016-05-19 06:52:04 +00:00
|
|
|
parse the .config and the include/autoconf.mk, moving
|
|
|
|
config options to the .config as needed.
|
|
|
|
If the .config was updated, run "make savedefconfig" to sync
|
|
|
|
it, update the original defconfig, and then set the slot back
|
|
|
|
to the idle state.
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Return True if the subprocess is terminated, False otherwise
|
|
|
|
"""
|
|
|
|
if self.state == STATE_IDLE:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if self.ps.poll() == None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if self.ps.poll() != 0:
|
2016-06-08 02:47:37 +00:00
|
|
|
self.handle_error()
|
|
|
|
elif self.state == STATE_DEFCONFIG:
|
2016-06-15 05:33:52 +00:00
|
|
|
if self.reference_src_dir and not self.current_src_dir:
|
2016-06-10 19:53:32 +00:00
|
|
|
self.do_savedefconfig()
|
|
|
|
else:
|
|
|
|
self.do_autoconf()
|
2016-06-08 02:47:37 +00:00
|
|
|
elif self.state == STATE_AUTOCONF:
|
2016-06-15 05:33:52 +00:00
|
|
|
if self.current_src_dir:
|
|
|
|
self.current_src_dir = None
|
2016-06-10 19:53:32 +00:00
|
|
|
self.do_defconfig()
|
2021-12-18 21:54:31 +00:00
|
|
|
elif self.args.build_db:
|
2017-06-02 01:39:02 +00:00
|
|
|
self.do_build_db()
|
2016-06-10 19:53:32 +00:00
|
|
|
else:
|
|
|
|
self.do_savedefconfig()
|
2016-06-08 02:47:37 +00:00
|
|
|
elif self.state == STATE_SAVEDEFCONFIG:
|
|
|
|
self.update_defconfig()
|
|
|
|
else:
|
2021-12-18 21:54:30 +00:00
|
|
|
sys.exit('Internal Error. This should not happen.')
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-06-08 02:47:37 +00:00
|
|
|
return True if self.state == STATE_IDLE else False
|
2015-05-19 18:21:17 +00:00
|
|
|
|
2016-06-08 02:47:37 +00:00
|
|
|
def handle_error(self):
|
|
|
|
"""Handle error cases."""
|
2016-05-19 06:52:08 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
self.log += color_text(self.args.color, COLOR_LIGHT_RED,
|
2021-12-18 21:54:30 +00:00
|
|
|
'Failed to process.\n')
|
2021-12-18 21:54:31 +00:00
|
|
|
if self.args.verbose:
|
|
|
|
self.log += color_text(self.args.color, COLOR_LIGHT_CYAN,
|
2020-02-12 19:46:45 +00:00
|
|
|
self.ps.stderr.read().decode())
|
2016-06-08 02:47:37 +00:00
|
|
|
self.finish(False)
|
2015-05-19 18:21:17 +00:00
|
|
|
|
2016-06-08 02:47:37 +00:00
|
|
|
def do_defconfig(self):
|
|
|
|
"""Run 'make <board>_defconfig' to create the .config file."""
|
2016-05-19 06:52:07 +00:00
|
|
|
|
2016-06-08 02:47:37 +00:00
|
|
|
cmd = list(self.make_cmd)
|
|
|
|
cmd.append(self.defconfig)
|
|
|
|
self.ps = subprocess.Popen(cmd, stdout=self.devnull,
|
2016-06-15 05:33:52 +00:00
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
cwd=self.current_src_dir)
|
2016-06-08 02:47:37 +00:00
|
|
|
self.state = STATE_DEFCONFIG
|
2016-05-19 06:52:07 +00:00
|
|
|
|
2016-06-08 02:47:37 +00:00
|
|
|
def do_autoconf(self):
|
2017-06-02 01:39:01 +00:00
|
|
|
"""Run 'make AUTO_CONF_PATH'."""
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2017-07-10 20:47:47 +00:00
|
|
|
arch = self.parser.get_arch()
|
|
|
|
try:
|
|
|
|
toolchain = self.toolchains.Select(arch)
|
|
|
|
except ValueError:
|
2021-12-18 21:54:31 +00:00
|
|
|
self.log += color_text(self.args.color, COLOR_YELLOW,
|
2017-08-27 08:00:51 +00:00
|
|
|
"Tool chain for '%s' is missing. Do nothing.\n" % arch)
|
2016-05-19 06:52:03 +00:00
|
|
|
self.finish(False)
|
2016-06-08 02:47:37 +00:00
|
|
|
return
|
2019-10-31 13:42:57 +00:00
|
|
|
env = toolchain.MakeEnvironment(False)
|
2016-05-19 06:51:53 +00:00
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
cmd = list(self.make_cmd)
|
2015-05-19 18:21:18 +00:00
|
|
|
cmd.append('KCONFIG_IGNORE_DUPLICATES=1')
|
2017-06-02 01:39:01 +00:00
|
|
|
cmd.append(AUTO_CONF_PATH)
|
2017-07-10 20:47:47 +00:00
|
|
|
self.ps = subprocess.Popen(cmd, stdout=self.devnull, env=env,
|
2016-06-15 05:33:52 +00:00
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
cwd=self.current_src_dir)
|
2015-05-20 02:36:07 +00:00
|
|
|
self.state = STATE_AUTOCONF
|
2016-06-08 02:47:37 +00:00
|
|
|
|
2017-06-02 01:39:02 +00:00
|
|
|
def do_build_db(self):
|
|
|
|
"""Add the board to the database"""
|
|
|
|
configs = {}
|
2021-12-18 21:54:34 +00:00
|
|
|
for line in read_file(os.path.join(self.build_dir, AUTO_CONF_PATH)):
|
|
|
|
if line.startswith('CONFIG'):
|
|
|
|
config, value = line.split('=', 1)
|
|
|
|
configs[config] = value.rstrip()
|
2017-06-02 01:39:02 +00:00
|
|
|
self.db_queue.put([self.defconfig, configs])
|
|
|
|
self.finish(True)
|
|
|
|
|
2016-06-08 02:47:37 +00:00
|
|
|
def do_savedefconfig(self):
|
|
|
|
"""Update the .config and run 'make savedefconfig'."""
|
|
|
|
|
2016-08-22 13:18:21 +00:00
|
|
|
(updated, suspicious, log) = self.parser.update_dotconfig()
|
|
|
|
if suspicious:
|
|
|
|
self.suspicious_boards.add(self.defconfig)
|
2016-06-08 02:47:37 +00:00
|
|
|
self.log += log
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if not self.args.force_sync and not updated:
|
2016-06-08 02:47:37 +00:00
|
|
|
self.finish(True)
|
|
|
|
return
|
|
|
|
if updated:
|
2021-12-18 21:54:31 +00:00
|
|
|
self.log += color_text(self.args.color, COLOR_LIGHT_GREEN,
|
2021-12-18 21:54:30 +00:00
|
|
|
'Syncing by savedefconfig...\n')
|
2016-06-08 02:47:37 +00:00
|
|
|
else:
|
2021-12-18 21:54:30 +00:00
|
|
|
self.log += 'Syncing by savedefconfig (forced by option)...\n'
|
2016-06-08 02:47:37 +00:00
|
|
|
|
|
|
|
cmd = list(self.make_cmd)
|
|
|
|
cmd.append('savedefconfig')
|
|
|
|
self.ps = subprocess.Popen(cmd, stdout=self.devnull,
|
|
|
|
stderr=subprocess.PIPE)
|
|
|
|
self.state = STATE_SAVEDEFCONFIG
|
|
|
|
|
|
|
|
def update_defconfig(self):
|
|
|
|
"""Update the input defconfig and go back to the idle state."""
|
|
|
|
|
2016-06-15 05:33:54 +00:00
|
|
|
log = self.parser.check_defconfig()
|
|
|
|
if log:
|
2016-08-22 13:18:20 +00:00
|
|
|
self.suspicious_boards.add(self.defconfig)
|
2016-06-15 05:33:54 +00:00
|
|
|
self.log += log
|
2016-06-08 02:47:37 +00:00
|
|
|
orig_defconfig = os.path.join('configs', self.defconfig)
|
|
|
|
new_defconfig = os.path.join(self.build_dir, 'defconfig')
|
|
|
|
updated = not filecmp.cmp(orig_defconfig, new_defconfig)
|
|
|
|
|
|
|
|
if updated:
|
2021-12-18 21:54:31 +00:00
|
|
|
self.log += color_text(self.args.color, COLOR_LIGHT_BLUE,
|
2021-12-18 21:54:30 +00:00
|
|
|
'defconfig was updated.\n')
|
2016-06-08 02:47:37 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if not self.args.dry_run and updated:
|
2016-06-08 02:47:37 +00:00
|
|
|
shutil.move(new_defconfig, orig_defconfig)
|
|
|
|
self.finish(True)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-05-19 06:52:03 +00:00
|
|
|
def finish(self, success):
|
|
|
|
"""Display log along with progress and go to the idle state.
|
2016-05-19 06:52:02 +00:00
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2016-05-19 06:52:03 +00:00
|
|
|
success: Should be True when the defconfig was processed
|
|
|
|
successfully, or False when it fails.
|
2016-05-19 06:52:02 +00:00
|
|
|
"""
|
|
|
|
# output at least 30 characters to hide the "* defconfigs out of *".
|
|
|
|
log = self.defconfig.ljust(30) + '\n'
|
|
|
|
|
|
|
|
log += '\n'.join([ ' ' + s for s in self.log.split('\n') ])
|
|
|
|
# Some threads are running in parallel.
|
|
|
|
# Print log atomically to not mix up logs from different threads.
|
2019-10-31 13:42:57 +00:00
|
|
|
print(log, file=(sys.stdout if success else sys.stderr))
|
2016-05-19 06:52:03 +00:00
|
|
|
|
|
|
|
if not success:
|
2021-12-18 21:54:31 +00:00
|
|
|
if self.args.exit_on_error:
|
2021-12-18 21:54:30 +00:00
|
|
|
sys.exit('Exit on error.')
|
2016-05-19 06:52:03 +00:00
|
|
|
# If --exit-on-error flag is not set, skip this board and continue.
|
|
|
|
# Record the failed board.
|
2016-08-22 13:18:20 +00:00
|
|
|
self.failed_boards.add(self.defconfig)
|
2016-05-19 06:52:03 +00:00
|
|
|
|
2016-05-19 06:52:02 +00:00
|
|
|
self.progress.inc()
|
|
|
|
self.progress.show()
|
2016-05-19 06:52:03 +00:00
|
|
|
self.state = STATE_IDLE
|
2016-05-19 06:52:02 +00:00
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
def get_failed_boards(self):
|
2016-08-22 13:18:20 +00:00
|
|
|
"""Returns a set of failed boards (defconfigs) in this slot.
|
2015-05-20 02:36:07 +00:00
|
|
|
"""
|
|
|
|
return self.failed_boards
|
|
|
|
|
2016-06-15 05:33:54 +00:00
|
|
|
def get_suspicious_boards(self):
|
2016-08-22 13:18:20 +00:00
|
|
|
"""Returns a set of boards (defconfigs) with possible misconversion.
|
2016-06-15 05:33:54 +00:00
|
|
|
"""
|
2016-08-22 13:18:21 +00:00
|
|
|
return self.suspicious_boards - self.failed_boards
|
2016-06-15 05:33:54 +00:00
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
class Slots:
|
|
|
|
|
|
|
|
"""Controller of the array of subprocess slots."""
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def __init__(self, toolchains, configs, args, progress,
|
2017-07-10 20:47:47 +00:00
|
|
|
reference_src_dir, db_queue):
|
2015-05-20 02:36:07 +00:00
|
|
|
"""Create a new slots controller.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2017-07-10 20:47:47 +00:00
|
|
|
toolchains: Toolchains object containing toolchains.
|
2016-05-19 06:51:57 +00:00
|
|
|
configs: A list of CONFIGs to move.
|
2021-12-18 21:54:31 +00:00
|
|
|
args: Program arguments
|
2016-05-19 06:51:55 +00:00
|
|
|
progress: A progress indicator.
|
2016-06-10 19:53:32 +00:00
|
|
|
reference_src_dir: Determine the true starting config state from this
|
|
|
|
source tree.
|
2017-06-02 01:39:02 +00:00
|
|
|
db_queue: output queue to write config info for the database
|
2015-05-20 02:36:07 +00:00
|
|
|
"""
|
2021-12-18 21:54:31 +00:00
|
|
|
self.args = args
|
2015-05-20 02:36:07 +00:00
|
|
|
self.slots = []
|
2021-12-18 21:54:32 +00:00
|
|
|
devnull = subprocess.DEVNULL
|
2015-05-20 02:36:07 +00:00
|
|
|
make_cmd = get_make_cmd()
|
2021-12-18 21:54:31 +00:00
|
|
|
for i in range(args.jobs):
|
|
|
|
self.slots.append(Slot(toolchains, configs, args, progress,
|
2017-07-10 20:47:47 +00:00
|
|
|
devnull, make_cmd, reference_src_dir,
|
|
|
|
db_queue))
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-05-19 06:51:55 +00:00
|
|
|
def add(self, defconfig):
|
2015-05-20 02:36:07 +00:00
|
|
|
"""Add a new subprocess if a vacant slot is found.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2015-05-20 02:36:07 +00:00
|
|
|
defconfig: defconfig name to be put into.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Return True on success or False on failure
|
|
|
|
"""
|
|
|
|
for slot in self.slots:
|
2016-05-19 06:51:55 +00:00
|
|
|
if slot.add(defconfig):
|
2015-05-20 02:36:07 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def available(self):
|
|
|
|
"""Check if there is a vacant slot.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Return True if at lease one vacant slot is found, False otherwise.
|
|
|
|
"""
|
|
|
|
for slot in self.slots:
|
|
|
|
if slot.poll():
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def empty(self):
|
|
|
|
"""Check if all slots are vacant.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Return True if all the slots are vacant, False otherwise.
|
|
|
|
"""
|
|
|
|
ret = True
|
|
|
|
for slot in self.slots:
|
|
|
|
if not slot.poll():
|
|
|
|
ret = False
|
|
|
|
return ret
|
|
|
|
|
|
|
|
def show_failed_boards(self):
|
|
|
|
"""Display all of the failed boards (defconfigs)."""
|
2016-08-22 13:18:20 +00:00
|
|
|
boards = set()
|
2016-06-15 05:33:53 +00:00
|
|
|
output_file = 'moveconfig.failed'
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
for slot in self.slots:
|
2016-08-22 13:18:20 +00:00
|
|
|
boards |= slot.get_failed_boards()
|
2016-06-15 05:33:53 +00:00
|
|
|
|
|
|
|
if boards:
|
|
|
|
boards = '\n'.join(boards) + '\n'
|
2021-12-18 21:54:30 +00:00
|
|
|
msg = 'The following boards were not processed due to error:\n'
|
2016-06-15 05:33:53 +00:00
|
|
|
msg += boards
|
2021-12-18 21:54:30 +00:00
|
|
|
msg += '(the list has been saved in %s)\n' % output_file
|
2021-12-18 21:54:31 +00:00
|
|
|
print(color_text(self.args.color, COLOR_LIGHT_RED,
|
2019-10-31 13:42:57 +00:00
|
|
|
msg), file=sys.stderr)
|
2016-06-15 05:33:53 +00:00
|
|
|
|
2021-12-18 21:54:33 +00:00
|
|
|
write_file(output_file, boards)
|
2015-05-19 18:21:22 +00:00
|
|
|
|
2016-06-15 05:33:54 +00:00
|
|
|
def show_suspicious_boards(self):
|
|
|
|
"""Display all boards (defconfigs) with possible misconversion."""
|
2016-08-22 13:18:20 +00:00
|
|
|
boards = set()
|
2016-06-15 05:33:54 +00:00
|
|
|
output_file = 'moveconfig.suspicious'
|
|
|
|
|
|
|
|
for slot in self.slots:
|
2016-08-22 13:18:20 +00:00
|
|
|
boards |= slot.get_suspicious_boards()
|
2016-06-15 05:33:54 +00:00
|
|
|
|
|
|
|
if boards:
|
|
|
|
boards = '\n'.join(boards) + '\n'
|
2021-12-18 21:54:30 +00:00
|
|
|
msg = 'The following boards might have been converted incorrectly.\n'
|
|
|
|
msg += 'It is highly recommended to check them manually:\n'
|
2016-06-15 05:33:54 +00:00
|
|
|
msg += boards
|
2021-12-18 21:54:30 +00:00
|
|
|
msg += '(the list has been saved in %s)\n' % output_file
|
2021-12-18 21:54:31 +00:00
|
|
|
print(color_text(self.args.color, COLOR_YELLOW,
|
2019-10-31 13:42:57 +00:00
|
|
|
msg), file=sys.stderr)
|
2016-06-15 05:33:54 +00:00
|
|
|
|
2021-12-18 21:54:33 +00:00
|
|
|
write_file(output_file, boards)
|
2016-06-15 05:33:54 +00:00
|
|
|
|
2016-06-15 05:33:51 +00:00
|
|
|
class ReferenceSource:
|
|
|
|
|
|
|
|
"""Reference source against which original configs should be parsed."""
|
|
|
|
|
|
|
|
def __init__(self, commit):
|
|
|
|
"""Create a reference source directory based on a specified commit.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2016-06-15 05:33:51 +00:00
|
|
|
commit: commit to git-clone
|
|
|
|
"""
|
|
|
|
self.src_dir = tempfile.mkdtemp()
|
2021-12-18 21:54:30 +00:00
|
|
|
print('Cloning git repo to a separate work directory...')
|
2016-06-15 05:33:51 +00:00
|
|
|
subprocess.check_output(['git', 'clone', os.getcwd(), '.'],
|
|
|
|
cwd=self.src_dir)
|
2019-10-31 13:42:57 +00:00
|
|
|
print("Checkout '%s' to build the original autoconf.mk." % \
|
|
|
|
subprocess.check_output(['git', 'rev-parse', '--short', commit]).strip())
|
2016-06-15 05:33:51 +00:00
|
|
|
subprocess.check_output(['git', 'checkout', commit],
|
|
|
|
stderr=subprocess.STDOUT, cwd=self.src_dir)
|
2016-06-10 19:53:32 +00:00
|
|
|
|
|
|
|
def __del__(self):
|
2016-06-15 05:33:51 +00:00
|
|
|
"""Delete the reference source directory
|
2016-06-10 19:53:32 +00:00
|
|
|
|
|
|
|
This function makes sure the temporary directory is cleaned away
|
|
|
|
even if Python suddenly dies due to error. It should be done in here
|
|
|
|
because it is guaranteed the destructor is always invoked when the
|
|
|
|
instance of the class gets unreferenced.
|
|
|
|
"""
|
2016-06-15 05:33:51 +00:00
|
|
|
shutil.rmtree(self.src_dir)
|
|
|
|
|
|
|
|
def get_dir(self):
|
|
|
|
"""Return the absolute path to the reference source directory."""
|
2016-06-10 19:53:32 +00:00
|
|
|
|
2016-06-15 05:33:51 +00:00
|
|
|
return self.src_dir
|
2016-06-10 19:53:32 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
def move_config(toolchains, configs, args, db_queue):
|
2015-05-20 02:36:07 +00:00
|
|
|
"""Move config options to defconfig files.
|
|
|
|
|
2021-12-18 21:54:35 +00:00
|
|
|
Args:
|
2016-05-19 06:51:57 +00:00
|
|
|
configs: A list of CONFIGs to move.
|
2021-12-18 21:54:31 +00:00
|
|
|
args: Program arguments
|
2015-05-20 02:36:07 +00:00
|
|
|
"""
|
2016-05-19 06:51:57 +00:00
|
|
|
if len(configs) == 0:
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.force_sync:
|
2019-10-31 13:42:57 +00:00
|
|
|
print('No CONFIG is specified. You are probably syncing defconfigs.', end=' ')
|
2021-12-18 21:54:31 +00:00
|
|
|
elif args.build_db:
|
2019-10-31 13:42:57 +00:00
|
|
|
print('Building %s database' % CONFIG_DATABASE)
|
2016-05-19 06:52:09 +00:00
|
|
|
else:
|
2019-10-31 13:42:57 +00:00
|
|
|
print('Neither CONFIG nor --force-sync is specified. Nothing will happen.', end=' ')
|
2016-05-19 06:52:09 +00:00
|
|
|
else:
|
2019-10-31 13:42:57 +00:00
|
|
|
print('Move ' + ', '.join(configs), end=' ')
|
2021-12-18 21:54:31 +00:00
|
|
|
print('(jobs: %d)\n' % args.jobs)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.git_ref:
|
|
|
|
reference_src = ReferenceSource(args.git_ref)
|
2016-06-15 05:33:51 +00:00
|
|
|
reference_src_dir = reference_src.get_dir()
|
|
|
|
else:
|
2016-06-15 05:33:52 +00:00
|
|
|
reference_src_dir = None
|
2016-06-10 19:53:32 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.defconfigs:
|
|
|
|
defconfigs = get_matched_defconfigs(args.defconfigs)
|
2015-05-19 18:21:19 +00:00
|
|
|
else:
|
2016-07-25 10:15:28 +00:00
|
|
|
defconfigs = get_all_defconfigs()
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2016-05-19 06:51:55 +00:00
|
|
|
progress = Progress(len(defconfigs))
|
2021-12-18 21:54:31 +00:00
|
|
|
slots = Slots(toolchains, configs, args, progress, reference_src_dir,
|
2021-12-18 21:54:35 +00:00
|
|
|
db_queue)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
|
|
|
# Main loop to process defconfig files:
|
|
|
|
# Add a new subprocess into a vacant slot.
|
|
|
|
# Sleep if there is no available slot.
|
2016-05-19 06:51:55 +00:00
|
|
|
for defconfig in defconfigs:
|
|
|
|
while not slots.add(defconfig):
|
2015-05-20 02:36:07 +00:00
|
|
|
while not slots.available():
|
|
|
|
# No available slot: sleep for a while
|
|
|
|
time.sleep(SLEEP_TIME)
|
|
|
|
|
|
|
|
# wait until all the subprocesses finish
|
|
|
|
while not slots.empty():
|
|
|
|
time.sleep(SLEEP_TIME)
|
|
|
|
|
2019-10-31 13:42:57 +00:00
|
|
|
print('')
|
2015-05-20 02:36:07 +00:00
|
|
|
slots.show_failed_boards()
|
2016-06-15 05:33:54 +00:00
|
|
|
slots.show_suspicious_boards()
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2017-06-16 03:39:33 +00:00
|
|
|
def find_kconfig_rules(kconf, config, imply_config):
|
|
|
|
"""Check whether a config has a 'select' or 'imply' keyword
|
|
|
|
|
|
|
|
Args:
|
2019-09-20 21:42:09 +00:00
|
|
|
kconf: Kconfiglib.Kconfig object
|
2017-06-16 03:39:33 +00:00
|
|
|
config: Name of config to check (without CONFIG_ prefix)
|
|
|
|
imply_config: Implying config (without CONFIG_ prefix) which may or
|
|
|
|
may not have an 'imply' for 'config')
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Symbol object for 'config' if found, else None
|
|
|
|
"""
|
2019-09-20 21:42:09 +00:00
|
|
|
sym = kconf.syms.get(imply_config)
|
2017-06-16 03:39:33 +00:00
|
|
|
if sym:
|
2021-07-22 03:35:53 +00:00
|
|
|
for sel, cond in (sym.selects + sym.implies):
|
2021-12-18 15:09:42 +00:00
|
|
|
if sel.name == config:
|
2017-06-16 03:39:33 +00:00
|
|
|
return sym
|
|
|
|
return None
|
|
|
|
|
|
|
|
def check_imply_rule(kconf, config, imply_config):
|
|
|
|
"""Check if we can add an 'imply' option
|
|
|
|
|
|
|
|
This finds imply_config in the Kconfig and looks to see if it is possible
|
|
|
|
to add an 'imply' for 'config' to that part of the Kconfig.
|
|
|
|
|
|
|
|
Args:
|
2019-09-20 21:42:09 +00:00
|
|
|
kconf: Kconfiglib.Kconfig object
|
2017-06-16 03:39:33 +00:00
|
|
|
config: Name of config to check (without CONFIG_ prefix)
|
|
|
|
imply_config: Implying config (without CONFIG_ prefix) which may or
|
|
|
|
may not have an 'imply' for 'config')
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
tuple:
|
|
|
|
filename of Kconfig file containing imply_config, or None if none
|
|
|
|
line number within the Kconfig file, or 0 if none
|
|
|
|
message indicating the result
|
|
|
|
"""
|
2019-09-20 21:42:09 +00:00
|
|
|
sym = kconf.syms.get(imply_config)
|
2017-06-16 03:39:33 +00:00
|
|
|
if not sym:
|
|
|
|
return 'cannot find sym'
|
2021-07-22 03:35:53 +00:00
|
|
|
nodes = sym.nodes
|
|
|
|
if len(nodes) != 1:
|
|
|
|
return '%d locations' % len(nodes)
|
2021-12-18 15:09:42 +00:00
|
|
|
node = nodes[0]
|
|
|
|
fname, linenum = node.filename, node.linenr
|
2017-06-16 03:39:33 +00:00
|
|
|
cwd = os.getcwd()
|
|
|
|
if cwd and fname.startswith(cwd):
|
|
|
|
fname = fname[len(cwd) + 1:]
|
|
|
|
file_line = ' at %s:%d' % (fname, linenum)
|
2021-12-18 21:54:34 +00:00
|
|
|
data = read_file(fname)
|
2017-06-16 03:39:33 +00:00
|
|
|
if data[linenum - 1] != 'config %s' % imply_config:
|
|
|
|
return None, 0, 'bad sym format %s%s' % (data[linenum], file_line)
|
|
|
|
return fname, linenum, 'adding%s' % file_line
|
|
|
|
|
|
|
|
def add_imply_rule(config, fname, linenum):
|
|
|
|
"""Add a new 'imply' option to a Kconfig
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config: config option to add an imply for (without CONFIG_ prefix)
|
|
|
|
fname: Kconfig filename to update
|
|
|
|
linenum: Line number to place the 'imply' before
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Message indicating the result
|
|
|
|
"""
|
|
|
|
file_line = ' at %s:%d' % (fname, linenum)
|
2021-12-18 21:54:34 +00:00
|
|
|
data = read_file(fname)
|
2017-06-16 03:39:33 +00:00
|
|
|
linenum -= 1
|
|
|
|
|
|
|
|
for offset, line in enumerate(data[linenum:]):
|
|
|
|
if line.strip().startswith('help') or not line:
|
|
|
|
data.insert(linenum + offset, '\timply %s' % config)
|
2021-12-18 21:54:33 +00:00
|
|
|
write_file(fname, data)
|
2017-06-16 03:39:33 +00:00
|
|
|
return 'added%s' % file_line
|
|
|
|
|
|
|
|
return 'could not insert%s'
|
|
|
|
|
|
|
|
(IMPLY_MIN_2, IMPLY_TARGET, IMPLY_CMD, IMPLY_NON_ARCH_BOARD) = (
|
|
|
|
1, 2, 4, 8)
|
2017-06-16 03:39:32 +00:00
|
|
|
|
|
|
|
IMPLY_FLAGS = {
|
|
|
|
'min2': [IMPLY_MIN_2, 'Show options which imply >2 boards (normally >5)'],
|
|
|
|
'target': [IMPLY_TARGET, 'Allow CONFIG_TARGET_... options to imply'],
|
|
|
|
'cmd': [IMPLY_CMD, 'Allow CONFIG_CMD_... to imply'],
|
2017-06-16 03:39:33 +00:00
|
|
|
'non-arch-board': [
|
|
|
|
IMPLY_NON_ARCH_BOARD,
|
|
|
|
'Allow Kconfig options outside arch/ and /board/ to imply'],
|
2021-12-18 21:54:35 +00:00
|
|
|
}
|
2017-06-16 03:39:32 +00:00
|
|
|
|
2021-12-18 15:09:43 +00:00
|
|
|
|
|
|
|
def read_database():
|
|
|
|
"""Read in the config database
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
tuple:
|
|
|
|
set of all config options seen (each a str)
|
|
|
|
set of all defconfigs seen (each a str)
|
|
|
|
dict of configs for each defconfig:
|
|
|
|
key: defconfig name, e.g. "MPC8548CDS_legacy_defconfig"
|
|
|
|
value: dict:
|
|
|
|
key: CONFIG option
|
|
|
|
value: Value of option
|
|
|
|
dict of defconfigs for each config:
|
|
|
|
key: CONFIG option
|
|
|
|
value: set of boards using that option
|
|
|
|
|
|
|
|
"""
|
|
|
|
configs = {}
|
|
|
|
|
|
|
|
# key is defconfig name, value is dict of (CONFIG_xxx, value)
|
|
|
|
config_db = {}
|
|
|
|
|
|
|
|
# Set of all config options we have seen
|
|
|
|
all_configs = set()
|
|
|
|
|
|
|
|
# Set of all defconfigs we have seen
|
|
|
|
all_defconfigs = set()
|
|
|
|
|
|
|
|
defconfig_db = collections.defaultdict(set)
|
2021-12-18 21:54:34 +00:00
|
|
|
for line in read_file(CONFIG_DATABASE):
|
|
|
|
line = line.rstrip()
|
|
|
|
if not line: # Separator between defconfigs
|
|
|
|
config_db[defconfig] = configs
|
|
|
|
all_defconfigs.add(defconfig)
|
|
|
|
configs = {}
|
|
|
|
elif line[0] == ' ': # CONFIG line
|
|
|
|
config, value = line.strip().split('=', 1)
|
|
|
|
configs[config] = value
|
|
|
|
defconfig_db[config].add(defconfig)
|
|
|
|
all_configs.add(config)
|
|
|
|
else: # New defconfig
|
|
|
|
defconfig = line
|
2021-12-18 15:09:43 +00:00
|
|
|
|
|
|
|
return all_configs, all_defconfigs, config_db, defconfig_db
|
|
|
|
|
|
|
|
|
2017-06-16 03:39:33 +00:00
|
|
|
def do_imply_config(config_list, add_imply, imply_flags, skip_added,
|
|
|
|
check_kconfig=True, find_superset=False):
|
2017-06-02 01:39:03 +00:00
|
|
|
"""Find CONFIG options which imply those in the list
|
|
|
|
|
|
|
|
Some CONFIG options can be implied by others and this can help to reduce
|
|
|
|
the size of the defconfig files. For example, CONFIG_X86 implies
|
|
|
|
CONFIG_CMD_IRQ, so we can put 'imply CMD_IRQ' under 'config X86' and
|
|
|
|
all x86 boards will have that option, avoiding adding CONFIG_CMD_IRQ to
|
|
|
|
each of the x86 defconfig files.
|
|
|
|
|
|
|
|
This function uses the moveconfig database to find such options. It
|
|
|
|
displays a list of things that could possibly imply those in the list.
|
|
|
|
The algorithm ignores any that start with CONFIG_TARGET since these
|
|
|
|
typically refer to only a few defconfigs (often one). It also does not
|
|
|
|
display a config with less than 5 defconfigs.
|
|
|
|
|
|
|
|
The algorithm works using sets. For each target config in config_list:
|
|
|
|
- Get the set 'defconfigs' which use that target config
|
|
|
|
- For each config (from a list of all configs):
|
|
|
|
- Get the set 'imply_defconfig' of defconfigs which use that config
|
|
|
|
-
|
|
|
|
- If imply_defconfigs contains anything not in defconfigs then
|
|
|
|
this config does not imply the target config
|
|
|
|
|
|
|
|
Params:
|
|
|
|
config_list: List of CONFIG options to check (each a string)
|
2017-06-16 03:39:33 +00:00
|
|
|
add_imply: Automatically add an 'imply' for each config.
|
2017-06-16 03:39:32 +00:00
|
|
|
imply_flags: Flags which control which implying configs are allowed
|
|
|
|
(IMPLY_...)
|
2017-06-16 03:39:33 +00:00
|
|
|
skip_added: Don't show options which already have an imply added.
|
|
|
|
check_kconfig: Check if implied symbols already have an 'imply' or
|
|
|
|
'select' for the target config, and show this information if so.
|
2017-06-02 01:39:03 +00:00
|
|
|
find_superset: True to look for configs which are a superset of those
|
|
|
|
already found. So for example if CONFIG_EXYNOS5 implies an option,
|
|
|
|
but CONFIG_EXYNOS covers a larger set of defconfigs and also
|
|
|
|
implies that option, this will drop the former in favour of the
|
|
|
|
latter. In practice this option has not proved very used.
|
|
|
|
|
|
|
|
Note the terminoloy:
|
|
|
|
config - a CONFIG_XXX options (a string, e.g. 'CONFIG_CMD_EEPROM')
|
|
|
|
defconfig - a defconfig file (a string, e.g. 'configs/snow_defconfig')
|
|
|
|
"""
|
2017-06-16 03:39:33 +00:00
|
|
|
kconf = KconfigScanner().conf if check_kconfig else None
|
|
|
|
if add_imply and add_imply != 'all':
|
2021-12-18 15:09:42 +00:00
|
|
|
add_imply = add_imply.split(',')
|
2017-06-16 03:39:33 +00:00
|
|
|
|
2021-12-18 15:09:43 +00:00
|
|
|
all_configs, all_defconfigs, config_db, defconfig_db = read_database()
|
2017-06-02 01:39:03 +00:00
|
|
|
|
2021-12-18 15:09:42 +00:00
|
|
|
# Work through each target config option in turn, independently
|
2017-06-02 01:39:03 +00:00
|
|
|
for config in config_list:
|
|
|
|
defconfigs = defconfig_db.get(config)
|
|
|
|
if not defconfigs:
|
2019-10-31 13:42:57 +00:00
|
|
|
print('%s not found in any defconfig' % config)
|
2017-06-02 01:39:03 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
# Get the set of defconfigs without this one (since a config cannot
|
|
|
|
# imply itself)
|
|
|
|
non_defconfigs = all_defconfigs - defconfigs
|
|
|
|
num_defconfigs = len(defconfigs)
|
2019-10-31 13:42:57 +00:00
|
|
|
print('%s found in %d/%d defconfigs' % (config, num_defconfigs,
|
|
|
|
len(all_configs)))
|
2017-06-02 01:39:03 +00:00
|
|
|
|
|
|
|
# This will hold the results: key=config, value=defconfigs containing it
|
|
|
|
imply_configs = {}
|
|
|
|
rest_configs = all_configs - set([config])
|
|
|
|
|
|
|
|
# Look at every possible config, except the target one
|
|
|
|
for imply_config in rest_configs:
|
2017-06-16 03:39:32 +00:00
|
|
|
if 'ERRATUM' in imply_config:
|
2017-06-02 01:39:03 +00:00
|
|
|
continue
|
2021-12-18 21:54:35 +00:00
|
|
|
if not imply_flags & IMPLY_CMD:
|
2017-06-16 03:39:32 +00:00
|
|
|
if 'CONFIG_CMD' in imply_config:
|
|
|
|
continue
|
2021-12-18 21:54:35 +00:00
|
|
|
if not imply_flags & IMPLY_TARGET:
|
2017-06-16 03:39:32 +00:00
|
|
|
if 'CONFIG_TARGET' in imply_config:
|
|
|
|
continue
|
2017-06-02 01:39:03 +00:00
|
|
|
|
|
|
|
# Find set of defconfigs that have this config
|
|
|
|
imply_defconfig = defconfig_db[imply_config]
|
|
|
|
|
|
|
|
# Get the intersection of this with defconfigs containing the
|
|
|
|
# target config
|
|
|
|
common_defconfigs = imply_defconfig & defconfigs
|
|
|
|
|
|
|
|
# Get the set of defconfigs containing this config which DO NOT
|
|
|
|
# also contain the taret config. If this set is non-empty it means
|
|
|
|
# that this config affects other defconfigs as well as (possibly)
|
|
|
|
# the ones affected by the target config. This means it implies
|
|
|
|
# things we don't want to imply.
|
|
|
|
not_common_defconfigs = imply_defconfig & non_defconfigs
|
|
|
|
if not_common_defconfigs:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# If there are common defconfigs, imply_config may be useful
|
|
|
|
if common_defconfigs:
|
|
|
|
skip = False
|
|
|
|
if find_superset:
|
2019-10-31 13:42:57 +00:00
|
|
|
for prev in list(imply_configs.keys()):
|
2017-06-02 01:39:03 +00:00
|
|
|
prev_count = len(imply_configs[prev])
|
|
|
|
count = len(common_defconfigs)
|
|
|
|
if (prev_count > count and
|
|
|
|
(imply_configs[prev] & common_defconfigs ==
|
|
|
|
common_defconfigs)):
|
|
|
|
# skip imply_config because prev is a superset
|
|
|
|
skip = True
|
|
|
|
break
|
|
|
|
elif count > prev_count:
|
|
|
|
# delete prev because imply_config is a superset
|
|
|
|
del imply_configs[prev]
|
|
|
|
if not skip:
|
|
|
|
imply_configs[imply_config] = common_defconfigs
|
|
|
|
|
|
|
|
# Now we have a dict imply_configs of configs which imply each config
|
|
|
|
# The value of each dict item is the set of defconfigs containing that
|
|
|
|
# config. Rank them so that we print the configs that imply the largest
|
|
|
|
# number of defconfigs first.
|
2017-06-16 03:39:33 +00:00
|
|
|
ranked_iconfigs = sorted(imply_configs,
|
2017-06-02 01:39:03 +00:00
|
|
|
key=lambda k: len(imply_configs[k]), reverse=True)
|
2017-06-16 03:39:33 +00:00
|
|
|
kconfig_info = ''
|
|
|
|
cwd = os.getcwd()
|
|
|
|
add_list = collections.defaultdict(list)
|
|
|
|
for iconfig in ranked_iconfigs:
|
|
|
|
num_common = len(imply_configs[iconfig])
|
2017-06-02 01:39:03 +00:00
|
|
|
|
|
|
|
# Don't bother if there are less than 5 defconfigs affected.
|
2017-06-16 03:39:32 +00:00
|
|
|
if num_common < (2 if imply_flags & IMPLY_MIN_2 else 5):
|
2017-06-02 01:39:03 +00:00
|
|
|
continue
|
2017-06-16 03:39:33 +00:00
|
|
|
missing = defconfigs - imply_configs[iconfig]
|
2017-06-02 01:39:03 +00:00
|
|
|
missing_str = ', '.join(missing) if missing else 'all'
|
|
|
|
missing_str = ''
|
2017-06-16 03:39:33 +00:00
|
|
|
show = True
|
|
|
|
if kconf:
|
|
|
|
sym = find_kconfig_rules(kconf, config[CONFIG_LEN:],
|
|
|
|
iconfig[CONFIG_LEN:])
|
|
|
|
kconfig_info = ''
|
|
|
|
if sym:
|
2021-07-22 03:35:53 +00:00
|
|
|
nodes = sym.nodes
|
|
|
|
if len(nodes) == 1:
|
|
|
|
fname, linenum = nodes[0].filename, nodes[0].linenr
|
2017-06-16 03:39:33 +00:00
|
|
|
if cwd and fname.startswith(cwd):
|
|
|
|
fname = fname[len(cwd) + 1:]
|
|
|
|
kconfig_info = '%s:%d' % (fname, linenum)
|
|
|
|
if skip_added:
|
|
|
|
show = False
|
|
|
|
else:
|
2019-09-20 21:42:09 +00:00
|
|
|
sym = kconf.syms.get(iconfig[CONFIG_LEN:])
|
2017-06-16 03:39:33 +00:00
|
|
|
fname = ''
|
|
|
|
if sym:
|
2021-07-22 03:35:53 +00:00
|
|
|
nodes = sym.nodes
|
|
|
|
if len(nodes) == 1:
|
|
|
|
fname, linenum = nodes[0].filename, nodes[0].linenr
|
2017-06-16 03:39:33 +00:00
|
|
|
if cwd and fname.startswith(cwd):
|
|
|
|
fname = fname[len(cwd) + 1:]
|
|
|
|
in_arch_board = not sym or (fname.startswith('arch') or
|
|
|
|
fname.startswith('board'))
|
|
|
|
if (not in_arch_board and
|
2021-12-18 21:54:35 +00:00
|
|
|
not imply_flags & IMPLY_NON_ARCH_BOARD):
|
2017-06-16 03:39:33 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if add_imply and (add_imply == 'all' or
|
|
|
|
iconfig in add_imply):
|
|
|
|
fname, linenum, kconfig_info = (check_imply_rule(kconf,
|
|
|
|
config[CONFIG_LEN:], iconfig[CONFIG_LEN:]))
|
|
|
|
if fname:
|
|
|
|
add_list[fname].append(linenum)
|
|
|
|
|
|
|
|
if show and kconfig_info != 'skip':
|
2019-10-31 13:42:57 +00:00
|
|
|
print('%5d : %-30s%-25s %s' % (num_common, iconfig.ljust(30),
|
|
|
|
kconfig_info, missing_str))
|
2017-06-16 03:39:33 +00:00
|
|
|
|
|
|
|
# Having collected a list of things to add, now we add them. We process
|
|
|
|
# each file from the largest line number to the smallest so that
|
|
|
|
# earlier additions do not affect our line numbers. E.g. if we added an
|
|
|
|
# imply at line 20 it would change the position of each line after
|
|
|
|
# that.
|
2019-10-31 13:42:57 +00:00
|
|
|
for fname, linenums in add_list.items():
|
2017-06-16 03:39:33 +00:00
|
|
|
for linenum in sorted(linenums, reverse=True):
|
|
|
|
add_imply_rule(config[CONFIG_LEN:], fname, linenum)
|
2017-06-02 01:39:03 +00:00
|
|
|
|
2022-02-08 18:49:46 +00:00
|
|
|
def defconfig_matches(configs, re_match):
|
|
|
|
"""Check if any CONFIG option matches a regex
|
|
|
|
|
|
|
|
The match must be complete, i.e. from the start to end of the CONFIG option.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
configs (dict): Dict of CONFIG options:
|
|
|
|
key: CONFIG option
|
|
|
|
value: Value of option
|
|
|
|
re_match (re.Pattern): Match to check
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
bool: True if any CONFIG matches the regex
|
|
|
|
"""
|
|
|
|
for cfg in configs:
|
2022-03-06 03:18:54 +00:00
|
|
|
if re_match.fullmatch(cfg):
|
2022-02-08 18:49:46 +00:00
|
|
|
return True
|
|
|
|
return False
|
2017-06-02 01:39:03 +00:00
|
|
|
|
2021-12-18 15:09:46 +00:00
|
|
|
def do_find_config(config_list):
|
|
|
|
"""Find boards with a given combination of CONFIGs
|
|
|
|
|
|
|
|
Params:
|
2022-02-08 18:49:46 +00:00
|
|
|
config_list: List of CONFIG options to check (each a regex consisting
|
2021-12-18 15:09:46 +00:00
|
|
|
of a config option, with or without a CONFIG_ prefix. If an option
|
|
|
|
is preceded by a tilde (~) then it must be false, otherwise it must
|
|
|
|
be true)
|
|
|
|
"""
|
|
|
|
all_configs, all_defconfigs, config_db, defconfig_db = read_database()
|
|
|
|
|
|
|
|
# Get the whitelist
|
2021-12-18 21:54:34 +00:00
|
|
|
adhoc_configs = set(read_file('scripts/config_whitelist.txt'))
|
2021-12-18 15:09:46 +00:00
|
|
|
|
|
|
|
# Start with all defconfigs
|
|
|
|
out = all_defconfigs
|
|
|
|
|
|
|
|
# Work through each config in turn
|
|
|
|
adhoc = []
|
|
|
|
for item in config_list:
|
|
|
|
# Get the real config name and whether we want this config or not
|
|
|
|
cfg = item
|
|
|
|
want = True
|
|
|
|
if cfg[0] == '~':
|
|
|
|
want = False
|
|
|
|
cfg = cfg[1:]
|
|
|
|
|
|
|
|
if cfg in adhoc_configs:
|
|
|
|
adhoc.append(cfg)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Search everything that is still in the running. If it has a config
|
|
|
|
# that we want, or doesn't have one that we don't, add it into the
|
|
|
|
# running for the next stage
|
|
|
|
in_list = out
|
|
|
|
out = set()
|
2022-02-08 18:49:46 +00:00
|
|
|
re_match = re.compile(cfg)
|
2021-12-18 15:09:46 +00:00
|
|
|
for defc in in_list:
|
2022-02-08 18:49:46 +00:00
|
|
|
has_cfg = defconfig_matches(config_db[defc], re_match)
|
2021-12-18 15:09:46 +00:00
|
|
|
if has_cfg == want:
|
|
|
|
out.add(defc)
|
|
|
|
if adhoc:
|
|
|
|
print(f"Error: Not in Kconfig: %s" % ' '.join(adhoc))
|
|
|
|
else:
|
|
|
|
print(f'{len(out)} matches')
|
2022-03-06 03:18:53 +00:00
|
|
|
print(' '.join(item.split('_defconfig')[0] for item in out))
|
2021-12-18 15:09:46 +00:00
|
|
|
|
|
|
|
|
|
|
|
def prefix_config(cfg):
|
|
|
|
"""Prefix a config with CONFIG_ if needed
|
|
|
|
|
|
|
|
This handles ~ operator, which indicates that the CONFIG should be disabled
|
|
|
|
|
|
|
|
>>> prefix_config('FRED')
|
|
|
|
'CONFIG_FRED'
|
|
|
|
>>> prefix_config('CONFIG_FRED')
|
|
|
|
'CONFIG_FRED'
|
|
|
|
>>> prefix_config('~FRED')
|
|
|
|
'~CONFIG_FRED'
|
|
|
|
>>> prefix_config('~CONFIG_FRED')
|
|
|
|
'~CONFIG_FRED'
|
|
|
|
>>> prefix_config('A123')
|
|
|
|
'CONFIG_A123'
|
|
|
|
"""
|
|
|
|
op = ''
|
|
|
|
if cfg[0] == '~':
|
|
|
|
op = cfg[0]
|
|
|
|
cfg = cfg[1:]
|
|
|
|
if not cfg.startswith('CONFIG_'):
|
|
|
|
cfg = 'CONFIG_' + cfg
|
|
|
|
return op + cfg
|
|
|
|
|
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
def main():
|
|
|
|
try:
|
|
|
|
cpu_count = multiprocessing.cpu_count()
|
|
|
|
except NotImplementedError:
|
|
|
|
cpu_count = 1
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
epilog = '''Move config options from headers to defconfig files. See
|
|
|
|
doc/develop/moveconfig.rst for documentation.'''
|
|
|
|
|
|
|
|
parser = ArgumentParser(epilog=epilog)
|
|
|
|
# Add arguments here
|
|
|
|
parser.add_argument('-a', '--add-imply', type=str, default='',
|
2017-06-16 03:39:33 +00:00
|
|
|
help='comma-separated list of CONFIG options to add '
|
|
|
|
"an 'imply' statement to for the CONFIG in -i")
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-A', '--skip-added', action='store_true', default=False,
|
2017-06-16 03:39:33 +00:00
|
|
|
help="don't show options which are already marked as "
|
|
|
|
'implying others')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-b', '--build-db', action='store_true', default=False,
|
2017-06-02 01:39:02 +00:00
|
|
|
help='build a CONFIG database')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-c', '--color', action='store_true', default=False,
|
2015-05-20 02:36:07 +00:00
|
|
|
help='display the log in color')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-C', '--commit', action='store_true', default=False,
|
2016-09-13 05:18:21 +00:00
|
|
|
help='Create a git commit for the operation')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-d', '--defconfigs', type=str,
|
2017-06-02 01:38:59 +00:00
|
|
|
help='a file containing a list of defconfigs to move, '
|
|
|
|
"one per line (for example 'snow_defconfig') "
|
|
|
|
"or '-' to read from stdin")
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-e', '--exit-on-error', action='store_true',
|
2015-05-20 02:36:07 +00:00
|
|
|
default=False,
|
|
|
|
help='exit immediately on any error')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-f', '--find', action='store_true', default=False,
|
2021-12-18 15:09:46 +00:00
|
|
|
help='Find boards with a given config combination')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-H', '--headers-only', dest='cleanup_headers_only',
|
2015-05-19 18:21:20 +00:00
|
|
|
action='store_true', default=False,
|
|
|
|
help='only cleanup the headers')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-i', '--imply', action='store_true', default=False,
|
2021-12-18 15:09:44 +00:00
|
|
|
help='find options which imply others')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-I', '--imply-flags', type=str, default='',
|
2021-12-18 15:09:44 +00:00
|
|
|
help="control the -i option ('help' for help")
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-j', '--jobs', type=int, default=cpu_count,
|
2015-05-20 02:36:07 +00:00
|
|
|
help='the number of jobs to run simultaneously')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-n', '--dry-run', action='store_true', default=False,
|
2021-12-18 15:09:44 +00:00
|
|
|
help='perform a trial run (show log with no changes)')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-r', '--git-ref', type=str,
|
2016-06-10 19:53:32 +00:00
|
|
|
help='the git ref to clone for building the autoconf.mk')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-s', '--force-sync', action='store_true', default=False,
|
2021-12-18 15:09:44 +00:00
|
|
|
help='force sync by savedefconfig')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-S', '--spl', action='store_true', default=False,
|
2021-12-18 15:09:44 +00:00
|
|
|
help='parse config options defined for SPL build')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-t', '--test', action='store_true', default=False,
|
2021-12-18 15:09:44 +00:00
|
|
|
help='run unit tests')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-y', '--yes', action='store_true', default=False,
|
2016-09-13 05:18:20 +00:00
|
|
|
help="respond 'yes' to any prompts")
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('-v', '--verbose', action='store_true', default=False,
|
2015-05-19 18:21:24 +00:00
|
|
|
help='show any build errors as boards are built')
|
2021-12-18 21:54:31 +00:00
|
|
|
parser.add_argument('configs', nargs='*')
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
configs = args.configs
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.test:
|
2021-12-18 15:09:45 +00:00
|
|
|
sys.argv = [sys.argv[0]]
|
|
|
|
fail, count = doctest.testmod()
|
|
|
|
if fail:
|
|
|
|
return 1
|
|
|
|
unittest.main()
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if not any((len(configs), args.force_sync, args.build_db, args.imply,
|
|
|
|
args.find)):
|
2015-05-20 02:36:07 +00:00
|
|
|
parser.print_usage()
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-05-19 06:51:58 +00:00
|
|
|
# prefix the option name with CONFIG_ if missing
|
2021-12-18 15:09:46 +00:00
|
|
|
configs = [prefix_config(cfg) for cfg in configs]
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2015-05-19 18:21:20 +00:00
|
|
|
check_top_directory()
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.imply:
|
2017-06-16 03:39:32 +00:00
|
|
|
imply_flags = 0
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.imply_flags == 'all':
|
2017-07-10 20:47:46 +00:00
|
|
|
imply_flags = -1
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
elif args.imply_flags:
|
|
|
|
for flag in args.imply_flags.split(','):
|
2017-07-10 20:47:46 +00:00
|
|
|
bad = flag not in IMPLY_FLAGS
|
|
|
|
if bad:
|
2019-10-31 13:42:57 +00:00
|
|
|
print("Invalid flag '%s'" % flag)
|
2017-07-10 20:47:46 +00:00
|
|
|
if flag == 'help' or bad:
|
2019-10-31 13:42:57 +00:00
|
|
|
print("Imply flags: (separate with ',')")
|
|
|
|
for name, info in IMPLY_FLAGS.items():
|
|
|
|
print(' %-15s: %s' % (name, info[1]))
|
2017-07-10 20:47:46 +00:00
|
|
|
parser.print_usage()
|
|
|
|
sys.exit(1)
|
|
|
|
imply_flags |= IMPLY_FLAGS[flag][0]
|
2017-06-16 03:39:32 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
do_imply_config(configs, args.add_imply, imply_flags, args.skip_added)
|
2017-06-02 01:39:03 +00:00
|
|
|
return
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.find:
|
2021-12-18 15:09:46 +00:00
|
|
|
do_find_config(configs)
|
|
|
|
return
|
|
|
|
|
2017-06-02 01:39:02 +00:00
|
|
|
config_db = {}
|
2019-10-31 13:42:57 +00:00
|
|
|
db_queue = queue.Queue()
|
2017-06-02 01:39:02 +00:00
|
|
|
t = DatabaseThread(config_db, db_queue)
|
|
|
|
t.setDaemon(True)
|
|
|
|
t.start()
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if not args.cleanup_headers_only:
|
2016-07-25 10:15:23 +00:00
|
|
|
check_clean_directory()
|
2019-10-31 13:42:57 +00:00
|
|
|
bsettings.Setup('')
|
2017-07-10 20:47:47 +00:00
|
|
|
toolchains = toolchain.Toolchains()
|
|
|
|
toolchains.GetSettings()
|
|
|
|
toolchains.Scan(verbose=False)
|
2021-12-18 21:54:31 +00:00
|
|
|
move_config(toolchains, configs, args, db_queue)
|
2017-06-02 01:39:02 +00:00
|
|
|
db_queue.join()
|
2015-05-19 18:21:20 +00:00
|
|
|
|
2016-05-19 06:52:09 +00:00
|
|
|
if configs:
|
2021-12-18 21:54:31 +00:00
|
|
|
cleanup_headers(configs, args)
|
|
|
|
cleanup_extra_options(configs, args)
|
|
|
|
cleanup_whitelist(configs, args)
|
|
|
|
cleanup_readme(configs, args)
|
2015-05-20 02:36:07 +00:00
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.commit:
|
2016-09-13 05:18:21 +00:00
|
|
|
subprocess.call(['git', 'add', '-u'])
|
|
|
|
if configs:
|
|
|
|
msg = 'Convert %s %sto Kconfig' % (configs[0],
|
|
|
|
'et al ' if len(configs) > 1 else '')
|
|
|
|
msg += ('\n\nThis converts the following to Kconfig:\n %s\n' %
|
|
|
|
'\n '.join(configs))
|
|
|
|
else:
|
|
|
|
msg = 'configs: Resync with savedefconfig'
|
|
|
|
msg += '\n\nRsync all defconfig files using moveconfig.py'
|
|
|
|
subprocess.call(['git', 'commit', '-s', '-m', msg])
|
|
|
|
|
2021-12-18 21:54:31 +00:00
|
|
|
if args.build_db:
|
2021-12-18 21:54:35 +00:00
|
|
|
with open(CONFIG_DATABASE, 'w', encoding='utf-8') as fd:
|
2019-10-31 13:42:57 +00:00
|
|
|
for defconfig, configs in config_db.items():
|
2017-08-13 22:02:54 +00:00
|
|
|
fd.write('%s\n' % defconfig)
|
2017-06-02 01:39:02 +00:00
|
|
|
for config in sorted(configs.keys()):
|
2017-08-13 22:02:54 +00:00
|
|
|
fd.write(' %s=%s\n' % (config, configs[config]))
|
|
|
|
fd.write('\n')
|
2017-06-02 01:39:02 +00:00
|
|
|
|
2015-05-20 02:36:07 +00:00
|
|
|
if __name__ == '__main__':
|
2021-12-18 15:09:46 +00:00
|
|
|
sys.exit(main())
|