mirror of
https://github.com/AsahiLinux/u-boot
synced 2024-11-25 22:20:45 +00:00
moveconfig: Use a function to read files
At present there is quite a bit of ad-hoc code reading from files. The most common case is to read the file as lines. Put it in a function and set the unicode encoding correctly. Avoid writing back to a file when there are obviously no changes as this speeds things up slightly. Signed-off-by: Simon Glass <sjg@chromium.org>
This commit is contained in:
parent
2fd85bd326
commit
37f815cad0
1 changed files with 63 additions and 50 deletions
|
@ -288,6 +288,34 @@ def write_file(fname, data):
|
||||||
else:
|
else:
|
||||||
out.write(data)
|
out.write(data)
|
||||||
|
|
||||||
|
def read_file(fname, as_lines=True, skip_unicode=False):
|
||||||
|
"""Read a file and return the contents
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fname (str): Filename to read from
|
||||||
|
as_lines: Return file contents as a list of lines
|
||||||
|
skip_unicode (bool): True to report unicode errors and continue
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
iter of str: List of ;ines from the file with newline removed; str if
|
||||||
|
as_lines is False with newlines intact; or None if a unicode error
|
||||||
|
occurred
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
UnicodeDecodeError: Unicode error occurred when reading
|
||||||
|
"""
|
||||||
|
with open(fname, encoding='utf-8') as inf:
|
||||||
|
try:
|
||||||
|
if as_lines:
|
||||||
|
return [line.rstrip('\n') for line in inf.readlines()]
|
||||||
|
else:
|
||||||
|
return inf.read()
|
||||||
|
except UnicodeDecodeError as e:
|
||||||
|
if not skip_unicode:
|
||||||
|
raises
|
||||||
|
print("Failed on file %s': %s" % (fname, e))
|
||||||
|
return None
|
||||||
|
|
||||||
def cleanup_empty_blocks(header_path, args):
|
def cleanup_empty_blocks(header_path, args):
|
||||||
"""Clean up empty conditional blocks
|
"""Clean up empty conditional blocks
|
||||||
|
|
||||||
|
@ -296,11 +324,8 @@ def cleanup_empty_blocks(header_path, args):
|
||||||
args: program arguments
|
args: program arguments
|
||||||
"""
|
"""
|
||||||
pattern = re.compile(r'^\s*#\s*if.*$\n^\s*#\s*endif.*$\n*', flags=re.M)
|
pattern = re.compile(r'^\s*#\s*if.*$\n^\s*#\s*endif.*$\n*', flags=re.M)
|
||||||
with open(header_path) as f:
|
data = read_file(header_path, as_lines=False, skip_unicode=True)
|
||||||
try:
|
if data is None:
|
||||||
data = f.read()
|
|
||||||
except UnicodeDecodeError as e:
|
|
||||||
print("Failed on file %s': %s" % (header_path, e))
|
|
||||||
return
|
return
|
||||||
|
|
||||||
new_data = pattern.sub('\n', data)
|
new_data = pattern.sub('\n', data)
|
||||||
|
@ -311,6 +336,7 @@ def cleanup_empty_blocks(header_path, args):
|
||||||
if args.dry_run:
|
if args.dry_run:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if new_data != data:
|
||||||
write_file(header_path, new_data)
|
write_file(header_path, new_data)
|
||||||
|
|
||||||
def cleanup_one_header(header_path, patterns, args):
|
def cleanup_one_header(header_path, patterns, args):
|
||||||
|
@ -322,11 +348,8 @@ def cleanup_one_header(header_path, patterns, args):
|
||||||
patterns are deleted.
|
patterns are deleted.
|
||||||
args: program arguments
|
args: program arguments
|
||||||
"""
|
"""
|
||||||
with open(header_path) as f:
|
lines = read_file(header_path, skip_unicode=True)
|
||||||
try:
|
if lines is None:
|
||||||
lines = f.readlines()
|
|
||||||
except UnicodeDecodeError as e:
|
|
||||||
print("Failed on file %s': %s" % (header_path, e))
|
|
||||||
return
|
return
|
||||||
|
|
||||||
matched = []
|
matched = []
|
||||||
|
@ -416,8 +439,7 @@ def cleanup_one_extra_option(defconfig_path, configs, args):
|
||||||
start = 'CONFIG_SYS_EXTRA_OPTIONS="'
|
start = 'CONFIG_SYS_EXTRA_OPTIONS="'
|
||||||
end = '"\n'
|
end = '"\n'
|
||||||
|
|
||||||
with open(defconfig_path) as f:
|
lines = read_file(defconfig_path)
|
||||||
lines = f.readlines()
|
|
||||||
|
|
||||||
for i, line in enumerate(lines):
|
for i, line in enumerate(lines):
|
||||||
if line.startswith(start) and line.endswith(end):
|
if line.startswith(start) and line.endswith(end):
|
||||||
|
@ -479,8 +501,7 @@ def cleanup_whitelist(configs, args):
|
||||||
if not confirm(args, 'Clean up whitelist entries?'):
|
if not confirm(args, 'Clean up whitelist entries?'):
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(os.path.join('scripts', 'config_whitelist.txt')) as f:
|
lines = read_file(os.path.join('scripts', 'config_whitelist.txt'))
|
||||||
lines = f.readlines()
|
|
||||||
|
|
||||||
lines = [x for x in lines if x.strip() not in configs]
|
lines = [x for x in lines if x.strip() not in configs]
|
||||||
|
|
||||||
|
@ -506,8 +527,7 @@ def cleanup_readme(configs, args):
|
||||||
for config in configs:
|
for config in configs:
|
||||||
patterns.append(re.compile(r'^\s+%s' % config))
|
patterns.append(re.compile(r'^\s+%s' % config))
|
||||||
|
|
||||||
with open('README') as f:
|
lines = read_file('README')
|
||||||
lines = f.readlines()
|
|
||||||
|
|
||||||
found = False
|
found = False
|
||||||
newlines = []
|
newlines = []
|
||||||
|
@ -615,7 +635,7 @@ class KconfigParser:
|
||||||
"""
|
"""
|
||||||
arch = ''
|
arch = ''
|
||||||
cpu = ''
|
cpu = ''
|
||||||
for line in open(self.dotconfig):
|
for line in read_file(self.dotconfig):
|
||||||
m = self.re_arch.match(line)
|
m = self.re_arch.match(line)
|
||||||
if m:
|
if m:
|
||||||
arch = m.group(1)
|
arch = m.group(1)
|
||||||
|
@ -717,11 +737,9 @@ class KconfigParser:
|
||||||
else:
|
else:
|
||||||
autoconf_path = self.autoconf
|
autoconf_path = self.autoconf
|
||||||
|
|
||||||
with open(self.dotconfig) as f:
|
dotconfig_lines = read_file(self.dotconfig)
|
||||||
dotconfig_lines = f.readlines()
|
|
||||||
|
|
||||||
with open(autoconf_path) as f:
|
autoconf_lines = read_file(autoconf_path)
|
||||||
autoconf_lines = f.readlines()
|
|
||||||
|
|
||||||
for config in self.configs:
|
for config in self.configs:
|
||||||
result = self.parse_one_config(config, dotconfig_lines,
|
result = self.parse_one_config(config, dotconfig_lines,
|
||||||
|
@ -775,8 +793,7 @@ class KconfigParser:
|
||||||
|
|
||||||
log = ''
|
log = ''
|
||||||
|
|
||||||
with open(self.defconfig) as f:
|
defconfig_lines = read_file(self.defconfig)
|
||||||
defconfig_lines = f.readlines()
|
|
||||||
|
|
||||||
for (action, value) in self.results:
|
for (action, value) in self.results:
|
||||||
if action != ACTION_MOVE:
|
if action != ACTION_MOVE:
|
||||||
|
@ -978,8 +995,7 @@ class Slot:
|
||||||
def do_build_db(self):
|
def do_build_db(self):
|
||||||
"""Add the board to the database"""
|
"""Add the board to the database"""
|
||||||
configs = {}
|
configs = {}
|
||||||
with open(os.path.join(self.build_dir, AUTO_CONF_PATH)) as fd:
|
for line in read_file(os.path.join(self.build_dir, AUTO_CONF_PATH)):
|
||||||
for line in fd.readlines():
|
|
||||||
if line.startswith('CONFIG'):
|
if line.startswith('CONFIG'):
|
||||||
config, value = line.split('=', 1)
|
config, value = line.split('=', 1)
|
||||||
configs[config] = value.rstrip()
|
configs[config] = value.rstrip()
|
||||||
|
@ -1297,8 +1313,7 @@ def check_imply_rule(kconf, config, imply_config):
|
||||||
if cwd and fname.startswith(cwd):
|
if cwd and fname.startswith(cwd):
|
||||||
fname = fname[len(cwd) + 1:]
|
fname = fname[len(cwd) + 1:]
|
||||||
file_line = ' at %s:%d' % (fname, linenum)
|
file_line = ' at %s:%d' % (fname, linenum)
|
||||||
with open(fname) as fd:
|
data = read_file(fname)
|
||||||
data = fd.read().splitlines()
|
|
||||||
if data[linenum - 1] != 'config %s' % imply_config:
|
if data[linenum - 1] != 'config %s' % imply_config:
|
||||||
return None, 0, 'bad sym format %s%s' % (data[linenum], file_line)
|
return None, 0, 'bad sym format %s%s' % (data[linenum], file_line)
|
||||||
return fname, linenum, 'adding%s' % file_line
|
return fname, linenum, 'adding%s' % file_line
|
||||||
|
@ -1315,7 +1330,7 @@ def add_imply_rule(config, fname, linenum):
|
||||||
Message indicating the result
|
Message indicating the result
|
||||||
"""
|
"""
|
||||||
file_line = ' at %s:%d' % (fname, linenum)
|
file_line = ' at %s:%d' % (fname, linenum)
|
||||||
data = open(fname).read().splitlines()
|
data = read_file(fname)
|
||||||
linenum -= 1
|
linenum -= 1
|
||||||
|
|
||||||
for offset, line in enumerate(data[linenum:]):
|
for offset, line in enumerate(data[linenum:]):
|
||||||
|
@ -1368,8 +1383,7 @@ def read_database():
|
||||||
all_defconfigs = set()
|
all_defconfigs = set()
|
||||||
|
|
||||||
defconfig_db = collections.defaultdict(set)
|
defconfig_db = collections.defaultdict(set)
|
||||||
with open(CONFIG_DATABASE) as fd:
|
for line in read_file(CONFIG_DATABASE):
|
||||||
for line in fd.readlines():
|
|
||||||
line = line.rstrip()
|
line = line.rstrip()
|
||||||
if not line: # Separator between defconfigs
|
if not line: # Separator between defconfigs
|
||||||
config_db[defconfig] = configs
|
config_db[defconfig] = configs
|
||||||
|
@ -1578,8 +1592,7 @@ def do_find_config(config_list):
|
||||||
all_configs, all_defconfigs, config_db, defconfig_db = read_database()
|
all_configs, all_defconfigs, config_db, defconfig_db = read_database()
|
||||||
|
|
||||||
# Get the whitelist
|
# Get the whitelist
|
||||||
with open('scripts/config_whitelist.txt') as inf:
|
adhoc_configs = set(read_file('scripts/config_whitelist.txt'))
|
||||||
adhoc_configs = set(inf.read().splitlines())
|
|
||||||
|
|
||||||
# Start with all defconfigs
|
# Start with all defconfigs
|
||||||
out = all_defconfigs
|
out = all_defconfigs
|
||||||
|
|
Loading…
Reference in a new issue