2012-06-05 11:05:53 +00:00
|
|
|
#!/usr/bin/env python
|
2012-04-08 08:43:30 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2013-05-23 14:47:07 +00:00
|
|
|
# Run me like this: ./create_manpage_completions.py /usr/share/man/man{1,8}/* > man_completions.fish
|
2012-04-04 19:43:12 +00:00
|
|
|
|
2012-01-29 07:32:40 +00:00
|
|
|
"""
|
2012-11-18 10:23:22 +00:00
|
|
|
<OWNER> = Siteshwar Vashisht
|
|
|
|
<YEAR> = 2012
|
2012-01-29 07:32:40 +00:00
|
|
|
|
2012-11-18 10:23:22 +00:00
|
|
|
Copyright (c) 2012, Siteshwar Vashisht
|
2012-01-29 07:32:40 +00:00
|
|
|
All rights reserved.
|
|
|
|
|
|
|
|
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
|
|
|
|
|
|
|
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
|
|
|
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
"""
|
|
|
|
|
2013-09-24 13:42:32 +00:00
|
|
|
import string, sys, re, os.path, bz2, gzip, traceback, getopt, errno, codecs
|
2012-04-08 08:43:30 +00:00
|
|
|
from deroff import Deroffer
|
2012-03-16 10:03:43 +00:00
|
|
|
|
2013-09-25 12:36:42 +00:00
|
|
|
lzma_available = True
|
2013-09-24 13:42:32 +00:00
|
|
|
try:
|
2013-09-25 12:36:42 +00:00
|
|
|
try:
|
|
|
|
import lzma
|
2013-11-05 15:30:51 +00:00
|
|
|
except ImportError:
|
|
|
|
from backports import lzma
|
2013-09-25 12:35:11 +00:00
|
|
|
except ImportError:
|
2013-09-25 12:36:42 +00:00
|
|
|
lzma_available = False
|
2013-09-24 13:42:32 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
# Whether we're Python 3
|
|
|
|
IS_PY3 = sys.version_info[0] >= 3
|
|
|
|
|
2012-01-29 06:25:45 +00:00
|
|
|
# This gets set to the name of the command that we are currently executing
|
|
|
|
CMDNAME = ""
|
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
# Information used to track which of our parsers were successful
|
|
|
|
PARSER_INFO = {}
|
|
|
|
|
2013-02-18 00:14:36 +00:00
|
|
|
# built_command writes into this global variable, yuck
|
2012-03-30 17:00:01 +00:00
|
|
|
built_command_output = []
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
# Diagnostic output
|
|
|
|
diagnostic_output = []
|
|
|
|
diagnostic_indent = 0
|
|
|
|
|
2012-04-12 02:05:43 +00:00
|
|
|
# Three diagnostic verbosity levels
|
|
|
|
VERY_VERBOSE, BRIEF_VERBOSE, NOT_VERBOSE = 2, 1, 0
|
2012-04-04 19:43:12 +00:00
|
|
|
|
2012-04-12 02:05:43 +00:00
|
|
|
# Pick some reasonable default values for settings
|
2012-06-18 20:59:07 +00:00
|
|
|
global VERBOSITY, WRITE_TO_STDOUT, DEROFF_ONLY
|
|
|
|
VERBOSITY, WRITE_TO_STDOUT, DEROFF_ONLY = NOT_VERBOSE, False, False
|
2012-04-12 02:05:43 +00:00
|
|
|
|
|
|
|
def add_diagnostic(dgn, msg_verbosity = VERY_VERBOSE):
|
|
|
|
# Add a diagnostic message, if msg_verbosity <= VERBOSITY
|
|
|
|
if msg_verbosity <= VERBOSITY:
|
2012-04-04 19:43:12 +00:00
|
|
|
diagnostic_output.append(' '*diagnostic_indent + dgn)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
def flush_diagnostics(where):
|
|
|
|
if diagnostic_output:
|
|
|
|
output_str = '\n'.join(diagnostic_output) + '\n'
|
|
|
|
where.write(output_str)
|
|
|
|
diagnostic_output[:] = []
|
|
|
|
|
|
|
|
# Make sure we don't output the same completion multiple times, which can happen
|
|
|
|
# For example, xsubpp.1.gz and xsubpp5.10.1.gz
|
|
|
|
# This maps commands to lists of completions
|
|
|
|
already_output_completions = {}
|
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def compile_and_search(regex, input):
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section_regex = re.compile(regex , re.DOTALL)
|
|
|
|
options_section_matched = re.search( options_section_regex, input)
|
|
|
|
return options_section_matched
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def unquote_double_quotes(data):
|
2012-04-04 01:38:25 +00:00
|
|
|
if (len(data) < 2):
|
|
|
|
return data
|
|
|
|
if data[0] == '"' and data[len(data)-1] == '"':
|
|
|
|
data = data[1:len(data)-1]
|
|
|
|
return data
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def unquote_single_quotes(data):
|
2012-04-04 01:38:25 +00:00
|
|
|
if (len(data) < 2):
|
|
|
|
return data
|
|
|
|
if data[0] == '`' and data[len(data)-1] == '\'':
|
|
|
|
data = data[1:len(data)-1]
|
|
|
|
return data
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
|
|
|
|
# Make a string of characters that are deemed safe in fish without needing to be escaped
|
|
|
|
# Note that space is not included
|
2012-04-09 06:26:26 +00:00
|
|
|
g_fish_safe_chars = frozenset(string.ascii_letters + string.digits + '_+-|/:=@~')
|
2012-04-04 01:38:25 +00:00
|
|
|
|
|
|
|
def fish_escape_single_quote(str):
|
|
|
|
# Escape a string if necessary so that it can be put in single quotes
|
|
|
|
# If it has no non-safe chars, there's nothing to do
|
|
|
|
if g_fish_safe_chars.issuperset(str):
|
|
|
|
return str
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
str = str.replace('\\', '\\\\') # Replace one backslash with two
|
|
|
|
str = str.replace("'", "\\'") # Replace one single quote with a backslash-single-quote
|
|
|
|
return "'" + str + "'"
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2013-04-15 08:05:37 +00:00
|
|
|
# Make a string Unicode by attempting to decode it as latin-1, or UTF8. See #658
|
|
|
|
def lossy_unicode(s):
|
|
|
|
# All strings are unicode in Python 3
|
|
|
|
if IS_PY3 or isinstance(s, unicode): return s
|
|
|
|
try:
|
|
|
|
return s.decode('latin-1')
|
|
|
|
except UnicodeEncodeError:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
return s.decode('utf-8')
|
|
|
|
except UnicodeEncodeError:
|
|
|
|
pass
|
|
|
|
return s.decode('latin-1', 'ignore')
|
|
|
|
|
|
|
|
|
2012-03-30 17:00:01 +00:00
|
|
|
def output_complete_command(cmdname, args, description, output_list):
|
2012-04-04 01:38:25 +00:00
|
|
|
comps = ['complete -c', cmdname]
|
|
|
|
comps.extend(args)
|
|
|
|
if description:
|
|
|
|
comps.append('--description')
|
|
|
|
comps.append(description)
|
2013-04-15 08:05:37 +00:00
|
|
|
output_list.append(lossy_unicode(' ').join([lossy_unicode(c) for c in comps]))
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2013-02-18 00:14:36 +00:00
|
|
|
def built_command(options, description):
|
2012-04-04 01:38:25 +00:00
|
|
|
# print "Options are: ", options
|
|
|
|
man_optionlist = re.split(" |,|\"|=|[|]", options)
|
|
|
|
fish_options = []
|
|
|
|
for option in man_optionlist:
|
2012-04-09 06:26:26 +00:00
|
|
|
option = option.strip()
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-09 06:26:26 +00:00
|
|
|
# Skip some problematic cases
|
|
|
|
if option in ['-', '--']: continue
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if option.startswith('--'):
|
|
|
|
# New style long option (--recursive)
|
|
|
|
fish_options.append('-l ' + fish_escape_single_quote(option[2:]))
|
|
|
|
elif option.startswith('-') and len(option) == 2:
|
|
|
|
# New style short option (-r)
|
|
|
|
fish_options.append('-s ' + fish_escape_single_quote(option[1:]))
|
|
|
|
elif option.startswith('-') and len(option) > 2:
|
|
|
|
# Old style long option (-recursive)
|
|
|
|
fish_options.append('-o ' + fish_escape_single_quote(option[1:]))
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
# Determine which options are new (not already in existing_options)
|
|
|
|
# Then add those to the existing options
|
|
|
|
existing_options = already_output_completions.setdefault(CMDNAME, set())
|
|
|
|
fish_options = [opt for opt in fish_options if opt not in existing_options]
|
|
|
|
existing_options.update(fish_options)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
# Maybe it's all for naught
|
|
|
|
if not fish_options: return
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-11-23 04:32:19 +00:00
|
|
|
# Here's what we'll use to truncate if necessary
|
|
|
|
max_description_width = 63
|
2013-01-08 22:57:48 +00:00
|
|
|
if IS_PY3:
|
|
|
|
truncation_suffix = '… [See Man Page]'
|
|
|
|
else:
|
2013-04-14 08:58:34 +00:00
|
|
|
ELLIPSIS_CODE_POINT = 0x2026
|
|
|
|
truncation_suffix = unichr(ELLIPSIS_CODE_POINT) + unicode(' [See Man Page]')
|
2012-11-23 04:32:19 +00:00
|
|
|
|
|
|
|
# Try to include as many whole sentences as will fit
|
2013-01-27 20:58:52 +00:00
|
|
|
# Clean up some probably bogus escapes in the process
|
|
|
|
clean_desc = description.replace("\\'", "'").replace("\\.", ".")
|
|
|
|
sentences = clean_desc.split('.')
|
|
|
|
|
|
|
|
# Clean up "sentences" that are just whitespace
|
|
|
|
# But don't let it be empty
|
|
|
|
sentences = [x for x in sentences if x.strip()]
|
|
|
|
if not sentences: sentences = ['']
|
2013-05-12 08:57:42 +00:00
|
|
|
|
|
|
|
udot = lossy_unicode('.')
|
|
|
|
uspace = lossy_unicode(' ')
|
2013-01-27 20:58:52 +00:00
|
|
|
|
2013-05-12 08:57:42 +00:00
|
|
|
truncated_description = lossy_unicode(sentences[0]) + udot
|
2012-11-23 04:32:19 +00:00
|
|
|
for line in sentences[1:]:
|
|
|
|
if not line: continue
|
2013-05-12 08:57:42 +00:00
|
|
|
proposed_description = lossy_unicode(truncated_description) + uspace + lossy_unicode(line) + udot
|
2012-11-23 04:32:19 +00:00
|
|
|
if len(proposed_description) <= max_description_width:
|
|
|
|
# It fits
|
|
|
|
truncated_description = proposed_description
|
|
|
|
else:
|
|
|
|
# No fit
|
|
|
|
break
|
|
|
|
|
|
|
|
# If the first sentence does not fit, truncate if necessary
|
|
|
|
if len(truncated_description) > max_description_width:
|
|
|
|
prefix_len = max_description_width - len(truncation_suffix)
|
|
|
|
truncated_description = truncated_description[:prefix_len] + truncation_suffix
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
# Escape some more things
|
2012-11-23 04:32:19 +00:00
|
|
|
truncated_description = fish_escape_single_quote(truncated_description)
|
2012-04-04 01:38:25 +00:00
|
|
|
escaped_cmd = fish_escape_single_quote(CMDNAME)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-11-23 04:32:19 +00:00
|
|
|
output_complete_command(escaped_cmd, fish_options, truncated_description, built_command_output)
|
2012-04-04 01:38:25 +00:00
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def remove_groff_formatting(data):
|
2012-04-04 01:38:25 +00:00
|
|
|
data = data.replace("\\fI","")
|
|
|
|
data = data.replace("\\fP","")
|
|
|
|
data = data.replace("\\f1","")
|
|
|
|
data = data.replace("\\fB","")
|
|
|
|
data = data.replace("\\fR","")
|
|
|
|
data = data.replace("\\e","")
|
|
|
|
data = re.sub(".PD( \d+)","",data)
|
|
|
|
data = data.replace(".BI","")
|
|
|
|
data = data.replace(".BR","")
|
|
|
|
data = data.replace("0.5i","")
|
|
|
|
data = data.replace(".rb","")
|
|
|
|
data = data.replace("\\^","")
|
|
|
|
data = data.replace("{ ","")
|
|
|
|
data = data.replace(" }","")
|
|
|
|
data = data.replace("\ ","")
|
|
|
|
data = data.replace("\-","-")
|
|
|
|
data = data.replace("\&","")
|
|
|
|
data = data.replace(".B","")
|
|
|
|
data = data.replace("\-","-")
|
|
|
|
data = data.replace(".I","")
|
|
|
|
data = data.replace("\f","")
|
|
|
|
return data
|
2012-01-29 06:25:45 +00:00
|
|
|
|
|
|
|
class ManParser:
|
2013-07-06 15:27:41 +00:00
|
|
|
def is_my_type(self, manpage):
|
2012-04-04 01:38:25 +00:00
|
|
|
return False
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def parse_man_page(self, manpage):
|
2012-04-04 01:38:25 +00:00
|
|
|
return False
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
def name(self):
|
|
|
|
return "no-name"
|
2012-01-29 06:25:45 +00:00
|
|
|
|
|
|
|
class Type1ManParser(ManParser):
|
2013-07-06 15:27:41 +00:00
|
|
|
def is_my_type(self, manpage):
|
2012-04-04 01:38:25 +00:00
|
|
|
# print manpage
|
2013-07-06 15:27:41 +00:00
|
|
|
options_section_matched = compile_and_search("\.SH \"OPTIONS\"(.*?)", manpage)
|
2012-04-04 01:38:25 +00:00
|
|
|
|
|
|
|
if options_section_matched == None:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def parse_man_page(self, manpage):
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section_regex = re.compile( "\.SH \"OPTIONS\"(.*?)(\.SH|\Z)", re.DOTALL)
|
|
|
|
options_section_matched = re.search( options_section_regex, manpage)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section = options_section_matched.group(0)
|
|
|
|
# print options_section
|
|
|
|
options_parts_regex = re.compile("\.PP(.*?)\.RE", re.DOTALL)
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
# print options_matched
|
|
|
|
add_diagnostic('Command is ' + CMDNAME)
|
|
|
|
|
|
|
|
if options_matched == None:
|
|
|
|
add_diagnostic('Unable to find options')
|
|
|
|
if( self.fallback(options_section) ):
|
|
|
|
return True
|
|
|
|
elif (self.fallback2(options_section) ):
|
|
|
|
return True
|
|
|
|
return False
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
while (options_matched != None):
|
|
|
|
data = options_matched.group(1)
|
|
|
|
last_dotpp_index = data.rfind(".PP")
|
|
|
|
if (last_dotpp_index != -1):
|
|
|
|
data = data[last_dotpp_index+3:]
|
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
data = remove_groff_formatting(data)
|
2012-04-04 01:38:25 +00:00
|
|
|
data = data.split(".RS 4")
|
|
|
|
if (len (data) > 1): #and len(data[1]) <= 300):
|
|
|
|
optionName = data[0].strip()
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if ( optionName.find("-") == -1):
|
|
|
|
add_diagnostic(optionName + " doesn't contain - ")
|
|
|
|
else:
|
2013-07-06 15:27:41 +00:00
|
|
|
optionName = unquote_double_quotes(optionName)
|
|
|
|
optionName = unquote_single_quotes(optionName)
|
2012-04-04 01:38:25 +00:00
|
|
|
optionDescription = data[1].strip().replace("\n"," ")
|
2013-02-18 00:14:36 +00:00
|
|
|
built_command(optionName, optionDescription)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
else:
|
|
|
|
add_diagnostic('Unable to split option from description')
|
|
|
|
return False
|
|
|
|
|
|
|
|
options_section = options_section[options_matched.end()-3:]
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
|
|
|
|
def fallback(self, options_section):
|
|
|
|
add_diagnostic('Falling Back')
|
|
|
|
options_parts_regex = re.compile("\.TP( \d+)?(.*?)\.TP", re.DOTALL)
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
if options_matched == None:
|
|
|
|
add_diagnostic('Still not found')
|
|
|
|
return False
|
|
|
|
while options_matched != None:
|
|
|
|
data = options_matched.group(2)
|
2013-07-06 15:27:41 +00:00
|
|
|
data = remove_groff_formatting(data)
|
2012-04-04 01:38:25 +00:00
|
|
|
data = data.strip()
|
|
|
|
data = data.split("\n",1)
|
|
|
|
if (len(data)>1 and len(data[1].strip())>0): # and len(data[1])<400):
|
|
|
|
optionName = data[0].strip()
|
|
|
|
if ( optionName.find("-") == -1):
|
|
|
|
add_diagnostic(optionName + "doesn't contains -")
|
|
|
|
else:
|
2013-07-06 15:27:41 +00:00
|
|
|
optionName = unquote_double_quotes(optionName)
|
|
|
|
optionName = unquote_single_quotes(optionName)
|
2012-04-04 01:38:25 +00:00
|
|
|
optionDescription = data[1].strip().replace("\n"," ")
|
2013-02-18 00:14:36 +00:00
|
|
|
built_command(optionName, optionDescription)
|
2012-04-04 01:38:25 +00:00
|
|
|
else:
|
|
|
|
add_diagnostic('Unable to split option from description')
|
|
|
|
return False
|
|
|
|
|
|
|
|
options_section = options_section[options_matched.end()-3:]
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
return True
|
|
|
|
|
|
|
|
def fallback2(self, options_section):
|
|
|
|
add_diagnostic('Falling Back2')
|
|
|
|
ix_remover_regex = re.compile("\.IX.*")
|
|
|
|
trailing_num_regex = re.compile('\\d+$')
|
|
|
|
options_parts_regex = re.compile("\.IP (.*?)\.IP", re.DOTALL)
|
|
|
|
|
|
|
|
options_section = re.sub(ix_remover_regex, "", options_section)
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
if options_matched == None:
|
|
|
|
add_diagnostic('Still not found2')
|
|
|
|
return False
|
|
|
|
while options_matched != None:
|
|
|
|
data = options_matched.group(1)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
data = remove_groff_formatting(data)
|
2012-04-04 01:38:25 +00:00
|
|
|
data = data.strip()
|
|
|
|
data = data.split("\n",1)
|
|
|
|
if (len(data)>1 and len(data[1].strip())>0): # and len(data[1])<400):
|
|
|
|
optionName = re.sub(trailing_num_regex, "", data[0].strip())
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if ('-' not in optionName):
|
|
|
|
add_diagnostic(optionName + " doesn't contain -")
|
|
|
|
else:
|
|
|
|
optionName = optionName.strip()
|
2013-07-06 15:27:41 +00:00
|
|
|
optionName = unquote_double_quotes(optionName)
|
|
|
|
optionName = unquote_single_quotes(optionName)
|
2012-04-04 01:38:25 +00:00
|
|
|
optionDescription = data[1].strip().replace("\n"," ")
|
2013-02-18 00:14:36 +00:00
|
|
|
built_command(optionName, optionDescription)
|
2012-04-04 01:38:25 +00:00
|
|
|
else:
|
|
|
|
add_diagnostic('Unable to split option from description')
|
|
|
|
return False
|
|
|
|
|
|
|
|
options_section = options_section[options_matched.end()-3:]
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
return True
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
def name(self):
|
|
|
|
return "Type1"
|
2012-01-29 06:25:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Type2ManParser(ManParser):
|
2013-07-06 15:27:41 +00:00
|
|
|
def is_my_type(self, manpage):
|
|
|
|
options_section_matched = compile_and_search("\.SH OPTIONS(.*?)", manpage)
|
2012-04-04 01:38:25 +00:00
|
|
|
|
|
|
|
if options_section_matched == None:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def parse_man_page(self, manpage):
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section_regex = re.compile( "\.SH OPTIONS(.*?)(\.SH|\Z)", re.DOTALL)
|
|
|
|
options_section_matched = re.search( options_section_regex, manpage)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section = options_section_matched.group(1)
|
|
|
|
|
|
|
|
options_parts_regex = re.compile("\.[I|T]P( \d+(\.\d)?i?)?(.*?)\.[I|T]P", re.DOTALL)
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
add_diagnostic('Command is ' + CMDNAME)
|
|
|
|
|
|
|
|
if options_matched == None:
|
|
|
|
add_diagnostic(self.name() + ': Unable to find options')
|
|
|
|
return False
|
|
|
|
|
|
|
|
while (options_matched != None):
|
|
|
|
data = options_matched.group(3)
|
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
data = remove_groff_formatting(data)
|
2012-04-04 01:38:25 +00:00
|
|
|
|
|
|
|
data = data.strip()
|
|
|
|
|
|
|
|
data = data.split("\n",1)
|
|
|
|
if (len(data)>1 and len(data[1].strip())>0): # and len(data[1])<400):
|
|
|
|
optionName = data[0].strip()
|
|
|
|
if '-' not in optionName:
|
|
|
|
add_diagnostic(optionName + " doesn't contain -")
|
|
|
|
else:
|
2013-07-06 15:27:41 +00:00
|
|
|
optionName = unquote_double_quotes(optionName)
|
|
|
|
optionName = unquote_single_quotes(optionName)
|
2012-04-04 01:38:25 +00:00
|
|
|
optionDescription = data[1].strip().replace("\n"," ")
|
2013-02-18 00:14:36 +00:00
|
|
|
built_command(optionName, optionDescription)
|
2012-04-04 01:38:25 +00:00
|
|
|
else:
|
|
|
|
add_diagnostic('Unable to split option from description')
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section = options_section[options_matched.end()-3:]
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
|
|
|
|
def name(self):
|
|
|
|
return "Type2"
|
2012-01-29 06:25:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Type3ManParser(ManParser):
|
2013-07-06 15:27:41 +00:00
|
|
|
def is_my_type(self, manpage):
|
|
|
|
options_section_matched = compile_and_search("\.SH DESCRIPTION(.*?)", manpage)
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if options_section_matched == None:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def parse_man_page(self, manpage):
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section_regex = re.compile( "\.SH DESCRIPTION(.*?)(\.SH|\Z)", re.DOTALL)
|
|
|
|
options_section_matched = re.search( options_section_regex, manpage)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section = options_section_matched.group(1)
|
|
|
|
options_parts_regex = re.compile("\.TP(.*?)\.TP", re.DOTALL)
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
add_diagnostic('Command is ' + CMDNAME)
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if options_matched == None:
|
|
|
|
add_diagnostic('Unable to find options section')
|
|
|
|
return False
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
while (options_matched != None):
|
|
|
|
data = options_matched.group(1)
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
data = remove_groff_formatting(data)
|
2012-04-04 01:38:25 +00:00
|
|
|
data = data.strip()
|
|
|
|
data = data.split("\n",1)
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if (len(data)>1): # and len(data[1])<400):
|
|
|
|
optionName = data[0].strip()
|
|
|
|
if ( optionName.find("-") == -1):
|
|
|
|
add_diagnostic(optionName + "doesn't contain -")
|
|
|
|
else:
|
2013-07-06 15:27:41 +00:00
|
|
|
optionName = unquote_double_quotes(optionName)
|
|
|
|
optionName = unquote_single_quotes(optionName)
|
2012-04-04 01:38:25 +00:00
|
|
|
optionDescription = data[1].strip().replace("\n"," ")
|
2013-02-18 00:14:36 +00:00
|
|
|
built_command(optionName, optionDescription)
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
else:
|
|
|
|
add_diagnostic('Unable to split option from description')
|
|
|
|
return False
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section = options_section[options_matched.end()-3:]
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
2012-01-29 06:25:45 +00:00
|
|
|
|
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
def name(self):
|
|
|
|
return "Type3"
|
2012-01-29 06:25:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Type4ManParser(ManParser):
|
2013-07-06 15:27:41 +00:00
|
|
|
def is_my_type(self, manpage):
|
|
|
|
options_section_matched = compile_and_search("\.SH FUNCTION LETTERS(.*?)", manpage)
|
2012-04-04 01:38:25 +00:00
|
|
|
|
|
|
|
if options_section_matched == None:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def parse_man_page(self, manpage):
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section_regex = re.compile( "\.SH FUNCTION LETTERS(.*?)(\.SH|\Z)", re.DOTALL)
|
|
|
|
options_section_matched = re.search( options_section_regex, manpage)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
options_section = options_section_matched.group(1)
|
|
|
|
options_parts_regex = re.compile("\.TP(.*?)\.TP", re.DOTALL)
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
2012-06-07 15:18:54 +00:00
|
|
|
add_diagnostic('Command is ' + CMDNAME)
|
2012-04-04 01:38:25 +00:00
|
|
|
|
|
|
|
if options_matched == None:
|
|
|
|
print >> sys.stderr, "Unable to find options section"
|
|
|
|
return False
|
|
|
|
|
|
|
|
while (options_matched != None):
|
|
|
|
data = options_matched.group(1)
|
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
data = remove_groff_formatting(data)
|
2012-04-04 01:38:25 +00:00
|
|
|
data = data.strip()
|
|
|
|
data = data.split("\n",1)
|
|
|
|
|
|
|
|
if (len(data)>1): # and len(data[1])<400):
|
|
|
|
optionName = data[0].strip()
|
|
|
|
if ( optionName.find("-") == -1):
|
2012-06-07 15:18:54 +00:00
|
|
|
add_diagnostic(optionName + " doesn't contain - ")
|
2012-04-04 01:38:25 +00:00
|
|
|
else:
|
2013-07-06 15:27:41 +00:00
|
|
|
optionName = unquote_double_quotes(optionName)
|
|
|
|
optionName = unquote_single_quotes(optionName)
|
2012-04-04 01:38:25 +00:00
|
|
|
optionDescription = data[1].strip().replace("\n"," ")
|
2013-02-18 00:14:36 +00:00
|
|
|
built_command(optionName, optionDescription)
|
2012-04-04 01:38:25 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
add_diagnostic('Unable to split option from description')
|
|
|
|
return False
|
|
|
|
|
|
|
|
options_section = options_section[options_matched.end()-3:]
|
|
|
|
options_matched = re.search(options_parts_regex, options_section)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def name(self):
|
|
|
|
return "Type4"
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
class TypeDarwinManParser(ManParser):
|
2013-07-06 15:27:41 +00:00
|
|
|
def is_my_type(self, manpage):
|
|
|
|
options_section_matched = compile_and_search("\.S[hH] DESCRIPTION", manpage)
|
2012-04-04 01:38:25 +00:00
|
|
|
return options_section_matched != None
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
def trim_groff(self, line):
|
|
|
|
# Remove initial period
|
|
|
|
if line.startswith('.'):
|
|
|
|
line = line[1:]
|
|
|
|
# Skip leading groff crud
|
|
|
|
while re.match('[A-Z][a-z]\s', line):
|
|
|
|
line = line[3:]
|
2012-11-23 04:32:19 +00:00
|
|
|
|
|
|
|
# If the line ends with a space and then a period or comma, then erase the space
|
|
|
|
# This hack handles lines of the form '.Ar projectname .'
|
|
|
|
if line.endswith(' ,') or line.endswith(' .'):
|
|
|
|
line = line[:-2] + line[-1]
|
2012-04-04 01:38:25 +00:00
|
|
|
return line
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-11-23 04:32:19 +00:00
|
|
|
def count_argument_dashes(self, line):
|
|
|
|
# Determine how many dashes the line has using the following regex hack
|
|
|
|
# Look for the start of a line, followed by a dot, then a sequence of
|
|
|
|
# one or more dashes ('Fl')
|
|
|
|
result = 0
|
|
|
|
if line.startswith('.'):
|
|
|
|
line = line[4:]
|
|
|
|
while line.startswith('Fl '):
|
|
|
|
result = result + 1
|
|
|
|
line = line[3:]
|
|
|
|
return result
|
|
|
|
|
2012-10-18 01:22:57 +00:00
|
|
|
# Replace some groff escapes. There's a lot we don't bother to handle.
|
|
|
|
def groff_replace_escapes(self, line):
|
2012-11-23 04:32:19 +00:00
|
|
|
line = line.replace('.Nm', CMDNAME)
|
2012-10-18 01:22:57 +00:00
|
|
|
line = line.replace('\\ ', ' ')
|
|
|
|
line = line.replace('\& ', '')
|
2012-11-23 04:32:19 +00:00
|
|
|
line = line.replace(r'.\"', '')
|
2012-10-18 01:22:57 +00:00
|
|
|
return line
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
def is_option(self, line):
|
|
|
|
return line.startswith('.It Fl')
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def parse_man_page(self, manpage):
|
2012-04-04 19:43:12 +00:00
|
|
|
got_something = False
|
2012-04-04 01:38:25 +00:00
|
|
|
lines = manpage.splitlines()
|
|
|
|
# Discard lines until we get to ".sh Description"
|
2012-04-04 19:43:12 +00:00
|
|
|
while lines and not (lines[0].startswith('.Sh DESCRIPTION') or lines[0].startswith('.SH DESCRIPTION')):
|
2012-04-04 01:38:25 +00:00
|
|
|
lines.pop(0)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
while lines:
|
|
|
|
# Pop until we get to the next option
|
|
|
|
while lines and not self.is_option(lines[0]):
|
|
|
|
lines.pop(0)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if not lines:
|
|
|
|
continue
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-10-18 01:22:57 +00:00
|
|
|
# Get the line and clean it up
|
|
|
|
line = lines.pop(0)
|
2012-11-23 04:32:19 +00:00
|
|
|
|
|
|
|
# Try to guess how many dashes this argument has
|
|
|
|
dash_count = self.count_argument_dashes(line)
|
|
|
|
|
2012-10-18 01:22:57 +00:00
|
|
|
line = self.groff_replace_escapes(line)
|
|
|
|
line = self.trim_groff(line)
|
|
|
|
line = line.strip()
|
|
|
|
if not line: continue
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
# Extract the name
|
2012-10-18 01:22:57 +00:00
|
|
|
name = line.split(None, 2)[0]
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
# Extract the description
|
2012-11-23 04:32:19 +00:00
|
|
|
desc_lines = []
|
2012-04-04 01:38:25 +00:00
|
|
|
while lines and not self.is_option(lines[0]):
|
2013-09-18 20:32:41 +00:00
|
|
|
line = lossy_unicode(lines.pop(0).strip())
|
2012-11-23 04:32:19 +00:00
|
|
|
if line.startswith('.'):
|
|
|
|
line = self.groff_replace_escapes(line)
|
|
|
|
line = self.trim_groff(line).strip()
|
|
|
|
if line:
|
|
|
|
desc_lines.append(line)
|
|
|
|
desc = ' '.join(desc_lines)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if name == '-':
|
|
|
|
# Skip double -- arguments
|
|
|
|
continue
|
|
|
|
elif len(name) > 1:
|
|
|
|
# Output the command
|
2013-02-18 00:14:36 +00:00
|
|
|
built_command(('-' * dash_count) + name, desc)
|
2012-04-04 19:43:12 +00:00
|
|
|
got_something = True
|
2012-04-04 01:38:25 +00:00
|
|
|
elif len(name) == 1:
|
2013-02-18 00:14:36 +00:00
|
|
|
built_command('-' + name, desc)
|
2012-04-04 19:43:12 +00:00
|
|
|
got_something = True
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 19:43:12 +00:00
|
|
|
return got_something
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
def name(self):
|
|
|
|
return "Darwin man parser"
|
2012-04-08 08:43:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
class TypeDeroffManParser(ManParser):
|
2013-07-06 15:27:41 +00:00
|
|
|
def is_my_type(self, manpage):
|
2012-04-08 08:43:30 +00:00
|
|
|
return True # We're optimists
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
def is_option(self, line):
|
|
|
|
return line.startswith('-')
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
def could_be_description(self, line):
|
|
|
|
return len(line) > 0 and not line.startswith('-')
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2013-07-06 15:27:41 +00:00
|
|
|
def parse_man_page(self, manpage):
|
2012-04-08 08:43:30 +00:00
|
|
|
d = Deroffer()
|
|
|
|
d.deroff(manpage)
|
|
|
|
output = d.get_output()
|
|
|
|
lines = output.split('\n')
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
got_something = False
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
# Discard lines until we get to DESCRIPTION or OPTIONS
|
|
|
|
while lines and not (lines[0].startswith('DESCRIPTION') or lines[0].startswith('OPTIONS') or lines[0].startswith('COMMAND OPTIONS')):
|
|
|
|
lines.pop(0)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-09 06:26:26 +00:00
|
|
|
# Look for BUGS and stop there
|
2012-06-05 11:05:53 +00:00
|
|
|
for idx in range(len(lines)):
|
2012-04-09 06:26:26 +00:00
|
|
|
line = lines[idx]
|
|
|
|
if line.startswith('BUGS'):
|
|
|
|
# Drop remaining elements
|
|
|
|
lines[idx:] = []
|
|
|
|
break
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
while lines:
|
|
|
|
# Pop until we get to the next option
|
|
|
|
while lines and not self.is_option(lines[0]):
|
2012-04-09 06:26:26 +00:00
|
|
|
line = lines.pop(0)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
if not lines:
|
|
|
|
continue
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
options = lines.pop(0)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
# Pop until we get to either an empty line or a line starting with -
|
|
|
|
description = ''
|
|
|
|
while lines and self.could_be_description(lines[0]):
|
|
|
|
if description: description += ' '
|
|
|
|
description += lines.pop(0)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2013-02-18 00:14:36 +00:00
|
|
|
built_command(options, description)
|
2012-04-08 08:43:30 +00:00
|
|
|
got_something = True
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
return got_something
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
def name(self):
|
|
|
|
return "Deroffing man parser"
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
# Return whether the file at the given path is overwritable
|
|
|
|
# Raises IOError if it cannot be opened
|
|
|
|
def file_is_overwritable(path):
|
|
|
|
result = False
|
|
|
|
file = open(path, 'r')
|
|
|
|
for line in file:
|
|
|
|
# Skip leading empty lines
|
|
|
|
line = line.strip()
|
|
|
|
if not line:
|
|
|
|
continue
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
# We look in the initial run of lines that start with #
|
|
|
|
if not line.startswith('#'):
|
|
|
|
break
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
# See if this contains the magic word
|
|
|
|
if 'Autogenerated' in line:
|
|
|
|
result = True
|
|
|
|
break
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
file.close()
|
|
|
|
return result
|
2013-02-18 00:14:36 +00:00
|
|
|
|
|
|
|
# Remove any and all autogenerated completions in the given directory
|
|
|
|
def cleanup_autogenerated_completions_in_directory(dir):
|
2012-04-12 01:26:26 +00:00
|
|
|
try:
|
2013-02-18 00:14:36 +00:00
|
|
|
for filename in os.listdir(dir):
|
|
|
|
# Skip non .fish files
|
|
|
|
if not filename.endswith('.fish'): continue
|
|
|
|
path = os.path.join(dir, filename)
|
|
|
|
try:
|
|
|
|
if file_is_overwritable(path):
|
|
|
|
os.unlink(path)
|
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
except OSError as err:
|
|
|
|
return False
|
|
|
|
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
# Delete the file if it is autogenerated
|
|
|
|
def cleanup_autogenerated_file(path):
|
|
|
|
try:
|
|
|
|
if file_is_overwritable(path):
|
|
|
|
os.remove(path)
|
|
|
|
except (OSError, IOError):
|
|
|
|
pass
|
2012-04-12 01:26:26 +00:00
|
|
|
|
2013-02-18 00:14:36 +00:00
|
|
|
def parse_manpage_at_path(manpage_path, output_directory):
|
2012-04-04 01:38:25 +00:00
|
|
|
filename = os.path.basename(manpage_path)
|
|
|
|
|
|
|
|
# Clear diagnostics
|
|
|
|
global diagnostic_indent
|
|
|
|
diagnostic_output[:] = []
|
|
|
|
diagnostic_indent = 0
|
|
|
|
|
|
|
|
# Set up some diagnostics
|
|
|
|
add_diagnostic('Considering ' + manpage_path)
|
|
|
|
diagnostic_indent += 1
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if manpage_path.endswith('.gz'):
|
|
|
|
fd = gzip.open(manpage_path, 'r')
|
2012-06-05 11:22:29 +00:00
|
|
|
manpage = fd.read()
|
2012-06-18 20:59:07 +00:00
|
|
|
if IS_PY3: manpage = manpage.decode('latin-1')
|
2013-09-24 23:35:32 +00:00
|
|
|
elif manpage_path.endswith('.bz2'):
|
2013-09-24 13:42:32 +00:00
|
|
|
fd = bz2.BZ2File(manpage_path, 'r')
|
|
|
|
manpage = fd.read()
|
|
|
|
if IS_PY3: manpage = manpage.decode('latin-1')
|
|
|
|
elif manpage_path.endswith('.xz') or manpage_path.endswith('.lzma'):
|
2013-09-25 12:36:42 +00:00
|
|
|
if not lzma_available:
|
|
|
|
return
|
2013-09-24 13:42:32 +00:00
|
|
|
fd = lzma.LZMAFile(str(manpage_path), 'r')
|
|
|
|
manpage = fd.read()
|
|
|
|
if IS_PY3: manpage = manpage.decode('latin-1')
|
2012-06-18 20:59:07 +00:00
|
|
|
else:
|
|
|
|
if IS_PY3:
|
|
|
|
fd = open(manpage_path, 'r', encoding='latin-1')
|
|
|
|
else:
|
|
|
|
fd = open(manpage_path, 'r')
|
2012-06-05 11:05:53 +00:00
|
|
|
manpage = fd.read()
|
2012-04-04 01:38:25 +00:00
|
|
|
fd.close()
|
2012-06-05 11:22:29 +00:00
|
|
|
|
2012-06-05 11:05:53 +00:00
|
|
|
manpage = str(manpage)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
# Get the "base" command, e.g. gcc.1.gz -> gcc
|
|
|
|
cmd_base = CMDNAME.split('.', 1)[0]
|
|
|
|
ignoredcommands = ["cc", "g++", "gcc", "c++", "cpp", "emacs", "gprof", "wget", "ld", "awk"]
|
|
|
|
if cmd_base in ignoredcommands:
|
|
|
|
return
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
# Ignore perl's gazillion man pages
|
|
|
|
ignored_prefixes = ['perl', 'zsh']
|
|
|
|
for prefix in ignored_prefixes:
|
|
|
|
if cmd_base.startswith(prefix):
|
|
|
|
return
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-08 08:43:30 +00:00
|
|
|
# Ignore the millions of links to BUILTIN(1)
|
|
|
|
if manpage.find('BUILTIN 1') != -1:
|
|
|
|
return
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
# Clear the output list
|
|
|
|
built_command_output[:] = []
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
if DEROFF_ONLY:
|
|
|
|
parsers = [TypeDeroffManParser()]
|
|
|
|
else:
|
|
|
|
parsers = [Type1ManParser(), Type2ManParser(), Type4ManParser(), Type3ManParser(), TypeDarwinManParser(), TypeDeroffManParser()]
|
2013-07-06 15:27:41 +00:00
|
|
|
parsersToTry = [p for p in parsers if p.is_my_type(manpage)]
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 19:43:12 +00:00
|
|
|
success = False
|
2012-04-04 01:38:25 +00:00
|
|
|
if not parsersToTry:
|
|
|
|
add_diagnostic(manpage_path + ": Not supported")
|
|
|
|
else:
|
|
|
|
for parser in parsersToTry:
|
2012-06-18 20:59:07 +00:00
|
|
|
parser_name = parser.name()
|
|
|
|
add_diagnostic('Trying parser ' + parser_name)
|
2012-04-04 01:38:25 +00:00
|
|
|
diagnostic_indent += 1
|
2013-07-06 15:27:41 +00:00
|
|
|
success = parser.parse_man_page(manpage)
|
2012-04-04 01:38:25 +00:00
|
|
|
diagnostic_indent -= 1
|
2013-01-08 22:57:48 +00:00
|
|
|
# Make sure empty files aren't reported as success
|
|
|
|
if not built_command_output:
|
|
|
|
success = False
|
2012-06-18 20:59:07 +00:00
|
|
|
if success:
|
|
|
|
PARSER_INFO.setdefault(parser_name, []).append(CMDNAME)
|
|
|
|
break
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
if success:
|
2012-04-12 01:26:26 +00:00
|
|
|
if WRITE_TO_STDOUT:
|
|
|
|
output_file = sys.stdout
|
|
|
|
else:
|
2012-06-18 20:59:07 +00:00
|
|
|
fullpath = os.path.join(output_directory, CMDNAME + '.fish')
|
2012-04-12 01:26:26 +00:00
|
|
|
try:
|
2013-02-18 00:14:36 +00:00
|
|
|
output_file = codecs.open(fullpath, "w", encoding="utf-8")
|
2012-06-05 11:05:53 +00:00
|
|
|
except IOError as err:
|
|
|
|
add_diagnostic("Unable to open file '%s': error(%d): %s" % (fullpath, err.errno, err.strerror))
|
2012-04-12 01:26:26 +00:00
|
|
|
return False
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-11-23 04:32:19 +00:00
|
|
|
built_command_output.insert(0, "# " + CMDNAME)
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-12 01:26:26 +00:00
|
|
|
# Output the magic word Autogenerated so we can tell if we can overwrite this
|
2012-11-23 04:32:19 +00:00
|
|
|
built_command_output.insert(1, "# Autogenerated from man page " + manpage_path)
|
2012-10-18 01:22:57 +00:00
|
|
|
built_command_output.insert(2, "# using " + parser_name)
|
2012-04-04 01:38:25 +00:00
|
|
|
for line in built_command_output:
|
2012-04-12 01:26:26 +00:00
|
|
|
output_file.write(line)
|
|
|
|
output_file.write('\n')
|
|
|
|
output_file.write('\n')
|
2012-04-04 01:38:25 +00:00
|
|
|
add_diagnostic(manpage_path + ' parsed successfully')
|
2012-04-12 01:26:26 +00:00
|
|
|
if output_file != sys.stdout:
|
|
|
|
output_file.close()
|
2012-04-04 01:38:25 +00:00
|
|
|
else:
|
|
|
|
parser_names = ', '.join(p.name() for p in parsersToTry)
|
2012-04-12 02:05:43 +00:00
|
|
|
#add_diagnostic('%s contains no options or is unparsable' % manpage_path, BRIEF_VERBOSE)
|
|
|
|
add_diagnostic('%s contains no options or is unparsable (tried parser %s)' % (manpage_path, parser_names), BRIEF_VERBOSE)
|
2013-01-08 22:57:48 +00:00
|
|
|
# Make sure we delete any old completion
|
|
|
|
if not WRITE_TO_STDOUT:
|
|
|
|
fullpath = os.path.join(output_directory, CMDNAME + '.fish')
|
|
|
|
try:
|
|
|
|
os.remove(fullpath)
|
|
|
|
except (OSError, IOError):
|
|
|
|
# Ignore failure
|
|
|
|
pass
|
|
|
|
|
2012-04-04 19:43:12 +00:00
|
|
|
return success
|
2012-01-29 06:25:45 +00:00
|
|
|
|
2013-02-18 00:14:36 +00:00
|
|
|
def parse_and_output_man_pages(paths, output_directory, show_progress):
|
2012-06-18 20:59:07 +00:00
|
|
|
global diagnostic_indent, CMDNAME
|
2012-06-05 11:05:53 +00:00
|
|
|
paths.sort()
|
2012-04-04 19:43:12 +00:00
|
|
|
total_count = len(paths)
|
2012-04-12 02:05:43 +00:00
|
|
|
successful_count, index = 0, 0
|
|
|
|
padding_len = len(str(total_count))
|
|
|
|
last_progress_string_length = 0
|
|
|
|
if show_progress and not WRITE_TO_STDOUT:
|
2012-06-05 11:05:53 +00:00
|
|
|
print("Parsing man pages and writing completions to {0}".format(output_directory))
|
2013-09-26 15:15:53 +00:00
|
|
|
|
|
|
|
man_page_suffixes = set([os.path.splitext(m)[1][1:] for m in paths])
|
|
|
|
lzma_xz_occurs = "xz" in man_page_suffixes or "lzma" in man_page_suffixes
|
|
|
|
if lzma_xz_occurs and not lzma_available:
|
|
|
|
add_diagnostic('At least one man page is compressed with lzma or xz, but the "lzma" module is not available.'
|
|
|
|
' Any man page compressed with either will be skipped.',
|
|
|
|
NOT_VERBOSE)
|
|
|
|
flush_diagnostics(sys.stderr)
|
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
for manpage_path in paths:
|
2012-04-12 02:05:43 +00:00
|
|
|
index += 1
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
# Get the "base" command, e.g. gcc.1.gz -> gcc
|
|
|
|
man_file_name = os.path.basename(manpage_path)
|
|
|
|
CMDNAME = man_file_name.split('.', 1)[0]
|
|
|
|
output_file_name = CMDNAME + '.fish'
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
# Show progress if we're doing that
|
2012-04-12 02:05:43 +00:00
|
|
|
if show_progress:
|
2012-06-18 20:59:07 +00:00
|
|
|
progress_str = ' {0} / {1} : {2}'.format((str(index).rjust(padding_len)), total_count, man_file_name)
|
2012-04-12 02:05:43 +00:00
|
|
|
# Pad on the right with spaces so we overwrite whatever we wrote last time
|
|
|
|
padded_progress_str = progress_str.ljust(last_progress_string_length)
|
|
|
|
last_progress_string_length = len(progress_str)
|
2012-07-09 00:35:07 +00:00
|
|
|
sys.stdout.write("\r{0}\r".format(padded_progress_str))
|
2012-06-18 20:59:07 +00:00
|
|
|
sys.stdout.flush()
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-06-18 20:59:07 +00:00
|
|
|
# Maybe we want to skip this item
|
|
|
|
skip = False
|
|
|
|
if not WRITE_TO_STDOUT:
|
|
|
|
# Compute the path that we would write to
|
|
|
|
output_path = os.path.join(output_directory, output_file_name)
|
|
|
|
|
|
|
|
# Now skip if requested
|
|
|
|
if skip:
|
|
|
|
continue
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-04 01:38:25 +00:00
|
|
|
try:
|
2013-02-18 00:14:36 +00:00
|
|
|
if parse_manpage_at_path(manpage_path, output_directory):
|
2012-04-04 19:43:12 +00:00
|
|
|
successful_count += 1
|
2012-04-04 01:38:25 +00:00
|
|
|
except IOError:
|
|
|
|
diagnostic_indent = 0
|
|
|
|
add_diagnostic('Cannot open ' + manpage_path)
|
2012-04-08 08:43:30 +00:00
|
|
|
except (KeyboardInterrupt, SystemExit):
|
|
|
|
raise
|
2012-04-04 01:38:25 +00:00
|
|
|
except:
|
2012-04-12 02:05:43 +00:00
|
|
|
add_diagnostic('Error parsing %s: %s' % (manpage_path, sys.exc_info()[0]), BRIEF_VERBOSE)
|
2012-04-04 01:38:25 +00:00
|
|
|
flush_diagnostics(sys.stderr)
|
|
|
|
traceback.print_exc(file=sys.stderr)
|
|
|
|
flush_diagnostics(sys.stderr)
|
2012-06-05 11:05:53 +00:00
|
|
|
print("") #Newline after loop
|
2012-04-12 02:05:43 +00:00
|
|
|
add_diagnostic("Successfully parsed %d / %d pages" % (successful_count, total_count), BRIEF_VERBOSE)
|
2012-04-04 19:43:12 +00:00
|
|
|
flush_diagnostics(sys.stderr)
|
2012-04-12 01:26:26 +00:00
|
|
|
|
|
|
|
def get_paths_from_manpath():
|
2013-05-23 14:47:07 +00:00
|
|
|
# Return all the paths to man(1) and man(8) files in the manpath
|
2012-04-12 01:26:26 +00:00
|
|
|
import subprocess, os
|
2012-06-06 18:30:43 +00:00
|
|
|
proc = subprocess.Popen(['manpath'], stdout=subprocess.PIPE)
|
2012-04-16 02:45:44 +00:00
|
|
|
manpath, err_data = proc.communicate()
|
2012-06-05 11:05:53 +00:00
|
|
|
parent_paths = manpath.decode().strip().split(':')
|
2012-04-12 01:26:26 +00:00
|
|
|
if not parent_paths:
|
2012-06-06 18:30:43 +00:00
|
|
|
sys.stderr.write("Unable to get the manpath (tried manpath)\n")
|
2012-04-12 01:26:26 +00:00
|
|
|
sys.exit(-1)
|
|
|
|
result = []
|
|
|
|
for parent_path in parent_paths:
|
2013-09-18 20:32:41 +00:00
|
|
|
for section in ['man1', 'man6', 'man8']:
|
2013-05-23 14:47:07 +00:00
|
|
|
directory_path = os.path.join(parent_path, section)
|
|
|
|
try:
|
|
|
|
names = os.listdir(directory_path)
|
|
|
|
except OSError as e:
|
|
|
|
names = []
|
|
|
|
names.sort()
|
|
|
|
for name in names:
|
|
|
|
result.append(os.path.join(directory_path, name))
|
2012-04-12 01:26:26 +00:00
|
|
|
return result
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-12 01:26:26 +00:00
|
|
|
def usage(script_name):
|
2012-06-05 11:05:53 +00:00
|
|
|
print("Usage: {0} [-v, --verbose] [-s, --stdout] [-d, --directory] [-p, --progress] files...".format(script_name))
|
|
|
|
print("""Command options are:
|
2012-04-12 01:26:26 +00:00
|
|
|
-h, --help\t\tShow this help message
|
2012-06-18 20:59:07 +00:00
|
|
|
-v, --verbose [0, 1, 2]\tShow debugging output to stderr. Larger is more verbose.
|
2012-04-12 01:26:26 +00:00
|
|
|
-s, --stdout\tWrite all completions to stdout (trumps the --directory option)
|
2013-02-18 00:14:36 +00:00
|
|
|
-d, --directory [dir]\tWrite all completions to the given directory, instead of to ~/.config/fish/generated_completions
|
2013-05-23 14:47:07 +00:00
|
|
|
-m, --manpath\tProcess all man1 and man8 files available in the manpath (as determined by manpath)
|
2012-04-12 02:05:43 +00:00
|
|
|
-p, --progress\tShow progress
|
2012-06-05 11:05:53 +00:00
|
|
|
""")
|
2012-03-30 17:00:01 +00:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2012-04-12 01:26:26 +00:00
|
|
|
script_name = sys.argv[0]
|
|
|
|
try:
|
2013-02-18 00:14:36 +00:00
|
|
|
opts, file_paths = getopt.gnu_getopt(sys.argv[1:], 'v:sd:hmpc:z', ['verbose=', 'stdout', 'directory=', 'cleanup-in=', 'help', 'manpath', 'progress'])
|
2012-06-05 11:05:53 +00:00
|
|
|
except getopt.GetoptError as err:
|
2012-07-09 00:35:07 +00:00
|
|
|
print(err.msg) # will print something like "option -a not recognized"
|
2012-04-12 01:26:26 +00:00
|
|
|
usage(script_name)
|
|
|
|
sys.exit(2)
|
2013-02-18 00:14:36 +00:00
|
|
|
|
|
|
|
# Directories within which we will clean up autogenerated completions
|
|
|
|
# This script originally wrote completions into ~/.config/fish/completions
|
|
|
|
# Now it writes them into a separate directory
|
|
|
|
cleanup_directories = []
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-12 02:05:43 +00:00
|
|
|
use_manpath, show_progress, custom_dir = False, False, False
|
2012-04-12 01:26:26 +00:00
|
|
|
output_directory = ''
|
|
|
|
for opt, value in opts:
|
|
|
|
if opt in ('-v', '--verbose'):
|
2012-06-18 20:59:07 +00:00
|
|
|
VERBOSITY = int(value)
|
2012-04-12 01:26:26 +00:00
|
|
|
elif opt in ('-s', '--stdout'):
|
|
|
|
WRITE_TO_STDOUT = True
|
|
|
|
elif opt in ('-d', '--directory'):
|
|
|
|
output_directory = value
|
|
|
|
elif opt in ('-h', '--help'):
|
|
|
|
usage(script_name)
|
|
|
|
sys.exit(0)
|
|
|
|
elif opt in ('-m', '--manpath'):
|
|
|
|
use_manpath = True
|
2012-04-12 02:05:43 +00:00
|
|
|
elif opt in ('-p', '--progress'):
|
|
|
|
show_progress = True
|
2013-02-18 00:14:36 +00:00
|
|
|
elif opt in ('-c', '--cleanup-in'):
|
|
|
|
cleanup_directories.append(value)
|
2013-09-25 14:39:22 +00:00
|
|
|
elif opt in ('-z',):
|
2012-06-18 20:59:07 +00:00
|
|
|
DEROFF_ONLY = True
|
2012-04-12 01:26:26 +00:00
|
|
|
else:
|
|
|
|
assert False, "unhandled option"
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2012-04-12 01:26:26 +00:00
|
|
|
if use_manpath:
|
2013-05-23 14:47:07 +00:00
|
|
|
# Fetch all man1 and man8 files from the manpath
|
2012-04-12 01:26:26 +00:00
|
|
|
file_paths.extend(get_paths_from_manpath())
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2013-02-18 00:14:36 +00:00
|
|
|
if cleanup_directories:
|
|
|
|
for cleanup_dir in cleanup_directories:
|
|
|
|
cleanup_autogenerated_completions_in_directory(cleanup_dir)
|
|
|
|
|
2012-04-12 01:26:26 +00:00
|
|
|
if not file_paths:
|
2012-06-05 11:05:53 +00:00
|
|
|
print("No paths specified")
|
2012-04-12 01:26:26 +00:00
|
|
|
sys.exit(0)
|
2013-02-18 00:14:36 +00:00
|
|
|
|
2012-04-12 01:26:26 +00:00
|
|
|
if not WRITE_TO_STDOUT and not output_directory:
|
2013-02-18 00:14:36 +00:00
|
|
|
# Default to ~/.config/fish/generated_completions/
|
2012-04-12 01:26:26 +00:00
|
|
|
# Create it if it doesn't exist
|
2014-09-29 11:39:36 +00:00
|
|
|
xdg_data_home = os.getenv('XDG_DATA_HOME', '~/.local/share')
|
|
|
|
output_directory = os.path.expanduser(xdg_data_home + '/fish/generated_completions/')
|
2012-04-12 01:26:26 +00:00
|
|
|
try:
|
|
|
|
os.makedirs(output_directory)
|
2012-06-05 11:05:53 +00:00
|
|
|
except OSError as e:
|
2012-04-12 01:26:26 +00:00
|
|
|
if e.errno != errno.EEXIST:
|
|
|
|
raise
|
2012-11-18 10:23:22 +00:00
|
|
|
|
2013-07-27 15:08:06 +00:00
|
|
|
parse_and_output_man_pages(file_paths, output_directory, show_progress)
|