Revert "doc: update Kernel documentation build system"

Unfortunately we now see a number of now-fatal warnings about duplicate
labels.  It is often unclear how best to re-write the document in
question to not duplicate these otherwise logical headings.

This reverts commit 10a1df3cd4.

Signed-off-by: Tom Rini <trini@konsulko.com>
This commit is contained in:
Tom Rini 2021-01-23 12:51:37 -05:00
parent 757cec3a03
commit e716c90229
15 changed files with 158 additions and 1478 deletions

View file

@ -133,8 +133,8 @@ B4420 has:
B4860QDS Default Settings
-------------------------
B4860QDS Switch Settings
^^^^^^^^^^^^^^^^^^^^^^^^
Switch Settings
^^^^^^^^^^^^^^^
.. code-block:: none
@ -166,8 +166,8 @@ NOR boot::
B4420QDS Default Settings
-------------------------
B4420QDS Switch Settings
^^^^^^^^^^^^^^^^^^^^^^^^
Switch Settings
^^^^^^^^^^^^^^^
.. code-block:: none

View file

@ -16,8 +16,6 @@ import sys
import os
import sphinx
from subprocess import check_output
# Get Sphinx version
major, minor, patch = sphinx.version_info[:3]
@ -33,98 +31,39 @@ from load_config import loadConfig
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.3'
latex_engine = 'xelatex'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['kerneldoc', 'rstFlatTable', 'kernel_include',
'kfigure', 'sphinx.ext.ifconfig', 'automarkup',
'maintainers_include', 'sphinx.ext.autosectionlabel',
'kernel_abi', 'kernel_feat']
extensions = ['kerneldoc', 'rstFlatTable', 'kernel_include', 'kfigure']
#
# cdomain is badly broken in Sphinx 3+. Leaving it out generates *most*
# of the docs correctly, but not all. Scream bloody murder but allow
# the process to proceed; hopefully somebody will fix this properly soon.
# of the docs correctly, but not all.
#
if major >= 3:
sys.stderr.write('''WARNING: The kernel documentation build process
support for Sphinx v3.0 and above is brand new. Be prepared for
possible issues in the generated output.
''')
if (major > 3) or (minor > 0 or patch >= 2):
sys.stderr.write('''The build process with Sphinx 3+ is broken.
You will have to remove -W in doc/Makefile.
''')
# Sphinx c function parser is more pedantic with regards to type
# checking. Due to that, having macros at c:function cause problems.
# Those needed to be scaped by using c_id_attributes[] array
# Those needed to be escaped by using c_id_attributes[] array
c_id_attributes = [
# GCC Compiler types not parsed by Sphinx:
"__restrict__",
# include/linux/compiler_types.h:
"__iomem",
"__kernel",
"noinstr",
"notrace",
"__percpu",
"__rcu",
"__user",
# include/linux/compiler_attributes.h:
"__alias",
"__aligned",
"__aligned_largest",
"__always_inline",
"__assume_aligned",
"__cold",
"__attribute_const__",
"__copy",
"__pure",
"__designated_init",
"__visible",
"__printf",
"__scanf",
"__gnu_inline",
"__malloc",
"__mode",
"__no_caller_saved_registers",
"__noclone",
"__nonstring",
"__noreturn",
"__packed",
"__pure",
"__section",
"__always_unused",
# include/linux/compiler.h
"__maybe_unused",
"__used",
"__weak",
"noinline",
# include/efi.h
"EFIAPI",
# include/efi_loader.h
"__efi_runtime",
# include/linux/memblock.h:
"__init_memblock",
"__meminit",
# include/linux/init.h:
"__init",
"__ref",
# include/linux/linkage.h:
"asmlinkage",
]
else:
extensions.append('cdomain')
if major == 1 and minor < 7:
sys.stderr.write('WARNING: Sphinx 1.7 or greater will be required as of '
'the v2021.04 release\n')
# Ensure that autosectionlabel will produce unique names
autosectionlabel_prefix_document = True
autosectionlabel_maxdepth = 2
# The name of the math extension changed on Sphinx 1.4
if (major == 1 and minor > 3) or (major > 1):
@ -147,9 +86,9 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
project = 'The Linux Kernel'
copyright = 'The kernel development community'
author = 'The kernel development community'
project = 'Das U-Boot'
copyright = 'The U-Boot development community'
author = 'The U-Boot development community'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@ -260,7 +199,7 @@ except ImportError:
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
html_logo = '../tools/logos/u-boot_logo.svg'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
@ -290,7 +229,7 @@ html_context = {
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = False
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
@ -340,7 +279,7 @@ html_use_smartypants = False
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'TheLinuxKerneldoc'
htmlhelp_basename = 'TheUBootdoc'
# -- Options for LaTeX output ---------------------------------------------
@ -349,7 +288,7 @@ latex_elements = {
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '11pt',
'pointsize': '8pt',
# Latex figure (float) alignment
#'figure_align': 'htbp',
@ -362,24 +301,13 @@ latex_elements = {
'preamble': '''
% Use some font with UTF-8 support with XeLaTeX
\\usepackage{fontspec}
\\setsansfont{DejaVu Sans}
\\setromanfont{DejaVu Serif}
\\setsansfont{DejaVu Serif}
\\setromanfont{DejaVu Sans}
\\setmonofont{DejaVu Sans Mono}
'''
}
# At least one book (translations) may have Asian characters
# with are only displayed if xeCJK is used
cjk_cmd = check_output(['fc-list', '--format="%{family[0]}\n"']).decode('utf-8', 'ignore')
if cjk_cmd.find("Noto Sans CJK SC") >= 0:
print ("enabling CJK for LaTeX builder")
latex_elements['preamble'] += '''
% This is needed for translations
\\usepackage{xeCJK}
\\setCJKmainfont{Noto Sans CJK SC}
'''
# Fix reference escape troubles with Sphinx 1.4.x
if major == 1 and minor > 3:
latex_elements['preamble'] += '\\renewcommand*{\\DUrole}[2]{ #2 }\n'
@ -470,23 +398,10 @@ if major == 1 and minor < 6:
# author, documentclass [howto, manual, or own class]).
# Sorted in alphabetical order
latex_documents = [
('index', 'u-boot-hacker-manual.tex', 'U-Boot Hacker Manual',
'The U-Boot development community', 'manual'),
]
# Add all other index files from Documentation/ subdirectories
for fn in os.listdir('.'):
doc = os.path.join(fn, "index")
if os.path.exists(doc + ".rst"):
has = False
for l in latex_documents:
if l[0] == doc:
has = True
break
if not has:
latex_documents.append((doc, fn + '.tex',
'Linux %s Documentation' % fn.capitalize(),
'The kernel development community',
'manual'))
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
@ -513,7 +428,7 @@ for fn in os.listdir('.'):
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'thelinuxkernel', 'The Linux Kernel Documentation',
(master_doc, 'dasuboot', 'The U-Boot Documentation',
[author], 1)
]
@ -527,8 +442,8 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'TheLinuxKernel', 'The Linux Kernel Documentation',
author, 'TheLinuxKernel', 'One line description of project.',
(master_doc, 'DasUBoot', 'The U-Boot Documentation',
author, 'DasUBoot', 'One line description of project.',
'Miscellaneous'),
]
@ -620,13 +535,13 @@ epub_exclude_files = ['search.html']
# Grouping the document tree into PDF files. List of tuples
# (source start file, target name, title, author, options).
#
# See the Sphinx chapter of https://ralsina.me/static/manual.pdf
# See the Sphinx chapter of http://ralsina.me/static/manual.pdf
#
# FIXME: Do not add the index file here; the result will be too big. Adding
# multiple PDF files here actually tries to get the cross-referencing right
# *between* PDF files.
pdf_documents = [
('kernel-documentation', u'Kernel', u'Kernel', u'J. Random Bozo'),
('uboot-documentation', u'U-Boot', u'U-Boot', u'J. Random Bozo'),
]
# kernel-doc extension configuration for running Sphinx directly (e.g. by Read

View file

@ -1,290 +0,0 @@
# SPDX-License-Identifier: GPL-2.0
# Copyright 2019 Jonathan Corbet <corbet@lwn.net>
#
# Apply kernel-specific tweaks after the initial document processing
# has been done.
#
from docutils import nodes
import sphinx
from sphinx import addnodes
if sphinx.version_info[0] < 2 or \
sphinx.version_info[0] == 2 and sphinx.version_info[1] < 1:
from sphinx.environment import NoUri
else:
from sphinx.errors import NoUri
import re
from itertools import chain
#
# Python 2 lacks re.ASCII...
#
try:
ascii_p3 = re.ASCII
except AttributeError:
ascii_p3 = 0
#
# Regex nastiness. Of course.
# Try to identify "function()" that's not already marked up some
# other way. Sphinx doesn't like a lot of stuff right after a
# :c:func: block (i.e. ":c:func:`mmap()`s" flakes out), so the last
# bit tries to restrict matches to things that won't create trouble.
#
RE_function = re.compile(r'\b(([a-zA-Z_]\w+)\(\))', flags=ascii_p3)
#
# Sphinx 2 uses the same :c:type role for struct, union, enum and typedef
#
RE_generic_type = re.compile(r'\b(struct|union|enum|typedef)\s+([a-zA-Z_]\w+)',
flags=ascii_p3)
#
# Sphinx 3 uses a different C role for each one of struct, union, enum and
# typedef
#
RE_struct = re.compile(r'\b(struct)\s+([a-zA-Z_]\w+)', flags=ascii_p3)
RE_union = re.compile(r'\b(union)\s+([a-zA-Z_]\w+)', flags=ascii_p3)
RE_enum = re.compile(r'\b(enum)\s+([a-zA-Z_]\w+)', flags=ascii_p3)
RE_typedef = re.compile(r'\b(typedef)\s+([a-zA-Z_]\w+)', flags=ascii_p3)
#
# Detects a reference to a documentation page of the form Documentation/... with
# an optional extension
#
RE_doc = re.compile(r'\bDocumentation(/[\w\-_/]+)(\.\w+)*')
RE_namespace = re.compile(r'^\s*..\s*c:namespace::\s*(\S+)\s*$')
#
# Reserved C words that we should skip when cross-referencing
#
Skipnames = [ 'for', 'if', 'register', 'sizeof', 'struct', 'unsigned' ]
#
# Many places in the docs refer to common system calls. It is
# pointless to try to cross-reference them and, as has been known
# to happen, somebody defining a function by these names can lead
# to the creation of incorrect and confusing cross references. So
# just don't even try with these names.
#
Skipfuncs = [ 'open', 'close', 'read', 'write', 'fcntl', 'mmap',
'select', 'poll', 'fork', 'execve', 'clone', 'ioctl',
'socket' ]
c_namespace = ''
def markup_refs(docname, app, node):
t = node.astext()
done = 0
repl = [ ]
#
# Associate each regex with the function that will markup its matches
#
markup_func_sphinx2 = {RE_doc: markup_doc_ref,
RE_function: markup_c_ref,
RE_generic_type: markup_c_ref}
markup_func_sphinx3 = {RE_doc: markup_doc_ref,
RE_function: markup_func_ref_sphinx3,
RE_struct: markup_c_ref,
RE_union: markup_c_ref,
RE_enum: markup_c_ref,
RE_typedef: markup_c_ref}
if sphinx.version_info[0] >= 3:
markup_func = markup_func_sphinx3
else:
markup_func = markup_func_sphinx2
match_iterators = [regex.finditer(t) for regex in markup_func]
#
# Sort all references by the starting position in text
#
sorted_matches = sorted(chain(*match_iterators), key=lambda m: m.start())
for m in sorted_matches:
#
# Include any text prior to match as a normal text node.
#
if m.start() > done:
repl.append(nodes.Text(t[done:m.start()]))
#
# Call the function associated with the regex that matched this text and
# append its return to the text
#
repl.append(markup_func[m.re](docname, app, m))
done = m.end()
if done < len(t):
repl.append(nodes.Text(t[done:]))
return repl
#
# In sphinx3 we can cross-reference to C macro and function, each one with its
# own C role, but both match the same regex, so we try both.
#
def markup_func_ref_sphinx3(docname, app, match):
class_str = ['c-func', 'c-macro']
reftype_str = ['function', 'macro']
cdom = app.env.domains['c']
#
# Go through the dance of getting an xref out of the C domain
#
base_target = match.group(2)
target_text = nodes.Text(match.group(0))
xref = None
possible_targets = [base_target]
# Check if this document has a namespace, and if so, try
# cross-referencing inside it first.
if c_namespace:
possible_targets.insert(0, c_namespace + "." + base_target)
if base_target not in Skipnames:
for target in possible_targets:
if target not in Skipfuncs:
for class_s, reftype_s in zip(class_str, reftype_str):
lit_text = nodes.literal(classes=['xref', 'c', class_s])
lit_text += target_text
pxref = addnodes.pending_xref('', refdomain = 'c',
reftype = reftype_s,
reftarget = target, modname = None,
classname = None)
#
# XXX The Latex builder will throw NoUri exceptions here,
# work around that by ignoring them.
#
try:
xref = cdom.resolve_xref(app.env, docname, app.builder,
reftype_s, target, pxref,
lit_text)
except NoUri:
xref = None
if xref:
return xref
return target_text
def markup_c_ref(docname, app, match):
class_str = {# Sphinx 2 only
RE_function: 'c-func',
RE_generic_type: 'c-type',
# Sphinx 3+ only
RE_struct: 'c-struct',
RE_union: 'c-union',
RE_enum: 'c-enum',
RE_typedef: 'c-type',
}
reftype_str = {# Sphinx 2 only
RE_function: 'function',
RE_generic_type: 'type',
# Sphinx 3+ only
RE_struct: 'struct',
RE_union: 'union',
RE_enum: 'enum',
RE_typedef: 'type',
}
cdom = app.env.domains['c']
#
# Go through the dance of getting an xref out of the C domain
#
base_target = match.group(2)
target_text = nodes.Text(match.group(0))
xref = None
possible_targets = [base_target]
# Check if this document has a namespace, and if so, try
# cross-referencing inside it first.
if c_namespace:
possible_targets.insert(0, c_namespace + "." + base_target)
if base_target not in Skipnames:
for target in possible_targets:
if not (match.re == RE_function and target in Skipfuncs):
lit_text = nodes.literal(classes=['xref', 'c', class_str[match.re]])
lit_text += target_text
pxref = addnodes.pending_xref('', refdomain = 'c',
reftype = reftype_str[match.re],
reftarget = target, modname = None,
classname = None)
#
# XXX The Latex builder will throw NoUri exceptions here,
# work around that by ignoring them.
#
try:
xref = cdom.resolve_xref(app.env, docname, app.builder,
reftype_str[match.re], target, pxref,
lit_text)
except NoUri:
xref = None
if xref:
return xref
return target_text
#
# Try to replace a documentation reference of the form Documentation/... with a
# cross reference to that page
#
def markup_doc_ref(docname, app, match):
stddom = app.env.domains['std']
#
# Go through the dance of getting an xref out of the std domain
#
target = match.group(1)
xref = None
pxref = addnodes.pending_xref('', refdomain = 'std', reftype = 'doc',
reftarget = target, modname = None,
classname = None, refexplicit = False)
#
# XXX The Latex builder will throw NoUri exceptions here,
# work around that by ignoring them.
#
try:
xref = stddom.resolve_xref(app.env, docname, app.builder, 'doc',
target, pxref, None)
except NoUri:
xref = None
#
# Return the xref if we got it; otherwise just return the plain text.
#
if xref:
return xref
else:
return nodes.Text(match.group(0))
def get_c_namespace(app, docname):
source = app.env.doc2path(docname)
with open(source) as f:
for l in f:
match = RE_namespace.search(l)
if match:
return match.group(1)
return ''
def auto_markup(app, doctree, name):
global c_namespace
c_namespace = get_c_namespace(app, name)
#
# This loop could eventually be improved on. Someday maybe we
# want a proper tree traversal with a lot of awareness of which
# kinds of nodes to prune. But this works well for now.
#
# The nodes.literal test catches ``literal text``, its purpose is to
# avoid adding cross-references to functions that have been explicitly
# marked with cc:func:.
#
for para in doctree.traverse(nodes.paragraph):
for node in para.traverse(nodes.Text):
if not isinstance(node.parent, nodes.literal):
node.parent.replace(node, markup_refs(name, app, node))
def setup(app):
app.connect('doctree-resolved', auto_markup)
return {
'parallel_read_safe': True,
'parallel_write_safe': True,
}

View file

@ -40,94 +40,14 @@ from sphinx import addnodes
from sphinx.domains.c import c_funcptr_sig_re, c_sig_re
from sphinx.domains.c import CObject as Base_CObject
from sphinx.domains.c import CDomain as Base_CDomain
from itertools import chain
import re
__version__ = '1.1'
__version__ = '1.0'
# Get Sphinx version
major, minor, patch = sphinx.version_info[:3]
# Namespace to be prepended to the full name
namespace = None
#
# Handle trivial newer c domain tags that are part of Sphinx 3.1 c domain tags
# - Store the namespace if ".. c:namespace::" tag is found
#
RE_namespace = re.compile(r'^\s*..\s*c:namespace::\s*(\S+)\s*$')
def markup_namespace(match):
global namespace
namespace = match.group(1)
return ""
#
# Handle c:macro for function-style declaration
#
RE_macro = re.compile(r'^\s*..\s*c:macro::\s*(\S+)\s+(\S.*)\s*$')
def markup_macro(match):
return ".. c:function:: " + match.group(1) + ' ' + match.group(2)
#
# Handle newer c domain tags that are evaluated as .. c:type: for
# backward-compatibility with Sphinx < 3.0
#
RE_ctype = re.compile(r'^\s*..\s*c:(struct|union|enum|enumerator|alias)::\s*(.*)$')
def markup_ctype(match):
return ".. c:type:: " + match.group(2)
#
# Handle newer c domain tags that are evaluated as :c:type: for
# backward-compatibility with Sphinx < 3.0
#
RE_ctype_refs = re.compile(r':c:(var|struct|union|enum|enumerator)::`([^\`]+)`')
def markup_ctype_refs(match):
return ":c:type:`" + match.group(2) + '`'
#
# Simply convert :c:expr: and :c:texpr: into a literal block.
#
RE_expr = re.compile(r':c:(expr|texpr):`([^\`]+)`')
def markup_c_expr(match):
return '\ ``' + match.group(2) + '``\ '
#
# Parse Sphinx 3.x C markups, replacing them by backward-compatible ones
#
def c_markups(app, docname, source):
result = ""
markup_func = {
RE_namespace: markup_namespace,
RE_expr: markup_c_expr,
RE_macro: markup_macro,
RE_ctype: markup_ctype,
RE_ctype_refs: markup_ctype_refs,
}
lines = iter(source[0].splitlines(True))
for n in lines:
match_iterators = [regex.finditer(n) for regex in markup_func]
matches = sorted(chain(*match_iterators), key=lambda m: m.start())
for m in matches:
n = n[:m.start()] + markup_func[m.re](m) + n[m.end():]
result = result + n
source[0] = result
#
# Now implements support for the cdomain namespacing logic
#
def setup(app):
# Handle easy Sphinx 3.1+ simple new tags: :c:expr and .. c:namespace::
app.connect('source-read', c_markups)
if (major == 1 and minor < 8):
app.override_domain(CDomain)
else:
@ -155,8 +75,6 @@ class CObject(Base_CObject):
function-like macro, the name of the macro is returned. Otherwise
``False`` is returned. """
global namespace
if not self.objtype == 'function':
return False
@ -189,16 +107,11 @@ class CObject(Base_CObject):
param += nodes.emphasis(argname, argname)
paramlist += param
if namespace:
fullname = namespace + "." + fullname
return fullname
def handle_signature(self, sig, signode):
"""Transform a C signature into RST nodes."""
global namespace
fullname = self.handle_func_like_macro(sig, signode)
if not fullname:
fullname = super(CObject, self).handle_signature(sig, signode)
@ -209,10 +122,6 @@ class CObject(Base_CObject):
else:
# FIXME: handle :name: value of other declaration types?
pass
else:
if namespace:
fullname = namespace + "." + fullname
return fullname
def add_target_and_index(self, name, sig, signode):

View file

@ -1,194 +0,0 @@
# -*- coding: utf-8; mode: python -*-
# coding=utf-8
# SPDX-License-Identifier: GPL-2.0
#
u"""
kernel-abi
~~~~~~~~~~
Implementation of the ``kernel-abi`` reST-directive.
:copyright: Copyright (C) 2016 Markus Heiser
:copyright: Copyright (C) 2016-2020 Mauro Carvalho Chehab
:maintained-by: Mauro Carvalho Chehab <mchehab+huawei@kernel.org>
:license: GPL Version 2, June 1991 see Linux/COPYING for details.
The ``kernel-abi`` (:py:class:`KernelCmd`) directive calls the
scripts/get_abi.pl script to parse the Kernel ABI files.
Overview of directive's argument and options.
.. code-block:: rst
.. kernel-abi:: <ABI directory location>
:debug:
The argument ``<ABI directory location>`` is required. It contains the
location of the ABI files to be parsed.
``debug``
Inserts a code-block with the *raw* reST. Sometimes it is helpful to see
what reST is generated.
"""
import codecs
import os
import subprocess
import sys
import re
import kernellog
from os import path
from docutils import nodes, statemachine
from docutils.statemachine import ViewList
from docutils.parsers.rst import directives, Directive
from docutils.utils.error_reporting import ErrorString
#
# AutodocReporter is only good up to Sphinx 1.7
#
import sphinx
Use_SSI = sphinx.__version__[:3] >= '1.7'
if Use_SSI:
from sphinx.util.docutils import switch_source_input
else:
from sphinx.ext.autodoc import AutodocReporter
__version__ = '1.0'
def setup(app):
app.add_directive("kernel-abi", KernelCmd)
return dict(
version = __version__
, parallel_read_safe = True
, parallel_write_safe = True
)
class KernelCmd(Directive):
u"""KernelABI (``kernel-abi``) directive"""
required_arguments = 1
optional_arguments = 2
has_content = False
final_argument_whitespace = True
option_spec = {
"debug" : directives.flag,
"rst" : directives.unchanged
}
def run(self):
doc = self.state.document
if not doc.settings.file_insertion_enabled:
raise self.warning("docutils: file insertion disabled")
env = doc.settings.env
cwd = path.dirname(doc.current_source)
cmd = "get_abi.pl rest --enable-lineno --dir "
cmd += self.arguments[0]
if 'rst' in self.options:
cmd += " --rst-source"
srctree = path.abspath(os.environ["srctree"])
fname = cmd
# extend PATH with $(srctree)/scripts
path_env = os.pathsep.join([
srctree + os.sep + "scripts",
os.environ["PATH"]
])
shell_env = os.environ.copy()
shell_env["PATH"] = path_env
shell_env["srctree"] = srctree
lines = self.runCmd(cmd, shell=True, cwd=cwd, env=shell_env)
nodeList = self.nestedParse(lines, self.arguments[0])
return nodeList
def runCmd(self, cmd, **kwargs):
u"""Run command ``cmd`` and return it's stdout as unicode."""
try:
proc = subprocess.Popen(
cmd
, stdout = subprocess.PIPE
, stderr = subprocess.PIPE
, **kwargs
)
out, err = proc.communicate()
out, err = codecs.decode(out, 'utf-8'), codecs.decode(err, 'utf-8')
if proc.returncode != 0:
raise self.severe(
u"command '%s' failed with return code %d"
% (cmd, proc.returncode)
)
except OSError as exc:
raise self.severe(u"problems with '%s' directive: %s."
% (self.name, ErrorString(exc)))
return out
def nestedParse(self, lines, fname):
content = ViewList()
node = nodes.section()
if "debug" in self.options:
code_block = "\n\n.. code-block:: rst\n :linenos:\n"
for l in lines.split("\n"):
code_block += "\n " + l
lines = code_block + "\n\n"
line_regex = re.compile("^#define LINENO (\S+)\#([0-9]+)$")
ln = 0
n = 0
f = fname
for line in lines.split("\n"):
n = n + 1
match = line_regex.search(line)
if match:
new_f = match.group(1)
# Sphinx parser is lazy: it stops parsing contents in the
# middle, if it is too big. So, handle it per input file
if new_f != f and content:
self.do_parse(content, node)
content = ViewList()
f = new_f
# sphinx counts lines from 0
ln = int(match.group(2)) - 1
else:
content.append(line, f, ln)
kernellog.info(self.state.document.settings.env.app, "%s: parsed %i lines" % (fname, n))
if content:
self.do_parse(content, node)
return node.children
def do_parse(self, content, node):
if Use_SSI:
with switch_source_input(self.state, content):
self.state.nested_parse(content, 0, node, match_titles=1)
else:
buf = self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter
self.state.memo.title_styles = []
self.state.memo.section_level = 0
self.state.memo.reporter = AutodocReporter(content, self.state.memo.reporter)
try:
self.state.nested_parse(content, 0, node, match_titles=1)
finally:
self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter = buf

View file

@ -1,169 +0,0 @@
# coding=utf-8
# SPDX-License-Identifier: GPL-2.0
#
u"""
kernel-feat
~~~~~~~~~~~
Implementation of the ``kernel-feat`` reST-directive.
:copyright: Copyright (C) 2016 Markus Heiser
:copyright: Copyright (C) 2016-2019 Mauro Carvalho Chehab
:maintained-by: Mauro Carvalho Chehab <mchehab+samsung@kernel.org>
:license: GPL Version 2, June 1991 see Linux/COPYING for details.
The ``kernel-feat`` (:py:class:`KernelFeat`) directive calls the
scripts/get_feat.pl script to parse the Kernel ABI files.
Overview of directive's argument and options.
.. code-block:: rst
.. kernel-feat:: <ABI directory location>
:debug:
The argument ``<ABI directory location>`` is required. It contains the
location of the ABI files to be parsed.
``debug``
Inserts a code-block with the *raw* reST. Sometimes it is helpful to see
what reST is generated.
"""
import codecs
import os
import subprocess
import sys
from os import path
from docutils import nodes, statemachine
from docutils.statemachine import ViewList
from docutils.parsers.rst import directives, Directive
from docutils.utils.error_reporting import ErrorString
#
# AutodocReporter is only good up to Sphinx 1.7
#
import sphinx
Use_SSI = sphinx.__version__[:3] >= '1.7'
if Use_SSI:
from sphinx.util.docutils import switch_source_input
else:
from sphinx.ext.autodoc import AutodocReporter
__version__ = '1.0'
def setup(app):
app.add_directive("kernel-feat", KernelFeat)
return dict(
version = __version__
, parallel_read_safe = True
, parallel_write_safe = True
)
class KernelFeat(Directive):
u"""KernelFeat (``kernel-feat``) directive"""
required_arguments = 1
optional_arguments = 2
has_content = False
final_argument_whitespace = True
option_spec = {
"debug" : directives.flag
}
def warn(self, message, **replace):
replace["fname"] = self.state.document.current_source
replace["line_no"] = replace.get("line_no", self.lineno)
message = ("%(fname)s:%(line_no)s: [kernel-feat WARN] : " + message) % replace
self.state.document.settings.env.app.warn(message, prefix="")
def run(self):
doc = self.state.document
if not doc.settings.file_insertion_enabled:
raise self.warning("docutils: file insertion disabled")
env = doc.settings.env
cwd = path.dirname(doc.current_source)
cmd = "get_feat.pl rest --dir "
cmd += self.arguments[0]
if len(self.arguments) > 1:
cmd += " --arch " + self.arguments[1]
srctree = path.abspath(os.environ["srctree"])
fname = cmd
# extend PATH with $(srctree)/scripts
path_env = os.pathsep.join([
srctree + os.sep + "scripts",
os.environ["PATH"]
])
shell_env = os.environ.copy()
shell_env["PATH"] = path_env
shell_env["srctree"] = srctree
lines = self.runCmd(cmd, shell=True, cwd=cwd, env=shell_env)
nodeList = self.nestedParse(lines, fname)
return nodeList
def runCmd(self, cmd, **kwargs):
u"""Run command ``cmd`` and return it's stdout as unicode."""
try:
proc = subprocess.Popen(
cmd
, stdout = subprocess.PIPE
, stderr = subprocess.PIPE
, **kwargs
)
out, err = proc.communicate()
out, err = codecs.decode(out, 'utf-8'), codecs.decode(err, 'utf-8')
if proc.returncode != 0:
raise self.severe(
u"command '%s' failed with return code %d"
% (cmd, proc.returncode)
)
except OSError as exc:
raise self.severe(u"problems with '%s' directive: %s."
% (self.name, ErrorString(exc)))
return out
def nestedParse(self, lines, fname):
content = ViewList()
node = nodes.section()
if "debug" in self.options:
code_block = "\n\n.. code-block:: rst\n :linenos:\n"
for l in lines.split("\n"):
code_block += "\n " + l
lines = code_block + "\n\n"
for c, l in enumerate(lines.split("\n")):
content.append(l, fname, c)
buf = self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter
if Use_SSI:
with switch_source_input(self.state, content):
self.state.nested_parse(content, 0, node, match_titles=1)
else:
self.state.memo.title_styles = []
self.state.memo.section_level = 0
self.state.memo.reporter = AutodocReporter(content, self.state.memo.reporter)
try:
self.state.nested_parse(content, 0, node, match_titles=1)
finally:
self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter = buf
return node.children

View file

@ -62,7 +62,6 @@ class KernelDocDirective(Directive):
'export': directives.unchanged,
'internal': directives.unchanged,
'identifiers': directives.unchanged,
'no-identifiers': directives.unchanged,
'functions': directives.unchanged,
}
has_content = False
@ -71,11 +70,6 @@ class KernelDocDirective(Directive):
env = self.state.document.settings.env
cmd = [env.config.kerneldoc_bin, '-rst', '-enable-lineno']
# Pass the version string to kernel-doc, as it needs to use a different
# dialect, depending what the C domain supports for each specific
# Sphinx versions
cmd += ['-sphinx-version', sphinx.__version__]
filename = env.config.kerneldoc_srctree + '/' + self.arguments[0]
export_file_patterns = []
@ -105,12 +99,6 @@ class KernelDocDirective(Directive):
else:
cmd += ['-no-doc-sections']
if 'no-identifiers' in self.options:
no_identifiers = self.options.get('no-identifiers').split()
if no_identifiers:
for i in no_identifiers:
cmd += ['-nosymbol', i]
for pattern in export_file_patterns:
for f in glob.glob(env.config.kerneldoc_srctree + '/' + pattern):
env.note_dependency(os.path.abspath(f))
@ -148,8 +136,7 @@ class KernelDocDirective(Directive):
lineoffset = int(match.group(1)) - 1
# we must eat our comments since the upset the markup
else:
doc = env.srcdir + "/" + env.docname + ":" + str(self.lineno)
result.append(line, doc + ": " + filename, lineoffset)
result.append(line, filename, lineoffset)
lineoffset += 1
node = nodes.section()

View file

@ -25,8 +25,4 @@ def verbose(app, message):
else:
app.verbose(message)
def info(app, message):
if UseLogging:
logger.info(message)
else:
app.info(message)

View file

@ -29,7 +29,7 @@ u"""
Used tools:
* ``dot(1)``: Graphviz (https://www.graphviz.org). If Graphviz is not
* ``dot(1)``: Graphviz (http://www.graphviz.org). If Graphviz is not
available, the DOT language is inserted as literal-block.
* SVG to PDF: To generate PDF, you need at least one of this tools:
@ -41,7 +41,7 @@ u"""
* generate PDF from SVG / used by PDF (LaTeX) builder
* generate SVG (html-builder) and PDF (latex-builder) from DOT files.
DOT: see https://www.graphviz.org/content/dot-language
DOT: see http://www.graphviz.org/content/dot-language
"""
@ -182,7 +182,7 @@ def setupTools(app):
kernellog.verbose(app, "use dot(1) from: " + dot_cmd)
else:
kernellog.warn(app, "dot(1) not found, for better output quality install "
"graphviz from https://www.graphviz.org")
"graphviz from http://www.graphviz.org")
if convert_cmd:
kernellog.verbose(app, "use convert(1) from: " + convert_cmd)
else:

View file

@ -21,29 +21,6 @@ def loadConfig(namespace):
and os.path.normpath(namespace["__file__"]) != os.path.normpath(config_file) ):
config_file = os.path.abspath(config_file)
# Let's avoid one conf.py file just due to latex_documents
start = config_file.find('Documentation/')
if start >= 0:
start = config_file.find('/', start + 1)
end = config_file.rfind('/')
if start >= 0 and end > 0:
dir = config_file[start + 1:end]
print("source directory: %s" % dir)
new_latex_docs = []
latex_documents = namespace['latex_documents']
for l in latex_documents:
if l[0].find(dir + '/') == 0:
has = True
fn = l[0][len(dir) + 1:]
new_latex_docs.append((fn, l[1], l[2], l[3], l[4]))
break
namespace['latex_documents'] = new_latex_docs
# If there is an extra conf.py file, load it
if os.path.isfile(config_file):
sys.stdout.write("load additional sphinx-config: %s\n" % config_file)
config = namespace.copy()
@ -52,6 +29,4 @@ def loadConfig(namespace):
del config['__file__']
namespace.update(config)
else:
config = namespace.copy()
config['tags'].add("subproject")
namespace.update(config)
sys.stderr.write("WARNING: additional sphinx-config not found: %s\n" % config_file)

View file

@ -1,197 +0,0 @@
#!/usr/bin/env python
# SPDX-License-Identifier: GPL-2.0
# -*- coding: utf-8; mode: python -*-
# pylint: disable=R0903, C0330, R0914, R0912, E0401
u"""
maintainers-include
~~~~~~~~~~~~~~~~~~~
Implementation of the ``maintainers-include`` reST-directive.
:copyright: Copyright (C) 2019 Kees Cook <keescook@chromium.org>
:license: GPL Version 2, June 1991 see linux/COPYING for details.
The ``maintainers-include`` reST-directive performs extensive parsing
specific to the Linux kernel's standard "MAINTAINERS" file, in an
effort to avoid needing to heavily mark up the original plain text.
"""
import sys
import re
import os.path
from docutils import statemachine
from docutils.utils.error_reporting import ErrorString
from docutils.parsers.rst import Directive
from docutils.parsers.rst.directives.misc import Include
__version__ = '1.0'
def setup(app):
app.add_directive("maintainers-include", MaintainersInclude)
return dict(
version = __version__,
parallel_read_safe = True,
parallel_write_safe = True
)
class MaintainersInclude(Include):
u"""MaintainersInclude (``maintainers-include``) directive"""
required_arguments = 0
def parse_maintainers(self, path):
"""Parse all the MAINTAINERS lines into ReST for human-readability"""
result = list()
result.append(".. _maintainers:")
result.append("")
# Poor man's state machine.
descriptions = False
maintainers = False
subsystems = False
# Field letter to field name mapping.
field_letter = None
fields = dict()
prev = None
field_prev = ""
field_content = ""
for line in open(path):
if sys.version_info.major == 2:
line = unicode(line, 'utf-8')
# Have we reached the end of the preformatted Descriptions text?
if descriptions and line.startswith('Maintainers'):
descriptions = False
# Ensure a blank line following the last "|"-prefixed line.
result.append("")
# Start subsystem processing? This is to skip processing the text
# between the Maintainers heading and the first subsystem name.
if maintainers and not subsystems:
if re.search('^[A-Z0-9]', line):
subsystems = True
# Drop needless input whitespace.
line = line.rstrip()
# Linkify all non-wildcard refs to ReST files in Documentation/.
pat = '(Documentation/([^\s\?\*]*)\.rst)'
m = re.search(pat, line)
if m:
# maintainers.rst is in a subdirectory, so include "../".
line = re.sub(pat, ':doc:`%s <../%s>`' % (m.group(2), m.group(2)), line)
# Check state machine for output rendering behavior.
output = None
if descriptions:
# Escape the escapes in preformatted text.
output = "| %s" % (line.replace("\\", "\\\\"))
# Look for and record field letter to field name mappings:
# R: Designated *reviewer*: FullName <address@domain>
m = re.search("\s(\S):\s", line)
if m:
field_letter = m.group(1)
if field_letter and not field_letter in fields:
m = re.search("\*([^\*]+)\*", line)
if m:
fields[field_letter] = m.group(1)
elif subsystems:
# Skip empty lines: subsystem parser adds them as needed.
if len(line) == 0:
continue
# Subsystem fields are batched into "field_content"
if line[1] != ':':
# Render a subsystem entry as:
# SUBSYSTEM NAME
# ~~~~~~~~~~~~~~
# Flush pending field content.
output = field_content + "\n\n"
field_content = ""
# Collapse whitespace in subsystem name.
heading = re.sub("\s+", " ", line)
output = output + "%s\n%s" % (heading, "~" * len(heading))
field_prev = ""
else:
# Render a subsystem field as:
# :Field: entry
# entry...
field, details = line.split(':', 1)
details = details.strip()
# Mark paths (and regexes) as literal text for improved
# readability and to escape any escapes.
if field in ['F', 'N', 'X', 'K']:
# But only if not already marked :)
if not ':doc:' in details:
details = '``%s``' % (details)
# Comma separate email field continuations.
if field == field_prev and field_prev in ['M', 'R', 'L']:
field_content = field_content + ","
# Do not repeat field names, so that field entries
# will be collapsed together.
if field != field_prev:
output = field_content + "\n"
field_content = ":%s:" % (fields.get(field, field))
field_content = field_content + "\n\t%s" % (details)
field_prev = field
else:
output = line
# Re-split on any added newlines in any above parsing.
if output != None:
for separated in output.split('\n'):
result.append(separated)
# Update the state machine when we find heading separators.
if line.startswith('----------'):
if prev.startswith('Descriptions'):
descriptions = True
if prev.startswith('Maintainers'):
maintainers = True
# Retain previous line for state machine transitions.
prev = line
# Flush pending field contents.
if field_content != "":
for separated in field_content.split('\n'):
result.append(separated)
output = "\n".join(result)
# For debugging the pre-rendered results...
#print(output, file=open("/tmp/MAINTAINERS.rst", "w"))
self.state_machine.insert_input(
statemachine.string2lines(output), path)
def run(self):
"""Include the MAINTAINERS file as part of this reST file."""
if not self.state.document.settings.file_insertion_enabled:
raise self.warning('"%s" directive disabled.' % self.name)
# Walk up source path directories to find Documentation/../
path = self.state_machine.document.attributes['source']
path = os.path.realpath(path)
tail = path
while tail != "Documentation" and tail != "":
(path, tail) = os.path.split(path)
# Append "MAINTAINERS"
path = os.path.join(path, "MAINTAINERS")
try:
self.state.document.settings.record_dependencies.add(path)
lines = self.parse_maintainers(path)
except IOError as error:
raise self.severe('Problems with "%s" directive path:\n%s.' %
(self.name, ErrorString(error)))
return []

View file

@ -1,33 +0,0 @@
#!/bin/sh
# SPDX-License-Identifier: GPL-2.0+
#
# Figure out if we should follow a specific parallelism from the make
# environment (as exported by scripts/jobserver-exec), or fall back to
# the "auto" parallelism when "-jN" is not specified at the top-level
# "make" invocation.
sphinx="$1"
shift || true
parallel="$PARALLELISM"
if [ -z "$parallel" ] ; then
# If no parallelism is specified at the top-level make, then
# fall back to the expected "-jauto" mode that the "htmldocs"
# target has had.
auto=$(perl -e 'open IN,"'"$sphinx"' --version 2>&1 |";
while (<IN>) {
if (m/([\d\.]+)/) {
print "auto" if ($1 >= "1.7")
}
}
close IN')
if [ -n "$auto" ] ; then
parallel="$auto"
fi
fi
# Only if some parallelism has been determined do we add the -jN option.
if [ -n "$parallel" ] ; then
parallel="-j$parallel"
fi
exec "$sphinx" $parallel "$@"

View file

@ -1,4 +1,4 @@
#!/usr/bin/env perl
#!/usr/bin/perl
use strict;
use Text::Tabs;
use Getopt::Long;
@ -110,7 +110,7 @@ while (<IN>) {
) {
my $s = $1;
$structs{$s} = "struct $s\\ ";
$structs{$s} = "struct :c:type:`$s`\\ ";
next;
}
}
@ -393,7 +393,7 @@ Report bugs to Mauro Carvalho Chehab <mchehab@kernel.org>
Copyright (c) 2016 by Mauro Carvalho Chehab <mchehab+samsung@kernel.org>.
License GPLv2: GNU GPL version 2 <https://gnu.org/licenses/gpl.html>.
License GPLv2: GNU GPL version 2 <http://gnu.org/licenses/gpl.html>.
This is free software: you are free to change and redistribute it.
There is NO WARRANTY, to the extent permitted by law.

View file

@ -1,4 +1,3 @@
docutils
Sphinx==2.4.4
docutils==0.12
Sphinx==1.4.9
sphinx_rtd_theme
six

View file

@ -56,13 +56,6 @@ Output format selection (mutually exclusive):
-rst Output reStructuredText format.
-none Do not output documentation, only warnings.
Output format selection modifier (affects only ReST output):
-sphinx-version Use the ReST C domain dialect compatible with an
specific Sphinx Version.
If not specified, kernel-doc will auto-detect using
the sphinx-build version found on PATH.
Output selection (mutually exclusive):
-export Only output documentation for symbols that have been
exported using EXPORT_SYMBOL() or EXPORT_SYMBOL_GPL()
@ -73,8 +66,9 @@ Output selection (mutually exclusive):
-function NAME Only output documentation for the given function(s)
or DOC: section title(s). All other functions and DOC:
sections are ignored. May be specified multiple times.
-nosymbol NAME Exclude the specified symbols from the output
documentation. May be specified multiple times.
-nofunction NAME Do NOT output documentation for the given function(s);
only output documentation for the other functions and
DOC: sections. May be specified multiple times.
Output selection modifiers:
-no-doc-sections Do not output DOC: sections.
@ -87,7 +81,6 @@ Output selection modifiers:
Other parameters:
-v Verbose output, more warnings and other information.
-h Print this help.
-Werror Treat warnings as errors.
EOF
print $message;
@ -220,9 +213,7 @@ my $type_constant = '\b``([^\`]+)``\b';
my $type_constant2 = '\%([-_\w]+)';
my $type_func = '(\w+)\(\)';
my $type_param = '\@(\w*((\.\w+)|(->\w+))*(\.\.\.)?)';
my $type_param_ref = '([\!]?)\@(\w*((\.\w+)|(->\w+))*(\.\.\.)?)';
my $type_fp_param = '\@(\w+)\(\)'; # Special RST handling for func ptr params
my $type_fp_param2 = '\@(\w+->\S+)\(\)'; # Special RST handling for structs with func ptr params
my $type_env = '(\$\w+)';
my $type_enum = '\&(enum\s*([_\w]+))';
my $type_struct = '\&(struct\s*([_\w]+))';
@ -245,7 +236,6 @@ my @highlights_man = (
[$type_typedef, "\\\\fI\$1\\\\fP"],
[$type_union, "\\\\fI\$1\\\\fP"],
[$type_param, "\\\\fI\$1\\\\fP"],
[$type_param_ref, "\\\\fI\$1\$2\\\\fP"],
[$type_member, "\\\\fI\$1\$2\$3\\\\fP"],
[$type_fallback, "\\\\fI\$1\\\\fP"]
);
@ -259,15 +249,14 @@ my @highlights_rst = (
[$type_member_func, "\\:c\\:type\\:`\$1\$2\$3\\\\(\\\\) <\$1>`"],
[$type_member, "\\:c\\:type\\:`\$1\$2\$3 <\$1>`"],
[$type_fp_param, "**\$1\\\\(\\\\)**"],
[$type_fp_param2, "**\$1\\\\(\\\\)**"],
[$type_func, "\$1()"],
[$type_func, "\\:c\\:func\\:`\$1()`"],
[$type_enum, "\\:c\\:type\\:`\$1 <\$2>`"],
[$type_struct, "\\:c\\:type\\:`\$1 <\$2>`"],
[$type_typedef, "\\:c\\:type\\:`\$1 <\$2>`"],
[$type_union, "\\:c\\:type\\:`\$1 <\$2>`"],
# in rst this can refer to any type
[$type_fallback, "\\:c\\:type\\:`\$1`"],
[$type_param_ref, "**\$1\$2**"]
[$type_param, "**\$1**"]
);
my $blankline_rst = "\n";
@ -277,12 +266,9 @@ if ($#ARGV == -1) {
}
my $kernelversion;
my ($sphinx_major, $sphinx_minor, $sphinx_patch);
my $dohighlight = "";
my $verbose = 0;
my $Werror = 0;
my $output_mode = "rst";
my $output_preformatted = 0;
my $no_doc_sections = 0;
@ -294,11 +280,12 @@ my $modulename = "Kernel API";
use constant {
OUTPUT_ALL => 0, # output all symbols and doc sections
OUTPUT_INCLUDE => 1, # output only specified symbols
OUTPUT_EXPORTED => 2, # output exported symbols
OUTPUT_INTERNAL => 3, # output non-exported symbols
OUTPUT_EXCLUDE => 2, # output everything except specified symbols
OUTPUT_EXPORTED => 3, # output exported symbols
OUTPUT_INTERNAL => 4, # output non-exported symbols
};
my $output_selection = OUTPUT_ALL;
my $show_not_found = 0; # No longer used
my $show_not_found = 0;
my @export_file_list;
@ -320,7 +307,6 @@ my $man_date = ('January', 'February', 'March', 'April', 'May', 'June',
# CAVEAT EMPTOR! Some of the others I localised may not want to be, which
# could cause "use of undefined value" or other bugs.
my ($function, %function_table, %parametertypes, $declaration_purpose);
my %nosymbol_table = ();
my $declaration_start_line;
my ($type, $declaration_name, $return_type);
my ($newsection, $newcontents, $prototype, $brcount, %source_map);
@ -329,21 +315,9 @@ if (defined($ENV{'KBUILD_VERBOSE'})) {
$verbose = "$ENV{'KBUILD_VERBOSE'}";
}
if (defined($ENV{'KDOC_WERROR'})) {
$Werror = "$ENV{'KDOC_WERROR'}";
}
if (defined($ENV{'KCFLAGS'})) {
my $kcflags = "$ENV{'KCFLAGS'}";
if ($kcflags =~ /Werror/) {
$Werror = 1;
}
}
# Generated docbook code is inserted in a template at a point where
# docbook v3.1 requires a non-zero sequence of RefEntry's; see:
# https://www.oasis-open.org/docbook/documentation/reference/html/refentry.html
# http://www.oasis-open.org/docbook/documentation/reference/html/refentry.html
# We keep track of number of generated entries and generate a dummy
# if needs be to ensure the expanded template can be postprocessed
# into html.
@ -357,10 +331,9 @@ use constant {
STATE_NAME => 1, # looking for function name
STATE_BODY_MAYBE => 2, # body - or maybe more description
STATE_BODY => 3, # the body of the comment
STATE_BODY_WITH_BLANK_LINE => 4, # the body, which has a blank line
STATE_PROTO => 5, # scanning prototype
STATE_DOCBLOCK => 6, # documentation block
STATE_INLINE => 7, # gathering doc outside main block
STATE_PROTO => 4, # scanning prototype
STATE_DOCBLOCK => 5, # documentation block
STATE_INLINE => 6, # gathering documentation outside main block
};
my $state;
my $in_doc_sect;
@ -440,9 +413,10 @@ while ($ARGV[0] =~ m/^--?(.*)/) {
$output_selection = OUTPUT_INCLUDE;
$function = shift @ARGV;
$function_table{$function} = 1;
} elsif ($cmd eq "nosymbol") { # Exclude specific symbols
my $symbol = shift @ARGV;
$nosymbol_table{$symbol} = 1;
} elsif ($cmd eq "nofunction") { # output all except specific functions
$output_selection = OUTPUT_EXCLUDE;
$function = shift @ARGV;
$function_table{$function} = 1;
} elsif ($cmd eq "export") { # only exported symbols
$output_selection = OUTPUT_EXPORTED;
%function_table = ();
@ -454,8 +428,6 @@ while ($ARGV[0] =~ m/^--?(.*)/) {
push(@export_file_list, $file);
} elsif ($cmd eq "v") {
$verbose = 1;
} elsif ($cmd eq "Werror") {
$Werror = 1;
} elsif (($cmd eq "h") || ($cmd eq "help")) {
usage();
} elsif ($cmd eq 'no-doc-sections') {
@ -463,24 +435,7 @@ while ($ARGV[0] =~ m/^--?(.*)/) {
} elsif ($cmd eq 'enable-lineno') {
$enable_lineno = 1;
} elsif ($cmd eq 'show-not-found') {
$show_not_found = 1; # A no-op but don't fail
} elsif ($cmd eq "sphinx-version") {
my $ver_string = shift @ARGV;
if ($ver_string =~ m/^(\d+)(\.\d+)?(\.\d+)?/) {
$sphinx_major = $1;
if (defined($2)) {
$sphinx_minor = substr($2,1);
} else {
$sphinx_minor = 0;
}
if (defined($3)) {
$sphinx_patch = substr($3,1)
} else {
$sphinx_patch = 0;
}
} else {
die "Sphinx version should either major.minor or major.minor.patch format\n";
}
$show_not_found = 1;
} else {
# Unknown argument
usage();
@ -489,51 +444,6 @@ while ($ARGV[0] =~ m/^--?(.*)/) {
# continue execution near EOF;
# The C domain dialect changed on Sphinx 3. So, we need to check the
# version in order to produce the right tags.
sub findprog($)
{
foreach(split(/:/, $ENV{PATH})) {
return "$_/$_[0]" if(-x "$_/$_[0]");
}
}
sub get_sphinx_version()
{
my $ver;
my $cmd = "sphinx-build";
if (!findprog($cmd)) {
my $cmd = "sphinx-build3";
if (!findprog($cmd)) {
$sphinx_major = 1;
$sphinx_minor = 2;
$sphinx_patch = 0;
printf STDERR "Warning: Sphinx version not found. Using default (Sphinx version %d.%d.%d)\n",
$sphinx_major, $sphinx_minor, $sphinx_patch;
return;
}
}
open IN, "$cmd --version 2>&1 |";
while (<IN>) {
if (m/^\s*sphinx-build\s+([\d]+)\.([\d\.]+)(\+\/[\da-f]+)?$/) {
$sphinx_major = $1;
$sphinx_minor = $2;
$sphinx_patch = $3;
last;
}
# Sphinx 1.2.x uses a different format
if (m/^\s*Sphinx.*\s+([\d]+)\.([\d\.]+)$/) {
$sphinx_major = $1;
$sphinx_minor = $2;
$sphinx_patch = $3;
last;
}
}
close IN;
}
# get kernel version from env
sub get_kernel_version() {
my $version = 'unknown kernel version';
@ -600,11 +510,11 @@ sub dump_doc_section {
return;
}
return if (defined($nosymbol_table{$name}));
if (($output_selection == OUTPUT_ALL) ||
(($output_selection == OUTPUT_INCLUDE) &&
defined($function_table{$name})))
($output_selection == OUTPUT_INCLUDE &&
defined($function_table{$name})) ||
($output_selection == OUTPUT_EXCLUDE &&
!defined($function_table{$name})))
{
dump_section($file, $name, $contents);
output_blockhead({'sectionlist' => \@sectionlist,
@ -687,10 +597,10 @@ sub output_function_man(%) {
$type = $args{'parametertypes'}{$parameter};
if ($type =~ m/([^\(]*\(\*)\s*\)\s*\(([^\)]*)\)/) {
# pointer-to-function
print ".BI \"" . $parenth . $1 . "\" " . " \") (" . $2 . ")" . $post . "\"\n";
print ".BI \"" . $parenth . $1 . "\" " . $parameter . " \") (" . $2 . ")" . $post . "\"\n";
} else {
$type =~ s/([^\*])$/$1 /;
print ".BI \"" . $parenth . $type . "\" " . " \"" . $post . "\"\n";
print ".BI \"" . $parenth . $type . "\" " . $parameter . " \"" . $post . "\"\n";
}
$count++;
$parenth = "";
@ -830,8 +740,6 @@ sub output_blockhead_rst(%) {
my ($parameter, $section);
foreach $section (@{$args{'sectionlist'}}) {
next if (defined($nosymbol_table{$section}));
if ($output_selection != OUTPUT_INCLUDE) {
print "**$section**\n\n";
}
@ -917,9 +825,7 @@ sub output_function_rst(%) {
my ($parameter, $section);
my $oldprefix = $lineprefix;
my $start = "";
my $is_macro = 0;
if ($sphinx_major < 3) {
if ($args{'typedef'}) {
print ".. c:type:: ". $args{'function'} . "\n\n";
print_lineno($declaration_start_line);
@ -927,28 +833,9 @@ sub output_function_rst(%) {
$lineprefix = "";
output_highlight_rst($args{'purpose'});
$start = "\n\n**Syntax**\n\n ``";
$is_macro = 1;
} else {
print ".. c:function:: ";
}
} else {
if ($args{'typedef'} || $args{'functiontype'} eq "") {
$is_macro = 1;
print ".. c:macro:: ". $args{'function'} . "\n\n";
} else {
print ".. c:function:: ";
}
if ($args{'typedef'}) {
print_lineno($declaration_start_line);
print " **Typedef**: ";
$lineprefix = "";
output_highlight_rst($args{'purpose'});
$start = "\n\n**Syntax**\n\n ``";
} else {
print "``" if ($is_macro);
}
}
if ($args{'functiontype'} ne "") {
$start .= $args{'functiontype'} . " " . $args{'function'} . " (";
} else {
@ -966,17 +853,15 @@ sub output_function_rst(%) {
if ($type =~ m/([^\(]*\(\*)\s*\)\s*\(([^\)]*)\)/) {
# pointer-to-function
print $1 . $parameter . ") (" . $2 . ")";
print $1 . $parameter . ") (" . $2;
} else {
print $type;
print $type . " " . $parameter;
}
}
if ($is_macro) {
print ")``\n\n";
if ($args{'typedef'}) {
print ");``\n\n";
} else {
print ")\n\n";
}
if (!$args{'typedef'}) {
print_lineno($declaration_start_line);
$lineprefix = " ";
output_highlight_rst($args{'purpose'});
@ -991,7 +876,7 @@ sub output_function_rst(%) {
$type = $args{'parametertypes'}{$parameter};
if ($type ne "") {
print "``$type``\n";
print "``$type $parameter``\n";
} else {
print "``$parameter``\n";
}
@ -1032,14 +917,9 @@ sub output_enum_rst(%) {
my ($parameter);
my $oldprefix = $lineprefix;
my $count;
if ($sphinx_major < 3) {
my $name = "enum " . $args{'enum'};
print "\n\n.. c:type:: " . $name . "\n\n";
} else {
my $name = $args{'enum'};
print "\n\n.. c:enum:: " . $name . "\n\n";
}
print_lineno($declaration_start_line);
$lineprefix = " ";
output_highlight_rst($args{'purpose'});
@ -1065,13 +945,8 @@ sub output_typedef_rst(%) {
my %args = %{$_[0]};
my ($parameter);
my $oldprefix = $lineprefix;
my $name;
my $name = "typedef " . $args{'typedef'};
if ($sphinx_major < 3) {
$name = "typedef " . $args{'typedef'};
} else {
$name = $args{'typedef'};
}
print "\n\n.. c:type:: " . $name . "\n\n";
print_lineno($declaration_start_line);
$lineprefix = " ";
@ -1086,18 +961,9 @@ sub output_struct_rst(%) {
my %args = %{$_[0]};
my ($parameter);
my $oldprefix = $lineprefix;
if ($sphinx_major < 3) {
my $name = $args{'type'} . " " . $args{'struct'};
print "\n\n.. c:type:: " . $name . "\n\n";
} else {
my $name = $args{'struct'};
if ($args{'type'} eq 'union') {
print "\n\n.. c:union:: " . $name . "\n\n";
} else {
print "\n\n.. c:struct:: " . $name . "\n\n";
}
}
print_lineno($declaration_start_line);
$lineprefix = " ";
output_highlight_rst($args{'purpose'});
@ -1156,14 +1022,12 @@ sub output_declaration {
my $name = shift;
my $functype = shift;
my $func = "output_${functype}_$output_mode";
return if (defined($nosymbol_table{$name}));
if (($output_selection == OUTPUT_ALL) ||
(($output_selection == OUTPUT_INCLUDE ||
$output_selection == OUTPUT_EXPORTED) &&
defined($function_table{$name})) ||
($output_selection == OUTPUT_INTERNAL &&
(($output_selection == OUTPUT_EXCLUDE ||
$output_selection == OUTPUT_INTERNAL) &&
!($functype eq "function" && defined($function_table{$name}))))
{
&$func(@_);
@ -1198,7 +1062,7 @@ sub dump_struct($$) {
my $x = shift;
my $file = shift;
if ($x =~ /(struct|union)\s+(\w+)\s*\{(.*)\}(\s*(__packed|__aligned|____cacheline_aligned_in_smp|____cacheline_aligned|__attribute__\s*\(\([a-z0-9,_\s\(\)]*\)\)))*/) {
if ($x =~ /(struct|union)\s+(\w+)\s*\{(.*)\}(\s*(__packed|__aligned|__attribute__\s*\(\([a-z0-9,_\s\(\)]*\)\)))*/) {
my $decl_type = $1;
$declaration_name = $2;
my $members = $3;
@ -1209,15 +1073,11 @@ sub dump_struct($$) {
# strip comments:
$members =~ s/\/\*.*?\*\///gos;
# strip attributes
$members =~ s/\s*__attribute__\s*\(\([a-z0-9,_\*\s\(\)]*\)\)/ /gi;
$members =~ s/\s*__aligned\s*\([^;]*\)/ /gos;
$members =~ s/\s*__packed\s*/ /gos;
$members =~ s/\s*CRYPTO_MINALIGN_ATTR/ /gos;
$members =~ s/\s*____cacheline_aligned_in_smp/ /gos;
$members =~ s/\s*____cacheline_aligned/ /gos;
$members =~ s/\s*__attribute__\s*\(\([a-z0-9,_\*\s\(\)]*\)\)//gi;
$members =~ s/\s*__aligned\s*\([^;]*\)//gos;
$members =~ s/\s*__packed\s*//gos;
$members =~ s/\s*CRYPTO_MINALIGN_ATTR//gos;
# replace DECLARE_BITMAP
$members =~ s/__ETHTOOL_DECLARE_LINK_MODE_MASK\s*\(([^\)]+)\)/DECLARE_BITMAP($1, __ETHTOOL_LINK_MODE_MASK_NBITS)/gos;
$members =~ s/DECLARE_BITMAP\s*\(([^,)]+),\s*([^,)]+)\)/unsigned long $1\[BITS_TO_LONGS($2)\]/gos;
# replace DECLARE_HASHTABLE
$members =~ s/DECLARE_HASHTABLE\s*\(([^,)]+),\s*([^,)]+)\)/unsigned long $1\[1 << (($2) - 1)\]/gos;
@ -1344,8 +1204,6 @@ sub show_warnings($$) {
my $functype = shift;
my $name = shift;
return 0 if (defined($nosymbol_table{$name}));
return 1 if ($output_selection == OUTPUT_ALL);
if ($output_selection == OUTPUT_EXPORTED) {
@ -1369,28 +1227,27 @@ sub show_warnings($$) {
return 0;
}
}
if ($output_selection == OUTPUT_EXCLUDE) {
if (!defined($function_table{$name})) {
return 1;
} else {
return 0;
}
}
die("Please add the new output type at show_warnings()");
}
sub dump_enum($$) {
my $x = shift;
my $file = shift;
my $members;
$x =~ s@/\*.*?\*/@@gos; # strip comments.
# strip #define macros inside enums
$x =~ s@#\s*((define|ifdef)\s+|endif)[^;]*;@@gos;
if ($x =~ /typedef\s+enum\s*\{(.*)\}\s*(\w*)\s*;/) {
$declaration_name = $2;
$members = $1;
} elsif ($x =~ /enum\s+(\w*)\s*\{(.*)\}/) {
if ($x =~ /enum\s+(\w+)\s*\{(.*)\}/) {
$declaration_name = $1;
$members = $2;
}
if ($members) {
my $members = $2;
my %_members;
$members =~ s/\s+$//;
@ -1425,31 +1282,27 @@ sub dump_enum($$) {
'sections' => \%sections,
'purpose' => $declaration_purpose
});
} else {
}
else {
print STDERR "${file}:$.: error: Cannot parse enum!\n";
++$errors;
}
}
my $typedef_type = qr { ((?:\s+[\w\*]+\b){1,8})\s* }x;
my $typedef_ident = qr { \*?\s*(\w\S+)\s* }x;
my $typedef_args = qr { \s*\((.*)\); }x;
my $typedef1 = qr { typedef$typedef_type\($typedef_ident\)$typedef_args }x;
my $typedef2 = qr { typedef$typedef_type$typedef_ident$typedef_args }x;
sub dump_typedef($$) {
my $x = shift;
my $file = shift;
$x =~ s@/\*.*?\*/@@gos; # strip comments.
# Parse function typedef prototypes
if ($x =~ $typedef1 || $x =~ $typedef2) {
# Parse function prototypes
if ($x =~ /typedef\s+(\w+)\s*\(\*\s*(\w\S+)\s*\)\s*\((.*)\);/ ||
$x =~ /typedef\s+(\w+)\s*(\w\S+)\s*\s*\((.*)\);/) {
# Function typedefs
$return_type = $1;
$declaration_name = $2;
my $args = $3;
$return_type =~ s/^\s+//;
create_parameterlist($args, ',', $file, $declaration_name);
@ -1516,27 +1369,25 @@ sub create_parameterlist($$$$) {
foreach my $arg (split($splitter, $args)) {
# strip comments
$arg =~ s/\/\*.*\*\///;
$arg =~ s/\s*EFIAPI\s*/ /g;
# strip leading/trailing spaces
$arg =~ s/^\s*//;
$arg =~ s/\s*$//;
$arg =~ s/\s+/ /;
if ($arg =~ /^#/) {
# Treat preprocessor directive as a typeless variable just to fill
# corresponding data structures "correctly". Catch it later in
# output_* subs.
push_parameter($arg, "", "", $file);
push_parameter($arg, "", $file);
} elsif ($arg =~ m/\(.+\)\s*\(/) {
# pointer-to-function
$arg =~ tr/#/,/;
$arg =~ m/[^\(]+\(\s*\*?\s*([\w\.]*)\s*\)/;
$arg =~ m/[^\(]+\([\w\s]*\*?\s*([\w\.]*)\s*\)/;
$param = $1;
$type = $arg;
$type =~ s/([^\(]+\(\*?)\s*$param/$1/;
save_struct_actual($param);
push_parameter($param, $type, $arg, $file, $declaration_name);
push_parameter($param, $type, $file, $declaration_name);
} elsif ($arg) {
$arg =~ s/\s*:\s*/:/g;
$arg =~ s/\s*\[/\[/g;
@ -1561,28 +1412,26 @@ sub create_parameterlist($$$$) {
foreach $param (@args) {
if ($param =~ m/^(\*+)\s*(.*)/) {
save_struct_actual($2);
push_parameter($2, "$type $1", $arg, $file, $declaration_name);
push_parameter($2, "$type $1", $file, $declaration_name);
}
elsif ($param =~ m/(.*?):(\d+)/) {
if ($type ne "") { # skip unnamed bit-fields
save_struct_actual($1);
push_parameter($1, "$type:$2", $arg, $file, $declaration_name)
push_parameter($1, "$type:$2", $file, $declaration_name)
}
}
else {
save_struct_actual($param);
push_parameter($param, $type, $arg, $file, $declaration_name);
push_parameter($param, $type, $file, $declaration_name);
}
}
}
}
}
sub push_parameter($$$$$) {
sub push_parameter($$$$) {
my $param = shift;
my $type = shift;
my $org_arg = shift;
my $file = shift;
my $declaration_name = shift;
@ -1600,10 +1449,6 @@ sub push_parameter($$$$$) {
# handles unnamed variable parameters
$param = "...";
}
elsif ($param =~ /\w\.\.\.$/) {
# for named variable parameters of the form `x...`, remove the dots
$param =~ s/\.\.\.$//;
}
if (!defined $parameterdescs{$param} || $parameterdescs{$param} eq "") {
$parameterdescs{$param} = "variable arguments";
}
@ -1646,8 +1491,8 @@ sub push_parameter($$$$$) {
# "[blah" in a parameter string;
###$param =~ s/\s*//g;
push @parameterlist, $param;
$org_arg =~ s/\s\s+/ /g;
$parametertypes{$param} = $org_arg;
$type =~ s/\s\s+/ /g;
$parametertypes{$param} = $type;
}
sub check_sections($$$$$) {
@ -1721,8 +1566,6 @@ sub dump_function($$) {
my $file = shift;
my $noret = 0;
print_lineno($new_start_line);
$prototype =~ s/^static +//;
$prototype =~ s/^extern +//;
$prototype =~ s/^asmlinkage +//;
@ -1737,7 +1580,6 @@ sub dump_function($$) {
$prototype =~ s/__must_check +//;
$prototype =~ s/__weak +//;
$prototype =~ s/__sched +//;
$prototype =~ s/__printf\s*\(\s*\d*\s*,\s*\d*\s*\) +//;
my $define = $prototype =~ s/^#\s*define\s+//; #ak added
$prototype =~ s/__attribute__\s*\(\(
(?:
@ -1810,23 +1652,6 @@ sub dump_function($$) {
check_return_section($file, $declaration_name, $return_type);
}
# The function parser can be called with a typedef parameter.
# Handle it.
if ($return_type =~ /typedef/) {
output_declaration($declaration_name,
'function',
{'function' => $declaration_name,
'typedef' => 1,
'module' => $modulename,
'functiontype' => $return_type,
'parameterlist' => \@parameterlist,
'parameterdescs' => \%parameterdescs,
'parametertypes' => \%parametertypes,
'sectionlist' => \@sectionlist,
'sections' => \%sections,
'purpose' => $declaration_purpose
});
} else {
output_declaration($declaration_name,
'function',
{'function' => $declaration_name,
@ -1839,7 +1664,6 @@ sub dump_function($$) {
'sections' => \%sections,
'purpose' => $declaration_purpose
});
}
}
sub reset_state {
@ -1934,11 +1758,6 @@ sub process_proto_function($$) {
$prototype =~ s@/\*.*?\*/@@gos; # strip comments.
$prototype =~ s@[\r\n]+@ @gos; # strip newlines/cr's.
$prototype =~ s@^\s+@@gos; # strip leading spaces
# Handle prototypes for function pointers like:
# int (*pcs_config)(struct foo)
$prototype =~ s@^(\S+\s+)\(\s*\*(\S+)\)@$1$2@gos;
if ($prototype =~ /SYSCALL_DEFINE/) {
syscall_munge();
}
@ -2017,7 +1836,6 @@ sub process_export_file($) {
while (<IN>) {
if (/$export_symbol/) {
next if (defined($nosymbol_table{$2}));
$function_table{$2} = 1;
}
}
@ -2049,7 +1867,7 @@ sub process_name($$) {
if (/$doc_block/o) {
$state = STATE_DOCBLOCK;
$contents = "";
$new_start_line = $.;
$new_start_line = $. + 1;
if ( $1 eq "" ) {
$section = $section_intro;
@ -2117,25 +1935,6 @@ sub process_name($$) {
sub process_body($$) {
my $file = shift;
# Until all named variable macro parameters are
# documented using the bare name (`x`) rather than with
# dots (`x...`), strip the dots:
if ($section =~ /\w\.\.\.$/) {
$section =~ s/\.\.\.$//;
if ($verbose) {
print STDERR "${file}:$.: warning: Variable macro arguments should be documented without dots\n";
++$warnings;
}
}
if ($state == STATE_BODY_WITH_BLANK_LINE && /^\s*\*\s?\S/) {
dump_section($file, $section, $contents);
$section = $section_default;
$new_start_line = $.;
$contents = "";
}
if (/$doc_sect/i) { # case insensitive for supported section names
$newsection = $1;
$newcontents = $2;
@ -2188,23 +1987,19 @@ sub process_body($$) {
$prototype = "";
$state = STATE_PROTO;
$brcount = 0;
$new_start_line = $. + 1;
} elsif (/$doc_content/) {
# miguel-style comment kludge, look for blank lines after
# @parameter line to signify start of description
if ($1 eq "") {
if ($section eq $section_context) {
if ($section =~ m/^@/ || $section eq $section_context) {
dump_section($file, $section, $contents);
$section = $section_default;
$contents = "";
$new_start_line = $.;
$state = STATE_BODY;
} else {
if ($section ne $section_default) {
$state = STATE_BODY_WITH_BLANK_LINE;
} else {
$state = STATE_BODY;
}
$contents .= "\n";
}
$state = STATE_BODY;
} elsif ($state == STATE_BODY_MAYBE) {
# Continued declaration purpose
chomp($declaration_purpose);
@ -2336,7 +2131,7 @@ sub process_file($) {
$file = map_filename($orig_file);
if (!open(IN_FILE,"<$file")) {
if (!open(IN,"<$file")) {
print STDERR "Error: Cannot open file $file\n";
++$errors;
return;
@ -2345,9 +2140,9 @@ sub process_file($) {
$. = 1;
$section_counter = 0;
while (<IN_FILE>) {
while (<IN>) {
while (s/\\\s*$//) {
$_ .= <IN_FILE>;
$_ .= <IN>;
}
# Replace tabs by spaces
while ($_ =~ s/\t+/' ' x (length($&) * 8 - length($`) % 8)/e) {};
@ -2356,8 +2151,7 @@ sub process_file($) {
process_normal();
} elsif ($state == STATE_NAME) {
process_name($file, $_);
} elsif ($state == STATE_BODY || $state == STATE_BODY_MAYBE ||
$state == STATE_BODY_WITH_BLANK_LINE) {
} elsif ($state == STATE_BODY || $state == STATE_BODY_MAYBE) {
process_body($file, $_);
} elsif ($state == STATE_INLINE) { # scanning for inline parameters
process_inline($file, $_);
@ -2369,24 +2163,17 @@ sub process_file($) {
}
# Make sure we got something interesting.
if ($initial_section_counter == $section_counter && $
output_mode ne "none") {
if ($output_selection == OUTPUT_INCLUDE) {
print STDERR "${file}:1: warning: '$_' not found\n"
for keys %function_table;
}
else {
if ($initial_section_counter == $section_counter) {
if ($output_mode ne "none") {
print STDERR "${file}:1: warning: no structured comments found\n";
}
if (($output_selection == OUTPUT_INCLUDE) && ($show_not_found == 1)) {
print STDERR " Was looking for '$_'.\n" for keys %function_table;
}
}
close IN_FILE;
}
if ($output_mode eq "rst") {
get_sphinx_version() if (!$sphinx_major);
}
$kernelversion = get_kernel_version();
# generate a sequence of code that will splice in highlighting information
@ -2433,9 +2220,4 @@ if ($verbose && $warnings) {
print STDERR "$warnings warnings\n";
}
if ($Werror && $warnings) {
print STDERR "$warnings warnings as Errors\n";
exit($warnings);
} else {
exit($output_mode eq "none" ? 0 : $errors)
}
exit($output_mode eq "none" ? 0 : $errors);