Revert "doc: update Kernel documentation build system"
Unfortunately we now see a number of now-fatal warnings about duplicate
labels. It is often unclear how best to re-write the document in
question to not duplicate these otherwise logical headings.
This reverts commit 10a1df3cd4.
Signed-off-by: Tom Rini <trini@konsulko.com>
This commit is contained in:
@@ -1,290 +0,0 @@
|
||||
# SPDX-License-Identifier: GPL-2.0
|
||||
# Copyright 2019 Jonathan Corbet <corbet@lwn.net>
|
||||
#
|
||||
# Apply kernel-specific tweaks after the initial document processing
|
||||
# has been done.
|
||||
#
|
||||
from docutils import nodes
|
||||
import sphinx
|
||||
from sphinx import addnodes
|
||||
if sphinx.version_info[0] < 2 or \
|
||||
sphinx.version_info[0] == 2 and sphinx.version_info[1] < 1:
|
||||
from sphinx.environment import NoUri
|
||||
else:
|
||||
from sphinx.errors import NoUri
|
||||
import re
|
||||
from itertools import chain
|
||||
|
||||
#
|
||||
# Python 2 lacks re.ASCII...
|
||||
#
|
||||
try:
|
||||
ascii_p3 = re.ASCII
|
||||
except AttributeError:
|
||||
ascii_p3 = 0
|
||||
|
||||
#
|
||||
# Regex nastiness. Of course.
|
||||
# Try to identify "function()" that's not already marked up some
|
||||
# other way. Sphinx doesn't like a lot of stuff right after a
|
||||
# :c:func: block (i.e. ":c:func:`mmap()`s" flakes out), so the last
|
||||
# bit tries to restrict matches to things that won't create trouble.
|
||||
#
|
||||
RE_function = re.compile(r'\b(([a-zA-Z_]\w+)\(\))', flags=ascii_p3)
|
||||
|
||||
#
|
||||
# Sphinx 2 uses the same :c:type role for struct, union, enum and typedef
|
||||
#
|
||||
RE_generic_type = re.compile(r'\b(struct|union|enum|typedef)\s+([a-zA-Z_]\w+)',
|
||||
flags=ascii_p3)
|
||||
|
||||
#
|
||||
# Sphinx 3 uses a different C role for each one of struct, union, enum and
|
||||
# typedef
|
||||
#
|
||||
RE_struct = re.compile(r'\b(struct)\s+([a-zA-Z_]\w+)', flags=ascii_p3)
|
||||
RE_union = re.compile(r'\b(union)\s+([a-zA-Z_]\w+)', flags=ascii_p3)
|
||||
RE_enum = re.compile(r'\b(enum)\s+([a-zA-Z_]\w+)', flags=ascii_p3)
|
||||
RE_typedef = re.compile(r'\b(typedef)\s+([a-zA-Z_]\w+)', flags=ascii_p3)
|
||||
|
||||
#
|
||||
# Detects a reference to a documentation page of the form Documentation/... with
|
||||
# an optional extension
|
||||
#
|
||||
RE_doc = re.compile(r'\bDocumentation(/[\w\-_/]+)(\.\w+)*')
|
||||
|
||||
RE_namespace = re.compile(r'^\s*..\s*c:namespace::\s*(\S+)\s*$')
|
||||
|
||||
#
|
||||
# Reserved C words that we should skip when cross-referencing
|
||||
#
|
||||
Skipnames = [ 'for', 'if', 'register', 'sizeof', 'struct', 'unsigned' ]
|
||||
|
||||
|
||||
#
|
||||
# Many places in the docs refer to common system calls. It is
|
||||
# pointless to try to cross-reference them and, as has been known
|
||||
# to happen, somebody defining a function by these names can lead
|
||||
# to the creation of incorrect and confusing cross references. So
|
||||
# just don't even try with these names.
|
||||
#
|
||||
Skipfuncs = [ 'open', 'close', 'read', 'write', 'fcntl', 'mmap',
|
||||
'select', 'poll', 'fork', 'execve', 'clone', 'ioctl',
|
||||
'socket' ]
|
||||
|
||||
c_namespace = ''
|
||||
|
||||
def markup_refs(docname, app, node):
|
||||
t = node.astext()
|
||||
done = 0
|
||||
repl = [ ]
|
||||
#
|
||||
# Associate each regex with the function that will markup its matches
|
||||
#
|
||||
markup_func_sphinx2 = {RE_doc: markup_doc_ref,
|
||||
RE_function: markup_c_ref,
|
||||
RE_generic_type: markup_c_ref}
|
||||
|
||||
markup_func_sphinx3 = {RE_doc: markup_doc_ref,
|
||||
RE_function: markup_func_ref_sphinx3,
|
||||
RE_struct: markup_c_ref,
|
||||
RE_union: markup_c_ref,
|
||||
RE_enum: markup_c_ref,
|
||||
RE_typedef: markup_c_ref}
|
||||
|
||||
if sphinx.version_info[0] >= 3:
|
||||
markup_func = markup_func_sphinx3
|
||||
else:
|
||||
markup_func = markup_func_sphinx2
|
||||
|
||||
match_iterators = [regex.finditer(t) for regex in markup_func]
|
||||
#
|
||||
# Sort all references by the starting position in text
|
||||
#
|
||||
sorted_matches = sorted(chain(*match_iterators), key=lambda m: m.start())
|
||||
for m in sorted_matches:
|
||||
#
|
||||
# Include any text prior to match as a normal text node.
|
||||
#
|
||||
if m.start() > done:
|
||||
repl.append(nodes.Text(t[done:m.start()]))
|
||||
|
||||
#
|
||||
# Call the function associated with the regex that matched this text and
|
||||
# append its return to the text
|
||||
#
|
||||
repl.append(markup_func[m.re](docname, app, m))
|
||||
|
||||
done = m.end()
|
||||
if done < len(t):
|
||||
repl.append(nodes.Text(t[done:]))
|
||||
return repl
|
||||
|
||||
#
|
||||
# In sphinx3 we can cross-reference to C macro and function, each one with its
|
||||
# own C role, but both match the same regex, so we try both.
|
||||
#
|
||||
def markup_func_ref_sphinx3(docname, app, match):
|
||||
class_str = ['c-func', 'c-macro']
|
||||
reftype_str = ['function', 'macro']
|
||||
|
||||
cdom = app.env.domains['c']
|
||||
#
|
||||
# Go through the dance of getting an xref out of the C domain
|
||||
#
|
||||
base_target = match.group(2)
|
||||
target_text = nodes.Text(match.group(0))
|
||||
xref = None
|
||||
possible_targets = [base_target]
|
||||
# Check if this document has a namespace, and if so, try
|
||||
# cross-referencing inside it first.
|
||||
if c_namespace:
|
||||
possible_targets.insert(0, c_namespace + "." + base_target)
|
||||
|
||||
if base_target not in Skipnames:
|
||||
for target in possible_targets:
|
||||
if target not in Skipfuncs:
|
||||
for class_s, reftype_s in zip(class_str, reftype_str):
|
||||
lit_text = nodes.literal(classes=['xref', 'c', class_s])
|
||||
lit_text += target_text
|
||||
pxref = addnodes.pending_xref('', refdomain = 'c',
|
||||
reftype = reftype_s,
|
||||
reftarget = target, modname = None,
|
||||
classname = None)
|
||||
#
|
||||
# XXX The Latex builder will throw NoUri exceptions here,
|
||||
# work around that by ignoring them.
|
||||
#
|
||||
try:
|
||||
xref = cdom.resolve_xref(app.env, docname, app.builder,
|
||||
reftype_s, target, pxref,
|
||||
lit_text)
|
||||
except NoUri:
|
||||
xref = None
|
||||
|
||||
if xref:
|
||||
return xref
|
||||
|
||||
return target_text
|
||||
|
||||
def markup_c_ref(docname, app, match):
|
||||
class_str = {# Sphinx 2 only
|
||||
RE_function: 'c-func',
|
||||
RE_generic_type: 'c-type',
|
||||
# Sphinx 3+ only
|
||||
RE_struct: 'c-struct',
|
||||
RE_union: 'c-union',
|
||||
RE_enum: 'c-enum',
|
||||
RE_typedef: 'c-type',
|
||||
}
|
||||
reftype_str = {# Sphinx 2 only
|
||||
RE_function: 'function',
|
||||
RE_generic_type: 'type',
|
||||
# Sphinx 3+ only
|
||||
RE_struct: 'struct',
|
||||
RE_union: 'union',
|
||||
RE_enum: 'enum',
|
||||
RE_typedef: 'type',
|
||||
}
|
||||
|
||||
cdom = app.env.domains['c']
|
||||
#
|
||||
# Go through the dance of getting an xref out of the C domain
|
||||
#
|
||||
base_target = match.group(2)
|
||||
target_text = nodes.Text(match.group(0))
|
||||
xref = None
|
||||
possible_targets = [base_target]
|
||||
# Check if this document has a namespace, and if so, try
|
||||
# cross-referencing inside it first.
|
||||
if c_namespace:
|
||||
possible_targets.insert(0, c_namespace + "." + base_target)
|
||||
|
||||
if base_target not in Skipnames:
|
||||
for target in possible_targets:
|
||||
if not (match.re == RE_function and target in Skipfuncs):
|
||||
lit_text = nodes.literal(classes=['xref', 'c', class_str[match.re]])
|
||||
lit_text += target_text
|
||||
pxref = addnodes.pending_xref('', refdomain = 'c',
|
||||
reftype = reftype_str[match.re],
|
||||
reftarget = target, modname = None,
|
||||
classname = None)
|
||||
#
|
||||
# XXX The Latex builder will throw NoUri exceptions here,
|
||||
# work around that by ignoring them.
|
||||
#
|
||||
try:
|
||||
xref = cdom.resolve_xref(app.env, docname, app.builder,
|
||||
reftype_str[match.re], target, pxref,
|
||||
lit_text)
|
||||
except NoUri:
|
||||
xref = None
|
||||
|
||||
if xref:
|
||||
return xref
|
||||
|
||||
return target_text
|
||||
|
||||
#
|
||||
# Try to replace a documentation reference of the form Documentation/... with a
|
||||
# cross reference to that page
|
||||
#
|
||||
def markup_doc_ref(docname, app, match):
|
||||
stddom = app.env.domains['std']
|
||||
#
|
||||
# Go through the dance of getting an xref out of the std domain
|
||||
#
|
||||
target = match.group(1)
|
||||
xref = None
|
||||
pxref = addnodes.pending_xref('', refdomain = 'std', reftype = 'doc',
|
||||
reftarget = target, modname = None,
|
||||
classname = None, refexplicit = False)
|
||||
#
|
||||
# XXX The Latex builder will throw NoUri exceptions here,
|
||||
# work around that by ignoring them.
|
||||
#
|
||||
try:
|
||||
xref = stddom.resolve_xref(app.env, docname, app.builder, 'doc',
|
||||
target, pxref, None)
|
||||
except NoUri:
|
||||
xref = None
|
||||
#
|
||||
# Return the xref if we got it; otherwise just return the plain text.
|
||||
#
|
||||
if xref:
|
||||
return xref
|
||||
else:
|
||||
return nodes.Text(match.group(0))
|
||||
|
||||
def get_c_namespace(app, docname):
|
||||
source = app.env.doc2path(docname)
|
||||
with open(source) as f:
|
||||
for l in f:
|
||||
match = RE_namespace.search(l)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return ''
|
||||
|
||||
def auto_markup(app, doctree, name):
|
||||
global c_namespace
|
||||
c_namespace = get_c_namespace(app, name)
|
||||
#
|
||||
# This loop could eventually be improved on. Someday maybe we
|
||||
# want a proper tree traversal with a lot of awareness of which
|
||||
# kinds of nodes to prune. But this works well for now.
|
||||
#
|
||||
# The nodes.literal test catches ``literal text``, its purpose is to
|
||||
# avoid adding cross-references to functions that have been explicitly
|
||||
# marked with cc:func:.
|
||||
#
|
||||
for para in doctree.traverse(nodes.paragraph):
|
||||
for node in para.traverse(nodes.Text):
|
||||
if not isinstance(node.parent, nodes.literal):
|
||||
node.parent.replace(node, markup_refs(name, app, node))
|
||||
|
||||
def setup(app):
|
||||
app.connect('doctree-resolved', auto_markup)
|
||||
return {
|
||||
'parallel_read_safe': True,
|
||||
'parallel_write_safe': True,
|
||||
}
|
||||
@@ -40,94 +40,14 @@ from sphinx import addnodes
|
||||
from sphinx.domains.c import c_funcptr_sig_re, c_sig_re
|
||||
from sphinx.domains.c import CObject as Base_CObject
|
||||
from sphinx.domains.c import CDomain as Base_CDomain
|
||||
from itertools import chain
|
||||
import re
|
||||
|
||||
__version__ = '1.1'
|
||||
__version__ = '1.0'
|
||||
|
||||
# Get Sphinx version
|
||||
major, minor, patch = sphinx.version_info[:3]
|
||||
|
||||
# Namespace to be prepended to the full name
|
||||
namespace = None
|
||||
|
||||
#
|
||||
# Handle trivial newer c domain tags that are part of Sphinx 3.1 c domain tags
|
||||
# - Store the namespace if ".. c:namespace::" tag is found
|
||||
#
|
||||
RE_namespace = re.compile(r'^\s*..\s*c:namespace::\s*(\S+)\s*$')
|
||||
|
||||
def markup_namespace(match):
|
||||
global namespace
|
||||
|
||||
namespace = match.group(1)
|
||||
|
||||
return ""
|
||||
|
||||
#
|
||||
# Handle c:macro for function-style declaration
|
||||
#
|
||||
RE_macro = re.compile(r'^\s*..\s*c:macro::\s*(\S+)\s+(\S.*)\s*$')
|
||||
def markup_macro(match):
|
||||
return ".. c:function:: " + match.group(1) + ' ' + match.group(2)
|
||||
|
||||
#
|
||||
# Handle newer c domain tags that are evaluated as .. c:type: for
|
||||
# backward-compatibility with Sphinx < 3.0
|
||||
#
|
||||
RE_ctype = re.compile(r'^\s*..\s*c:(struct|union|enum|enumerator|alias)::\s*(.*)$')
|
||||
|
||||
def markup_ctype(match):
|
||||
return ".. c:type:: " + match.group(2)
|
||||
|
||||
#
|
||||
# Handle newer c domain tags that are evaluated as :c:type: for
|
||||
# backward-compatibility with Sphinx < 3.0
|
||||
#
|
||||
RE_ctype_refs = re.compile(r':c:(var|struct|union|enum|enumerator)::`([^\`]+)`')
|
||||
def markup_ctype_refs(match):
|
||||
return ":c:type:`" + match.group(2) + '`'
|
||||
|
||||
#
|
||||
# Simply convert :c:expr: and :c:texpr: into a literal block.
|
||||
#
|
||||
RE_expr = re.compile(r':c:(expr|texpr):`([^\`]+)`')
|
||||
def markup_c_expr(match):
|
||||
return '\ ``' + match.group(2) + '``\ '
|
||||
|
||||
#
|
||||
# Parse Sphinx 3.x C markups, replacing them by backward-compatible ones
|
||||
#
|
||||
def c_markups(app, docname, source):
|
||||
result = ""
|
||||
markup_func = {
|
||||
RE_namespace: markup_namespace,
|
||||
RE_expr: markup_c_expr,
|
||||
RE_macro: markup_macro,
|
||||
RE_ctype: markup_ctype,
|
||||
RE_ctype_refs: markup_ctype_refs,
|
||||
}
|
||||
|
||||
lines = iter(source[0].splitlines(True))
|
||||
for n in lines:
|
||||
match_iterators = [regex.finditer(n) for regex in markup_func]
|
||||
matches = sorted(chain(*match_iterators), key=lambda m: m.start())
|
||||
for m in matches:
|
||||
n = n[:m.start()] + markup_func[m.re](m) + n[m.end():]
|
||||
|
||||
result = result + n
|
||||
|
||||
source[0] = result
|
||||
|
||||
#
|
||||
# Now implements support for the cdomain namespacing logic
|
||||
#
|
||||
|
||||
def setup(app):
|
||||
|
||||
# Handle easy Sphinx 3.1+ simple new tags: :c:expr and .. c:namespace::
|
||||
app.connect('source-read', c_markups)
|
||||
|
||||
if (major == 1 and minor < 8):
|
||||
app.override_domain(CDomain)
|
||||
else:
|
||||
@@ -155,8 +75,6 @@ class CObject(Base_CObject):
|
||||
function-like macro, the name of the macro is returned. Otherwise
|
||||
``False`` is returned. """
|
||||
|
||||
global namespace
|
||||
|
||||
if not self.objtype == 'function':
|
||||
return False
|
||||
|
||||
@@ -189,16 +107,11 @@ class CObject(Base_CObject):
|
||||
param += nodes.emphasis(argname, argname)
|
||||
paramlist += param
|
||||
|
||||
if namespace:
|
||||
fullname = namespace + "." + fullname
|
||||
|
||||
return fullname
|
||||
|
||||
def handle_signature(self, sig, signode):
|
||||
"""Transform a C signature into RST nodes."""
|
||||
|
||||
global namespace
|
||||
|
||||
fullname = self.handle_func_like_macro(sig, signode)
|
||||
if not fullname:
|
||||
fullname = super(CObject, self).handle_signature(sig, signode)
|
||||
@@ -209,10 +122,6 @@ class CObject(Base_CObject):
|
||||
else:
|
||||
# FIXME: handle :name: value of other declaration types?
|
||||
pass
|
||||
else:
|
||||
if namespace:
|
||||
fullname = namespace + "." + fullname
|
||||
|
||||
return fullname
|
||||
|
||||
def add_target_and_index(self, name, sig, signode):
|
||||
|
||||
@@ -1,194 +0,0 @@
|
||||
# -*- coding: utf-8; mode: python -*-
|
||||
# coding=utf-8
|
||||
# SPDX-License-Identifier: GPL-2.0
|
||||
#
|
||||
u"""
|
||||
kernel-abi
|
||||
~~~~~~~~~~
|
||||
|
||||
Implementation of the ``kernel-abi`` reST-directive.
|
||||
|
||||
:copyright: Copyright (C) 2016 Markus Heiser
|
||||
:copyright: Copyright (C) 2016-2020 Mauro Carvalho Chehab
|
||||
:maintained-by: Mauro Carvalho Chehab <mchehab+huawei@kernel.org>
|
||||
:license: GPL Version 2, June 1991 see Linux/COPYING for details.
|
||||
|
||||
The ``kernel-abi`` (:py:class:`KernelCmd`) directive calls the
|
||||
scripts/get_abi.pl script to parse the Kernel ABI files.
|
||||
|
||||
Overview of directive's argument and options.
|
||||
|
||||
.. code-block:: rst
|
||||
|
||||
.. kernel-abi:: <ABI directory location>
|
||||
:debug:
|
||||
|
||||
The argument ``<ABI directory location>`` is required. It contains the
|
||||
location of the ABI files to be parsed.
|
||||
|
||||
``debug``
|
||||
Inserts a code-block with the *raw* reST. Sometimes it is helpful to see
|
||||
what reST is generated.
|
||||
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import re
|
||||
import kernellog
|
||||
|
||||
from os import path
|
||||
|
||||
from docutils import nodes, statemachine
|
||||
from docutils.statemachine import ViewList
|
||||
from docutils.parsers.rst import directives, Directive
|
||||
from docutils.utils.error_reporting import ErrorString
|
||||
|
||||
#
|
||||
# AutodocReporter is only good up to Sphinx 1.7
|
||||
#
|
||||
import sphinx
|
||||
|
||||
Use_SSI = sphinx.__version__[:3] >= '1.7'
|
||||
if Use_SSI:
|
||||
from sphinx.util.docutils import switch_source_input
|
||||
else:
|
||||
from sphinx.ext.autodoc import AutodocReporter
|
||||
|
||||
__version__ = '1.0'
|
||||
|
||||
def setup(app):
|
||||
|
||||
app.add_directive("kernel-abi", KernelCmd)
|
||||
return dict(
|
||||
version = __version__
|
||||
, parallel_read_safe = True
|
||||
, parallel_write_safe = True
|
||||
)
|
||||
|
||||
class KernelCmd(Directive):
|
||||
|
||||
u"""KernelABI (``kernel-abi``) directive"""
|
||||
|
||||
required_arguments = 1
|
||||
optional_arguments = 2
|
||||
has_content = False
|
||||
final_argument_whitespace = True
|
||||
|
||||
option_spec = {
|
||||
"debug" : directives.flag,
|
||||
"rst" : directives.unchanged
|
||||
}
|
||||
|
||||
def run(self):
|
||||
|
||||
doc = self.state.document
|
||||
if not doc.settings.file_insertion_enabled:
|
||||
raise self.warning("docutils: file insertion disabled")
|
||||
|
||||
env = doc.settings.env
|
||||
cwd = path.dirname(doc.current_source)
|
||||
cmd = "get_abi.pl rest --enable-lineno --dir "
|
||||
cmd += self.arguments[0]
|
||||
|
||||
if 'rst' in self.options:
|
||||
cmd += " --rst-source"
|
||||
|
||||
srctree = path.abspath(os.environ["srctree"])
|
||||
|
||||
fname = cmd
|
||||
|
||||
# extend PATH with $(srctree)/scripts
|
||||
path_env = os.pathsep.join([
|
||||
srctree + os.sep + "scripts",
|
||||
os.environ["PATH"]
|
||||
])
|
||||
shell_env = os.environ.copy()
|
||||
shell_env["PATH"] = path_env
|
||||
shell_env["srctree"] = srctree
|
||||
|
||||
lines = self.runCmd(cmd, shell=True, cwd=cwd, env=shell_env)
|
||||
nodeList = self.nestedParse(lines, self.arguments[0])
|
||||
return nodeList
|
||||
|
||||
def runCmd(self, cmd, **kwargs):
|
||||
u"""Run command ``cmd`` and return it's stdout as unicode."""
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
cmd
|
||||
, stdout = subprocess.PIPE
|
||||
, stderr = subprocess.PIPE
|
||||
, **kwargs
|
||||
)
|
||||
out, err = proc.communicate()
|
||||
|
||||
out, err = codecs.decode(out, 'utf-8'), codecs.decode(err, 'utf-8')
|
||||
|
||||
if proc.returncode != 0:
|
||||
raise self.severe(
|
||||
u"command '%s' failed with return code %d"
|
||||
% (cmd, proc.returncode)
|
||||
)
|
||||
except OSError as exc:
|
||||
raise self.severe(u"problems with '%s' directive: %s."
|
||||
% (self.name, ErrorString(exc)))
|
||||
return out
|
||||
|
||||
def nestedParse(self, lines, fname):
|
||||
content = ViewList()
|
||||
node = nodes.section()
|
||||
|
||||
if "debug" in self.options:
|
||||
code_block = "\n\n.. code-block:: rst\n :linenos:\n"
|
||||
for l in lines.split("\n"):
|
||||
code_block += "\n " + l
|
||||
lines = code_block + "\n\n"
|
||||
|
||||
line_regex = re.compile("^#define LINENO (\S+)\#([0-9]+)$")
|
||||
ln = 0
|
||||
n = 0
|
||||
f = fname
|
||||
|
||||
for line in lines.split("\n"):
|
||||
n = n + 1
|
||||
match = line_regex.search(line)
|
||||
if match:
|
||||
new_f = match.group(1)
|
||||
|
||||
# Sphinx parser is lazy: it stops parsing contents in the
|
||||
# middle, if it is too big. So, handle it per input file
|
||||
if new_f != f and content:
|
||||
self.do_parse(content, node)
|
||||
content = ViewList()
|
||||
|
||||
f = new_f
|
||||
|
||||
# sphinx counts lines from 0
|
||||
ln = int(match.group(2)) - 1
|
||||
else:
|
||||
content.append(line, f, ln)
|
||||
|
||||
kernellog.info(self.state.document.settings.env.app, "%s: parsed %i lines" % (fname, n))
|
||||
|
||||
if content:
|
||||
self.do_parse(content, node)
|
||||
|
||||
return node.children
|
||||
|
||||
def do_parse(self, content, node):
|
||||
if Use_SSI:
|
||||
with switch_source_input(self.state, content):
|
||||
self.state.nested_parse(content, 0, node, match_titles=1)
|
||||
else:
|
||||
buf = self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter
|
||||
|
||||
self.state.memo.title_styles = []
|
||||
self.state.memo.section_level = 0
|
||||
self.state.memo.reporter = AutodocReporter(content, self.state.memo.reporter)
|
||||
try:
|
||||
self.state.nested_parse(content, 0, node, match_titles=1)
|
||||
finally:
|
||||
self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter = buf
|
||||
@@ -1,169 +0,0 @@
|
||||
# coding=utf-8
|
||||
# SPDX-License-Identifier: GPL-2.0
|
||||
#
|
||||
u"""
|
||||
kernel-feat
|
||||
~~~~~~~~~~~
|
||||
|
||||
Implementation of the ``kernel-feat`` reST-directive.
|
||||
|
||||
:copyright: Copyright (C) 2016 Markus Heiser
|
||||
:copyright: Copyright (C) 2016-2019 Mauro Carvalho Chehab
|
||||
:maintained-by: Mauro Carvalho Chehab <mchehab+samsung@kernel.org>
|
||||
:license: GPL Version 2, June 1991 see Linux/COPYING for details.
|
||||
|
||||
The ``kernel-feat`` (:py:class:`KernelFeat`) directive calls the
|
||||
scripts/get_feat.pl script to parse the Kernel ABI files.
|
||||
|
||||
Overview of directive's argument and options.
|
||||
|
||||
.. code-block:: rst
|
||||
|
||||
.. kernel-feat:: <ABI directory location>
|
||||
:debug:
|
||||
|
||||
The argument ``<ABI directory location>`` is required. It contains the
|
||||
location of the ABI files to be parsed.
|
||||
|
||||
``debug``
|
||||
Inserts a code-block with the *raw* reST. Sometimes it is helpful to see
|
||||
what reST is generated.
|
||||
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from os import path
|
||||
|
||||
from docutils import nodes, statemachine
|
||||
from docutils.statemachine import ViewList
|
||||
from docutils.parsers.rst import directives, Directive
|
||||
from docutils.utils.error_reporting import ErrorString
|
||||
|
||||
#
|
||||
# AutodocReporter is only good up to Sphinx 1.7
|
||||
#
|
||||
import sphinx
|
||||
|
||||
Use_SSI = sphinx.__version__[:3] >= '1.7'
|
||||
if Use_SSI:
|
||||
from sphinx.util.docutils import switch_source_input
|
||||
else:
|
||||
from sphinx.ext.autodoc import AutodocReporter
|
||||
|
||||
__version__ = '1.0'
|
||||
|
||||
def setup(app):
|
||||
|
||||
app.add_directive("kernel-feat", KernelFeat)
|
||||
return dict(
|
||||
version = __version__
|
||||
, parallel_read_safe = True
|
||||
, parallel_write_safe = True
|
||||
)
|
||||
|
||||
class KernelFeat(Directive):
|
||||
|
||||
u"""KernelFeat (``kernel-feat``) directive"""
|
||||
|
||||
required_arguments = 1
|
||||
optional_arguments = 2
|
||||
has_content = False
|
||||
final_argument_whitespace = True
|
||||
|
||||
option_spec = {
|
||||
"debug" : directives.flag
|
||||
}
|
||||
|
||||
def warn(self, message, **replace):
|
||||
replace["fname"] = self.state.document.current_source
|
||||
replace["line_no"] = replace.get("line_no", self.lineno)
|
||||
message = ("%(fname)s:%(line_no)s: [kernel-feat WARN] : " + message) % replace
|
||||
self.state.document.settings.env.app.warn(message, prefix="")
|
||||
|
||||
def run(self):
|
||||
|
||||
doc = self.state.document
|
||||
if not doc.settings.file_insertion_enabled:
|
||||
raise self.warning("docutils: file insertion disabled")
|
||||
|
||||
env = doc.settings.env
|
||||
cwd = path.dirname(doc.current_source)
|
||||
cmd = "get_feat.pl rest --dir "
|
||||
cmd += self.arguments[0]
|
||||
|
||||
if len(self.arguments) > 1:
|
||||
cmd += " --arch " + self.arguments[1]
|
||||
|
||||
srctree = path.abspath(os.environ["srctree"])
|
||||
|
||||
fname = cmd
|
||||
|
||||
# extend PATH with $(srctree)/scripts
|
||||
path_env = os.pathsep.join([
|
||||
srctree + os.sep + "scripts",
|
||||
os.environ["PATH"]
|
||||
])
|
||||
shell_env = os.environ.copy()
|
||||
shell_env["PATH"] = path_env
|
||||
shell_env["srctree"] = srctree
|
||||
|
||||
lines = self.runCmd(cmd, shell=True, cwd=cwd, env=shell_env)
|
||||
nodeList = self.nestedParse(lines, fname)
|
||||
return nodeList
|
||||
|
||||
def runCmd(self, cmd, **kwargs):
|
||||
u"""Run command ``cmd`` and return it's stdout as unicode."""
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
cmd
|
||||
, stdout = subprocess.PIPE
|
||||
, stderr = subprocess.PIPE
|
||||
, **kwargs
|
||||
)
|
||||
out, err = proc.communicate()
|
||||
|
||||
out, err = codecs.decode(out, 'utf-8'), codecs.decode(err, 'utf-8')
|
||||
|
||||
if proc.returncode != 0:
|
||||
raise self.severe(
|
||||
u"command '%s' failed with return code %d"
|
||||
% (cmd, proc.returncode)
|
||||
)
|
||||
except OSError as exc:
|
||||
raise self.severe(u"problems with '%s' directive: %s."
|
||||
% (self.name, ErrorString(exc)))
|
||||
return out
|
||||
|
||||
def nestedParse(self, lines, fname):
|
||||
content = ViewList()
|
||||
node = nodes.section()
|
||||
|
||||
if "debug" in self.options:
|
||||
code_block = "\n\n.. code-block:: rst\n :linenos:\n"
|
||||
for l in lines.split("\n"):
|
||||
code_block += "\n " + l
|
||||
lines = code_block + "\n\n"
|
||||
|
||||
for c, l in enumerate(lines.split("\n")):
|
||||
content.append(l, fname, c)
|
||||
|
||||
buf = self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter
|
||||
|
||||
if Use_SSI:
|
||||
with switch_source_input(self.state, content):
|
||||
self.state.nested_parse(content, 0, node, match_titles=1)
|
||||
else:
|
||||
self.state.memo.title_styles = []
|
||||
self.state.memo.section_level = 0
|
||||
self.state.memo.reporter = AutodocReporter(content, self.state.memo.reporter)
|
||||
try:
|
||||
self.state.nested_parse(content, 0, node, match_titles=1)
|
||||
finally:
|
||||
self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter = buf
|
||||
|
||||
return node.children
|
||||
@@ -62,7 +62,6 @@ class KernelDocDirective(Directive):
|
||||
'export': directives.unchanged,
|
||||
'internal': directives.unchanged,
|
||||
'identifiers': directives.unchanged,
|
||||
'no-identifiers': directives.unchanged,
|
||||
'functions': directives.unchanged,
|
||||
}
|
||||
has_content = False
|
||||
@@ -71,11 +70,6 @@ class KernelDocDirective(Directive):
|
||||
env = self.state.document.settings.env
|
||||
cmd = [env.config.kerneldoc_bin, '-rst', '-enable-lineno']
|
||||
|
||||
# Pass the version string to kernel-doc, as it needs to use a different
|
||||
# dialect, depending what the C domain supports for each specific
|
||||
# Sphinx versions
|
||||
cmd += ['-sphinx-version', sphinx.__version__]
|
||||
|
||||
filename = env.config.kerneldoc_srctree + '/' + self.arguments[0]
|
||||
export_file_patterns = []
|
||||
|
||||
@@ -105,12 +99,6 @@ class KernelDocDirective(Directive):
|
||||
else:
|
||||
cmd += ['-no-doc-sections']
|
||||
|
||||
if 'no-identifiers' in self.options:
|
||||
no_identifiers = self.options.get('no-identifiers').split()
|
||||
if no_identifiers:
|
||||
for i in no_identifiers:
|
||||
cmd += ['-nosymbol', i]
|
||||
|
||||
for pattern in export_file_patterns:
|
||||
for f in glob.glob(env.config.kerneldoc_srctree + '/' + pattern):
|
||||
env.note_dependency(os.path.abspath(f))
|
||||
@@ -148,8 +136,7 @@ class KernelDocDirective(Directive):
|
||||
lineoffset = int(match.group(1)) - 1
|
||||
# we must eat our comments since the upset the markup
|
||||
else:
|
||||
doc = env.srcdir + "/" + env.docname + ":" + str(self.lineno)
|
||||
result.append(line, doc + ": " + filename, lineoffset)
|
||||
result.append(line, filename, lineoffset)
|
||||
lineoffset += 1
|
||||
|
||||
node = nodes.section()
|
||||
|
||||
@@ -25,8 +25,4 @@ def verbose(app, message):
|
||||
else:
|
||||
app.verbose(message)
|
||||
|
||||
def info(app, message):
|
||||
if UseLogging:
|
||||
logger.info(message)
|
||||
else:
|
||||
app.info(message)
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ u"""
|
||||
|
||||
Used tools:
|
||||
|
||||
* ``dot(1)``: Graphviz (https://www.graphviz.org). If Graphviz is not
|
||||
* ``dot(1)``: Graphviz (http://www.graphviz.org). If Graphviz is not
|
||||
available, the DOT language is inserted as literal-block.
|
||||
|
||||
* SVG to PDF: To generate PDF, you need at least one of this tools:
|
||||
@@ -41,7 +41,7 @@ u"""
|
||||
* generate PDF from SVG / used by PDF (LaTeX) builder
|
||||
|
||||
* generate SVG (html-builder) and PDF (latex-builder) from DOT files.
|
||||
DOT: see https://www.graphviz.org/content/dot-language
|
||||
DOT: see http://www.graphviz.org/content/dot-language
|
||||
|
||||
"""
|
||||
|
||||
@@ -182,7 +182,7 @@ def setupTools(app):
|
||||
kernellog.verbose(app, "use dot(1) from: " + dot_cmd)
|
||||
else:
|
||||
kernellog.warn(app, "dot(1) not found, for better output quality install "
|
||||
"graphviz from https://www.graphviz.org")
|
||||
"graphviz from http://www.graphviz.org")
|
||||
if convert_cmd:
|
||||
kernellog.verbose(app, "use convert(1) from: " + convert_cmd)
|
||||
else:
|
||||
|
||||
@@ -21,29 +21,6 @@ def loadConfig(namespace):
|
||||
and os.path.normpath(namespace["__file__"]) != os.path.normpath(config_file) ):
|
||||
config_file = os.path.abspath(config_file)
|
||||
|
||||
# Let's avoid one conf.py file just due to latex_documents
|
||||
start = config_file.find('Documentation/')
|
||||
if start >= 0:
|
||||
start = config_file.find('/', start + 1)
|
||||
|
||||
end = config_file.rfind('/')
|
||||
if start >= 0 and end > 0:
|
||||
dir = config_file[start + 1:end]
|
||||
|
||||
print("source directory: %s" % dir)
|
||||
new_latex_docs = []
|
||||
latex_documents = namespace['latex_documents']
|
||||
|
||||
for l in latex_documents:
|
||||
if l[0].find(dir + '/') == 0:
|
||||
has = True
|
||||
fn = l[0][len(dir) + 1:]
|
||||
new_latex_docs.append((fn, l[1], l[2], l[3], l[4]))
|
||||
break
|
||||
|
||||
namespace['latex_documents'] = new_latex_docs
|
||||
|
||||
# If there is an extra conf.py file, load it
|
||||
if os.path.isfile(config_file):
|
||||
sys.stdout.write("load additional sphinx-config: %s\n" % config_file)
|
||||
config = namespace.copy()
|
||||
@@ -52,6 +29,4 @@ def loadConfig(namespace):
|
||||
del config['__file__']
|
||||
namespace.update(config)
|
||||
else:
|
||||
config = namespace.copy()
|
||||
config['tags'].add("subproject")
|
||||
namespace.update(config)
|
||||
sys.stderr.write("WARNING: additional sphinx-config not found: %s\n" % config_file)
|
||||
|
||||
@@ -1,197 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# SPDX-License-Identifier: GPL-2.0
|
||||
# -*- coding: utf-8; mode: python -*-
|
||||
# pylint: disable=R0903, C0330, R0914, R0912, E0401
|
||||
|
||||
u"""
|
||||
maintainers-include
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Implementation of the ``maintainers-include`` reST-directive.
|
||||
|
||||
:copyright: Copyright (C) 2019 Kees Cook <keescook@chromium.org>
|
||||
:license: GPL Version 2, June 1991 see linux/COPYING for details.
|
||||
|
||||
The ``maintainers-include`` reST-directive performs extensive parsing
|
||||
specific to the Linux kernel's standard "MAINTAINERS" file, in an
|
||||
effort to avoid needing to heavily mark up the original plain text.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import re
|
||||
import os.path
|
||||
|
||||
from docutils import statemachine
|
||||
from docutils.utils.error_reporting import ErrorString
|
||||
from docutils.parsers.rst import Directive
|
||||
from docutils.parsers.rst.directives.misc import Include
|
||||
|
||||
__version__ = '1.0'
|
||||
|
||||
def setup(app):
|
||||
app.add_directive("maintainers-include", MaintainersInclude)
|
||||
return dict(
|
||||
version = __version__,
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
||||
|
||||
class MaintainersInclude(Include):
|
||||
u"""MaintainersInclude (``maintainers-include``) directive"""
|
||||
required_arguments = 0
|
||||
|
||||
def parse_maintainers(self, path):
|
||||
"""Parse all the MAINTAINERS lines into ReST for human-readability"""
|
||||
|
||||
result = list()
|
||||
result.append(".. _maintainers:")
|
||||
result.append("")
|
||||
|
||||
# Poor man's state machine.
|
||||
descriptions = False
|
||||
maintainers = False
|
||||
subsystems = False
|
||||
|
||||
# Field letter to field name mapping.
|
||||
field_letter = None
|
||||
fields = dict()
|
||||
|
||||
prev = None
|
||||
field_prev = ""
|
||||
field_content = ""
|
||||
|
||||
for line in open(path):
|
||||
if sys.version_info.major == 2:
|
||||
line = unicode(line, 'utf-8')
|
||||
# Have we reached the end of the preformatted Descriptions text?
|
||||
if descriptions and line.startswith('Maintainers'):
|
||||
descriptions = False
|
||||
# Ensure a blank line following the last "|"-prefixed line.
|
||||
result.append("")
|
||||
|
||||
# Start subsystem processing? This is to skip processing the text
|
||||
# between the Maintainers heading and the first subsystem name.
|
||||
if maintainers and not subsystems:
|
||||
if re.search('^[A-Z0-9]', line):
|
||||
subsystems = True
|
||||
|
||||
# Drop needless input whitespace.
|
||||
line = line.rstrip()
|
||||
|
||||
# Linkify all non-wildcard refs to ReST files in Documentation/.
|
||||
pat = '(Documentation/([^\s\?\*]*)\.rst)'
|
||||
m = re.search(pat, line)
|
||||
if m:
|
||||
# maintainers.rst is in a subdirectory, so include "../".
|
||||
line = re.sub(pat, ':doc:`%s <../%s>`' % (m.group(2), m.group(2)), line)
|
||||
|
||||
# Check state machine for output rendering behavior.
|
||||
output = None
|
||||
if descriptions:
|
||||
# Escape the escapes in preformatted text.
|
||||
output = "| %s" % (line.replace("\\", "\\\\"))
|
||||
# Look for and record field letter to field name mappings:
|
||||
# R: Designated *reviewer*: FullName <address@domain>
|
||||
m = re.search("\s(\S):\s", line)
|
||||
if m:
|
||||
field_letter = m.group(1)
|
||||
if field_letter and not field_letter in fields:
|
||||
m = re.search("\*([^\*]+)\*", line)
|
||||
if m:
|
||||
fields[field_letter] = m.group(1)
|
||||
elif subsystems:
|
||||
# Skip empty lines: subsystem parser adds them as needed.
|
||||
if len(line) == 0:
|
||||
continue
|
||||
# Subsystem fields are batched into "field_content"
|
||||
if line[1] != ':':
|
||||
# Render a subsystem entry as:
|
||||
# SUBSYSTEM NAME
|
||||
# ~~~~~~~~~~~~~~
|
||||
|
||||
# Flush pending field content.
|
||||
output = field_content + "\n\n"
|
||||
field_content = ""
|
||||
|
||||
# Collapse whitespace in subsystem name.
|
||||
heading = re.sub("\s+", " ", line)
|
||||
output = output + "%s\n%s" % (heading, "~" * len(heading))
|
||||
field_prev = ""
|
||||
else:
|
||||
# Render a subsystem field as:
|
||||
# :Field: entry
|
||||
# entry...
|
||||
field, details = line.split(':', 1)
|
||||
details = details.strip()
|
||||
|
||||
# Mark paths (and regexes) as literal text for improved
|
||||
# readability and to escape any escapes.
|
||||
if field in ['F', 'N', 'X', 'K']:
|
||||
# But only if not already marked :)
|
||||
if not ':doc:' in details:
|
||||
details = '``%s``' % (details)
|
||||
|
||||
# Comma separate email field continuations.
|
||||
if field == field_prev and field_prev in ['M', 'R', 'L']:
|
||||
field_content = field_content + ","
|
||||
|
||||
# Do not repeat field names, so that field entries
|
||||
# will be collapsed together.
|
||||
if field != field_prev:
|
||||
output = field_content + "\n"
|
||||
field_content = ":%s:" % (fields.get(field, field))
|
||||
field_content = field_content + "\n\t%s" % (details)
|
||||
field_prev = field
|
||||
else:
|
||||
output = line
|
||||
|
||||
# Re-split on any added newlines in any above parsing.
|
||||
if output != None:
|
||||
for separated in output.split('\n'):
|
||||
result.append(separated)
|
||||
|
||||
# Update the state machine when we find heading separators.
|
||||
if line.startswith('----------'):
|
||||
if prev.startswith('Descriptions'):
|
||||
descriptions = True
|
||||
if prev.startswith('Maintainers'):
|
||||
maintainers = True
|
||||
|
||||
# Retain previous line for state machine transitions.
|
||||
prev = line
|
||||
|
||||
# Flush pending field contents.
|
||||
if field_content != "":
|
||||
for separated in field_content.split('\n'):
|
||||
result.append(separated)
|
||||
|
||||
output = "\n".join(result)
|
||||
# For debugging the pre-rendered results...
|
||||
#print(output, file=open("/tmp/MAINTAINERS.rst", "w"))
|
||||
|
||||
self.state_machine.insert_input(
|
||||
statemachine.string2lines(output), path)
|
||||
|
||||
def run(self):
|
||||
"""Include the MAINTAINERS file as part of this reST file."""
|
||||
if not self.state.document.settings.file_insertion_enabled:
|
||||
raise self.warning('"%s" directive disabled.' % self.name)
|
||||
|
||||
# Walk up source path directories to find Documentation/../
|
||||
path = self.state_machine.document.attributes['source']
|
||||
path = os.path.realpath(path)
|
||||
tail = path
|
||||
while tail != "Documentation" and tail != "":
|
||||
(path, tail) = os.path.split(path)
|
||||
|
||||
# Append "MAINTAINERS"
|
||||
path = os.path.join(path, "MAINTAINERS")
|
||||
|
||||
try:
|
||||
self.state.document.settings.record_dependencies.add(path)
|
||||
lines = self.parse_maintainers(path)
|
||||
except IOError as error:
|
||||
raise self.severe('Problems with "%s" directive path:\n%s.' %
|
||||
(self.name, ErrorString(error)))
|
||||
|
||||
return []
|
||||
@@ -1,33 +0,0 @@
|
||||
#!/bin/sh
|
||||
# SPDX-License-Identifier: GPL-2.0+
|
||||
#
|
||||
# Figure out if we should follow a specific parallelism from the make
|
||||
# environment (as exported by scripts/jobserver-exec), or fall back to
|
||||
# the "auto" parallelism when "-jN" is not specified at the top-level
|
||||
# "make" invocation.
|
||||
|
||||
sphinx="$1"
|
||||
shift || true
|
||||
|
||||
parallel="$PARALLELISM"
|
||||
if [ -z "$parallel" ] ; then
|
||||
# If no parallelism is specified at the top-level make, then
|
||||
# fall back to the expected "-jauto" mode that the "htmldocs"
|
||||
# target has had.
|
||||
auto=$(perl -e 'open IN,"'"$sphinx"' --version 2>&1 |";
|
||||
while (<IN>) {
|
||||
if (m/([\d\.]+)/) {
|
||||
print "auto" if ($1 >= "1.7")
|
||||
}
|
||||
}
|
||||
close IN')
|
||||
if [ -n "$auto" ] ; then
|
||||
parallel="$auto"
|
||||
fi
|
||||
fi
|
||||
# Only if some parallelism has been determined do we add the -jN option.
|
||||
if [ -n "$parallel" ] ; then
|
||||
parallel="-j$parallel"
|
||||
fi
|
||||
|
||||
exec "$sphinx" $parallel "$@"
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env perl
|
||||
#!/usr/bin/perl
|
||||
use strict;
|
||||
use Text::Tabs;
|
||||
use Getopt::Long;
|
||||
@@ -110,7 +110,7 @@ while (<IN>) {
|
||||
) {
|
||||
my $s = $1;
|
||||
|
||||
$structs{$s} = "struct $s\\ ";
|
||||
$structs{$s} = "struct :c:type:`$s`\\ ";
|
||||
next;
|
||||
}
|
||||
}
|
||||
@@ -393,7 +393,7 @@ Report bugs to Mauro Carvalho Chehab <mchehab@kernel.org>
|
||||
|
||||
Copyright (c) 2016 by Mauro Carvalho Chehab <mchehab+samsung@kernel.org>.
|
||||
|
||||
License GPLv2: GNU GPL version 2 <https://gnu.org/licenses/gpl.html>.
|
||||
License GPLv2: GNU GPL version 2 <http://gnu.org/licenses/gpl.html>.
|
||||
|
||||
This is free software: you are free to change and redistribute it.
|
||||
There is NO WARRANTY, to the extent permitted by law.
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
docutils
|
||||
Sphinx==2.4.4
|
||||
docutils==0.12
|
||||
Sphinx==1.4.9
|
||||
sphinx_rtd_theme
|
||||
six
|
||||
|
||||
Reference in New Issue
Block a user