summaryrefslogtreecommitdiff
path: root/doc/tools/coqrst
diff options
context:
space:
mode:
Diffstat (limited to 'doc/tools/coqrst')
-rw-r--r--doc/tools/coqrst/__init__.py10
-rw-r--r--doc/tools/coqrst/checkdeps.py39
-rw-r--r--doc/tools/coqrst/coqdoc/__init__.py10
-rw-r--r--doc/tools/coqrst/coqdoc/main.py91
-rw-r--r--doc/tools/coqrst/coqdomain.py1201
-rw-r--r--doc/tools/coqrst/notations/Makefile27
-rw-r--r--doc/tools/coqrst/notations/TacticNotations.g33
-rw-r--r--doc/tools/coqrst/notations/TacticNotations.tokens10
-rw-r--r--doc/tools/coqrst/notations/TacticNotationsLexer.py71
-rw-r--r--doc/tools/coqrst/notations/TacticNotationsLexer.tokens10
-rw-r--r--doc/tools/coqrst/notations/TacticNotationsParser.py595
-rw-r--r--doc/tools/coqrst/notations/TacticNotationsVisitor.py58
-rw-r--r--doc/tools/coqrst/notations/__init__.py0
-rwxr-xr-xdoc/tools/coqrst/notations/fontsupport.py80
-rw-r--r--doc/tools/coqrst/notations/html.py75
-rw-r--r--doc/tools/coqrst/notations/parsing.py37
-rw-r--r--doc/tools/coqrst/notations/plain.py53
-rw-r--r--doc/tools/coqrst/notations/regexp.py60
-rw-r--r--doc/tools/coqrst/notations/sphinx.py102
-rw-r--r--doc/tools/coqrst/repl/__init__.py0
-rw-r--r--doc/tools/coqrst/repl/ansicolors.py99
-rw-r--r--doc/tools/coqrst/repl/coqtop.py103
22 files changed, 2764 insertions, 0 deletions
diff --git a/doc/tools/coqrst/__init__.py b/doc/tools/coqrst/__init__.py
new file mode 100644
index 00000000..2dda7d92
--- /dev/null
+++ b/doc/tools/coqrst/__init__.py
@@ -0,0 +1,10 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""Utilities for documenting Coq in Sphinx"""
diff --git a/doc/tools/coqrst/checkdeps.py b/doc/tools/coqrst/checkdeps.py
new file mode 100644
index 00000000..11f95c4e
--- /dev/null
+++ b/doc/tools/coqrst/checkdeps.py
@@ -0,0 +1,39 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+from __future__ import print_function
+import sys
+
+def eprint(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+def missing_dep(dep):
+ eprint('Cannot find %s (needed to build documentation)' % dep)
+ eprint('You can run `pip3 install %s` to install it.' % dep)
+ sys.exit(1)
+
+try:
+ import sphinx_rtd_theme
+except:
+ missing_dep('sphinx_rtd_theme')
+
+try:
+ import pexpect
+except:
+ missing_dep('pexpect')
+
+try:
+ import antlr4
+except:
+ missing_dep('antlr4-python3-runtime')
+
+try:
+ import bs4
+except:
+ missing_dep('beautifulsoup4')
diff --git a/doc/tools/coqrst/coqdoc/__init__.py b/doc/tools/coqrst/coqdoc/__init__.py
new file mode 100644
index 00000000..a89a548e
--- /dev/null
+++ b/doc/tools/coqrst/coqdoc/__init__.py
@@ -0,0 +1,10 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+from .main import *
diff --git a/doc/tools/coqrst/coqdoc/main.py b/doc/tools/coqrst/coqdoc/main.py
new file mode 100644
index 00000000..57adcb28
--- /dev/null
+++ b/doc/tools/coqrst/coqdoc/main.py
@@ -0,0 +1,91 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""
+Use CoqDoc to highlight Coq snippets
+====================================
+
+Pygment's Coq parser isn't the best; instead, use coqdoc to highlight snippets.
+Only works for full, syntactically valid sentences; on shorter snippets, coqdoc
+swallows parts of the input.
+
+Works by reparsing coqdoc's output into the output that Sphinx expects from a
+lexer.
+"""
+
+import os
+import platform
+from tempfile import mkstemp
+from subprocess import check_output
+
+from bs4 import BeautifulSoup
+from bs4.element import NavigableString
+
+COQDOC_OPTIONS = ['--body-only', '--no-glob', '--no-index', '--no-externals',
+ '-s', '--html', '--stdout', '--utf8']
+
+COQDOC_SYMBOLS = ["->", "<-", "<->", "=>", "<=", ">=", "<>", "~", "/\\", "\\/", "|-", "*", "forall", "exists"]
+COQDOC_HEADER = "".join("(** remove printing {} *)".format(s) for s in COQDOC_SYMBOLS)
+
+def coqdoc(coq_code, coqdoc_bin=None):
+ """Get the output of coqdoc on coq_code."""
+ coqdoc_bin = coqdoc_bin or os.path.join(os.getenv("COQBIN"), "coqdoc")
+ fd, filename = mkstemp(prefix="coqdoc-", suffix=".v")
+ if platform.system().startswith("CYGWIN"):
+ # coqdoc currently doesn't accept cygwin style paths in the form "/cygdrive/c/..."
+ filename = check_output(["cygpath", "-w", filename]).decode("utf-8").strip()
+ try:
+ os.write(fd, COQDOC_HEADER.encode("utf-8"))
+ os.write(fd, coq_code.encode("utf-8"))
+ os.close(fd)
+ return check_output([coqdoc_bin] + COQDOC_OPTIONS + [filename], timeout = 10).decode("utf-8")
+ finally:
+ os.remove(filename)
+
+def is_whitespace_string(elem):
+ return isinstance(elem, NavigableString) and elem.strip() == ""
+
+def strip_soup(soup, pred):
+ """Strip elements maching pred from front and tail of soup."""
+ while soup.contents and pred(soup.contents[-1]):
+ soup.contents.pop()
+
+ skip = 0
+ for elem in soup.contents:
+ if not pred(elem):
+ break
+ skip += 1
+
+ soup.contents[:] = soup.contents[skip:]
+
+def lex(source):
+ """Convert source into a stream of (css_classes, token_string)."""
+ coqdoc_output = coqdoc(source)
+ soup = BeautifulSoup(coqdoc_output, "html.parser")
+ root = soup.find(class_='code')
+ strip_soup(root, is_whitespace_string)
+ for elem in root.children:
+ if isinstance(elem, NavigableString):
+ yield [], elem
+ elif elem.name == "span":
+ cls = "coqdoc-{}".format(elem['title'])
+ yield [cls], elem.string
+ elif elem.name == 'br':
+ pass
+ else:
+ raise ValueError(elem)
+
+def main():
+ """Lex stdin (for testing purposes)"""
+ import sys
+ for classes, text in lex(sys.stdin.read()):
+ print(repr(text) + "\t" ' '.join(classes))
+
+if __name__ == '__main__':
+ main()
diff --git a/doc/tools/coqrst/coqdomain.py b/doc/tools/coqrst/coqdomain.py
new file mode 100644
index 00000000..edf4e6ec
--- /dev/null
+++ b/doc/tools/coqrst/coqdomain.py
@@ -0,0 +1,1201 @@
+# -*- coding: utf-8 -*-
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""A Coq domain for Sphinx.
+
+Currently geared towards Coq's manual, rather than Coq source files, but one
+could imagine extending it.
+"""
+
+# pylint: disable=too-few-public-methods
+
+import re
+from itertools import chain
+from collections import defaultdict
+
+from docutils import nodes, utils
+from docutils.transforms import Transform
+from docutils.parsers.rst import Directive, directives
+from docutils.parsers.rst.roles import code_role #, set_classes
+from docutils.parsers.rst.directives.admonitions import BaseAdmonition
+
+from sphinx import addnodes
+from sphinx.roles import XRefRole
+from sphinx.errors import ExtensionError
+from sphinx.util.nodes import set_source_info, set_role_source_info, make_refnode
+from sphinx.util.logging import getLogger
+from sphinx.directives import ObjectDescription
+from sphinx.domains import Domain, ObjType, Index
+from sphinx.domains.std import token_xrefs
+from sphinx.ext import mathbase
+
+from . import coqdoc
+from .repl import ansicolors
+from .repl.coqtop import CoqTop
+from .notations.sphinx import sphinxify
+from .notations.plain import stringify_with_ellipses
+
+def parse_notation(notation, source, line, rawtext=None):
+ """Parse notation and wrap it in an inline node"""
+ node = nodes.inline(rawtext or notation, '', *sphinxify(notation), classes=['notation'])
+ node.source, node.line = source, line
+ return node
+
+def highlight_using_coqdoc(sentence):
+ """Lex sentence using coqdoc, and yield inline nodes for each token"""
+ tokens = coqdoc.lex(utils.unescape(sentence, 1))
+ for classes, value in tokens:
+ yield nodes.inline(value, value, classes=classes)
+
+def make_target(objtype, targetid):
+ """Create a target to an object of type objtype and id targetid"""
+ return "coq:{}.{}".format(objtype, targetid)
+
+def make_math_node(latex, docname, nowrap):
+ node = mathbase.displaymath()
+ node['latex'] = latex
+ node['label'] = None # Otherwise equations are numbered
+ node['nowrap'] = nowrap
+ node['docname'] = docname
+ node['number'] = None
+ return node
+
+class CoqObject(ObjectDescription):
+ """A generic Coq object for Sphinx; all Coq objects are subclasses of this.
+
+ The fields and methods to override are listed at the top of this class'
+ implementation. Each object supports the :name: option, which gives an
+ explicit name to link to.
+
+ See the comments and docstrings in CoqObject for more information.
+ """
+
+ # The semantic domain in which this object lives (eg. “tac”, “cmd”, “chm”…).
+ # It matches exactly one of the roles used for cross-referencing.
+ subdomain = None # type: str
+
+ # The suffix to use in indices for objects of this type (eg. “(tac)”)
+ index_suffix = None # type: str
+
+ # The annotation to add to headers of objects of this type
+ # (eg. “Command”, “Theorem”)
+ annotation = None # type: str
+
+ def _name_from_signature(self, signature): # pylint: disable=no-self-use, unused-argument
+ """Convert a signature into a name to link to.
+
+ ‘Signature’ is Sphinx parlance for an object's header (think “type
+ signature”); for example, the signature of the simplest form of the
+ ``exact`` tactic is ``exact @id``.
+
+ Returns None by default, in which case no name will be automatically
+ generated. This is a convenient way to automatically generate names
+ (link targets) without having to write explicit names everywhere.
+
+ """
+ return None
+
+ def _render_signature(self, signature, signode):
+ """Render a signature, placing resulting nodes into signode."""
+ raise NotImplementedError(self)
+
+ option_spec = {
+ # Explicit object naming
+ 'name': directives.unchanged,
+ # Silence warnings produced by report_undocumented_coq_objects
+ 'undocumented': directives.flag,
+ # noindex omits this object from its index
+ 'noindex': directives.flag
+ }
+
+ def subdomain_data(self):
+ if self.subdomain is None:
+ raise ValueError()
+ return self.env.domaindata['coq']['objects'][self.subdomain]
+
+ def _render_annotation(self, signode):
+ if self.annotation:
+ annot_node = nodes.inline(self.annotation, self.annotation, classes=['sigannot'])
+ signode += addnodes.desc_annotation(self.annotation, '', annot_node)
+ signode += nodes.Text(' ')
+
+ def handle_signature(self, signature, signode):
+ """Prefix signature with the proper annotation, then render it using
+ ``_render_signature`` (for example, add “Command” in front of commands).
+
+ :returns: the name given to the resulting node, if any
+ """
+ self._render_annotation(signode)
+ self._render_signature(signature, signode)
+ name = self._names.get(signature)
+ if name is None:
+ name = self._name_from_signature(signature)
+ # remove trailing ‘.’ found in commands, but not ‘...’ (ellipsis)
+ if name is not None and name.endswith(".") and not name.endswith("..."):
+ name = name[:-1]
+ return name
+
+ def _warn_if_duplicate_name(self, objects, name):
+ """Check that two objects in the same domain don't have the same name."""
+ if name in objects:
+ MSG = 'Duplicate object: {}; other is at {}'
+ msg = MSG.format(name, self.env.doc2path(objects[name][0]))
+ self.state_machine.reporter.warning(msg, line=self.lineno)
+
+ def _warn_if_duplicate_name(self, objects, name):
+ """Check that two objects in the same domain don't have the same name."""
+ if name in objects:
+ MSG = 'Duplicate object: {}; other is at {}'
+ msg = MSG.format(name, self.env.doc2path(objects[name][0]))
+ self.state_machine.reporter.warning(msg, line=self.lineno)
+
+ def _record_name(self, name, target_id):
+ """Record a name, mapping it to target_id
+
+ Warns if another object of the same name already exists.
+ """
+ names_in_subdomain = self.subdomain_data()
+ self._warn_if_duplicate_name(names_in_subdomain, name)
+ names_in_subdomain[name] = (self.env.docname, self.objtype, target_id)
+
+ def _target_id(self, name):
+ return make_target(self.objtype, nodes.make_id(name))
+
+ def _add_target(self, signode, name):
+ """Register a link target ‘name’, pointing to signode."""
+ targetid = self._target_id(name)
+ if targetid not in self.state.document.ids:
+ signode['ids'].append(targetid)
+ signode['names'].append(name)
+ signode['first'] = (not self.names)
+ self.state.document.note_explicit_target(signode)
+ self._record_name(name, targetid)
+ return targetid
+
+ def _add_index_entry(self, name, target):
+ """Add `name` (pointing to `target`) to the main index."""
+ assert isinstance(name, str)
+ if not name.startswith("_"):
+ # remove trailing . , found in commands, but not ... (ellipsis)
+ trim = name.endswith(".") and not name.endswith("...")
+ index_text = name[:-1] if trim else name
+ if self.index_suffix:
+ index_text += " " + self.index_suffix
+ self.indexnode['entries'].append(('single', index_text, target, '', None))
+
+ def add_target_and_index(self, name, _, signode):
+ """Attach a link target to `signode` and an index entry for `name`.
+ This is only called (from ``ObjectDescription.run``) if ``:noindex:`` isn't specified."""
+ if name:
+ target = self._add_target(signode, name)
+ self._add_index_entry(name, target)
+ return target
+
+ def _warn_if_undocumented(self):
+ document = self.state.document
+ config = document.settings.env.config
+ report = config.report_undocumented_coq_objects
+ if report and not self.content and "undocumented" not in self.options:
+ # This is annoyingly convoluted, but we don't want to raise warnings
+ # or interrupt the generation of the current node. For more details
+ # see https://github.com/sphinx-doc/sphinx/issues/4976.
+ msg = 'No contents in directive {}'.format(self.name)
+ node = document.reporter.info(msg, line=self.lineno)
+ getLogger(__name__).info(node.astext())
+ if report == "warning":
+ raise self.warning(msg)
+
+ def _prepare_names(self):
+ sigs = self.get_signatures()
+ names = self.options.get("name")
+ if names is None:
+ self._names = {}
+ else:
+ names = [n.strip() for n in names.split(";")]
+ if len(names) != len(sigs):
+ ERR = ("Expected {} semicolon-separated names, got {}. " +
+ "Please provide one name per signature line.")
+ raise self.error(ERR.format(len(names), len(sigs)))
+ self._names = dict(zip(sigs, names))
+
+ def run(self):
+ self._warn_if_undocumented()
+ self._prepare_names()
+ return super().run()
+
+class PlainObject(CoqObject):
+ """A base class for objects whose signatures should be rendered literally."""
+ def _render_signature(self, signature, signode):
+ signode += addnodes.desc_name(signature, signature)
+
+class NotationObject(CoqObject):
+ """A base class for objects whose signatures should be rendered as nested boxes.
+
+ Objects that inherit from this class can use the notation grammar (“{+ …}”,
+ “@…”, etc.) in their signature.
+ """
+ def _render_signature(self, signature, signode):
+ position = self.state_machine.get_source_and_line(self.lineno)
+ tacn_node = parse_notation(signature, *position)
+ signode += addnodes.desc_name(signature, '', tacn_node)
+
+class GallinaObject(PlainObject):
+ r"""A theorem.
+
+ Example::
+
+ .. thm:: Bound on the ceiling function
+
+ Let :math:`p` be an integer and :math:`c` a rational constant. Then
+ :math:`p \ge c \rightarrow p \ge \lceil{c}\rceil`.
+ """
+ subdomain = "thm"
+ index_suffix = "(thm)"
+ annotation = "Theorem"
+
+class VernacObject(NotationObject):
+ """A Coq command.
+
+ Example::
+
+ .. cmd:: Infix "@symbol" := @term ({+, @modifier}).
+
+ This command is equivalent to :n:`…`.
+ """
+ subdomain = "cmd"
+ index_suffix = "(cmd)"
+ annotation = "Command"
+
+ def _name_from_signature(self, signature):
+ m = re.match(r"[a-zA-Z ]+", signature)
+ if m:
+ return m.group(0).strip()
+
+class VernacVariantObject(VernacObject):
+ """A variant of a Coq command.
+
+ Example::
+
+ .. cmd:: Axiom @ident : @term.
+
+ This command links :token:`term` to the name :token:`term` as its specification in
+ the global context. The fact asserted by :token:`term` is thus assumed as a
+ postulate.
+
+ .. cmdv:: Parameter @ident : @term.
+
+ This is equivalent to :n:`Axiom @ident : @term`.
+ """
+ index_suffix = "(cmdv)"
+ annotation = "Variant"
+
+ def _name_from_signature(self, signature):
+ return None
+
+class TacticNotationObject(NotationObject):
+ """A tactic, or a tactic notation.
+
+ Example::
+
+ .. tacn:: do @num @expr
+
+ :token:`expr` is evaluated to ``v`` which must be a tactic value. …
+ """
+ subdomain = "tacn"
+ index_suffix = "(tacn)"
+ annotation = None
+
+class TacticNotationVariantObject(TacticNotationObject):
+ """A variant of a tactic.
+
+ Example::
+
+ .. tacn:: fail
+
+ This is the always-failing tactic: it does not solve any goal. It is
+ useful for defining other tacticals since it can be caught by
+ :tacn:`try`, :tacn:`repeat`, :tacn:`match goal`, or the branching
+ tacticals. …
+
+ .. tacv:: fail @natural
+
+ The number is the failure level. If no level is specified, it
+ defaults to 0. …
+ """
+ index_suffix = "(tacnv)"
+ annotation = "Variant"
+
+class OptionObject(NotationObject):
+ """A Coq option (a setting with non-boolean value, e.g. a string or numeric value).
+
+ Example::
+
+ .. opt:: Hyps Limit @num
+ :name Hyps Limit
+
+ Controls the maximum number of hypotheses displayed in goals after
+ application of a tactic.
+ """
+ subdomain = "opt"
+ index_suffix = "(opt)"
+ annotation = "Option"
+
+ def _name_from_signature(self, signature):
+ return stringify_with_ellipses(signature)
+
+
+class FlagObject(NotationObject):
+ """A Coq flag (i.e. a boolean setting).
+
+ Example::
+
+ .. flag:: Nonrecursive Elimination Schemes
+
+ Controls whether types declared with the keywords
+ :cmd:`Variant` and :cmd:`Record` get an automatic declaration of
+ induction principles.
+ """
+ subdomain = "flag"
+ index_suffix = "(flag)"
+ annotation = "Flag"
+
+ def _name_from_signature(self, signature):
+ return stringify_with_ellipses(signature)
+
+
+class TableObject(NotationObject):
+ """A Coq table, i.e. a setting that is a set of values.
+
+ Example::
+
+ .. table:: Search Blacklist @string
+ :name: Search Blacklist
+
+ Controls ...
+ """
+ subdomain = "table"
+ index_suffix = "(table)"
+ annotation = "Table"
+
+ def _name_from_signature(self, signature):
+ return stringify_with_ellipses(signature)
+
+class ProductionObject(CoqObject):
+ r"""A grammar production.
+
+ This is useful if you intend to document individual grammar productions.
+ Otherwise, use Sphinx's `production lists
+ <http://www.sphinx-doc.org/en/stable/markup/para.html#directive-productionlist>`_.
+
+ Unlike ``.. productionlist``\ s, this directive accepts notation syntax.
+
+
+ Usage::
+
+ .. prodn:: token += production
+ .. prodn:: token ::= production
+
+ Example::
+
+ .. prodn:: term += let: @pattern := @term in @term
+ .. prodn:: occ_switch ::= { {? + %| - } {* @num } }
+
+ """
+ subdomain = "prodn"
+ #annotation = "Grammar production"
+
+ def _render_signature(self, signature, signode):
+ raise NotImplementedError(self)
+
+ SIG_ERROR = ("Invalid syntax in ``.. prodn::`` directive"
+ + "\nExpected ``name ::= ...`` or ``name += ...``"
+ + " (e.g. ``pattern += constr:(@ident)``)")
+
+ def handle_signature(self, signature, signode):
+ nsplits = 2
+ parts = signature.split(maxsplit=nsplits)
+ if len(parts) != 3:
+ raise ExtensionError(ProductionObject.SIG_ERROR)
+
+ lhs, op, rhs = (part.strip() for part in parts)
+ if op not in ["::=", "+="]:
+ raise ExtensionError(ProductionObject.SIG_ERROR)
+
+ self._render_annotation(signode)
+
+ lhs_op = '{} {} '.format(lhs, op)
+ lhs_node = nodes.literal(lhs_op, lhs_op)
+
+ position = self.state_machine.get_source_and_line(self.lineno)
+ rhs_node = parse_notation(rhs, *position)
+ signode += addnodes.desc_name(signature, '', lhs_node, rhs_node)
+
+ return ('token', lhs) if op == '::=' else None
+
+ def _add_index_entry(self, name, target):
+ pass
+
+ def _target_id(self, name):
+ # Use `name[1]` instead of ``nodes.make_id(name[1])`` to work around
+ # https://github.com/sphinx-doc/sphinx/issues/4983
+ return 'grammar-token-{}'.format(name[1])
+
+ def _record_name(self, name, targetid):
+ env = self.state.document.settings.env
+ objects = env.domaindata['std']['objects']
+ self._warn_if_duplicate_name(objects, name)
+ objects[name] = env.docname, targetid
+
+class ExceptionObject(NotationObject):
+ """An error raised by a Coq command or tactic.
+
+ This commonly appears nested in the ``.. tacn::`` that raises the
+ exception.
+
+ Example::
+
+ .. tacv:: assert @form by @tactic
+
+ This tactic applies :n:`@tactic` to solve the subgoals generated by
+ ``assert``.
+
+ .. exn:: Proof is not complete
+
+ Raised if :n:`@tactic` does not fully solve the goal.
+ """
+ subdomain = "exn"
+ index_suffix = "(err)"
+ annotation = "Error"
+ # Uses “exn” since “err” already is a CSS class added by “writer_aux”.
+
+ # Generate names automatically
+ def _name_from_signature(self, signature):
+ return stringify_with_ellipses(signature)
+
+class WarningObject(NotationObject):
+ """An warning raised by a Coq command or tactic..
+
+ Do not mistake this for ``.. warning::``; this directive is for warning
+ messages produced by Coq.
+
+
+ Example::
+
+ .. warn:: Ambiguous path
+
+ When the coercion :token:`qualid` is added to the inheritance graph, non
+ valid coercion paths are ignored.
+ """
+ subdomain = "warn"
+ index_suffix = "(warn)"
+ annotation = "Warning"
+
+ # Generate names automatically
+ def _name_from_signature(self, signature):
+ return stringify_with_ellipses(signature)
+
+def NotationRole(role, rawtext, text, lineno, inliner, options={}, content=[]):
+ #pylint: disable=unused-argument, dangerous-default-value
+ """Any text using the notation syntax (``@id``, ``{+, …}``, etc.).
+
+ Use this to explain tactic equivalences. For example, you might write
+ this::
+
+ :n:`generalize @term as @ident` is just like :n:`generalize @term`, but
+ it names the introduced hypothesis :token:`ident`.
+
+ Note that this example also uses ``:token:``. That's because ``ident`` is
+ defined in the Coq manual as a grammar production, and ``:token:``
+ creates a link to that. When referring to a placeholder that happens to be
+ a grammar production, ``:token:`…``` is typically preferable to ``:n:`@…```.
+ """
+ notation = utils.unescape(text, 1)
+ position = inliner.reporter.get_source_and_line(lineno)
+ return [nodes.literal(rawtext, '', parse_notation(notation, *position, rawtext=rawtext))], []
+
+def coq_code_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
+ #pylint: disable=dangerous-default-value
+ """Coq code.
+
+ Use this for Gallina and Ltac snippets::
+
+ :g:`apply plus_comm; reflexivity`
+ :g:`Set Printing All.`
+ :g:`forall (x: t), P(x)`
+ """
+ options['language'] = 'Coq'
+ return code_role(role, rawtext, text, lineno, inliner, options, content)
+ ## Too heavy:
+ ## Forked from code_role to use our custom tokenizer; this doesn't work for
+ ## snippets though: for example CoqDoc swallows the parentheses around this:
+ ## “(a: A) (b: B)”
+ # set_classes(options)
+ # classes = ['code', 'coq']
+ # code = utils.unescape(text, 1)
+ # node = nodes.literal(rawtext, '', *highlight_using_coqdoc(code), classes=classes)
+ # return [node], []
+
+CoqCodeRole = coq_code_role
+
+class CoqtopDirective(Directive):
+ r"""A reST directive to describe interactions with Coqtop.
+
+ Usage::
+
+ .. coqtop:: options…
+
+ Coq code to send to coqtop
+
+ Example::
+
+ .. coqtop:: in reset undo
+
+ Print nat.
+ Definition a := 1.
+
+ Here is a list of permissible options:
+
+ - Display options
+
+ - ``all``: Display input and output
+ - ``in``: Display only input
+ - ``out``: Display only output
+ - ``none``: Display neither (useful for setup commands)
+
+ - Behavior options
+
+ - ``reset``: Send a ``Reset Initial`` command before running this block
+ - ``undo``: Send an ``Undo n`` (``n`` = number of sentences) command after
+ running all the commands in this block
+
+ ``coqtop``\ 's state is preserved across consecutive ``.. coqtop::`` blocks
+ of the same document (``coqrst`` creates a single ``coqtop`` process per
+ reST source file). Use the ``reset`` option to reset Coq's state.
+ """
+ has_content = True
+ required_arguments = 0
+ optional_arguments = 1
+ final_argument_whitespace = True
+ option_spec = { 'name': directives.unchanged }
+ directive_name = "coqtop"
+
+ def run(self):
+ # Uses a ‘container’ instead of a ‘literal_block’ to disable
+ # Pygments-based post-processing (we could also set rawsource to '')
+ content = '\n'.join(self.content)
+ args = self.arguments[0].split() if self.arguments else ['in']
+ if 'all' in args:
+ args.extend(['in', 'out'])
+ node = nodes.container(content, coqtop_options = list(set(args)),
+ classes=['coqtop', 'literal-block'])
+ self.add_name(node)
+ return [node]
+
+class CoqdocDirective(Directive):
+ """A reST directive to display Coqtop-formatted source code.
+
+ Usage::
+
+ .. coqdoc::
+
+ Coq code to highlight
+
+ Example::
+
+ .. coqdoc::
+
+ Definition test := 1.
+ """
+ # TODO implement this as a Pygments highlighter?
+ has_content = True
+ required_arguments = 0
+ optional_arguments = 0
+ final_argument_whitespace = True
+ option_spec = { 'name': directives.unchanged }
+ directive_name = "coqdoc"
+
+ def run(self):
+ # Uses a ‘container’ instead of a ‘literal_block’ to disable
+ # Pygments-based post-processing (we could also set rawsource to '')
+ content = '\n'.join(self.content)
+ node = nodes.inline(content, '', *highlight_using_coqdoc(content))
+ wrapper = nodes.container(content, node, classes=['coqdoc', 'literal-block'])
+ self.add_name(wrapper)
+ return [wrapper]
+
+class ExampleDirective(BaseAdmonition):
+ """A reST directive for examples.
+
+ This behaves like a generic admonition; see
+ http://docutils.sourceforge.net/docs/ref/rst/directives.html#generic-admonition
+ for more details.
+
+ Optionally, any text immediately following the ``.. example::`` header is
+ used as the example's title.
+
+ Example::
+
+ .. example:: Adding a hint to a database
+
+ The following adds ``plus_comm`` to the ``plu`` database:
+
+ .. coqdoc::
+
+ Hint Resolve plus_comm : plu.
+ """
+ node_class = nodes.admonition
+ directive_name = "example"
+ optional_arguments = 1
+
+ def run(self):
+ # ‘BaseAdmonition’ checks whether ‘node_class’ is ‘nodes.admonition’,
+ # and uses arguments[0] as the title in that case (in other cases, the
+ # title is unset, and it is instead set in the HTML visitor).
+ assert len(self.arguments) <= 1
+ self.arguments = [": ".join(['Example'] + self.arguments)]
+ self.options['classes'] = ['admonition', 'note']
+ return super().run()
+
+class PreambleDirective(Directive):
+ r"""A reST directive to include a TeX file.
+
+ Mostly useful to let MathJax know about `\def`s and `\newcommand`s. The
+ contents of the TeX file are wrapped in a math environment, as MathJax
+ doesn't process LaTeX definitions otherwise.
+
+ Usage::
+
+ .. preamble:: preamble.tex
+ """
+ has_content = False
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = True
+ option_spec = {}
+ directive_name = "preamble"
+
+ def run(self):
+ document = self.state.document
+ env = document.settings.env
+
+ if not document.settings.file_insertion_enabled:
+ msg = 'File insertion disabled'
+ return [document.reporter.warning(msg, line=self.lineno)]
+
+ rel_fname, abs_fname = env.relfn2path(self.arguments[0])
+ env.note_dependency(rel_fname)
+
+ with open(abs_fname, encoding="utf-8") as ltx:
+ latex = ltx.read()
+
+ node = make_math_node(latex, env.docname, nowrap=False)
+ node['classes'] = ["math-preamble"]
+ set_source_info(self, node)
+ return [node]
+
+class InferenceDirective(Directive):
+ r"""A reST directive to format inference rules.
+
+ This also serves as a small illustration of the way to create new Sphinx
+ directives.
+
+ Usage::
+
+ .. inference:: name
+
+ newline-separated premises
+ --------------------------
+ conclusion
+
+ Example::
+
+ .. inference:: Prod-Pro
+
+ \WTEG{T}{s}
+ s \in \Sort
+ \WTE{\Gamma::(x:T)}{U}{\Prop}
+ -----------------------------
+ \WTEG{\forall~x:T,U}{\Prop}
+ """
+ required_arguments = 1
+ optional_arguments = 0
+ has_content = True
+ final_argument_whitespace = True
+ directive_name = "inference"
+
+ @staticmethod
+ def prepare_latex_operand(op):
+ # TODO: Could use a fancier inference class in LaTeX
+ return '%\n\\hspace{3em}%\n'.join(op.strip().splitlines())
+
+ def prepare_latex(self, content):
+ parts = re.split('^ *----+ *$', content, flags=re.MULTILINE)
+ if len(parts) != 2:
+ raise self.error('Expected two parts in ‘inference’ directive, separated by a rule (----).')
+
+ top, bottom = tuple(InferenceDirective.prepare_latex_operand(p) for p in parts)
+ return "%\n".join(("\\frac{", top, "}{", bottom, "}"))
+
+ def run(self):
+ self.assert_has_content()
+
+ title = self.arguments[0]
+ content = '\n'.join(self.content)
+ latex = self.prepare_latex(content)
+ docname = self.state.document.settings.env.docname
+ math_node = make_math_node(latex, docname, nowrap=False)
+
+ tid = nodes.make_id(title)
+ target = nodes.target('', '', ids=['inference-' + tid])
+ self.state.document.note_explicit_target(target)
+
+ term, desc = nodes.term('', title), nodes.description('', math_node)
+ dli = nodes.definition_list_item('', term, desc)
+ dl = nodes.definition_list(content, target, dli)
+ set_source_info(self, dl)
+ return [dl]
+
+class AnsiColorsParser():
+ """Parse ANSI-colored output from Coqtop into Sphinx nodes."""
+
+ # Coqtop's output crashes ansi.py, because it contains a bunch of extended codes
+ # This class is a fork of the original ansi.py, released under a BSD license in sphinx-contribs
+
+ COLOR_PATTERN = re.compile('\x1b\\[([^m]+)m')
+
+ def __init__(self):
+ self.new_nodes, self.pending_nodes = [], []
+
+ def _finalize_pending_nodes(self):
+ self.new_nodes.extend(self.pending_nodes)
+ self.pending_nodes = []
+
+ def _add_text(self, raw, beg, end):
+ if beg < end:
+ text = raw[beg:end]
+ if self.pending_nodes:
+ self.pending_nodes[-1].append(nodes.Text(text))
+ else:
+ self.new_nodes.append(nodes.inline('', text))
+
+ def colorize_str(self, raw):
+ """Parse raw (an ANSI-colored output string from Coqtop) into Sphinx nodes."""
+ last_end = 0
+ for match in AnsiColorsParser.COLOR_PATTERN.finditer(raw):
+ self._add_text(raw, last_end, match.start())
+ last_end = match.end()
+ classes = ansicolors.parse_ansi(match.group(1))
+ if 'ansi-reset' in classes:
+ self._finalize_pending_nodes()
+ else:
+ node = nodes.inline()
+ self.pending_nodes.append(node)
+ node['classes'].extend(classes)
+ self._add_text(raw, last_end, len(raw))
+ self._finalize_pending_nodes()
+ return self.new_nodes
+
+class CoqtopBlocksTransform(Transform):
+ """Filter handling the actual work for the coqtop directive
+
+ Adds coqtop's responses, colorizes input and output, and merges consecutive
+ coqtop directives for better visual rendition.
+ """
+ default_priority = 10
+
+ @staticmethod
+ def is_coqtop_block(node):
+ return isinstance(node, nodes.Element) and 'coqtop_options' in node
+
+ @staticmethod
+ def split_sentences(source):
+ """Split Coq sentences in source. Could be improved."""
+ return re.split(r"(?<=(?<!\.)\.)\s+", source)
+
+ @staticmethod
+ def parse_options(options):
+ """Parse options according to the description in CoqtopDirective."""
+ opt_undo = 'undo' in options
+ opt_reset = 'reset' in options
+ opt_all, opt_none = 'all' in options, 'none' in options
+ opt_input, opt_output = opt_all or 'in' in options, opt_all or 'out' in options
+
+ unexpected_options = list(set(options) - set(('reset', 'undo', 'all', 'none', 'in', 'out')))
+ if unexpected_options:
+ raise ValueError("Unexpected options for .. coqtop:: {}".format(unexpected_options))
+ elif (opt_input or opt_output) and opt_none:
+ raise ValueError("Inconsistent options for .. coqtop:: ‘none’ with ‘in’, ‘out’, or ‘all’")
+ elif opt_reset and opt_undo:
+ raise ValueError("Inconsistent options for .. coqtop:: ‘undo’ with ‘reset’")
+
+ return opt_undo, opt_reset, opt_input and not opt_none, opt_output and not opt_none
+
+ @staticmethod
+ def block_classes(should_show, contents=None):
+ """Compute classes to add to a node containing contents.
+
+ :param should_show: Whether this node should be displayed"""
+ is_empty = contents is not None and re.match(r"^\s*$", contents)
+ if is_empty or not should_show:
+ return ['coqtop-hidden']
+ else:
+ return []
+
+ @staticmethod
+ def make_rawsource(pairs, opt_input, opt_output):
+ blocks = []
+ for sentence, output in pairs:
+ output = AnsiColorsParser.COLOR_PATTERN.sub("", output).strip()
+ if opt_input:
+ blocks.append(sentence)
+ if output and opt_output:
+ blocks.append(re.sub("^", " ", output, flags=re.MULTILINE) + "\n")
+ return '\n'.join(blocks)
+
+ def add_coqtop_output(self):
+ """Add coqtop's responses to a Sphinx AST
+
+ Finds nodes to process using is_coqtop_block."""
+ with CoqTop(color=True) as repl:
+ for node in self.document.traverse(CoqtopBlocksTransform.is_coqtop_block):
+ options = node['coqtop_options']
+ opt_undo, opt_reset, opt_input, opt_output = self.parse_options(options)
+
+ if opt_reset:
+ repl.sendone("Reset Initial.")
+ pairs = []
+ for sentence in self.split_sentences(node.rawsource):
+ pairs.append((sentence, repl.sendone(sentence)))
+ if opt_undo:
+ repl.sendone("Undo {}.".format(len(pairs)))
+
+ dli = nodes.definition_list_item()
+ for sentence, output in pairs:
+ # Use Coqdoq to highlight input
+ in_chunks = highlight_using_coqdoc(sentence)
+ dli += nodes.term(sentence, '', *in_chunks, classes=self.block_classes(opt_input))
+ # Parse ANSI sequences to highlight output
+ out_chunks = AnsiColorsParser().colorize_str(output)
+ dli += nodes.definition(output, *out_chunks, classes=self.block_classes(opt_output, output))
+ node.clear()
+ node.rawsource = self.make_rawsource(pairs, opt_input, opt_output)
+ node['classes'].extend(self.block_classes(opt_input or opt_output))
+ node += nodes.inline('', '', classes=['coqtop-reset'] * opt_reset)
+ node += nodes.definition_list(node.rawsource, dli)
+
+ @staticmethod
+ def merge_coqtop_classes(kept_node, discarded_node):
+ discarded_classes = discarded_node['classes']
+ if not 'coqtop-hidden' in discarded_classes:
+ kept_node['classes'] = [c for c in kept_node['classes']
+ if c != 'coqtop-hidden']
+
+ @staticmethod
+ def merge_consecutive_coqtop_blocks(app, doctree, _):
+ """Merge consecutive divs wrapping lists of Coq sentences; keep ‘dl’s separate."""
+ for node in doctree.traverse(CoqtopBlocksTransform.is_coqtop_block):
+ if node.parent:
+ rawsources, names = [node.rawsource], set(node['names'])
+ for sibling in node.traverse(include_self=False, descend=False,
+ siblings=True, ascend=False):
+ if CoqtopBlocksTransform.is_coqtop_block(sibling):
+ CoqtopBlocksTransform.merge_coqtop_classes(node, sibling)
+ rawsources.append(sibling.rawsource)
+ names.update(sibling['names'])
+ node.extend(sibling.children)
+ node.parent.remove(sibling)
+ sibling.parent = None
+ else:
+ break
+ node.rawsource = "\n\n".join(rawsources)
+ node['names'] = list(names)
+
+ def apply(self):
+ self.add_coqtop_output()
+
+class CoqSubdomainsIndex(Index):
+ """Index subclass to provide subdomain-specific indices.
+
+ Just as in the original manual, we want to have separate indices for each
+ Coq subdomain (tactics, commands, options, etc)"""
+
+ name, localname, shortname, subdomains = None, None, None, [] # Must be overwritten
+
+ def generate(self, docnames=None):
+ content = defaultdict(list)
+ items = chain(*(self.domain.data['objects'][subdomain].items()
+ for subdomain in self.subdomains))
+
+ for itemname, (docname, _, anchor) in sorted(items, key=lambda x: x[0].lower()):
+ if docnames and docname not in docnames:
+ continue
+
+ entries = content[itemname[0].lower()]
+ entries.append([itemname, 0, docname, anchor, '', '', ''])
+
+ collapse = False
+ content = sorted(content.items())
+ return content, collapse
+
+class CoqVernacIndex(CoqSubdomainsIndex):
+ name, localname, shortname, subdomains = "cmdindex", "Command Index", "commands", ["cmd"]
+
+class CoqTacticIndex(CoqSubdomainsIndex):
+ name, localname, shortname, subdomains = "tacindex", "Tactic Index", "tactics", ["tacn"]
+
+class CoqOptionIndex(CoqSubdomainsIndex):
+ name, localname, shortname, subdomains = "optindex", "Flags, options and Tables Index", "options", ["flag", "opt", "table"]
+
+class CoqGallinaIndex(CoqSubdomainsIndex):
+ name, localname, shortname, subdomains = "thmindex", "Gallina Index", "theorems", ["thm"]
+
+class CoqExceptionIndex(CoqSubdomainsIndex):
+ name, localname, shortname, subdomains = "exnindex", "Errors and Warnings Index", "errors", ["exn", "warn"]
+
+class IndexXRefRole(XRefRole):
+ """A link to one of our domain-specific indices."""
+ lowercase = True,
+ innernodeclass = nodes.inline
+ warn_dangling = True
+
+ def process_link(self, env, refnode, has_explicit_title, title, target):
+ if not has_explicit_title:
+ index = CoqDomain.find_index_by_name(target)
+ if index:
+ title = index.localname
+ return title, target
+
+def GrammarProductionRole(typ, rawtext, text, lineno, inliner, options={}, content=[]):
+ """A grammar production not included in a ``productionlist`` directive.
+
+ Useful to informally introduce a production, as part of running text.
+
+ Example::
+
+ :production:`string` indicates a quoted string.
+
+ You're not likely to use this role very commonly; instead, use a
+ `production list
+ <http://www.sphinx-doc.org/en/stable/markup/para.html#directive-productionlist>`_
+ and reference its tokens using ``:token:`…```.
+ """
+ #pylint: disable=dangerous-default-value, unused-argument
+ env = inliner.document.settings.env
+ targetid = 'grammar-token-{}'.format(text)
+ target = nodes.target('', '', ids=[targetid])
+ inliner.document.note_explicit_target(target)
+ code = nodes.literal(rawtext, text, role=typ.lower())
+ node = nodes.inline(rawtext, '', target, code, classes=['inline-grammar-production'])
+ set_role_source_info(inliner, lineno, node)
+ env.domaindata['std']['objects']['token', text] = env.docname, targetid
+ return [node], []
+
+GrammarProductionRole.role_name = "production"
+
+class CoqDomain(Domain):
+ """A domain to document Coq code.
+
+ Sphinx has a notion of “domains”, used to tailor it to a specific language.
+ Domains mostly consist in descriptions of the objects that we wish to
+ describe (for Coq, this includes tactics, tactic notations, options,
+ exceptions, etc.), as well as domain-specific roles and directives.
+
+ Each domain is responsible for tracking its objects, and resolving
+ references to them. In the case of Coq, this leads us to define Coq
+ “subdomains”, which classify objects into categories in which names must be
+ unique. For example, a tactic and a theorem may share a name, but two
+ tactics cannot be named the same.
+ """
+
+ name = 'coq'
+ label = 'Coq'
+
+ object_types = {
+ # ObjType (= directive type) → (Local name, *xref-roles)
+ 'cmd': ObjType('cmd', 'cmd'),
+ 'cmdv': ObjType('cmdv', 'cmd'),
+ 'tacn': ObjType('tacn', 'tacn'),
+ 'tacv': ObjType('tacv', 'tacn'),
+ 'opt': ObjType('opt', 'opt'),
+ 'flag': ObjType('flag', 'flag'),
+ 'table': ObjType('table', 'table'),
+ 'thm': ObjType('thm', 'thm'),
+ 'prodn': ObjType('prodn', 'prodn'),
+ 'exn': ObjType('exn', 'exn'),
+ 'warn': ObjType('warn', 'exn'),
+ 'index': ObjType('index', 'index', searchprio=-1)
+ }
+
+ directives = {
+ # Note that some directives live in the same semantic subdomain; ie
+ # there's one directive per object type, but some object types map to
+ # the same role.
+ 'cmd': VernacObject,
+ 'cmdv': VernacVariantObject,
+ 'tacn': TacticNotationObject,
+ 'tacv': TacticNotationVariantObject,
+ 'opt': OptionObject,
+ 'flag': FlagObject,
+ 'table': TableObject,
+ 'thm': GallinaObject,
+ 'prodn' : ProductionObject,
+ 'exn': ExceptionObject,
+ 'warn': WarningObject,
+ }
+
+ roles = {
+ # Each of these roles lives in a different semantic “subdomain”
+ 'cmd': XRefRole(warn_dangling=True),
+ 'tacn': XRefRole(warn_dangling=True),
+ 'opt': XRefRole(warn_dangling=True),
+ 'flag': XRefRole(warn_dangling=True),
+ 'table': XRefRole(warn_dangling=True),
+ 'thm': XRefRole(warn_dangling=True),
+ 'prodn' : XRefRole(warn_dangling=True),
+ 'exn': XRefRole(warn_dangling=True),
+ 'warn': XRefRole(warn_dangling=True),
+ # This one is special
+ 'index': IndexXRefRole(),
+ # These are used for highlighting
+ 'n': NotationRole,
+ 'g': CoqCodeRole
+ }
+
+ indices = [CoqVernacIndex, CoqTacticIndex, CoqOptionIndex, CoqGallinaIndex, CoqExceptionIndex]
+
+ data_version = 1
+ initial_data = {
+ # Collect everything under a key that we control, since Sphinx adds
+ # others, such as “version”
+ 'objects' : { # subdomain → name → docname, objtype, targetid
+ 'cmd': {},
+ 'tacn': {},
+ 'opt': {},
+ 'flag': {},
+ 'table': {},
+ 'thm': {},
+ 'prodn' : {},
+ 'exn': {},
+ 'warn': {},
+ }
+ }
+
+ @staticmethod
+ def find_index_by_name(targetid):
+ for index in CoqDomain.indices:
+ if index.name == targetid:
+ return index
+
+ def get_objects(self):
+ # Used for searching and object inventories (intersphinx)
+ for _, objects in self.data['objects'].items():
+ for name, (docname, objtype, targetid) in objects.items():
+ yield (name, name, objtype, docname, targetid, self.object_types[objtype].attrs['searchprio'])
+ for index in self.indices:
+ yield (index.name, index.localname, 'index', "coq-" + index.name, '', -1)
+
+ def merge_domaindata(self, docnames, otherdata):
+ DUP = "Duplicate declaration: '{}' also defined in '{}'.\n"
+ for subdomain, their_objects in otherdata['objects'].items():
+ our_objects = self.data['objects'][subdomain]
+ for name, (docname, objtype, targetid) in their_objects.items():
+ if docname in docnames:
+ if name in our_objects:
+ self.env.warn(docname, DUP.format(name, our_objects[name][0]))
+ our_objects[name] = (docname, objtype, targetid)
+
+ def resolve_xref(self, env, fromdocname, builder, role, targetname, node, contnode):
+ # ‘target’ is the name that was written in the document
+ # ‘role’ is where this xref comes from; it's exactly one of our subdomains
+ if role == 'index':
+ index = CoqDomain.find_index_by_name(targetname)
+ if index:
+ return make_refnode(builder, fromdocname, "coq-" + index.name, '', contnode, index.localname)
+ else:
+ resolved = self.data['objects'][role].get(targetname)
+ if resolved:
+ (todocname, _, targetid) = resolved
+ return make_refnode(builder, fromdocname, todocname, targetid, contnode, targetname)
+
+ def clear_doc(self, docname_to_clear):
+ for subdomain_objects in self.data['objects'].values():
+ for name, (docname, _, _) in list(subdomain_objects.items()):
+ if docname == docname_to_clear:
+ del subdomain_objects[name]
+
+def is_coqtop_or_coqdoc_block(node):
+ return (isinstance(node, nodes.Element) and
+ ('coqtop' in node['classes'] or 'coqdoc' in node['classes']))
+
+def simplify_source_code_blocks_for_latex(app, doctree, fromdocname): # pylint: disable=unused-argument
+ """Simplify coqdoc and coqtop blocks.
+
+ In HTML mode, this does nothing; in other formats, such as LaTeX, it
+ replaces coqdoc and coqtop blocks by plain text sources, which will use
+ pygments if available. This prevents the LaTeX builder from getting
+ confused.
+ """
+ is_html = app.builder.tags.has("html")
+ for node in doctree.traverse(is_coqtop_or_coqdoc_block):
+ if is_html:
+ node.rawsource = '' # Prevent pygments from kicking in
+ elif 'coqtop-hidden' in node['classes']:
+ node.parent.remove(node)
+ else:
+ node.replace_self(nodes.literal_block(node.rawsource, node.rawsource, language="Coq"))
+
+COQ_ADDITIONAL_DIRECTIVES = [CoqtopDirective,
+ CoqdocDirective,
+ ExampleDirective,
+ InferenceDirective,
+ PreambleDirective]
+
+COQ_ADDITIONAL_ROLES = [GrammarProductionRole]
+
+def setup(app):
+ """Register the Coq domain"""
+
+ # A few sanity checks:
+ subdomains = set(obj.subdomain for obj in CoqDomain.directives.values())
+ assert subdomains.issuperset(chain(*(idx.subdomains for idx in CoqDomain.indices)))
+ assert subdomains.issubset(CoqDomain.roles.keys())
+
+ # Add domain, directives, and roles
+ app.add_domain(CoqDomain)
+
+ for role in COQ_ADDITIONAL_ROLES:
+ app.add_role(role.role_name, role)
+
+ for directive in COQ_ADDITIONAL_DIRECTIVES:
+ app.add_directive(directive.directive_name, directive)
+
+ app.add_transform(CoqtopBlocksTransform)
+ app.connect('doctree-resolved', simplify_source_code_blocks_for_latex)
+ app.connect('doctree-resolved', CoqtopBlocksTransform.merge_consecutive_coqtop_blocks)
+
+ # Add extra styles
+ app.add_stylesheet("fonts.css")
+ app.add_stylesheet("ansi.css")
+ app.add_stylesheet("coqdoc.css")
+ app.add_javascript("notations.js")
+ app.add_stylesheet("notations.css")
+ app.add_stylesheet("pre-text.css")
+
+ # Tell Sphinx about extra settings
+ app.add_config_value("report_undocumented_coq_objects", None, 'env')
+
+ # ``env_version`` is used by Sphinx to know when to invalidate
+ # coqdomain-specific bits in its caches. It should be incremented when the
+ # contents of ``env.domaindata['coq']`` change. See
+ # `https://github.com/sphinx-doc/sphinx/issues/4460`.
+ meta = { "version": "0.1",
+ "env_version": 2,
+ "parallel_read_safe": True }
+ return meta
diff --git a/doc/tools/coqrst/notations/Makefile b/doc/tools/coqrst/notations/Makefile
new file mode 100644
index 00000000..c017aed9
--- /dev/null
+++ b/doc/tools/coqrst/notations/Makefile
@@ -0,0 +1,27 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+# Parsing compact tactic notation syntax in
+
+TEST_INPUT="unfold {+, @qualid|@string at {+, num}}"
+
+python:
+ antlr4 -Dlanguage=Python3 -visitor -no-listener TacticNotations.g
+
+java:
+ antlr4 -Dlanguage=Java TacticNotations.g && javac TacticNotations*.java
+
+test: java
+ grun TacticNotations top -tree <<< "$(TEST_INPUT)"
+
+gui: java
+ grun TacticNotations top -gui <<< "$(TEST_INPUT)"
+
+sample:
+ cd ..; python3 -m coqnotations.driver < ../tests/tactics > ../tests/antlr-notations.html
diff --git a/doc/tools/coqrst/notations/TacticNotations.g b/doc/tools/coqrst/notations/TacticNotations.g
new file mode 100644
index 00000000..a889ebda
--- /dev/null
+++ b/doc/tools/coqrst/notations/TacticNotations.g
@@ -0,0 +1,33 @@
+/************************************************************************/
+/* * The Coq Proof Assistant / The Coq Development Team */
+/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */
+/* <O___,, * (see CREDITS file for the list of authors) */
+/* \VV/ **************************************************************/
+/* // * This file is distributed under the terms of the */
+/* * GNU Lesser General Public License Version 2.1 */
+/* * (see LICENSE file for the text of the license) */
+/************************************************************************/
+grammar TacticNotations;
+
+// Terminals are not visited, so we add non-terminals for each terminal that
+// needs rendering (in particular whitespace (kept in output) vs. WHITESPACE
+// (discarded)).
+
+top: blocks EOF;
+blocks: block ((whitespace)? block)*;
+block: atomic | meta | hole | repeat | curlies;
+repeat: LGROUP (ATOM)? WHITESPACE blocks (WHITESPACE)? RBRACE;
+curlies: LBRACE (whitespace)? blocks (whitespace)? RBRACE;
+whitespace: WHITESPACE;
+meta: METACHAR;
+atomic: ATOM (SUB)?;
+hole: ID (SUB)?;
+
+LGROUP: '{' [+*?];
+LBRACE: '{';
+RBRACE: '}';
+METACHAR: '%' [|(){}];
+ATOM: '@' | '_' | ~[@_{} ]+;
+ID: '@' ('_'? [a-zA-Z0-9])+;
+SUB: '_' '_' [a-zA-Z0-9]+;
+WHITESPACE: ' '+;
diff --git a/doc/tools/coqrst/notations/TacticNotations.tokens b/doc/tools/coqrst/notations/TacticNotations.tokens
new file mode 100644
index 00000000..88b38f97
--- /dev/null
+++ b/doc/tools/coqrst/notations/TacticNotations.tokens
@@ -0,0 +1,10 @@
+LGROUP=1
+LBRACE=2
+RBRACE=3
+METACHAR=4
+ATOM=5
+ID=6
+SUB=7
+WHITESPACE=8
+'{'=2
+'}'=3
diff --git a/doc/tools/coqrst/notations/TacticNotationsLexer.py b/doc/tools/coqrst/notations/TacticNotationsLexer.py
new file mode 100644
index 00000000..27293e7e
--- /dev/null
+++ b/doc/tools/coqrst/notations/TacticNotationsLexer.py
@@ -0,0 +1,71 @@
+# Generated from TacticNotations.g by ANTLR 4.7
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\n")
+ buf.write(":\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
+ buf.write("\4\b\t\b\4\t\t\t\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3")
+ buf.write("\5\3\6\3\6\6\6 \n\6\r\6\16\6!\5\6$\n\6\3\7\3\7\5\7(\n")
+ buf.write("\7\3\7\6\7+\n\7\r\7\16\7,\3\b\3\b\3\b\6\b\62\n\b\r\b\16")
+ buf.write("\b\63\3\t\6\t\67\n\t\r\t\16\t8\2\2\n\3\3\5\4\7\5\t\6\13")
+ buf.write("\7\r\b\17\t\21\n\3\2\7\4\2,-AA\4\2*+}\177\4\2BBaa\7\2")
+ buf.write("\"\"BBaa}}\177\177\5\2\62;C\\c|\2?\2\3\3\2\2\2\2\5\3\2")
+ buf.write("\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2")
+ buf.write("\2\17\3\2\2\2\2\21\3\2\2\2\3\23\3\2\2\2\5\26\3\2\2\2\7")
+ buf.write("\30\3\2\2\2\t\32\3\2\2\2\13#\3\2\2\2\r%\3\2\2\2\17.\3")
+ buf.write("\2\2\2\21\66\3\2\2\2\23\24\7}\2\2\24\25\t\2\2\2\25\4\3")
+ buf.write("\2\2\2\26\27\7}\2\2\27\6\3\2\2\2\30\31\7\177\2\2\31\b")
+ buf.write("\3\2\2\2\32\33\7\'\2\2\33\34\t\3\2\2\34\n\3\2\2\2\35$")
+ buf.write("\t\4\2\2\36 \n\5\2\2\37\36\3\2\2\2 !\3\2\2\2!\37\3\2\2")
+ buf.write("\2!\"\3\2\2\2\"$\3\2\2\2#\35\3\2\2\2#\37\3\2\2\2$\f\3")
+ buf.write("\2\2\2%*\7B\2\2&(\7a\2\2\'&\3\2\2\2\'(\3\2\2\2()\3\2\2")
+ buf.write("\2)+\t\6\2\2*\'\3\2\2\2+,\3\2\2\2,*\3\2\2\2,-\3\2\2\2")
+ buf.write("-\16\3\2\2\2./\7a\2\2/\61\7a\2\2\60\62\t\6\2\2\61\60\3")
+ buf.write("\2\2\2\62\63\3\2\2\2\63\61\3\2\2\2\63\64\3\2\2\2\64\20")
+ buf.write("\3\2\2\2\65\67\7\"\2\2\66\65\3\2\2\2\678\3\2\2\28\66\3")
+ buf.write("\2\2\289\3\2\2\29\22\3\2\2\2\t\2!#\',\638\2")
+ return buf.getvalue()
+
+
+class TacticNotationsLexer(Lexer):
+
+ atn = ATNDeserializer().deserialize(serializedATN())
+
+ decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+
+ LGROUP = 1
+ LBRACE = 2
+ RBRACE = 3
+ METACHAR = 4
+ ATOM = 5
+ ID = 6
+ SUB = 7
+ WHITESPACE = 8
+
+ channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
+
+ modeNames = [ "DEFAULT_MODE" ]
+
+ literalNames = [ "<INVALID>",
+ "'{'", "'}'" ]
+
+ symbolicNames = [ "<INVALID>",
+ "LGROUP", "LBRACE", "RBRACE", "METACHAR", "ATOM", "ID", "SUB",
+ "WHITESPACE" ]
+
+ ruleNames = [ "LGROUP", "LBRACE", "RBRACE", "METACHAR", "ATOM", "ID",
+ "SUB", "WHITESPACE" ]
+
+ grammarFileName = "TacticNotations.g"
+
+ def __init__(self, input=None, output:TextIO = sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7")
+ self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
+ self._actions = None
+ self._predicates = None
diff --git a/doc/tools/coqrst/notations/TacticNotationsLexer.tokens b/doc/tools/coqrst/notations/TacticNotationsLexer.tokens
new file mode 100644
index 00000000..88b38f97
--- /dev/null
+++ b/doc/tools/coqrst/notations/TacticNotationsLexer.tokens
@@ -0,0 +1,10 @@
+LGROUP=1
+LBRACE=2
+RBRACE=3
+METACHAR=4
+ATOM=5
+ID=6
+SUB=7
+WHITESPACE=8
+'{'=2
+'}'=3
diff --git a/doc/tools/coqrst/notations/TacticNotationsParser.py b/doc/tools/coqrst/notations/TacticNotationsParser.py
new file mode 100644
index 00000000..645f0789
--- /dev/null
+++ b/doc/tools/coqrst/notations/TacticNotationsParser.py
@@ -0,0 +1,595 @@
+# Generated from TacticNotations.g by ANTLR 4.7
+# encoding: utf-8
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\n")
+ buf.write("J\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b")
+ buf.write("\t\b\4\t\t\t\4\n\t\n\3\2\3\2\3\2\3\3\3\3\5\3\32\n\3\3")
+ buf.write("\3\7\3\35\n\3\f\3\16\3 \13\3\3\4\3\4\3\4\3\4\3\4\5\4\'")
+ buf.write("\n\4\3\5\3\5\5\5+\n\5\3\5\3\5\3\5\5\5\60\n\5\3\5\3\5\3")
+ buf.write("\6\3\6\5\6\66\n\6\3\6\3\6\5\6:\n\6\3\6\3\6\3\7\3\7\3\b")
+ buf.write("\3\b\3\t\3\t\5\tD\n\t\3\n\3\n\5\nH\n\n\3\n\2\2\13\2\4")
+ buf.write("\6\b\n\f\16\20\22\2\2\2L\2\24\3\2\2\2\4\27\3\2\2\2\6&")
+ buf.write("\3\2\2\2\b(\3\2\2\2\n\63\3\2\2\2\f=\3\2\2\2\16?\3\2\2")
+ buf.write("\2\20A\3\2\2\2\22E\3\2\2\2\24\25\5\4\3\2\25\26\7\2\2\3")
+ buf.write("\26\3\3\2\2\2\27\36\5\6\4\2\30\32\5\f\7\2\31\30\3\2\2")
+ buf.write("\2\31\32\3\2\2\2\32\33\3\2\2\2\33\35\5\6\4\2\34\31\3\2")
+ buf.write("\2\2\35 \3\2\2\2\36\34\3\2\2\2\36\37\3\2\2\2\37\5\3\2")
+ buf.write("\2\2 \36\3\2\2\2!\'\5\20\t\2\"\'\5\16\b\2#\'\5\22\n\2")
+ buf.write("$\'\5\b\5\2%\'\5\n\6\2&!\3\2\2\2&\"\3\2\2\2&#\3\2\2\2")
+ buf.write("&$\3\2\2\2&%\3\2\2\2\'\7\3\2\2\2(*\7\3\2\2)+\7\7\2\2*")
+ buf.write(")\3\2\2\2*+\3\2\2\2+,\3\2\2\2,-\7\n\2\2-/\5\4\3\2.\60")
+ buf.write("\7\n\2\2/.\3\2\2\2/\60\3\2\2\2\60\61\3\2\2\2\61\62\7\5")
+ buf.write("\2\2\62\t\3\2\2\2\63\65\7\4\2\2\64\66\5\f\7\2\65\64\3")
+ buf.write("\2\2\2\65\66\3\2\2\2\66\67\3\2\2\2\679\5\4\3\28:\5\f\7")
+ buf.write("\298\3\2\2\29:\3\2\2\2:;\3\2\2\2;<\7\5\2\2<\13\3\2\2\2")
+ buf.write("=>\7\n\2\2>\r\3\2\2\2?@\7\6\2\2@\17\3\2\2\2AC\7\7\2\2")
+ buf.write("BD\7\t\2\2CB\3\2\2\2CD\3\2\2\2D\21\3\2\2\2EG\7\b\2\2F")
+ buf.write("H\7\t\2\2GF\3\2\2\2GH\3\2\2\2H\23\3\2\2\2\13\31\36&*/")
+ buf.write("\659CG")
+ return buf.getvalue()
+
+
+class TacticNotationsParser ( Parser ):
+
+ grammarFileName = "TacticNotations.g"
+
+ atn = ATNDeserializer().deserialize(serializedATN())
+
+ decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+
+ sharedContextCache = PredictionContextCache()
+
+ literalNames = [ "<INVALID>", "<INVALID>", "'{'", "'}'" ]
+
+ symbolicNames = [ "<INVALID>", "LGROUP", "LBRACE", "RBRACE", "METACHAR",
+ "ATOM", "ID", "SUB", "WHITESPACE" ]
+
+ RULE_top = 0
+ RULE_blocks = 1
+ RULE_block = 2
+ RULE_repeat = 3
+ RULE_curlies = 4
+ RULE_whitespace = 5
+ RULE_meta = 6
+ RULE_atomic = 7
+ RULE_hole = 8
+
+ ruleNames = [ "top", "blocks", "block", "repeat", "curlies", "whitespace",
+ "meta", "atomic", "hole" ]
+
+ EOF = Token.EOF
+ LGROUP=1
+ LBRACE=2
+ RBRACE=3
+ METACHAR=4
+ ATOM=5
+ ID=6
+ SUB=7
+ WHITESPACE=8
+
+ def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7")
+ self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
+ self._predicates = None
+
+
+
+ class TopContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def blocks(self):
+ return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0)
+
+
+ def EOF(self):
+ return self.getToken(TacticNotationsParser.EOF, 0)
+
+ def getRuleIndex(self):
+ return TacticNotationsParser.RULE_top
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitTop" ):
+ return visitor.visitTop(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def top(self):
+
+ localctx = TacticNotationsParser.TopContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 0, self.RULE_top)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 18
+ self.blocks()
+ self.state = 19
+ self.match(TacticNotationsParser.EOF)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class BlocksContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def block(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(TacticNotationsParser.BlockContext)
+ else:
+ return self.getTypedRuleContext(TacticNotationsParser.BlockContext,i)
+
+
+ def whitespace(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(TacticNotationsParser.WhitespaceContext)
+ else:
+ return self.getTypedRuleContext(TacticNotationsParser.WhitespaceContext,i)
+
+
+ def getRuleIndex(self):
+ return TacticNotationsParser.RULE_blocks
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitBlocks" ):
+ return visitor.visitBlocks(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def blocks(self):
+
+ localctx = TacticNotationsParser.BlocksContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 2, self.RULE_blocks)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 21
+ self.block()
+ self.state = 28
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,1,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 23
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==TacticNotationsParser.WHITESPACE:
+ self.state = 22
+ self.whitespace()
+
+
+ self.state = 25
+ self.block()
+ self.state = 30
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,1,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class BlockContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def atomic(self):
+ return self.getTypedRuleContext(TacticNotationsParser.AtomicContext,0)
+
+
+ def meta(self):
+ return self.getTypedRuleContext(TacticNotationsParser.MetaContext,0)
+
+
+ def hole(self):
+ return self.getTypedRuleContext(TacticNotationsParser.HoleContext,0)
+
+
+ def repeat(self):
+ return self.getTypedRuleContext(TacticNotationsParser.RepeatContext,0)
+
+
+ def curlies(self):
+ return self.getTypedRuleContext(TacticNotationsParser.CurliesContext,0)
+
+
+ def getRuleIndex(self):
+ return TacticNotationsParser.RULE_block
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitBlock" ):
+ return visitor.visitBlock(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def block(self):
+
+ localctx = TacticNotationsParser.BlockContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 4, self.RULE_block)
+ try:
+ self.state = 36
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [TacticNotationsParser.ATOM]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 31
+ self.atomic()
+ pass
+ elif token in [TacticNotationsParser.METACHAR]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 32
+ self.meta()
+ pass
+ elif token in [TacticNotationsParser.ID]:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 33
+ self.hole()
+ pass
+ elif token in [TacticNotationsParser.LGROUP]:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 34
+ self.repeat()
+ pass
+ elif token in [TacticNotationsParser.LBRACE]:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 35
+ self.curlies()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class RepeatContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def LGROUP(self):
+ return self.getToken(TacticNotationsParser.LGROUP, 0)
+
+ def WHITESPACE(self, i:int=None):
+ if i is None:
+ return self.getTokens(TacticNotationsParser.WHITESPACE)
+ else:
+ return self.getToken(TacticNotationsParser.WHITESPACE, i)
+
+ def blocks(self):
+ return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0)
+
+
+ def RBRACE(self):
+ return self.getToken(TacticNotationsParser.RBRACE, 0)
+
+ def ATOM(self):
+ return self.getToken(TacticNotationsParser.ATOM, 0)
+
+ def getRuleIndex(self):
+ return TacticNotationsParser.RULE_repeat
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitRepeat" ):
+ return visitor.visitRepeat(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def repeat(self):
+
+ localctx = TacticNotationsParser.RepeatContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 6, self.RULE_repeat)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 38
+ self.match(TacticNotationsParser.LGROUP)
+ self.state = 40
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==TacticNotationsParser.ATOM:
+ self.state = 39
+ self.match(TacticNotationsParser.ATOM)
+
+
+ self.state = 42
+ self.match(TacticNotationsParser.WHITESPACE)
+ self.state = 43
+ self.blocks()
+ self.state = 45
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==TacticNotationsParser.WHITESPACE:
+ self.state = 44
+ self.match(TacticNotationsParser.WHITESPACE)
+
+
+ self.state = 47
+ self.match(TacticNotationsParser.RBRACE)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class CurliesContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def LBRACE(self):
+ return self.getToken(TacticNotationsParser.LBRACE, 0)
+
+ def blocks(self):
+ return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0)
+
+
+ def RBRACE(self):
+ return self.getToken(TacticNotationsParser.RBRACE, 0)
+
+ def whitespace(self, i:int=None):
+ if i is None:
+ return self.getTypedRuleContexts(TacticNotationsParser.WhitespaceContext)
+ else:
+ return self.getTypedRuleContext(TacticNotationsParser.WhitespaceContext,i)
+
+
+ def getRuleIndex(self):
+ return TacticNotationsParser.RULE_curlies
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitCurlies" ):
+ return visitor.visitCurlies(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def curlies(self):
+
+ localctx = TacticNotationsParser.CurliesContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 8, self.RULE_curlies)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 49
+ self.match(TacticNotationsParser.LBRACE)
+ self.state = 51
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==TacticNotationsParser.WHITESPACE:
+ self.state = 50
+ self.whitespace()
+
+
+ self.state = 53
+ self.blocks()
+ self.state = 55
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==TacticNotationsParser.WHITESPACE:
+ self.state = 54
+ self.whitespace()
+
+
+ self.state = 57
+ self.match(TacticNotationsParser.RBRACE)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class WhitespaceContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def WHITESPACE(self):
+ return self.getToken(TacticNotationsParser.WHITESPACE, 0)
+
+ def getRuleIndex(self):
+ return TacticNotationsParser.RULE_whitespace
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitWhitespace" ):
+ return visitor.visitWhitespace(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def whitespace(self):
+
+ localctx = TacticNotationsParser.WhitespaceContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 10, self.RULE_whitespace)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 59
+ self.match(TacticNotationsParser.WHITESPACE)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class MetaContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def METACHAR(self):
+ return self.getToken(TacticNotationsParser.METACHAR, 0)
+
+ def getRuleIndex(self):
+ return TacticNotationsParser.RULE_meta
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitMeta" ):
+ return visitor.visitMeta(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def meta(self):
+
+ localctx = TacticNotationsParser.MetaContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 12, self.RULE_meta)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 61
+ self.match(TacticNotationsParser.METACHAR)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class AtomicContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def ATOM(self):
+ return self.getToken(TacticNotationsParser.ATOM, 0)
+
+ def SUB(self):
+ return self.getToken(TacticNotationsParser.SUB, 0)
+
+ def getRuleIndex(self):
+ return TacticNotationsParser.RULE_atomic
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitAtomic" ):
+ return visitor.visitAtomic(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def atomic(self):
+
+ localctx = TacticNotationsParser.AtomicContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 14, self.RULE_atomic)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 63
+ self.match(TacticNotationsParser.ATOM)
+ self.state = 65
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==TacticNotationsParser.SUB:
+ self.state = 64
+ self.match(TacticNotationsParser.SUB)
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class HoleContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def ID(self):
+ return self.getToken(TacticNotationsParser.ID, 0)
+
+ def SUB(self):
+ return self.getToken(TacticNotationsParser.SUB, 0)
+
+ def getRuleIndex(self):
+ return TacticNotationsParser.RULE_hole
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitHole" ):
+ return visitor.visitHole(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def hole(self):
+
+ localctx = TacticNotationsParser.HoleContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 16, self.RULE_hole)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 67
+ self.match(TacticNotationsParser.ID)
+ self.state = 69
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==TacticNotationsParser.SUB:
+ self.state = 68
+ self.match(TacticNotationsParser.SUB)
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
diff --git a/doc/tools/coqrst/notations/TacticNotationsVisitor.py b/doc/tools/coqrst/notations/TacticNotationsVisitor.py
new file mode 100644
index 00000000..c0bcc4af
--- /dev/null
+++ b/doc/tools/coqrst/notations/TacticNotationsVisitor.py
@@ -0,0 +1,58 @@
+# Generated from TacticNotations.g by ANTLR 4.7
+from antlr4 import *
+if __name__ is not None and "." in __name__:
+ from .TacticNotationsParser import TacticNotationsParser
+else:
+ from TacticNotationsParser import TacticNotationsParser
+
+# This class defines a complete generic visitor for a parse tree produced by TacticNotationsParser.
+
+class TacticNotationsVisitor(ParseTreeVisitor):
+
+ # Visit a parse tree produced by TacticNotationsParser#top.
+ def visitTop(self, ctx:TacticNotationsParser.TopContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by TacticNotationsParser#blocks.
+ def visitBlocks(self, ctx:TacticNotationsParser.BlocksContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by TacticNotationsParser#block.
+ def visitBlock(self, ctx:TacticNotationsParser.BlockContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by TacticNotationsParser#repeat.
+ def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by TacticNotationsParser#curlies.
+ def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by TacticNotationsParser#whitespace.
+ def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by TacticNotationsParser#meta.
+ def visitMeta(self, ctx:TacticNotationsParser.MetaContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by TacticNotationsParser#atomic.
+ def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext):
+ return self.visitChildren(ctx)
+
+
+ # Visit a parse tree produced by TacticNotationsParser#hole.
+ def visitHole(self, ctx:TacticNotationsParser.HoleContext):
+ return self.visitChildren(ctx)
+
+
+
+del TacticNotationsParser
diff --git a/doc/tools/coqrst/notations/__init__.py b/doc/tools/coqrst/notations/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/doc/tools/coqrst/notations/__init__.py
diff --git a/doc/tools/coqrst/notations/fontsupport.py b/doc/tools/coqrst/notations/fontsupport.py
new file mode 100755
index 00000000..a3efd97f
--- /dev/null
+++ b/doc/tools/coqrst/notations/fontsupport.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python2
+# -*- coding: utf-8 -*-
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""Transform a font to center each of its characters in square bounding boxes.
+
+See https://stackoverflow.com/questions/37377476/ for background information.
+"""
+
+from collections import Counter
+
+try:
+ import fontforge
+ import psMat
+except ImportError:
+ print("This program requires FontForge's python bindings:")
+ print(" git clone https://github.com/fontforge/fontforge")
+ print(" cd fontforge")
+ print(" ./bootstrap")
+ print(" ./configure")
+ print(" make -j8")
+ print(" sudo make install")
+ raise
+
+def glyph_height(glyph):
+ _, ylo, _, yhi = glyph.boundingBox()
+ return yhi - ylo
+
+def scale_single_glyph(glyph, width, height):
+ """Center glyph in a box of size width*height"""
+ # Some glyphs (such as ‘;’) contain references (‘.’ + ‘,’), and moving the
+ # referenced glyphs moves them in all glyphs that reference them.
+ # Unlinking copies references into this glyph
+ glyph.unlinkThisGlyph()
+ # Width
+ deltaw = width - glyph.width
+ glyph.left_side_bearing += deltaw / 2
+ glyph.right_side_bearing += deltaw - glyph.left_side_bearing
+ glyph.width = width
+ # Height
+ ylo = glyph.boundingBox()[1]
+ deltah = height - glyph_height(glyph)
+ glyph.transform(psMat.translate(0, deltah / 2.0 - ylo))
+
+def avg(lst):
+ lst = list(lst)
+ return sum(lst) / float(len(lst))
+
+def trim_font(fnt):
+ """Remove characters codes beyond 191 front fnt"""
+ for g in fnt.glyphs():
+ if g.unicode >= 191:
+ fnt.removeGlyph(g)
+ return fnt
+
+def center_glyphs(src_font_path, dst_font_path, dst_name):
+ fnt = trim_font(fontforge.open(src_font_path))
+
+ size = max(g.width for g in fnt.glyphs())
+ fnt.ascent, fnt.descent = size, 0
+ for glyph in fnt.glyphs():
+ scale_single_glyph(glyph, size, size)
+
+ fnt.sfnt_names = []
+ fnt.fontname = fnt.familyname = fnt.fullname = dst_name
+ fnt.generate(dst_font_path)
+
+if __name__ == '__main__':
+ from os.path import dirname, join, abspath
+ curdir = dirname(abspath(__file__))
+ ubuntumono_path = join(curdir, "UbuntuMono-B.ttf")
+ ubuntumono_mod_path = join(curdir, "CoqNotations.ttf")
+ center_glyphs(ubuntumono_path, ubuntumono_mod_path, "CoqNotations")
diff --git a/doc/tools/coqrst/notations/html.py b/doc/tools/coqrst/notations/html.py
new file mode 100644
index 00000000..87a41cf9
--- /dev/null
+++ b/doc/tools/coqrst/notations/html.py
@@ -0,0 +1,75 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""A visitor for ANTLR notation ASTs, producing raw HTML.
+
+Uses the dominate package.
+"""
+
+from dominate import tags
+
+from .parsing import parse
+from .TacticNotationsParser import TacticNotationsParser
+from .TacticNotationsVisitor import TacticNotationsVisitor
+
+class TacticNotationsToHTMLVisitor(TacticNotationsVisitor):
+ def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext):
+ with tags.span(_class="repeat-wrapper"):
+ with tags.span(_class="repeat"):
+ self.visitChildren(ctx)
+ repeat_marker = ctx.LGROUP().getText()[1]
+ separator = ctx.ATOM()
+ tags.sup(repeat_marker)
+ if separator:
+ tags.sub(separator.getText())
+
+ def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext):
+ sp = tags.span(_class="curlies")
+ sp.add("{")
+ with sp:
+ self.visitChildren(ctx)
+ sp.add("}")
+
+ def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext):
+ tags.span(ctx.ATOM().getText())
+
+ def visitHole(self, ctx:TacticNotationsParser.HoleContext):
+ tags.span(ctx.ID().getText()[1:], _class="hole")
+ sub = ctx.SUB()
+ if sub:
+ tags.sub(sub.getText()[1:])
+
+ def visitMeta(self, ctx:TacticNotationsParser.MetaContext):
+ txt = ctx.METACHAR().getText()[1:]
+ if (txt == "{") or (txt == "}"):
+ tags.span(txt)
+ else:
+ tags.span(txt, _class="meta")
+
+ def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext):
+ tags.span(" ") # TODO: no need for a <span> here
+
+def htmlize(notation):
+ """Translate notation to a dominate HTML tree"""
+ top = tags.span(_class='notation')
+ with top:
+ TacticNotationsToHTMLVisitor().visit(parse(notation))
+ return top
+
+def htmlize_str(notation):
+ """Translate notation to a raw HTML document"""
+ # ‘pretty=True’ introduces spurious spaces
+ return htmlize(notation).render(pretty=False)
+
+def htmlize_p(notation):
+ """Like `htmlize`, wrapped in a ‘p’.
+ Does not return: instead, must be run in a dominate context.
+ """
+ with tags.p():
+ htmlize(notation)
diff --git a/doc/tools/coqrst/notations/parsing.py b/doc/tools/coqrst/notations/parsing.py
new file mode 100644
index 00000000..506240d9
--- /dev/null
+++ b/doc/tools/coqrst/notations/parsing.py
@@ -0,0 +1,37 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+from .TacticNotationsLexer import TacticNotationsLexer
+from .TacticNotationsParser import TacticNotationsParser
+
+from antlr4 import CommonTokenStream, InputStream
+
+SUBSTITUTIONS = [#("@bindings_list", "{+ (@id := @val) }"),
+ ("@qualid_or_string", "@id|@string")]
+
+def substitute(notation):
+ """Perform common substitutions in the notation string.
+
+ Nested notations quickly became unwieldy in the original ‘…’-based format,
+ so they were avoided and replaced by pointers to grammar rules. With the
+ new format, it's usually nicer to remove the indirection.
+ """
+ for (src, dst) in SUBSTITUTIONS:
+ notation = notation.replace(src, dst)
+ return notation
+
+def parse(notation):
+ """Parse a notation string.
+
+ :return: An ANTLR AST. Use one of the supplied visitors (or write your own)
+ to turn it into useful output.
+ """
+ substituted = substitute(notation)
+ lexer = TacticNotationsLexer(InputStream(substituted))
+ return TacticNotationsParser(CommonTokenStream(lexer)).top()
diff --git a/doc/tools/coqrst/notations/plain.py b/doc/tools/coqrst/notations/plain.py
new file mode 100644
index 00000000..f6e82fc6
--- /dev/null
+++ b/doc/tools/coqrst/notations/plain.py
@@ -0,0 +1,53 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""A visitor for ANTLR notation ASTs, producing plain text with ellipses.
+
+Somewhat-closely approximates the rendering of the original manual.
+"""
+
+from io import StringIO
+
+from .parsing import parse
+from .TacticNotationsParser import TacticNotationsParser
+from .TacticNotationsVisitor import TacticNotationsVisitor
+
+class TacticNotationsToDotsVisitor(TacticNotationsVisitor):
+ def __init__(self):
+ self.buffer = StringIO()
+
+ def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext):
+ separator = ctx.ATOM()
+ self.visitChildren(ctx)
+ if ctx.LGROUP().getText()[1] == "+":
+ spacer = (separator.getText() + " " if separator else "")
+ self.buffer.write(spacer + "…" + spacer)
+ self.visitChildren(ctx)
+
+ def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext):
+ self.buffer.write("{")
+ self.visitChildren(ctx)
+ self.buffer.write("}")
+
+ def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext):
+ self.buffer.write(ctx.ATOM().getText())
+
+ def visitHole(self, ctx:TacticNotationsParser.HoleContext):
+ self.buffer.write("‘{}’".format(ctx.ID().getText()[1:]))
+
+ def visitMeta(self, ctx:TacticNotationsParser.MetaContext):
+ self.buffer.write(ctx.METACHAR().getText()[1:])
+
+ def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext):
+ self.buffer.write(" ")
+
+def stringify_with_ellipses(notation):
+ vs = TacticNotationsToDotsVisitor()
+ vs.visit(parse(notation))
+ return vs.buffer.getvalue()
diff --git a/doc/tools/coqrst/notations/regexp.py b/doc/tools/coqrst/notations/regexp.py
new file mode 100644
index 00000000..ea820c71
--- /dev/null
+++ b/doc/tools/coqrst/notations/regexp.py
@@ -0,0 +1,60 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""An experimental visitor for ANTLR notation ASTs, producing regular expressions."""
+
+import re
+from io import StringIO
+
+from .parsing import parse
+from .TacticNotationsParser import TacticNotationsParser
+from .TacticNotationsVisitor import TacticNotationsVisitor
+
+class TacticNotationsToRegexpVisitor(TacticNotationsVisitor):
+ def __init__(self):
+ self.buffer = StringIO()
+
+ def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext):
+ separator = ctx.ATOM()
+ repeat_marker = ctx.LGROUP().getText()[1]
+
+ self.buffer.write("(")
+ self.visitChildren(ctx)
+ self.buffer.write(")")
+
+ if repeat_marker in ["?", "*"]:
+ self.buffer.write("?")
+ elif repeat_marker in ["+", "*"]:
+ self.buffer.write("(")
+ self.buffer.write(r"\s*" + re.escape(separator.getText() if separator else " ") + r"\s*")
+ self.visitChildren(ctx)
+ self.buffer.write(")*")
+
+ def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext):
+ self.buffer.write(r"\{")
+ self.visitChildren(ctx)
+ self.buffer.write(r"\}")
+
+ def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext):
+ self.buffer.write(re.escape(ctx.ATOM().getText()))
+
+ def visitHole(self, ctx:TacticNotationsParser.HoleContext):
+ self.buffer.write("([^();. \n]+)") # FIXME could allow more things
+
+ def visitMeta(self, ctx:TacticNotationsParser.MetaContext):
+ self.buffer.write(re.escape(ctx.METACHAR().getText()[1:]))
+
+ def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext):
+ self.buffer.write(r"\s+")
+
+def regexpify(notation):
+ """Translate notation to a Python regular expression matching it"""
+ vs = TacticNotationsToRegexpVisitor()
+ vs.visit(parse(notation))
+ return vs.buffer.getvalue()
diff --git a/doc/tools/coqrst/notations/sphinx.py b/doc/tools/coqrst/notations/sphinx.py
new file mode 100644
index 00000000..e05b8341
--- /dev/null
+++ b/doc/tools/coqrst/notations/sphinx.py
@@ -0,0 +1,102 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""A visitor for ANTLR notation ASTs, producing Sphinx nodes.
+
+Unlike the HTML visitor, this produces Sphinx-friendly nodes that can be used by
+all backends. If you just want HTML output, use the HTML visitor.
+"""
+
+from .parsing import parse
+from .TacticNotationsParser import TacticNotationsParser
+from .TacticNotationsVisitor import TacticNotationsVisitor
+
+from docutils import nodes
+from sphinx import addnodes
+
+import sys
+
+class TacticNotationsToSphinxVisitor(TacticNotationsVisitor):
+ def defaultResult(self):
+ return []
+
+ def aggregateResult(self, aggregate, nextResult):
+ if nextResult:
+ aggregate.extend(nextResult)
+ return aggregate
+
+ def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext):
+ # Uses inline nodes instead of subscript and superscript to ensure that
+ # we get the right customization hooks at the LaTeX level
+ wrapper = nodes.inline('', '', classes=['repeat-wrapper'])
+ wrapper += nodes.inline('', '', *self.visitChildren(ctx), classes=["repeat"])
+
+ repeat_marker = ctx.LGROUP().getText()[1]
+ wrapper += nodes.inline(repeat_marker, repeat_marker, classes=['notation-sup'])
+
+ separator = ctx.ATOM()
+ if separator:
+ sep = separator.getText()
+ wrapper += nodes.inline(sep, sep, classes=['notation-sub'])
+
+ return [wrapper]
+
+ def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext):
+ sp = nodes.inline('', '', classes=["curlies"])
+ sp += nodes.Text("{")
+ sp.extend(self.visitChildren(ctx))
+ sp += nodes.Text("}")
+ return [sp]
+
+ def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext):
+ atom = ctx.ATOM().getText()
+ sub = ctx.SUB()
+ node = nodes.inline(atom, atom)
+
+ if sub:
+ sub_index = sub.getText()[2:]
+ node += nodes.subscript(sub_index, sub_index)
+
+ return [node]
+
+ def visitHole(self, ctx:TacticNotationsParser.HoleContext):
+ hole = ctx.ID().getText()
+ token_name = hole[1:]
+ node = nodes.inline(hole, token_name, classes=["hole"])
+
+ sub = ctx.SUB()
+ if sub:
+ sub_index = sub.getText()[2:]
+ node += nodes.subscript(sub_index, sub_index)
+
+ return [addnodes.pending_xref(token_name, node, reftype='token', refdomain='std', reftarget=token_name)]
+
+ def visitMeta(self, ctx:TacticNotationsParser.MetaContext):
+ meta = ctx.METACHAR().getText()
+ metachar = meta[1:] # remove escape char
+ token_name = metachar
+ if (metachar == "{") or (metachar == "}"):
+ classes=[]
+ else:
+ classes=["meta"]
+ return [nodes.inline(metachar, token_name, classes=classes)]
+
+ def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext):
+ return [nodes.Text(" ")]
+
+def sphinxify(notation):
+ """Translate notation into a Sphinx document tree"""
+ vs = TacticNotationsToSphinxVisitor()
+ return vs.visit(parse(notation))
+
+def main():
+ print(sphinxify("a := {+, b {+ c}}"))
+
+if __name__ == '__main__':
+ main()
diff --git a/doc/tools/coqrst/repl/__init__.py b/doc/tools/coqrst/repl/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/doc/tools/coqrst/repl/__init__.py
diff --git a/doc/tools/coqrst/repl/ansicolors.py b/doc/tools/coqrst/repl/ansicolors.py
new file mode 100644
index 00000000..495eea91
--- /dev/null
+++ b/doc/tools/coqrst/repl/ansicolors.py
@@ -0,0 +1,99 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""
+Parse Coq's ANSI output.
+========================
+
+Translated to Python from Coq's terminal.ml.
+"""
+
+# pylint: disable=too-many-return-statements, too-many-branches
+
+def parse_color(style, offset):
+ color = style[offset] % 10
+ if color == 0:
+ return ("black", 1)
+ elif color == 1:
+ return ("red", 1)
+ elif color == 2:
+ return ("green", 1)
+ elif color == 3:
+ return ("yellow", 1)
+ elif color == 4:
+ return ("blue", 1)
+ elif color == 5:
+ return ("magenta", 1)
+ elif color == 6:
+ return ("cyan", 1)
+ elif color == 7:
+ return ("white", 1)
+ elif color == 9:
+ return ("default", 1)
+ elif color == 8:
+ nxt = style[offset + 1]
+ if nxt == 5:
+ return ("index-{}".format(style[offset + 1]), 2)
+ elif nxt == 2:
+ return ("rgb-{}-{}-{}".format(*style[offset+1:offset+4]), 4)
+ else:
+ raise ValueError("{}, {}".format(style, offset))
+ else:
+ raise ValueError()
+
+def parse_style(style, offset, acc):
+ offset = 0
+ while offset < len(style):
+ head = style[offset]
+
+ if head == 0:
+ acc.append("reset")
+ elif head == 1:
+ acc.append("bold")
+ elif head == 3:
+ acc.append("italic")
+ elif head == 4:
+ acc.append("underline")
+ elif head == 7:
+ acc.append("negative")
+ elif head == 22:
+ acc.append("no-bold")
+ elif head == 23:
+ acc.append("no-italic")
+ elif head == 24:
+ acc.append("no-underline")
+ elif head == 27:
+ acc.append("no-negative")
+ else:
+ color, suboffset = parse_color(style, offset)
+ offset += suboffset - 1
+ if 30 <= head < 40:
+ acc.append("fg-{}".format(color))
+ elif 40 <= head < 50:
+ acc.append("bg-{}".format(color))
+ elif 90 <= head < 100:
+ acc.append("fg-light-{}".format(color))
+ elif 100 <= head < 110:
+ acc.append("bg-light-{}".format(color))
+
+ offset += 1
+
+def parse_ansi(code):
+ """Parse an ansi code into a collection of CSS classes.
+
+ :param code: A sequence of ‘;’-separated ANSI codes. Do not include the
+ leading ‘^[[’ or the final ‘m’
+ """
+ classes = []
+ parse_style([int(c) for c in code.split(';')], 0, classes)
+ return ["ansi-" + cls for cls in classes]
+
+if __name__ == '__main__':
+ # As produced by Coq with ‘Check nat.’
+ print(parse_ansi("92;49;22;23;24;27"))
diff --git a/doc/tools/coqrst/repl/coqtop.py b/doc/tools/coqrst/repl/coqtop.py
new file mode 100644
index 00000000..aeadce4c
--- /dev/null
+++ b/doc/tools/coqrst/repl/coqtop.py
@@ -0,0 +1,103 @@
+##########################################################################
+## # The Coq Proof Assistant / The Coq Development Team ##
+## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
+## <O___,, # (see CREDITS file for the list of authors) ##
+## \VV/ ###############################################################
+## // # This file is distributed under the terms of the ##
+## # GNU Lesser General Public License Version 2.1 ##
+## # (see LICENSE file for the text of the license) ##
+##########################################################################
+"""
+Drive coqtop with Python!
+=========================
+
+This module is a simple pexpect-based driver for coqtop, based on the old
+REPL interface.
+"""
+
+import os
+import re
+
+import pexpect
+
+class CoqTop:
+ """Create an instance of coqtop.
+
+ Use this as a context manager: no instance of coqtop is created until
+ you call `__enter__`. coqtop is terminated when you `__exit__` the
+ context manager.
+
+ Sentence parsing is very basic for now (a "." in a quoted string will
+ confuse it).
+ """
+
+ COQTOP_PROMPT = re.compile("\r\n[^< ]+ < ")
+
+ def __init__(self, coqtop_bin=None, color=False, args=None) -> str:
+ """Configure a coqtop instance (but don't start it yet).
+
+ :param coqtop_bin: The path to coqtop; uses $COQBIN by default, falling back to "coqtop"
+ :param color: When True, tell coqtop to produce ANSI color codes (see
+ the ansicolors module)
+ :param args: Additional arugments to coqtop.
+ """
+ self.coqtop_bin = coqtop_bin or os.path.join(os.getenv('COQBIN', ""), "coqtop")
+ if not pexpect.utils.which(self.coqtop_bin):
+ raise ValueError("coqtop binary not found: '{}'".format(self.coqtop_bin))
+ self.args = (args or []) + ["-boot", "-color", "on"] * color
+ self.coqtop = None
+
+ def __enter__(self):
+ if self.coqtop:
+ raise ValueError("This module isn't re-entrant")
+ self.coqtop = pexpect.spawn(self.coqtop_bin, args=self.args, echo=False, encoding="utf-8")
+ # Disable delays (http://pexpect.readthedocs.io/en/stable/commonissues.html?highlight=delaybeforesend)
+ self.coqtop.delaybeforesend = 0
+ self.next_prompt()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.coqtop.kill(9)
+
+ def next_prompt(self):
+ "Wait for the next coqtop prompt, and return the output preceeding it."
+ self.coqtop.expect(CoqTop.COQTOP_PROMPT, timeout = 10)
+ return self.coqtop.before
+
+ def sendone(self, sentence):
+ """Send a single sentence to coqtop.
+
+ :sentence: One Coq sentence (otherwise, Coqtop will produce multiple
+ prompts and we'll get confused)
+ """
+ # Suppress newlines, but not spaces: they are significant in notations
+ sentence = re.sub(r"[\r\n]+", " ", sentence).strip()
+ self.coqtop.sendline(sentence)
+ try:
+ output = self.next_prompt()
+ except:
+ print("Error while sending the following sentence to coqtop: {}".format(sentence))
+ raise
+ # print("Got {}".format(repr(output)))
+ return output
+
+def sendmany(*sentences):
+ """A small demo: send each sentence in sentences and print the output"""
+ with CoqTop() as coqtop:
+ for sentence in sentences:
+ print("=====================================")
+ print(sentence)
+ print("-------------------------------------")
+ response = coqtop.sendone(sentence)
+ print(response)
+
+def main():
+ """Run a simple performance test and demo `sendmany`"""
+ with CoqTop() as coqtop:
+ for _ in range(200):
+ print(repr(coqtop.sendone("Check nat.")))
+ sendmany("Goal False -> True.", "Proof.", "intros H.",
+ "Check H.", "Chchc.", "apply I.", "Qed.")
+
+if __name__ == '__main__':
+ main()