From 9043add656177eeac1491a73d2f3ab92bec0013c Mon Sep 17 00:00:00 2001 From: Benjamin Barenblat Date: Sat, 29 Dec 2018 14:31:27 -0500 Subject: Imported Upstream version 8.8.2 --- doc/LICENSE | 26 +- doc/Makefile.rt | 43 - doc/common/styles/html/simple/style.css | 2 +- doc/stdlib/index-list.html.template | 46 +- doc/tools/coqrst/__init__.py | 10 + doc/tools/coqrst/checkdeps.py | 39 + doc/tools/coqrst/coqdoc/__init__.py | 10 + doc/tools/coqrst/coqdoc/main.py | 91 ++ doc/tools/coqrst/coqdomain.py | 1201 ++++++++++++++++++++ doc/tools/coqrst/notations/Makefile | 27 + doc/tools/coqrst/notations/TacticNotations.g | 33 + doc/tools/coqrst/notations/TacticNotations.tokens | 10 + doc/tools/coqrst/notations/TacticNotationsLexer.py | 71 ++ .../coqrst/notations/TacticNotationsLexer.tokens | 10 + .../coqrst/notations/TacticNotationsParser.py | 595 ++++++++++ .../coqrst/notations/TacticNotationsVisitor.py | 58 + doc/tools/coqrst/notations/__init__.py | 0 doc/tools/coqrst/notations/fontsupport.py | 80 ++ doc/tools/coqrst/notations/html.py | 75 ++ doc/tools/coqrst/notations/parsing.py | 37 + doc/tools/coqrst/notations/plain.py | 53 + doc/tools/coqrst/notations/regexp.py | 60 + doc/tools/coqrst/notations/sphinx.py | 102 ++ doc/tools/coqrst/repl/__init__.py | 0 doc/tools/coqrst/repl/ansicolors.py | 99 ++ doc/tools/coqrst/repl/coqtop.py | 103 ++ 26 files changed, 2790 insertions(+), 91 deletions(-) delete mode 100644 doc/Makefile.rt create mode 100644 doc/tools/coqrst/__init__.py create mode 100644 doc/tools/coqrst/checkdeps.py create mode 100644 doc/tools/coqrst/coqdoc/__init__.py create mode 100644 doc/tools/coqrst/coqdoc/main.py create mode 100644 doc/tools/coqrst/coqdomain.py create mode 100644 doc/tools/coqrst/notations/Makefile create mode 100644 doc/tools/coqrst/notations/TacticNotations.g create mode 100644 doc/tools/coqrst/notations/TacticNotations.tokens create mode 100644 doc/tools/coqrst/notations/TacticNotationsLexer.py create mode 100644 doc/tools/coqrst/notations/TacticNotationsLexer.tokens create mode 100644 doc/tools/coqrst/notations/TacticNotationsParser.py create mode 100644 doc/tools/coqrst/notations/TacticNotationsVisitor.py create mode 100644 doc/tools/coqrst/notations/__init__.py create mode 100755 doc/tools/coqrst/notations/fontsupport.py create mode 100644 doc/tools/coqrst/notations/html.py create mode 100644 doc/tools/coqrst/notations/parsing.py create mode 100644 doc/tools/coqrst/notations/plain.py create mode 100644 doc/tools/coqrst/notations/regexp.py create mode 100644 doc/tools/coqrst/notations/sphinx.py create mode 100644 doc/tools/coqrst/repl/__init__.py create mode 100644 doc/tools/coqrst/repl/ansicolors.py create mode 100644 doc/tools/coqrst/repl/coqtop.py (limited to 'doc') diff --git a/doc/LICENSE b/doc/LICENSE index ada22e66..c223a4e1 100644 --- a/doc/LICENSE +++ b/doc/LICENSE @@ -2,11 +2,17 @@ The Coq Reference Manual is a collective work from the Coq Development Team whose members are listed in the file CREDITS of the Coq source package. All related documents (the LaTeX and BibTeX sources, the embedded png files, and the PostScript, PDF and html outputs) are -copyright (c) INRIA 1999-2006. The material connected to the Reference -Manual may be distributed only subject to the terms and conditions set -forth in the Open Publication License, v1.0 or later (the latest -version is presently available at http://www.opencontent.org/openpub/). -Options A and B are *not* elected. +copyright (c) INRIA 1999-2006, with the exception of the Ubuntu font +file UbuntuMono-B.ttf, which is +Copyright 2010,2011 Canonical Ltd and licensed under the Ubuntu font +license, version 1.0 +(https://www.ubuntu.com/legal/terms-and-policies/font-licence), and its +derivative CoqNotations.ttf distributed under the same license. The +material connected to the Reference Manual may be distributed only +subject to the terms and conditions set forth in the Open Publication +License, v1.0 or later (the latest version is presently available at +http://www.opencontent.org/openpub/). Options A and B are *not* +elected. The Coq Tutorial is a work by Gérard Huet, Gilles Kahn and Christine Paulin-Mohring. All documents (the LaTeX source and the PostScript, @@ -25,16 +31,6 @@ the PostScript, PDF and html outputs) are copyright (c) INRIA distributed under the terms of the Lesser General Public License version 2.1 or later. -The FAQ (Coq for the Clueless) is a work by Pierre Castéran, Hugo -Herbelin, Florent Kirchner, Benjamin Monate, and Julien Narboux. All -documents (the LaTeX source and the PostScript, PDF and html outputs) -are copyright (c) INRIA 2004-2006. The material connected to the FAQ -(Coq for the Clueless) may be distributed only subject to the terms -and conditions set forth in the Open Publication License, v1.0 or -later (the latest version is presently available at -http://www.opencontent.org/openpub/). Options A and B are *not* -elected. - The Tutorial on [Co-]Inductive Types in Coq is a work by Pierre Castéran and Eduardo Gimenez. All related documents (the LaTeX and BibTeX sources and the PostScript, PDF and html outputs) are copyright diff --git a/doc/Makefile.rt b/doc/Makefile.rt deleted file mode 100644 index 6c328134..00000000 --- a/doc/Makefile.rt +++ /dev/null @@ -1,43 +0,0 @@ -# Makefile for building Coq Technical Reports - -# if coqc,coqtop,coq-tex are not in your PATH, you need the environment -# variable COQBIN to be correctly set -# (COQTOP is autodetected) -# (some files are preprocessed using Coq and some part of the documentation -# is automatically built from the theories sources) - -# To compile documentation, you need the following tools: -# Dvi: latex (latex2e), bibtex, makeindex, dviselect (package RPM dviutils) -# Ps: dvips, psutils (ftp://ftp.dcs.ed.ac.uk/pub/ajcd/psutils.tar.gz) -# Pdf: pdflatex -# Html: -# - hevea: http://para.inria.fr/~maranget/hevea/ -# - htmlSplit: http://coq.inria.fr/~delahaye -# Rapports INRIA: dviselect, rrkit (par Michel Mauny) - -include ./Makefile - -################### -# RT -################### -# Fabrication d'un RT INRIA (utilise rrkit de Michel Mauny) -rt/Reference-Manual-RT.dvi: refman/Reference-Manual.dvi rt/RefMan-cover.tex - dviselect -i refman/Reference-Manual.dvi -o rt/RefMan-body.dvi 3: - (cd rt; $(LATEX) RefMan-cover.tex) - set a=`tail -1 refman/Reference-Manual.log`;\ - set a=expr \("$$a" : '.*(\(.*\) pages.*'\) % 2;\ - (cd rt; if $(TEST) "$$a = 0";\ - then rrkit RefMan-cover.dvi RefMan-body.dvi Reference-Manual-RT.dvi;\ - else rrkit -odd RefMan-cover.dvi RefMan-body.dvi Reference-Manual-RT.dvi;\ - fi) - -# Fabrication d'un RT INRIA (utilise rrkit de Michel Mauny) -rt/Tutorial-RT.dvi : tutorial/Tutorial.v.dvi rt/Tutorial-cover.tex - dviselect -i rt/Tutorial.v.dvi -o rt/Tutorial-body.dvi 3: - (cd rt; $(LATEX) Tutorial-cover.tex) - set a=`tail -1 tutorial/Tutorial.v.log`;\ - set a=expr \("$$a" : '.*(\(.*\) pages.*'\) % 2;\ - (cd rt; if $(TEST) "$$a = 0";\ - then rrkit Tutorial-cover.dvi Tutorial-body.dvi Tutorial-RT.dvi;\ - else rrkit -odd Tutorial-cover.dvi Tutorial-body.dvi Tutorial-RT.dvi;\ - fi) diff --git a/doc/common/styles/html/simple/style.css b/doc/common/styles/html/simple/style.css index 0b1e640b..d1b2ce11 100644 --- a/doc/common/styles/html/simple/style.css +++ b/doc/common/styles/html/simple/style.css @@ -10,4 +10,4 @@ margin: 0pt; padding: .5ex 1em; list-style: none -} \ No newline at end of file +} diff --git a/doc/stdlib/index-list.html.template b/doc/stdlib/index-list.html.template index 9216c81f..8c09b23a 100644 --- a/doc/stdlib/index-list.html.template +++ b/doc/stdlib/index-list.html.template @@ -18,6 +18,7 @@ through the Require Import command.

theories/Init/Logic.v theories/Init/Logic_Type.v theories/Init/Nat.v + theories/Init/Decimal.v theories/Init/Peano.v theories/Init/Specif.v theories/Init/Tactics.v @@ -46,6 +47,7 @@ through the Require Import command.

theories/Logic/ClassicalDescription.v theories/Logic/ClassicalEpsilon.v theories/Logic/ClassicalUniqueChoice.v + theories/Logic/SetoidChoice.v theories/Logic/Berardi.v theories/Logic/Diaconescu.v theories/Logic/Hurkens.v @@ -55,7 +57,10 @@ through the Require Import command.

theories/Logic/Description.v theories/Logic/Epsilon.v theories/Logic/IndefiniteDescription.v + theories/Logic/PropExtensionality.v + theories/Logic/PropExtensionalityFacts.v theories/Logic/FunctionalExtensionality.v + theories/Logic/ExtensionalFunctionRepresentative.v theories/Logic/ExtensionalityFacts.v theories/Logic/WeakFan.v theories/Logic/WKL.v @@ -220,8 +225,13 @@ through the Require Import command.

theories/Numbers/BinNums.v theories/Numbers/NumPrelude.v - theories/Numbers/BigNumPrelude.v theories/Numbers/NaryFunctions.v + theories/Numbers/DecimalFacts.v + theories/Numbers/DecimalNat.v + theories/Numbers/DecimalPos.v + theories/Numbers/DecimalN.v + theories/Numbers/DecimalZ.v + theories/Numbers/DecimalString.v
  NatInt: @@ -252,16 +262,7 @@ through the Require Import command.

theories/Numbers/Cyclic/Abstract/CyclicAxioms.v theories/Numbers/Cyclic/Abstract/NZCyclic.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleAdd.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleBase.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleCyclic.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleDiv.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleDivn1.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleLift.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleMul.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleSqrt.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleSub.v - theories/Numbers/Cyclic/DoubleCyclic/DoubleType.v + theories/Numbers/Cyclic/Abstract/DoubleType.v theories/Numbers/Cyclic/Int31/Cyclic31.v theories/Numbers/Cyclic/Int31/Ring31.v theories/Numbers/Cyclic/Int31/Int31.v @@ -294,12 +295,6 @@ through the Require Import command.

theories/Numbers/Natural/Abstract/NProperties.v theories/Numbers/Natural/Binary/NBinary.v theories/Numbers/Natural/Peano/NPeano.v - theories/Numbers/Natural/SpecViaZ/NSig.v - theories/Numbers/Natural/SpecViaZ/NSigNAxioms.v - theories/Numbers/Natural/BigN/BigN.v - theories/Numbers/Natural/BigN/Nbasic.v - theories/Numbers/Natural/BigN/NMake.v - theories/Numbers/Natural/BigN/NMake_gen.v
  Integer: @@ -327,19 +322,6 @@ through the Require Import command.

theories/Numbers/Integer/Abstract/ZDivTrunc.v theories/Numbers/Integer/Binary/ZBinary.v theories/Numbers/Integer/NatPairs/ZNatPairs.v - theories/Numbers/Integer/SpecViaZ/ZSig.v - theories/Numbers/Integer/SpecViaZ/ZSigZAxioms.v - theories/Numbers/Integer/BigZ/BigZ.v - theories/Numbers/Integer/BigZ/ZMake.v - - -
  Rational: - Abstract and 31-bits-words-based rational arithmetic -
-
- theories/Numbers/Rational/SpecViaQ/QSig.v - theories/Numbers/Rational/BigQ/BigQ.v - theories/Numbers/Rational/BigQ/QMake.v
@@ -614,8 +596,8 @@ through the Require Import command.

theories/Compat/AdmitAxiom.v - theories/Compat/Coq84.v - theories/Compat/Coq85.v theories/Compat/Coq86.v + theories/Compat/Coq87.v + theories/Compat/Coq88.v
diff --git a/doc/tools/coqrst/__init__.py b/doc/tools/coqrst/__init__.py new file mode 100644 index 00000000..2dda7d92 --- /dev/null +++ b/doc/tools/coqrst/__init__.py @@ -0,0 +1,10 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## ", "<-", "<->", "=>", "<=", ">=", "<>", "~", "/\\", "\\/", "|-", "*", "forall", "exists"] +COQDOC_HEADER = "".join("(** remove printing {} *)".format(s) for s in COQDOC_SYMBOLS) + +def coqdoc(coq_code, coqdoc_bin=None): + """Get the output of coqdoc on coq_code.""" + coqdoc_bin = coqdoc_bin or os.path.join(os.getenv("COQBIN"), "coqdoc") + fd, filename = mkstemp(prefix="coqdoc-", suffix=".v") + if platform.system().startswith("CYGWIN"): + # coqdoc currently doesn't accept cygwin style paths in the form "/cygdrive/c/..." + filename = check_output(["cygpath", "-w", filename]).decode("utf-8").strip() + try: + os.write(fd, COQDOC_HEADER.encode("utf-8")) + os.write(fd, coq_code.encode("utf-8")) + os.close(fd) + return check_output([coqdoc_bin] + COQDOC_OPTIONS + [filename], timeout = 10).decode("utf-8") + finally: + os.remove(filename) + +def is_whitespace_string(elem): + return isinstance(elem, NavigableString) and elem.strip() == "" + +def strip_soup(soup, pred): + """Strip elements maching pred from front and tail of soup.""" + while soup.contents and pred(soup.contents[-1]): + soup.contents.pop() + + skip = 0 + for elem in soup.contents: + if not pred(elem): + break + skip += 1 + + soup.contents[:] = soup.contents[skip:] + +def lex(source): + """Convert source into a stream of (css_classes, token_string).""" + coqdoc_output = coqdoc(source) + soup = BeautifulSoup(coqdoc_output, "html.parser") + root = soup.find(class_='code') + strip_soup(root, is_whitespace_string) + for elem in root.children: + if isinstance(elem, NavigableString): + yield [], elem + elif elem.name == "span": + cls = "coqdoc-{}".format(elem['title']) + yield [cls], elem.string + elif elem.name == 'br': + pass + else: + raise ValueError(elem) + +def main(): + """Lex stdin (for testing purposes)""" + import sys + for classes, text in lex(sys.stdin.read()): + print(repr(text) + "\t" ' '.join(classes)) + +if __name__ == '__main__': + main() diff --git a/doc/tools/coqrst/coqdomain.py b/doc/tools/coqrst/coqdomain.py new file mode 100644 index 00000000..edf4e6ec --- /dev/null +++ b/doc/tools/coqrst/coqdomain.py @@ -0,0 +1,1201 @@ +# -*- coding: utf-8 -*- +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## `_. + + Unlike ``.. productionlist``\ s, this directive accepts notation syntax. + + + Usage:: + + .. prodn:: token += production + .. prodn:: token ::= production + + Example:: + + .. prodn:: term += let: @pattern := @term in @term + .. prodn:: occ_switch ::= { {? + %| - } {* @num } } + + """ + subdomain = "prodn" + #annotation = "Grammar production" + + def _render_signature(self, signature, signode): + raise NotImplementedError(self) + + SIG_ERROR = ("Invalid syntax in ``.. prodn::`` directive" + + "\nExpected ``name ::= ...`` or ``name += ...``" + + " (e.g. ``pattern += constr:(@ident)``)") + + def handle_signature(self, signature, signode): + nsplits = 2 + parts = signature.split(maxsplit=nsplits) + if len(parts) != 3: + raise ExtensionError(ProductionObject.SIG_ERROR) + + lhs, op, rhs = (part.strip() for part in parts) + if op not in ["::=", "+="]: + raise ExtensionError(ProductionObject.SIG_ERROR) + + self._render_annotation(signode) + + lhs_op = '{} {} '.format(lhs, op) + lhs_node = nodes.literal(lhs_op, lhs_op) + + position = self.state_machine.get_source_and_line(self.lineno) + rhs_node = parse_notation(rhs, *position) + signode += addnodes.desc_name(signature, '', lhs_node, rhs_node) + + return ('token', lhs) if op == '::=' else None + + def _add_index_entry(self, name, target): + pass + + def _target_id(self, name): + # Use `name[1]` instead of ``nodes.make_id(name[1])`` to work around + # https://github.com/sphinx-doc/sphinx/issues/4983 + return 'grammar-token-{}'.format(name[1]) + + def _record_name(self, name, targetid): + env = self.state.document.settings.env + objects = env.domaindata['std']['objects'] + self._warn_if_duplicate_name(objects, name) + objects[name] = env.docname, targetid + +class ExceptionObject(NotationObject): + """An error raised by a Coq command or tactic. + + This commonly appears nested in the ``.. tacn::`` that raises the + exception. + + Example:: + + .. tacv:: assert @form by @tactic + + This tactic applies :n:`@tactic` to solve the subgoals generated by + ``assert``. + + .. exn:: Proof is not complete + + Raised if :n:`@tactic` does not fully solve the goal. + """ + subdomain = "exn" + index_suffix = "(err)" + annotation = "Error" + # Uses “exn” since “err” already is a CSS class added by “writer_aux”. + + # Generate names automatically + def _name_from_signature(self, signature): + return stringify_with_ellipses(signature) + +class WarningObject(NotationObject): + """An warning raised by a Coq command or tactic.. + + Do not mistake this for ``.. warning::``; this directive is for warning + messages produced by Coq. + + + Example:: + + .. warn:: Ambiguous path + + When the coercion :token:`qualid` is added to the inheritance graph, non + valid coercion paths are ignored. + """ + subdomain = "warn" + index_suffix = "(warn)" + annotation = "Warning" + + # Generate names automatically + def _name_from_signature(self, signature): + return stringify_with_ellipses(signature) + +def NotationRole(role, rawtext, text, lineno, inliner, options={}, content=[]): + #pylint: disable=unused-argument, dangerous-default-value + """Any text using the notation syntax (``@id``, ``{+, …}``, etc.). + + Use this to explain tactic equivalences. For example, you might write + this:: + + :n:`generalize @term as @ident` is just like :n:`generalize @term`, but + it names the introduced hypothesis :token:`ident`. + + Note that this example also uses ``:token:``. That's because ``ident`` is + defined in the Coq manual as a grammar production, and ``:token:`` + creates a link to that. When referring to a placeholder that happens to be + a grammar production, ``:token:`…``` is typically preferable to ``:n:`@…```. + """ + notation = utils.unescape(text, 1) + position = inliner.reporter.get_source_and_line(lineno) + return [nodes.literal(rawtext, '', parse_notation(notation, *position, rawtext=rawtext))], [] + +def coq_code_role(role, rawtext, text, lineno, inliner, options={}, content=[]): + #pylint: disable=dangerous-default-value + """Coq code. + + Use this for Gallina and Ltac snippets:: + + :g:`apply plus_comm; reflexivity` + :g:`Set Printing All.` + :g:`forall (x: t), P(x)` + """ + options['language'] = 'Coq' + return code_role(role, rawtext, text, lineno, inliner, options, content) + ## Too heavy: + ## Forked from code_role to use our custom tokenizer; this doesn't work for + ## snippets though: for example CoqDoc swallows the parentheses around this: + ## “(a: A) (b: B)” + # set_classes(options) + # classes = ['code', 'coq'] + # code = utils.unescape(text, 1) + # node = nodes.literal(rawtext, '', *highlight_using_coqdoc(code), classes=classes) + # return [node], [] + +CoqCodeRole = coq_code_role + +class CoqtopDirective(Directive): + r"""A reST directive to describe interactions with Coqtop. + + Usage:: + + .. coqtop:: options… + + Coq code to send to coqtop + + Example:: + + .. coqtop:: in reset undo + + Print nat. + Definition a := 1. + + Here is a list of permissible options: + + - Display options + + - ``all``: Display input and output + - ``in``: Display only input + - ``out``: Display only output + - ``none``: Display neither (useful for setup commands) + + - Behavior options + + - ``reset``: Send a ``Reset Initial`` command before running this block + - ``undo``: Send an ``Undo n`` (``n`` = number of sentences) command after + running all the commands in this block + + ``coqtop``\ 's state is preserved across consecutive ``.. coqtop::`` blocks + of the same document (``coqrst`` creates a single ``coqtop`` process per + reST source file). Use the ``reset`` option to reset Coq's state. + """ + has_content = True + required_arguments = 0 + optional_arguments = 1 + final_argument_whitespace = True + option_spec = { 'name': directives.unchanged } + directive_name = "coqtop" + + def run(self): + # Uses a ‘container’ instead of a ‘literal_block’ to disable + # Pygments-based post-processing (we could also set rawsource to '') + content = '\n'.join(self.content) + args = self.arguments[0].split() if self.arguments else ['in'] + if 'all' in args: + args.extend(['in', 'out']) + node = nodes.container(content, coqtop_options = list(set(args)), + classes=['coqtop', 'literal-block']) + self.add_name(node) + return [node] + +class CoqdocDirective(Directive): + """A reST directive to display Coqtop-formatted source code. + + Usage:: + + .. coqdoc:: + + Coq code to highlight + + Example:: + + .. coqdoc:: + + Definition test := 1. + """ + # TODO implement this as a Pygments highlighter? + has_content = True + required_arguments = 0 + optional_arguments = 0 + final_argument_whitespace = True + option_spec = { 'name': directives.unchanged } + directive_name = "coqdoc" + + def run(self): + # Uses a ‘container’ instead of a ‘literal_block’ to disable + # Pygments-based post-processing (we could also set rawsource to '') + content = '\n'.join(self.content) + node = nodes.inline(content, '', *highlight_using_coqdoc(content)) + wrapper = nodes.container(content, node, classes=['coqdoc', 'literal-block']) + self.add_name(wrapper) + return [wrapper] + +class ExampleDirective(BaseAdmonition): + """A reST directive for examples. + + This behaves like a generic admonition; see + http://docutils.sourceforge.net/docs/ref/rst/directives.html#generic-admonition + for more details. + + Optionally, any text immediately following the ``.. example::`` header is + used as the example's title. + + Example:: + + .. example:: Adding a hint to a database + + The following adds ``plus_comm`` to the ``plu`` database: + + .. coqdoc:: + + Hint Resolve plus_comm : plu. + """ + node_class = nodes.admonition + directive_name = "example" + optional_arguments = 1 + + def run(self): + # ‘BaseAdmonition’ checks whether ‘node_class’ is ‘nodes.admonition’, + # and uses arguments[0] as the title in that case (in other cases, the + # title is unset, and it is instead set in the HTML visitor). + assert len(self.arguments) <= 1 + self.arguments = [": ".join(['Example'] + self.arguments)] + self.options['classes'] = ['admonition', 'note'] + return super().run() + +class PreambleDirective(Directive): + r"""A reST directive to include a TeX file. + + Mostly useful to let MathJax know about `\def`s and `\newcommand`s. The + contents of the TeX file are wrapped in a math environment, as MathJax + doesn't process LaTeX definitions otherwise. + + Usage:: + + .. preamble:: preamble.tex + """ + has_content = False + required_arguments = 1 + optional_arguments = 0 + final_argument_whitespace = True + option_spec = {} + directive_name = "preamble" + + def run(self): + document = self.state.document + env = document.settings.env + + if not document.settings.file_insertion_enabled: + msg = 'File insertion disabled' + return [document.reporter.warning(msg, line=self.lineno)] + + rel_fname, abs_fname = env.relfn2path(self.arguments[0]) + env.note_dependency(rel_fname) + + with open(abs_fname, encoding="utf-8") as ltx: + latex = ltx.read() + + node = make_math_node(latex, env.docname, nowrap=False) + node['classes'] = ["math-preamble"] + set_source_info(self, node) + return [node] + +class InferenceDirective(Directive): + r"""A reST directive to format inference rules. + + This also serves as a small illustration of the way to create new Sphinx + directives. + + Usage:: + + .. inference:: name + + newline-separated premises + -------------------------- + conclusion + + Example:: + + .. inference:: Prod-Pro + + \WTEG{T}{s} + s \in \Sort + \WTE{\Gamma::(x:T)}{U}{\Prop} + ----------------------------- + \WTEG{\forall~x:T,U}{\Prop} + """ + required_arguments = 1 + optional_arguments = 0 + has_content = True + final_argument_whitespace = True + directive_name = "inference" + + @staticmethod + def prepare_latex_operand(op): + # TODO: Could use a fancier inference class in LaTeX + return '%\n\\hspace{3em}%\n'.join(op.strip().splitlines()) + + def prepare_latex(self, content): + parts = re.split('^ *----+ *$', content, flags=re.MULTILINE) + if len(parts) != 2: + raise self.error('Expected two parts in ‘inference’ directive, separated by a rule (----).') + + top, bottom = tuple(InferenceDirective.prepare_latex_operand(p) for p in parts) + return "%\n".join(("\\frac{", top, "}{", bottom, "}")) + + def run(self): + self.assert_has_content() + + title = self.arguments[0] + content = '\n'.join(self.content) + latex = self.prepare_latex(content) + docname = self.state.document.settings.env.docname + math_node = make_math_node(latex, docname, nowrap=False) + + tid = nodes.make_id(title) + target = nodes.target('', '', ids=['inference-' + tid]) + self.state.document.note_explicit_target(target) + + term, desc = nodes.term('', title), nodes.description('', math_node) + dli = nodes.definition_list_item('', term, desc) + dl = nodes.definition_list(content, target, dli) + set_source_info(self, dl) + return [dl] + +class AnsiColorsParser(): + """Parse ANSI-colored output from Coqtop into Sphinx nodes.""" + + # Coqtop's output crashes ansi.py, because it contains a bunch of extended codes + # This class is a fork of the original ansi.py, released under a BSD license in sphinx-contribs + + COLOR_PATTERN = re.compile('\x1b\\[([^m]+)m') + + def __init__(self): + self.new_nodes, self.pending_nodes = [], [] + + def _finalize_pending_nodes(self): + self.new_nodes.extend(self.pending_nodes) + self.pending_nodes = [] + + def _add_text(self, raw, beg, end): + if beg < end: + text = raw[beg:end] + if self.pending_nodes: + self.pending_nodes[-1].append(nodes.Text(text)) + else: + self.new_nodes.append(nodes.inline('', text)) + + def colorize_str(self, raw): + """Parse raw (an ANSI-colored output string from Coqtop) into Sphinx nodes.""" + last_end = 0 + for match in AnsiColorsParser.COLOR_PATTERN.finditer(raw): + self._add_text(raw, last_end, match.start()) + last_end = match.end() + classes = ansicolors.parse_ansi(match.group(1)) + if 'ansi-reset' in classes: + self._finalize_pending_nodes() + else: + node = nodes.inline() + self.pending_nodes.append(node) + node['classes'].extend(classes) + self._add_text(raw, last_end, len(raw)) + self._finalize_pending_nodes() + return self.new_nodes + +class CoqtopBlocksTransform(Transform): + """Filter handling the actual work for the coqtop directive + + Adds coqtop's responses, colorizes input and output, and merges consecutive + coqtop directives for better visual rendition. + """ + default_priority = 10 + + @staticmethod + def is_coqtop_block(node): + return isinstance(node, nodes.Element) and 'coqtop_options' in node + + @staticmethod + def split_sentences(source): + """Split Coq sentences in source. Could be improved.""" + return re.split(r"(?<=(?`_ + and reference its tokens using ``:token:`…```. + """ + #pylint: disable=dangerous-default-value, unused-argument + env = inliner.document.settings.env + targetid = 'grammar-token-{}'.format(text) + target = nodes.target('', '', ids=[targetid]) + inliner.document.note_explicit_target(target) + code = nodes.literal(rawtext, text, role=typ.lower()) + node = nodes.inline(rawtext, '', target, code, classes=['inline-grammar-production']) + set_role_source_info(inliner, lineno, node) + env.domaindata['std']['objects']['token', text] = env.docname, targetid + return [node], [] + +GrammarProductionRole.role_name = "production" + +class CoqDomain(Domain): + """A domain to document Coq code. + + Sphinx has a notion of “domains”, used to tailor it to a specific language. + Domains mostly consist in descriptions of the objects that we wish to + describe (for Coq, this includes tactics, tactic notations, options, + exceptions, etc.), as well as domain-specific roles and directives. + + Each domain is responsible for tracking its objects, and resolving + references to them. In the case of Coq, this leads us to define Coq + “subdomains”, which classify objects into categories in which names must be + unique. For example, a tactic and a theorem may share a name, but two + tactics cannot be named the same. + """ + + name = 'coq' + label = 'Coq' + + object_types = { + # ObjType (= directive type) → (Local name, *xref-roles) + 'cmd': ObjType('cmd', 'cmd'), + 'cmdv': ObjType('cmdv', 'cmd'), + 'tacn': ObjType('tacn', 'tacn'), + 'tacv': ObjType('tacv', 'tacn'), + 'opt': ObjType('opt', 'opt'), + 'flag': ObjType('flag', 'flag'), + 'table': ObjType('table', 'table'), + 'thm': ObjType('thm', 'thm'), + 'prodn': ObjType('prodn', 'prodn'), + 'exn': ObjType('exn', 'exn'), + 'warn': ObjType('warn', 'exn'), + 'index': ObjType('index', 'index', searchprio=-1) + } + + directives = { + # Note that some directives live in the same semantic subdomain; ie + # there's one directive per object type, but some object types map to + # the same role. + 'cmd': VernacObject, + 'cmdv': VernacVariantObject, + 'tacn': TacticNotationObject, + 'tacv': TacticNotationVariantObject, + 'opt': OptionObject, + 'flag': FlagObject, + 'table': TableObject, + 'thm': GallinaObject, + 'prodn' : ProductionObject, + 'exn': ExceptionObject, + 'warn': WarningObject, + } + + roles = { + # Each of these roles lives in a different semantic “subdomain” + 'cmd': XRefRole(warn_dangling=True), + 'tacn': XRefRole(warn_dangling=True), + 'opt': XRefRole(warn_dangling=True), + 'flag': XRefRole(warn_dangling=True), + 'table': XRefRole(warn_dangling=True), + 'thm': XRefRole(warn_dangling=True), + 'prodn' : XRefRole(warn_dangling=True), + 'exn': XRefRole(warn_dangling=True), + 'warn': XRefRole(warn_dangling=True), + # This one is special + 'index': IndexXRefRole(), + # These are used for highlighting + 'n': NotationRole, + 'g': CoqCodeRole + } + + indices = [CoqVernacIndex, CoqTacticIndex, CoqOptionIndex, CoqGallinaIndex, CoqExceptionIndex] + + data_version = 1 + initial_data = { + # Collect everything under a key that we control, since Sphinx adds + # others, such as “version” + 'objects' : { # subdomain → name → docname, objtype, targetid + 'cmd': {}, + 'tacn': {}, + 'opt': {}, + 'flag': {}, + 'table': {}, + 'thm': {}, + 'prodn' : {}, + 'exn': {}, + 'warn': {}, + } + } + + @staticmethod + def find_index_by_name(targetid): + for index in CoqDomain.indices: + if index.name == targetid: + return index + + def get_objects(self): + # Used for searching and object inventories (intersphinx) + for _, objects in self.data['objects'].items(): + for name, (docname, objtype, targetid) in objects.items(): + yield (name, name, objtype, docname, targetid, self.object_types[objtype].attrs['searchprio']) + for index in self.indices: + yield (index.name, index.localname, 'index', "coq-" + index.name, '', -1) + + def merge_domaindata(self, docnames, otherdata): + DUP = "Duplicate declaration: '{}' also defined in '{}'.\n" + for subdomain, their_objects in otherdata['objects'].items(): + our_objects = self.data['objects'][subdomain] + for name, (docname, objtype, targetid) in their_objects.items(): + if docname in docnames: + if name in our_objects: + self.env.warn(docname, DUP.format(name, our_objects[name][0])) + our_objects[name] = (docname, objtype, targetid) + + def resolve_xref(self, env, fromdocname, builder, role, targetname, node, contnode): + # ‘target’ is the name that was written in the document + # ‘role’ is where this xref comes from; it's exactly one of our subdomains + if role == 'index': + index = CoqDomain.find_index_by_name(targetname) + if index: + return make_refnode(builder, fromdocname, "coq-" + index.name, '', contnode, index.localname) + else: + resolved = self.data['objects'][role].get(targetname) + if resolved: + (todocname, _, targetid) = resolved + return make_refnode(builder, fromdocname, todocname, targetid, contnode, targetname) + + def clear_doc(self, docname_to_clear): + for subdomain_objects in self.data['objects'].values(): + for name, (docname, _, _) in list(subdomain_objects.items()): + if docname == docname_to_clear: + del subdomain_objects[name] + +def is_coqtop_or_coqdoc_block(node): + return (isinstance(node, nodes.Element) and + ('coqtop' in node['classes'] or 'coqdoc' in node['classes'])) + +def simplify_source_code_blocks_for_latex(app, doctree, fromdocname): # pylint: disable=unused-argument + """Simplify coqdoc and coqtop blocks. + + In HTML mode, this does nothing; in other formats, such as LaTeX, it + replaces coqdoc and coqtop blocks by plain text sources, which will use + pygments if available. This prevents the LaTeX builder from getting + confused. + """ + is_html = app.builder.tags.has("html") + for node in doctree.traverse(is_coqtop_or_coqdoc_block): + if is_html: + node.rawsource = '' # Prevent pygments from kicking in + elif 'coqtop-hidden' in node['classes']: + node.parent.remove(node) + else: + node.replace_self(nodes.literal_block(node.rawsource, node.rawsource, language="Coq")) + +COQ_ADDITIONAL_DIRECTIVES = [CoqtopDirective, + CoqdocDirective, + ExampleDirective, + InferenceDirective, + PreambleDirective] + +COQ_ADDITIONAL_ROLES = [GrammarProductionRole] + +def setup(app): + """Register the Coq domain""" + + # A few sanity checks: + subdomains = set(obj.subdomain for obj in CoqDomain.directives.values()) + assert subdomains.issuperset(chain(*(idx.subdomains for idx in CoqDomain.indices))) + assert subdomains.issubset(CoqDomain.roles.keys()) + + # Add domain, directives, and roles + app.add_domain(CoqDomain) + + for role in COQ_ADDITIONAL_ROLES: + app.add_role(role.role_name, role) + + for directive in COQ_ADDITIONAL_DIRECTIVES: + app.add_directive(directive.directive_name, directive) + + app.add_transform(CoqtopBlocksTransform) + app.connect('doctree-resolved', simplify_source_code_blocks_for_latex) + app.connect('doctree-resolved', CoqtopBlocksTransform.merge_consecutive_coqtop_blocks) + + # Add extra styles + app.add_stylesheet("fonts.css") + app.add_stylesheet("ansi.css") + app.add_stylesheet("coqdoc.css") + app.add_javascript("notations.js") + app.add_stylesheet("notations.css") + app.add_stylesheet("pre-text.css") + + # Tell Sphinx about extra settings + app.add_config_value("report_undocumented_coq_objects", None, 'env') + + # ``env_version`` is used by Sphinx to know when to invalidate + # coqdomain-specific bits in its caches. It should be incremented when the + # contents of ``env.domaindata['coq']`` change. See + # `https://github.com/sphinx-doc/sphinx/issues/4460`. + meta = { "version": "0.1", + "env_version": 2, + "parallel_read_safe": True } + return meta diff --git a/doc/tools/coqrst/notations/Makefile b/doc/tools/coqrst/notations/Makefile new file mode 100644 index 00000000..c017aed9 --- /dev/null +++ b/doc/tools/coqrst/notations/Makefile @@ -0,0 +1,27 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## ../tests/antlr-notations.html diff --git a/doc/tools/coqrst/notations/TacticNotations.g b/doc/tools/coqrst/notations/TacticNotations.g new file mode 100644 index 00000000..a889ebda --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotations.g @@ -0,0 +1,33 @@ +/************************************************************************/ +/* * The Coq Proof Assistant / The Coq Development Team */ +/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */ +/* ", + "'{'", "'}'" ] + + symbolicNames = [ "", + "LGROUP", "LBRACE", "RBRACE", "METACHAR", "ATOM", "ID", "SUB", + "WHITESPACE" ] + + ruleNames = [ "LGROUP", "LBRACE", "RBRACE", "METACHAR", "ATOM", "ID", + "SUB", "WHITESPACE" ] + + grammarFileName = "TacticNotations.g" + + def __init__(self, input=None, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.7") + self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) + self._actions = None + self._predicates = None diff --git a/doc/tools/coqrst/notations/TacticNotationsLexer.tokens b/doc/tools/coqrst/notations/TacticNotationsLexer.tokens new file mode 100644 index 00000000..88b38f97 --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotationsLexer.tokens @@ -0,0 +1,10 @@ +LGROUP=1 +LBRACE=2 +RBRACE=3 +METACHAR=4 +ATOM=5 +ID=6 +SUB=7 +WHITESPACE=8 +'{'=2 +'}'=3 diff --git a/doc/tools/coqrst/notations/TacticNotationsParser.py b/doc/tools/coqrst/notations/TacticNotationsParser.py new file mode 100644 index 00000000..645f0789 --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotationsParser.py @@ -0,0 +1,595 @@ +# Generated from TacticNotations.g by ANTLR 4.7 +# encoding: utf-8 +from antlr4 import * +from io import StringIO +from typing.io import TextIO +import sys + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\n") + buf.write("J\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b") + buf.write("\t\b\4\t\t\t\4\n\t\n\3\2\3\2\3\2\3\3\3\3\5\3\32\n\3\3") + buf.write("\3\7\3\35\n\3\f\3\16\3 \13\3\3\4\3\4\3\4\3\4\3\4\5\4\'") + buf.write("\n\4\3\5\3\5\5\5+\n\5\3\5\3\5\3\5\5\5\60\n\5\3\5\3\5\3") + buf.write("\6\3\6\5\6\66\n\6\3\6\3\6\5\6:\n\6\3\6\3\6\3\7\3\7\3\b") + buf.write("\3\b\3\t\3\t\5\tD\n\t\3\n\3\n\5\nH\n\n\3\n\2\2\13\2\4") + buf.write("\6\b\n\f\16\20\22\2\2\2L\2\24\3\2\2\2\4\27\3\2\2\2\6&") + buf.write("\3\2\2\2\b(\3\2\2\2\n\63\3\2\2\2\f=\3\2\2\2\16?\3\2\2") + buf.write("\2\20A\3\2\2\2\22E\3\2\2\2\24\25\5\4\3\2\25\26\7\2\2\3") + buf.write("\26\3\3\2\2\2\27\36\5\6\4\2\30\32\5\f\7\2\31\30\3\2\2") + buf.write("\2\31\32\3\2\2\2\32\33\3\2\2\2\33\35\5\6\4\2\34\31\3\2") + buf.write("\2\2\35 \3\2\2\2\36\34\3\2\2\2\36\37\3\2\2\2\37\5\3\2") + buf.write("\2\2 \36\3\2\2\2!\'\5\20\t\2\"\'\5\16\b\2#\'\5\22\n\2") + buf.write("$\'\5\b\5\2%\'\5\n\6\2&!\3\2\2\2&\"\3\2\2\2&#\3\2\2\2") + buf.write("&$\3\2\2\2&%\3\2\2\2\'\7\3\2\2\2(*\7\3\2\2)+\7\7\2\2*") + buf.write(")\3\2\2\2*+\3\2\2\2+,\3\2\2\2,-\7\n\2\2-/\5\4\3\2.\60") + buf.write("\7\n\2\2/.\3\2\2\2/\60\3\2\2\2\60\61\3\2\2\2\61\62\7\5") + buf.write("\2\2\62\t\3\2\2\2\63\65\7\4\2\2\64\66\5\f\7\2\65\64\3") + buf.write("\2\2\2\65\66\3\2\2\2\66\67\3\2\2\2\679\5\4\3\28:\5\f\7") + buf.write("\298\3\2\2\29:\3\2\2\2:;\3\2\2\2;<\7\5\2\2<\13\3\2\2\2") + buf.write("=>\7\n\2\2>\r\3\2\2\2?@\7\6\2\2@\17\3\2\2\2AC\7\7\2\2") + buf.write("BD\7\t\2\2CB\3\2\2\2CD\3\2\2\2D\21\3\2\2\2EG\7\b\2\2F") + buf.write("H\7\t\2\2GF\3\2\2\2GH\3\2\2\2H\23\3\2\2\2\13\31\36&*/") + buf.write("\659CG") + return buf.getvalue() + + +class TacticNotationsParser ( Parser ): + + grammarFileName = "TacticNotations.g" + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + sharedContextCache = PredictionContextCache() + + literalNames = [ "", "", "'{'", "'}'" ] + + symbolicNames = [ "", "LGROUP", "LBRACE", "RBRACE", "METACHAR", + "ATOM", "ID", "SUB", "WHITESPACE" ] + + RULE_top = 0 + RULE_blocks = 1 + RULE_block = 2 + RULE_repeat = 3 + RULE_curlies = 4 + RULE_whitespace = 5 + RULE_meta = 6 + RULE_atomic = 7 + RULE_hole = 8 + + ruleNames = [ "top", "blocks", "block", "repeat", "curlies", "whitespace", + "meta", "atomic", "hole" ] + + EOF = Token.EOF + LGROUP=1 + LBRACE=2 + RBRACE=3 + METACHAR=4 + ATOM=5 + ID=6 + SUB=7 + WHITESPACE=8 + + def __init__(self, input:TokenStream, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.7") + self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) + self._predicates = None + + + + class TopContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def blocks(self): + return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0) + + + def EOF(self): + return self.getToken(TacticNotationsParser.EOF, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_top + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTop" ): + return visitor.visitTop(self) + else: + return visitor.visitChildren(self) + + + + + def top(self): + + localctx = TacticNotationsParser.TopContext(self, self._ctx, self.state) + self.enterRule(localctx, 0, self.RULE_top) + try: + self.enterOuterAlt(localctx, 1) + self.state = 18 + self.blocks() + self.state = 19 + self.match(TacticNotationsParser.EOF) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class BlocksContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def block(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(TacticNotationsParser.BlockContext) + else: + return self.getTypedRuleContext(TacticNotationsParser.BlockContext,i) + + + def whitespace(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(TacticNotationsParser.WhitespaceContext) + else: + return self.getTypedRuleContext(TacticNotationsParser.WhitespaceContext,i) + + + def getRuleIndex(self): + return TacticNotationsParser.RULE_blocks + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitBlocks" ): + return visitor.visitBlocks(self) + else: + return visitor.visitChildren(self) + + + + + def blocks(self): + + localctx = TacticNotationsParser.BlocksContext(self, self._ctx, self.state) + self.enterRule(localctx, 2, self.RULE_blocks) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 21 + self.block() + self.state = 28 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,1,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 23 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.WHITESPACE: + self.state = 22 + self.whitespace() + + + self.state = 25 + self.block() + self.state = 30 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,1,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class BlockContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def atomic(self): + return self.getTypedRuleContext(TacticNotationsParser.AtomicContext,0) + + + def meta(self): + return self.getTypedRuleContext(TacticNotationsParser.MetaContext,0) + + + def hole(self): + return self.getTypedRuleContext(TacticNotationsParser.HoleContext,0) + + + def repeat(self): + return self.getTypedRuleContext(TacticNotationsParser.RepeatContext,0) + + + def curlies(self): + return self.getTypedRuleContext(TacticNotationsParser.CurliesContext,0) + + + def getRuleIndex(self): + return TacticNotationsParser.RULE_block + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitBlock" ): + return visitor.visitBlock(self) + else: + return visitor.visitChildren(self) + + + + + def block(self): + + localctx = TacticNotationsParser.BlockContext(self, self._ctx, self.state) + self.enterRule(localctx, 4, self.RULE_block) + try: + self.state = 36 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [TacticNotationsParser.ATOM]: + self.enterOuterAlt(localctx, 1) + self.state = 31 + self.atomic() + pass + elif token in [TacticNotationsParser.METACHAR]: + self.enterOuterAlt(localctx, 2) + self.state = 32 + self.meta() + pass + elif token in [TacticNotationsParser.ID]: + self.enterOuterAlt(localctx, 3) + self.state = 33 + self.hole() + pass + elif token in [TacticNotationsParser.LGROUP]: + self.enterOuterAlt(localctx, 4) + self.state = 34 + self.repeat() + pass + elif token in [TacticNotationsParser.LBRACE]: + self.enterOuterAlt(localctx, 5) + self.state = 35 + self.curlies() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class RepeatContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def LGROUP(self): + return self.getToken(TacticNotationsParser.LGROUP, 0) + + def WHITESPACE(self, i:int=None): + if i is None: + return self.getTokens(TacticNotationsParser.WHITESPACE) + else: + return self.getToken(TacticNotationsParser.WHITESPACE, i) + + def blocks(self): + return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0) + + + def RBRACE(self): + return self.getToken(TacticNotationsParser.RBRACE, 0) + + def ATOM(self): + return self.getToken(TacticNotationsParser.ATOM, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_repeat + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitRepeat" ): + return visitor.visitRepeat(self) + else: + return visitor.visitChildren(self) + + + + + def repeat(self): + + localctx = TacticNotationsParser.RepeatContext(self, self._ctx, self.state) + self.enterRule(localctx, 6, self.RULE_repeat) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 38 + self.match(TacticNotationsParser.LGROUP) + self.state = 40 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.ATOM: + self.state = 39 + self.match(TacticNotationsParser.ATOM) + + + self.state = 42 + self.match(TacticNotationsParser.WHITESPACE) + self.state = 43 + self.blocks() + self.state = 45 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.WHITESPACE: + self.state = 44 + self.match(TacticNotationsParser.WHITESPACE) + + + self.state = 47 + self.match(TacticNotationsParser.RBRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class CurliesContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def LBRACE(self): + return self.getToken(TacticNotationsParser.LBRACE, 0) + + def blocks(self): + return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0) + + + def RBRACE(self): + return self.getToken(TacticNotationsParser.RBRACE, 0) + + def whitespace(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(TacticNotationsParser.WhitespaceContext) + else: + return self.getTypedRuleContext(TacticNotationsParser.WhitespaceContext,i) + + + def getRuleIndex(self): + return TacticNotationsParser.RULE_curlies + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitCurlies" ): + return visitor.visitCurlies(self) + else: + return visitor.visitChildren(self) + + + + + def curlies(self): + + localctx = TacticNotationsParser.CurliesContext(self, self._ctx, self.state) + self.enterRule(localctx, 8, self.RULE_curlies) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 49 + self.match(TacticNotationsParser.LBRACE) + self.state = 51 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.WHITESPACE: + self.state = 50 + self.whitespace() + + + self.state = 53 + self.blocks() + self.state = 55 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.WHITESPACE: + self.state = 54 + self.whitespace() + + + self.state = 57 + self.match(TacticNotationsParser.RBRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class WhitespaceContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def WHITESPACE(self): + return self.getToken(TacticNotationsParser.WHITESPACE, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_whitespace + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitWhitespace" ): + return visitor.visitWhitespace(self) + else: + return visitor.visitChildren(self) + + + + + def whitespace(self): + + localctx = TacticNotationsParser.WhitespaceContext(self, self._ctx, self.state) + self.enterRule(localctx, 10, self.RULE_whitespace) + try: + self.enterOuterAlt(localctx, 1) + self.state = 59 + self.match(TacticNotationsParser.WHITESPACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class MetaContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def METACHAR(self): + return self.getToken(TacticNotationsParser.METACHAR, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_meta + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitMeta" ): + return visitor.visitMeta(self) + else: + return visitor.visitChildren(self) + + + + + def meta(self): + + localctx = TacticNotationsParser.MetaContext(self, self._ctx, self.state) + self.enterRule(localctx, 12, self.RULE_meta) + try: + self.enterOuterAlt(localctx, 1) + self.state = 61 + self.match(TacticNotationsParser.METACHAR) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class AtomicContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def ATOM(self): + return self.getToken(TacticNotationsParser.ATOM, 0) + + def SUB(self): + return self.getToken(TacticNotationsParser.SUB, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_atomic + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitAtomic" ): + return visitor.visitAtomic(self) + else: + return visitor.visitChildren(self) + + + + + def atomic(self): + + localctx = TacticNotationsParser.AtomicContext(self, self._ctx, self.state) + self.enterRule(localctx, 14, self.RULE_atomic) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 63 + self.match(TacticNotationsParser.ATOM) + self.state = 65 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.SUB: + self.state = 64 + self.match(TacticNotationsParser.SUB) + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class HoleContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def ID(self): + return self.getToken(TacticNotationsParser.ID, 0) + + def SUB(self): + return self.getToken(TacticNotationsParser.SUB, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_hole + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitHole" ): + return visitor.visitHole(self) + else: + return visitor.visitChildren(self) + + + + + def hole(self): + + localctx = TacticNotationsParser.HoleContext(self, self._ctx, self.state) + self.enterRule(localctx, 16, self.RULE_hole) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 67 + self.match(TacticNotationsParser.ID) + self.state = 69 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.SUB: + self.state = 68 + self.match(TacticNotationsParser.SUB) + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx diff --git a/doc/tools/coqrst/notations/TacticNotationsVisitor.py b/doc/tools/coqrst/notations/TacticNotationsVisitor.py new file mode 100644 index 00000000..c0bcc4af --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotationsVisitor.py @@ -0,0 +1,58 @@ +# Generated from TacticNotations.g by ANTLR 4.7 +from antlr4 import * +if __name__ is not None and "." in __name__: + from .TacticNotationsParser import TacticNotationsParser +else: + from TacticNotationsParser import TacticNotationsParser + +# This class defines a complete generic visitor for a parse tree produced by TacticNotationsParser. + +class TacticNotationsVisitor(ParseTreeVisitor): + + # Visit a parse tree produced by TacticNotationsParser#top. + def visitTop(self, ctx:TacticNotationsParser.TopContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#blocks. + def visitBlocks(self, ctx:TacticNotationsParser.BlocksContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#block. + def visitBlock(self, ctx:TacticNotationsParser.BlockContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#repeat. + def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#curlies. + def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#whitespace. + def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#meta. + def visitMeta(self, ctx:TacticNotationsParser.MetaContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#atomic. + def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#hole. + def visitHole(self, ctx:TacticNotationsParser.HoleContext): + return self.visitChildren(ctx) + + + +del TacticNotationsParser diff --git a/doc/tools/coqrst/notations/__init__.py b/doc/tools/coqrst/notations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/doc/tools/coqrst/notations/fontsupport.py b/doc/tools/coqrst/notations/fontsupport.py new file mode 100755 index 00000000..a3efd97f --- /dev/null +++ b/doc/tools/coqrst/notations/fontsupport.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python2 +# -*- coding: utf-8 -*- +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## = 191: + fnt.removeGlyph(g) + return fnt + +def center_glyphs(src_font_path, dst_font_path, dst_name): + fnt = trim_font(fontforge.open(src_font_path)) + + size = max(g.width for g in fnt.glyphs()) + fnt.ascent, fnt.descent = size, 0 + for glyph in fnt.glyphs(): + scale_single_glyph(glyph, size, size) + + fnt.sfnt_names = [] + fnt.fontname = fnt.familyname = fnt.fullname = dst_name + fnt.generate(dst_font_path) + +if __name__ == '__main__': + from os.path import dirname, join, abspath + curdir = dirname(abspath(__file__)) + ubuntumono_path = join(curdir, "UbuntuMono-B.ttf") + ubuntumono_mod_path = join(curdir, "CoqNotations.ttf") + center_glyphs(ubuntumono_path, ubuntumono_mod_path, "CoqNotations") diff --git a/doc/tools/coqrst/notations/html.py b/doc/tools/coqrst/notations/html.py new file mode 100644 index 00000000..87a41cf9 --- /dev/null +++ b/doc/tools/coqrst/notations/html.py @@ -0,0 +1,75 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## here + +def htmlize(notation): + """Translate notation to a dominate HTML tree""" + top = tags.span(_class='notation') + with top: + TacticNotationsToHTMLVisitor().visit(parse(notation)) + return top + +def htmlize_str(notation): + """Translate notation to a raw HTML document""" + # ‘pretty=True’ introduces spurious spaces + return htmlize(notation).render(pretty=False) + +def htmlize_p(notation): + """Like `htmlize`, wrapped in a ‘p’. + Does not return: instead, must be run in a dominate context. + """ + with tags.p(): + htmlize(notation) diff --git a/doc/tools/coqrst/notations/parsing.py b/doc/tools/coqrst/notations/parsing.py new file mode 100644 index 00000000..506240d9 --- /dev/null +++ b/doc/tools/coqrst/notations/parsing.py @@ -0,0 +1,37 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## str: + """Configure a coqtop instance (but don't start it yet). + + :param coqtop_bin: The path to coqtop; uses $COQBIN by default, falling back to "coqtop" + :param color: When True, tell coqtop to produce ANSI color codes (see + the ansicolors module) + :param args: Additional arugments to coqtop. + """ + self.coqtop_bin = coqtop_bin or os.path.join(os.getenv('COQBIN', ""), "coqtop") + if not pexpect.utils.which(self.coqtop_bin): + raise ValueError("coqtop binary not found: '{}'".format(self.coqtop_bin)) + self.args = (args or []) + ["-boot", "-color", "on"] * color + self.coqtop = None + + def __enter__(self): + if self.coqtop: + raise ValueError("This module isn't re-entrant") + self.coqtop = pexpect.spawn(self.coqtop_bin, args=self.args, echo=False, encoding="utf-8") + # Disable delays (http://pexpect.readthedocs.io/en/stable/commonissues.html?highlight=delaybeforesend) + self.coqtop.delaybeforesend = 0 + self.next_prompt() + return self + + def __exit__(self, type, value, traceback): + self.coqtop.kill(9) + + def next_prompt(self): + "Wait for the next coqtop prompt, and return the output preceeding it." + self.coqtop.expect(CoqTop.COQTOP_PROMPT, timeout = 10) + return self.coqtop.before + + def sendone(self, sentence): + """Send a single sentence to coqtop. + + :sentence: One Coq sentence (otherwise, Coqtop will produce multiple + prompts and we'll get confused) + """ + # Suppress newlines, but not spaces: they are significant in notations + sentence = re.sub(r"[\r\n]+", " ", sentence).strip() + self.coqtop.sendline(sentence) + try: + output = self.next_prompt() + except: + print("Error while sending the following sentence to coqtop: {}".format(sentence)) + raise + # print("Got {}".format(repr(output))) + return output + +def sendmany(*sentences): + """A small demo: send each sentence in sentences and print the output""" + with CoqTop() as coqtop: + for sentence in sentences: + print("=====================================") + print(sentence) + print("-------------------------------------") + response = coqtop.sendone(sentence) + print(response) + +def main(): + """Run a simple performance test and demo `sendmany`""" + with CoqTop() as coqtop: + for _ in range(200): + print(repr(coqtop.sendone("Check nat."))) + sendmany("Goal False -> True.", "Proof.", "intros H.", + "Check H.", "Chchc.", "apply I.", "Qed.") + +if __name__ == '__main__': + main() -- cgit v1.2.3