aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Emilio Jesus Gallego Arias <e+git@x80.org>2018-07-02 15:06:17 +0200
committerGravatar Emilio Jesus Gallego Arias <e+git@x80.org>2018-07-02 15:06:17 +0200
commit02fe76c0c1c3f01c6fb4310dd4450b35f43005da (patch)
tree1d1c7c47fff5688105d0f868f9ab89d479ede899
parentf6f606232ae3c32e5c90d4fd427721875529b777 (diff)
parent47bbe39d480f02dc92e4856fa8d872f52b9903a4 (diff)
Merge PR #7902: Use a homebrew parser to replace the GEXTEND extension points of Camlp5
-rw-r--r--.gitignore6
-rw-r--r--Makefile9
-rw-r--r--Makefile.build24
-rw-r--r--configure.ml13
-rw-r--r--dev/ci/user-overlays/07902-ppedrot-camlp5-parser.sh8
-rw-r--r--dev/doc/changes.md44
-rw-r--r--grammar/coqpp_ast.mli80
-rw-r--r--grammar/coqpp_lex.mll163
-rw-r--r--grammar/coqpp_main.ml292
-rw-r--r--grammar/coqpp_parse.mly190
-rw-r--r--grammar/q_util.mlp2
-rw-r--r--parsing/extend.ml2
-rw-r--r--parsing/g_constr.mlg (renamed from parsing/g_constr.ml4)351
-rw-r--r--parsing/g_prim.mlg (renamed from parsing/g_prim.ml4)69
-rw-r--r--parsing/pcoq.ml11
-rw-r--r--parsing/pcoq.mli3
-rw-r--r--plugins/ltac/g_tactic.mlg (renamed from plugins/ltac/g_tactic.ml4)452
-rw-r--r--plugins/ltac/tacentries.ml4
-rw-r--r--toplevel/g_toplevel.mlg (renamed from toplevel/g_toplevel.ml4)19
-rw-r--r--vernac/egramcoq.ml7
-rw-r--r--vernac/g_proofs.ml4131
-rw-r--r--vernac/g_proofs.mlg135
-rw-r--r--vernac/g_vernac.ml41156
-rw-r--r--vernac/g_vernac.mlg1173
24 files changed, 2607 insertions, 1737 deletions
diff --git a/.gitignore b/.gitignore
index 6adbc9fb2..1eee3f94f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -115,6 +115,7 @@ dev/ocamldoc/*.css
# .mll files
+grammar/coqpp_lex.ml
dev/ocamlweb-doc/lex.ml
ide/coq_lex.ml
ide/config_lexer.ml
@@ -126,6 +127,11 @@ tools/ocamllibdep.ml
tools/coqdoc/cpretty.ml
ide/protocol/xml_lexer.ml
+# .mly files
+
+grammar/coqpp_parse.ml
+grammar/coqpp_parse.mli
+
# .ml4 / .mlp files
g_*.ml
diff --git a/Makefile b/Makefile
index 4787377ea..26d582c72 100644
--- a/Makefile
+++ b/Makefile
@@ -74,9 +74,11 @@ endef
## Files in the source tree
LEXFILES := $(call find, '*.mll')
+YACCFILES := $(call find, '*.mly')
export MLLIBFILES := $(call find, '*.mllib')
export MLPACKFILES := $(call find, '*.mlpack')
export ML4FILES := $(call find, '*.ml4')
+export MLGFILES := $(call find, '*.mlg')
export CFILES := $(call findindir, 'kernel/byterun', '*.c')
export MERLINFILES := $(call find, '.merlin')
@@ -90,7 +92,8 @@ EXISTINGMLI := $(call find, '*.mli')
## Files that will be generated
GENML4FILES:= $(ML4FILES:.ml4=.ml)
-export GENMLFILES:=$(LEXFILES:.mll=.ml) kernel/copcodes.ml
+GENMLGFILES:= $(MLGFILES:.mlg=.ml)
+export GENMLFILES:=$(LEXFILES:.mll=.ml) $(YACCFILES:.mly=.ml) $(GENMLGFILES) kernel/copcodes.ml
export GENHFILES:=kernel/byterun/coq_jumptbl.h
export GENFILES:=$(GENMLFILES) $(GENMLIFILES) $(GENHFILES)
@@ -100,7 +103,7 @@ export GENFILES:=$(GENMLFILES) $(GENMLIFILES) $(GENHFILES)
## More complex file lists
-export MLSTATICFILES := $(filter-out $(GENMLFILES) $(GENML4FILES), $(EXISTINGML))
+export MLSTATICFILES := $(filter-out $(GENMLFILES) $(GENML4FILES) $(GENMLGFILES), $(EXISTINGML))
export MLIFILES := $(sort $(GENMLIFILES) $(EXISTINGMLI))
include Makefile.common
@@ -223,7 +226,7 @@ clean-ide:
rm -rf $(COQIDEAPP)
ml4clean:
- rm -f $(GENML4FILES)
+ rm -f $(GENML4FILES) $(GENMLGFILES)
depclean:
find . $(FIND_SKIP_DIRS) '(' -name '*.d' ')' -print | xargs rm -f
diff --git a/Makefile.build b/Makefile.build
index ed251af77..c8ab85765 100644
--- a/Makefile.build
+++ b/Makefile.build
@@ -342,6 +342,7 @@ kernel/copcodes.ml: kernel/byterun/coq_instruct.h
GRAMBASEDEPS := grammar/q_util.cmi
GRAMCMO := grammar/q_util.cmo \
grammar/argextend.cmo grammar/tacextend.cmo grammar/vernacextend.cmo
+COQPPCMO := grammar/coqpp_parse.cmo grammar/coqpp_lex.cmo
grammar/argextend.cmo : $(GRAMBASEDEPS)
grammar/q_util.cmo : $(GRAMBASEDEPS)
@@ -349,6 +350,10 @@ grammar/tacextend.cmo : $(GRAMBASEDEPS) grammar/argextend.cmo
grammar/vernacextend.cmo : $(GRAMBASEDEPS) grammar/tacextend.cmo \
grammar/argextend.cmo
+grammar/coqpp_parse.cmi: grammar/coqpp_ast.cmi
+grammar/coqpp_parse.cmo: grammar/coqpp_ast.cmi grammar/coqpp_parse.cmi
+grammar/coqpp_lex.cmo: grammar/coqpp_ast.cmi grammar/coqpp_parse.cmo
+
## Ocaml compiler with the right options and -I for grammar
GRAMC := $(OCAMLFIND) ocamlc $(CAMLFLAGS) $(CAMLDEBUG) $(USERFLAGS) \
@@ -359,11 +364,15 @@ GRAMC := $(OCAMLFIND) ocamlc $(CAMLFLAGS) $(CAMLDEBUG) $(USERFLAGS) \
grammar/grammar.cma : $(GRAMCMO)
$(SHOW)'Testing $@'
@touch grammar/test.mlp
- $(HIDE)$(GRAMC) -pp '$(CAMLP5O) -I $(MYCAMLP5LIB) $^ -impl' -impl grammar/test.mlp -o grammar/test
+ $(HIDE)$(GRAMC) -pp '$(CAMLP5O) $^ -impl' -impl grammar/test.mlp -o grammar/test
@rm -f grammar/test.* grammar/test
$(SHOW)'OCAMLC -a $@'
$(HIDE)$(GRAMC) $^ -linkall -a -o $@
+grammar/coqpp: $(COQPPCMO) grammar/coqpp_main.ml
+ $(SHOW)'OCAMLC -a $@'
+ $(HIDE)$(GRAMC) $^ -linkall -o $@
+
## Support of Camlp5 and Camlp5
COMPATCMO:=
@@ -376,6 +385,10 @@ grammar/%.cmo: grammar/%.mlp | $(COMPATCMO)
$(SHOW)'OCAMLC -c -pp $<'
$(HIDE)$(GRAMC) -c -pp '$(GRAMPP)' -impl $<
+grammar/%.cmo: grammar/%.ml | $(COMPATCMO)
+ $(SHOW)'OCAMLC -c -pp $<'
+ $(HIDE)$(GRAMC) -c $<
+
grammar/%.cmi: grammar/%.mli
$(SHOW)'OCAMLC -c $<'
$(HIDE)$(GRAMC) -c $<
@@ -747,11 +760,18 @@ plugins/%.cmx: plugins/%.ml
$(SHOW)'OCAMLLEX $<'
$(HIDE)$(OCAMLLEX) -o $@ "$*.mll"
-%.ml: %.ml4 $(CAMLP5DEPS)
+%.ml %.mli: %.mly
+ $(SHOW)'OCAMLYACC $<'
+ $(HIDE)$(OCAMLYACC) --strict "$*.mly"
+
+%.ml: %.ml4 $(CAMLP5DEPS) grammar/coqpp
$(SHOW)'CAMLP5O $<'
$(HIDE)$(CAMLP5O) -I $(MYCAMLP5LIB) $(PR_O) \
$(CAMLP5DEPS) $(CAMLP5USE) $(CAMLP5COMPAT) -impl $< -o $@
+%.ml: %.mlg grammar/coqpp
+ $(SHOW)'COQPP $<'
+ $(HIDE)grammar/coqpp $<
###########################################################################
# Dependencies of ML code
diff --git a/configure.ml b/configure.ml
index b5d5a2419..c13c5e107 100644
--- a/configure.ml
+++ b/configure.ml
@@ -461,14 +461,19 @@ let _ = parse_args ()
type camlexec =
{ mutable find : string;
mutable top : string;
- mutable lex : string; }
+ mutable lex : string;
+ mutable yacc : string;
+ }
let camlexec =
{ find = "ocamlfind";
top = "ocaml";
- lex = "ocamllex"; }
+ lex = "ocamllex";
+ yacc = "ocamlyacc";
+ }
let reset_caml_lex c o = c.lex <- o
+let reset_caml_yacc c o = c.yacc <- o
let reset_caml_top c o = c.top <- o
let reset_caml_find c o = c.find <- o
@@ -580,6 +585,9 @@ let camlbin, caml_version, camllib, findlib_version =
if is_executable (camlbin / "ocamllex")
then reset_caml_lex camlexec (camlbin / "ocamllex") in
let () =
+ if is_executable (camlbin / "ocamlyacc")
+ then reset_caml_yacc camlexec (camlbin / "ocamlyacc") in
+ let () =
if is_executable (camlbin / "ocaml")
then reset_caml_top camlexec (camlbin / "ocaml") in
camlbin, caml_version, camllib, findlib_version
@@ -1280,6 +1288,7 @@ let write_makefile f =
pr "OCAML=%S\n" camlexec.top;
pr "OCAMLFIND=%S\n" camlexec.find;
pr "OCAMLLEX=%S\n" camlexec.lex;
+ pr "OCAMLYACC=%S\n" camlexec.yacc;
pr "# The best compiler: native (=opt) or bytecode (=byte)\n";
pr "BEST=%s\n\n" best_compiler;
pr "# Ocaml version number\n";
diff --git a/dev/ci/user-overlays/07902-ppedrot-camlp5-parser.sh b/dev/ci/user-overlays/07902-ppedrot-camlp5-parser.sh
new file mode 100644
index 000000000..735153ebd
--- /dev/null
+++ b/dev/ci/user-overlays/07902-ppedrot-camlp5-parser.sh
@@ -0,0 +1,8 @@
+_OVERLAY_BRANCH=camlp5-parser
+
+if [ "$CI_PULL_REQUEST" = "7902" ] || [ "$CI_BRANCH" = "$_OVERLAY_BRANCH" ]; then
+
+ ltac2_CI_BRANCH="$_OVERLAY_BRANCH"
+ ltac2_CI_GITURL=https://github.com/ppedrot/ltac2
+
+fi
diff --git a/dev/doc/changes.md b/dev/doc/changes.md
index e6d741159..4177513a1 100644
--- a/dev/doc/changes.md
+++ b/dev/doc/changes.md
@@ -93,6 +93,50 @@ Primitive number parsers
The test suite now allows writing unit tests against OCaml code in the Coq
code base. Those unit tests create a dependency on the OUnit test framework.
+### Transitioning away from Camlp5
+
+In an effort to reduce dependency on Camlp5, the use of Camlp5 GEXTEND macro
+is discouraged. Most plugin writers do not need this low-level interface, but
+for those who do, the transition path is somewhat straightforward. To convert
+a ml4 file containing only standard OCaml with GEXTEND statements, the following
+should be enough:
+- replace its "ml4" extension with "mlg"
+- replace GEXTEND with GRAMMAR EXTEND
+- wrap every occurrence of OCaml code into braces { }
+
+For instance, code of the form
+```
+let myval = 0
+
+GEXTEND Gram
+ GLOBAL: my_entry;
+
+my_entry:
+[ [ x = bar; y = qux -> do_something x y
+ | "("; z = LIST0 my_entry; ")" -> do_other_thing z
+] ];
+END
+```
+should be turned into
+```
+{
+let myval = 0
+}
+
+GRAMMAR EXTEND Gram
+ GLOBAL: my_entry;
+
+my_entry:
+[ [ x = bar; y = qux -> { do_something x y }
+ | "("; z = LIST0 my_entry; ")" -> { do_other_thing z }
+] ];
+END
+```
+
+Currently mlg files can only contain GRAMMAR EXTEND statements. They do not
+support TACTIC, VERNAC nor ARGUMENT EXTEND. In case you have a file containing
+both kinds at the same time, it is recommended splitting it in two.
+
## Changes between Coq 8.7 and Coq 8.8
### Bug tracker
diff --git a/grammar/coqpp_ast.mli b/grammar/coqpp_ast.mli
new file mode 100644
index 000000000..245e530ae
--- /dev/null
+++ b/grammar/coqpp_ast.mli
@@ -0,0 +1,80 @@
+(************************************************************************)
+(* v * The Coq Proof Assistant / The Coq Development Team *)
+(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2017 *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(************************************************************************)
+
+type loc = {
+ loc_start : Lexing.position;
+ loc_end : Lexing.position;
+}
+
+type code = { code : string }
+
+type token_data =
+| TokNone
+| TokName of string
+| TokNameSep of string * string
+
+type ext_token =
+| ExtTerminal of string
+| ExtNonTerminal of string * token_data
+
+type tactic_rule = {
+ tac_toks : ext_token list;
+ tac_body : code;
+}
+
+type level = string
+
+type position =
+| First
+| Last
+| Before of level
+| After of level
+| Level of level
+
+type assoc =
+| LeftA
+| RightA
+| NonA
+
+type gram_symbol =
+| GSymbString of string
+| GSymbQualid of string * level option
+| GSymbParen of gram_symbol list
+| GSymbProd of gram_prod list
+
+and gram_prod = {
+ gprod_symbs : (string option * gram_symbol list) list;
+ gprod_body : code;
+}
+
+type gram_rule = {
+ grule_label : string option;
+ grule_assoc : assoc option;
+ grule_prods : gram_prod list;
+}
+
+type grammar_entry = {
+ gentry_name : string;
+ gentry_pos : position option;
+ gentry_rules : gram_rule list;
+}
+
+type grammar_ext = {
+ gramext_name : string;
+ gramext_globals : string list;
+ gramext_entries : grammar_entry list;
+}
+
+type node =
+| Code of code
+| Comment of string
+| GramExt of grammar_ext
+| VernacExt
+| TacticExt of string * tactic_rule list
+
+type t = node list
diff --git a/grammar/coqpp_lex.mll b/grammar/coqpp_lex.mll
new file mode 100644
index 000000000..f35766b16
--- /dev/null
+++ b/grammar/coqpp_lex.mll
@@ -0,0 +1,163 @@
+(************************************************************************)
+(* v * The Coq Proof Assistant / The Coq Development Team *)
+(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2017 *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(************************************************************************)
+
+{
+
+open Lexing
+open Coqpp_parse
+
+type mode =
+| OCaml
+| Extend
+
+exception Lex_error of Coqpp_ast.loc * string
+
+let loc lexbuf = {
+ Coqpp_ast.loc_start = lexeme_start_p lexbuf;
+ Coqpp_ast.loc_end = lexeme_end_p lexbuf;
+}
+
+let newline lexbuf =
+ let pos = lexbuf.lex_curr_p in
+ let pos = { pos with pos_lnum = pos.pos_lnum + 1; pos_bol = pos.pos_cnum } in
+ lexbuf.lex_curr_p <- pos
+
+let num_comments = ref 0
+let num_braces = ref 0
+
+let mode () = if !num_braces = 0 then Extend else OCaml
+
+let comment_buf = Buffer.create 512
+let ocaml_buf = Buffer.create 512
+let string_buf = Buffer.create 512
+
+let lex_error lexbuf msg =
+ raise (Lex_error (loc lexbuf, msg))
+
+let lex_unexpected_eof lexbuf where =
+ lex_error lexbuf (Printf.sprintf "Unexpected end of file in %s" where)
+
+let start_comment _ =
+ let () = incr num_comments in
+ Buffer.add_string comment_buf "(*"
+
+let end_comment lexbuf =
+ let () = decr num_comments in
+ let () = Buffer.add_string comment_buf "*)" in
+ if !num_comments < 0 then lex_error lexbuf "Unexpected end of comment"
+ else if !num_comments = 0 then
+ let s = Buffer.contents comment_buf in
+ let () = Buffer.reset comment_buf in
+ Some (COMMENT s)
+ else
+ None
+
+let start_ocaml _ =
+ let () = match mode () with
+ | OCaml -> Buffer.add_string ocaml_buf "{"
+ | Extend -> ()
+ in
+ incr num_braces
+
+let end_ocaml lexbuf =
+ let () = decr num_braces in
+ if !num_braces < 0 then lex_error lexbuf "Unexpected end of OCaml code"
+ else if !num_braces = 0 then
+ let s = Buffer.contents ocaml_buf in
+ let () = Buffer.reset ocaml_buf in
+ Some (CODE { Coqpp_ast.code = s })
+ else
+ let () = Buffer.add_string ocaml_buf "}" in
+ None
+
+}
+
+let letter = ['a'-'z' 'A'-'Z']
+let letterlike = ['_' 'a'-'z' 'A'-'Z']
+let alphanum = ['_' 'a'-'z' 'A'-'Z' '0'-'9' '\'']
+let ident = letterlike alphanum*
+let qualid = ident ('.' ident)*
+let space = [' ' '\t' '\r']
+
+rule extend = parse
+| "(*" { start_comment (); comment lexbuf }
+| "{" { start_ocaml (); ocaml lexbuf }
+| "GRAMMAR" { GRAMMAR }
+| "VERNAC" { VERNAC }
+| "TACTIC" { TACTIC }
+| "EXTEND" { EXTEND }
+| "END" { END }
+(** Camlp5 specific keywords *)
+| "GLOBAL" { GLOBAL }
+| "FIRST" { FIRST }
+| "LAST" { LAST }
+| "BEFORE" { BEFORE }
+| "AFTER" { AFTER }
+| "LEVEL" { LEVEL }
+| "LEFTA" { LEFTA }
+| "RIGHTA" { RIGHTA }
+| "NONA" { NONA }
+(** Standard *)
+| ident { IDENT (Lexing.lexeme lexbuf) }
+| qualid { QUALID (Lexing.lexeme lexbuf) }
+| space { extend lexbuf }
+| '\"' { string lexbuf }
+| '\n' { newline lexbuf; extend lexbuf }
+| '[' { LBRACKET }
+| ']' { RBRACKET }
+| '|' { PIPE }
+| "->" { ARROW }
+| ',' { COMMA }
+| ':' { COLON }
+| ';' { SEMICOLON }
+| '(' { LPAREN }
+| ')' { RPAREN }
+| '=' { EQUAL }
+| _ { lex_error lexbuf "syntax error" }
+| eof { EOF }
+
+and ocaml = parse
+| "{" { start_ocaml (); ocaml lexbuf }
+| "}" { match end_ocaml lexbuf with Some tk -> tk | None -> ocaml lexbuf }
+| '\n' { newline lexbuf; Buffer.add_char ocaml_buf '\n'; ocaml lexbuf }
+| '\"' { Buffer.add_char ocaml_buf '\"'; ocaml_string lexbuf }
+| (_ as c) { Buffer.add_char ocaml_buf c; ocaml lexbuf }
+| eof { lex_unexpected_eof lexbuf "OCaml code" }
+
+and comment = parse
+| "*)" { match end_comment lexbuf with Some _ -> extend lexbuf | None -> comment lexbuf }
+| "(*" { start_comment lexbuf; comment lexbuf }
+| '\n' { newline lexbuf; Buffer.add_char comment_buf '\n'; comment lexbuf }
+| (_ as c) { Buffer.add_char comment_buf c; comment lexbuf }
+| eof { lex_unexpected_eof lexbuf "comment" }
+
+and string = parse
+| '\"'
+ {
+ let s = Buffer.contents string_buf in
+ let () = Buffer.reset string_buf in
+ STRING s
+ }
+| "\\\"" { Buffer.add_char string_buf '\"'; string lexbuf }
+| '\n' { newline lexbuf; Buffer.add_char string_buf '\n'; string lexbuf }
+| (_ as c) { Buffer.add_char string_buf c; string lexbuf }
+| eof { lex_unexpected_eof lexbuf "string" }
+
+and ocaml_string = parse
+| "\\\"" { Buffer.add_string ocaml_buf "\\\""; ocaml_string lexbuf }
+| '\"' { Buffer.add_char ocaml_buf '\"'; ocaml lexbuf }
+| (_ as c) { Buffer.add_char ocaml_buf c; ocaml_string lexbuf }
+| eof { lex_unexpected_eof lexbuf "OCaml string" }
+
+{
+
+let token lexbuf = match mode () with
+| OCaml -> ocaml lexbuf
+| Extend -> extend lexbuf
+
+}
diff --git a/grammar/coqpp_main.ml b/grammar/coqpp_main.ml
new file mode 100644
index 000000000..23a5104e2
--- /dev/null
+++ b/grammar/coqpp_main.ml
@@ -0,0 +1,292 @@
+(************************************************************************)
+(* v * The Coq Proof Assistant / The Coq Development Team *)
+(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2017 *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(************************************************************************)
+
+open Lexing
+open Coqpp_ast
+open Format
+
+let fatal msg =
+ let () = Format.eprintf "Error: %s@\n%!" msg in
+ exit 1
+
+let pr_loc loc =
+ let file = loc.loc_start.pos_fname in
+ let line = loc.loc_start.pos_lnum in
+ let bpos = loc.loc_start.pos_cnum - loc.loc_start.pos_bol in
+ let epos = loc.loc_end.pos_cnum - loc.loc_start.pos_bol in
+ Printf.sprintf "File \"%s\", line %d, characters %d-%d:" file line bpos epos
+
+let parse_file f =
+ let chan = open_in f in
+ let lexbuf = Lexing.from_channel chan in
+ let () = lexbuf.lex_curr_p <- { lexbuf.lex_curr_p with pos_fname = f } in
+ let ans =
+ try Coqpp_parse.file Coqpp_lex.token lexbuf
+ with
+ | Coqpp_lex.Lex_error (loc, msg) ->
+ let () = close_in chan in
+ let () = Printf.eprintf "%s\n%!" (pr_loc loc) in
+ fatal msg
+ | Parsing.Parse_error ->
+ let () = close_in chan in
+ let loc = Coqpp_lex.loc lexbuf in
+ let () = Printf.eprintf "%s\n%!" (pr_loc loc) in
+ fatal "syntax error"
+ in
+ let () = close_in chan in
+ ans
+
+module StringSet = Set.Make(String)
+
+let string_split s =
+ let len = String.length s in
+ let rec split n =
+ try
+ let pos = String.index_from s n '.' in
+ let dir = String.sub s n (pos-n) in
+ dir :: split (succ pos)
+ with
+ | Not_found -> [String.sub s n (len-n)]
+ in
+ if len == 0 then [] else split 0
+
+module GramExt :
+sig
+
+val print_ast : Format.formatter -> grammar_ext -> unit
+
+end =
+struct
+
+let is_uident s = match s.[0] with
+| 'A'..'Z' -> true
+| _ -> false
+
+let is_qualified = is_uident
+
+let get_local_entries ext =
+ let global = StringSet.of_list ext.gramext_globals in
+ let map e = e.gentry_name in
+ let entries = List.map map ext.gramext_entries in
+ let local = List.filter (fun e -> not (is_qualified e || StringSet.mem e global)) entries in
+ let rec uniquize seen = function
+ | [] -> []
+ | id :: rem ->
+ let rem = uniquize (StringSet.add id seen) rem in
+ if StringSet.mem id seen then rem else id :: rem
+ in
+ uniquize StringSet.empty local
+
+let print_local chan ext =
+ let locals = get_local_entries ext in
+ match locals with
+ | [] -> ()
+ | e :: locals ->
+ let mk_e chan e = fprintf chan "%s.Entry.create \"%s\"" ext.gramext_name e in
+ let () = fprintf chan "@[<hv 2>let %s =@ @[%a@]@]@ " e mk_e e in
+ let iter e = fprintf chan "@[<hv 2>and %s =@ @[%a@]@]@ " e mk_e e in
+ let () = List.iter iter locals in
+ fprintf chan "in@ "
+
+let print_string chan s = fprintf chan "\"%s\"" s
+
+let print_list chan pr l =
+ let rec prl chan = function
+ | [] -> ()
+ | [x] -> fprintf chan "%a" pr x
+ | x :: l -> fprintf chan "%a;@ %a" pr x prl l
+ in
+ fprintf chan "@[<hv>[%a]@]" prl l
+
+let print_opt chan pr = function
+| None -> fprintf chan "None"
+| Some x -> fprintf chan "Some@ (%a)" pr x
+
+let print_position chan pos = match pos with
+| First -> fprintf chan "Extend.First"
+| Last -> fprintf chan "Extend.Last"
+| Before s -> fprintf chan "Extend.Before@ \"%s\"" s
+| After s -> fprintf chan "Extend.After@ \"%s\"" s
+| Level s -> fprintf chan "Extend.Level@ \"%s\"" s
+
+let print_assoc chan = function
+| LeftA -> fprintf chan "Extend.LeftA"
+| RightA -> fprintf chan "Extend.RightA"
+| NonA -> fprintf chan "Extend.NonA"
+
+type symb =
+| SymbToken of string * string option
+| SymbEntry of string * string option
+| SymbSelf
+| SymbNext
+| SymbList0 of symb * symb option
+| SymbList1 of symb * symb option
+| SymbOpt of symb
+| SymbRules of ((string option * symb) list * code) list
+
+let is_token s = match string_split s with
+| [s] -> is_uident s
+| _ -> false
+
+let rec parse_tokens = function
+| [GSymbString s] -> SymbToken ("", Some s)
+| [GSymbQualid ("SELF", None)] -> SymbSelf
+| [GSymbQualid ("NEXT", None)] -> SymbNext
+| [GSymbQualid ("LIST0", None); tkn] ->
+ SymbList0 (parse_token tkn, None)
+| [GSymbQualid ("LIST1", None); tkn] ->
+ SymbList1 (parse_token tkn, None)
+| [GSymbQualid ("LIST0", None); tkn; GSymbQualid ("SEP", None); tkn'] ->
+ SymbList0 (parse_token tkn, Some (parse_token tkn'))
+| [GSymbQualid ("LIST1", None); tkn; GSymbQualid ("SEP", None); tkn'] ->
+ SymbList1 (parse_token tkn, Some (parse_token tkn'))
+| [GSymbQualid ("OPT", None); tkn] ->
+ SymbOpt (parse_token tkn)
+| [GSymbQualid (e, None)] when is_token e -> SymbToken (e, None)
+| [GSymbQualid (e, None); GSymbString s] when is_token e -> SymbToken (e, Some s)
+| [GSymbQualid (e, lvl)] when not (is_token e) -> SymbEntry (e, lvl)
+| [GSymbParen tkns] -> parse_tokens tkns
+| [GSymbProd prds] ->
+ let map p =
+ let map (pat, tkns) = (pat, parse_tokens tkns) in
+ (List.map map p.gprod_symbs, p.gprod_body)
+ in
+ SymbRules (List.map map prds)
+| t ->
+ let rec db_token = function
+ | GSymbString s -> Printf.sprintf "\"%s\"" s
+ | GSymbQualid (s, _) -> Printf.sprintf "%s" s
+ | GSymbParen s -> Printf.sprintf "(%s)" (db_tokens s)
+ | GSymbProd _ -> Printf.sprintf "[...]"
+ and db_tokens tkns =
+ let s = List.map db_token tkns in
+ String.concat " " s
+ in
+ fatal (Printf.sprintf "Invalid token: %s" (db_tokens t))
+
+and parse_token tkn = parse_tokens [tkn]
+
+let print_fun chan (vars, body) =
+ let vars = List.rev vars in
+ let iter = function
+ | None -> fprintf chan "_@ "
+ | Some id -> fprintf chan "%s@ " id
+ in
+ let () = fprintf chan "fun@ " in
+ let () = List.iter iter vars in
+ (** FIXME: use Coq locations in the macros *)
+ let () = fprintf chan "loc ->@ @[%s@]" body.code in
+ ()
+
+(** Meta-program instead of calling Tok.of_pattern here because otherwise
+ violates value restriction *)
+let print_tok chan = function
+| "", s -> fprintf chan "Tok.KEYWORD %a" print_string s
+| "IDENT", s -> fprintf chan "Tok.IDENT %a" print_string s
+| "PATTERNIDENT", s -> fprintf chan "Tok.PATTERNIDENT %a" print_string s
+| "FIELD", s -> fprintf chan "Tok.FIELD %a" print_string s
+| "INT", s -> fprintf chan "Tok.INT %a" print_string s
+| "STRING", s -> fprintf chan "Tok.STRING %a" print_string s
+| "LEFTQMARK", _ -> fprintf chan "Tok.LEFTQMARK"
+| "BULLET", s -> fprintf chan "Tok.BULLET %a" print_string s
+| "EOI", _ -> fprintf chan "Tok.EOI"
+| _ -> failwith "Tok.of_pattern: not a constructor"
+
+let rec print_prod chan p =
+ let (vars, tkns) = List.split p.gprod_symbs in
+ let f = (vars, p.gprod_body) in
+ let tkn = List.rev_map parse_tokens tkns in
+ fprintf chan "@[Extend.Rule@ (@[%a@],@ @[(%a)@])@]" print_symbols tkn print_fun f
+
+and print_symbols chan = function
+| [] -> fprintf chan "Extend.Stop"
+| tkn :: tkns ->
+ fprintf chan "Extend.Next @[(%a,@ %a)@]" print_symbols tkns print_symbol tkn
+
+and print_symbol chan tkn = match tkn with
+| SymbToken (t, s) ->
+ let s = match s with None -> "" | Some s -> s in
+ fprintf chan "(Extend.Atoken (%a))" print_tok (t, s)
+| SymbEntry (e, None) ->
+ fprintf chan "(Extend.Aentry %s)" e
+| SymbEntry (e, Some l) ->
+ fprintf chan "(Extend.Aentryl (%s, %a))" e print_string l
+| SymbSelf ->
+ fprintf chan "Extend.Aself"
+| SymbNext ->
+ fprintf chan "Extend.Anext"
+| SymbList0 (s, None) ->
+ fprintf chan "(Extend.Alist0 %a)" print_symbol s
+| SymbList0 (s, Some sep) ->
+ fprintf chan "(Extend.Alist0sep (%a, %a))" print_symbol s print_symbol sep
+| SymbList1 (s, None) ->
+ fprintf chan "(Extend.Alist1 %a)" print_symbol s
+| SymbList1 (s, Some sep) ->
+ fprintf chan "(Extend.Alist1sep (%a, %a))" print_symbol s print_symbol sep
+| SymbOpt s ->
+ fprintf chan "(Extend.Aopt %a)" print_symbol s
+| SymbRules rules ->
+ let pr chan (r, body) =
+ let (vars, tkn) = List.split r in
+ let tkn = List.rev tkn in
+ fprintf chan "Extend.Rules @[({ Extend.norec_rule = %a },@ (%a))@]" print_symbols tkn print_fun (vars, body)
+ in
+ let pr chan rules = print_list chan pr rules in
+ fprintf chan "(Extend.Arules %a)" pr (List.rev rules)
+
+let print_rule chan r =
+ let pr_lvl chan lvl = print_opt chan print_string lvl in
+ let pr_asc chan asc = print_opt chan print_assoc asc in
+ let pr_prd chan prd = print_list chan print_prod prd in
+ fprintf chan "@[(%a,@ %a,@ %a)@]" pr_lvl r.grule_label pr_asc r.grule_assoc pr_prd (List.rev r.grule_prods)
+
+let print_entry chan gram e =
+ let print_position_opt chan pos = print_opt chan print_position pos in
+ let print_rules chan rules = print_list chan print_rule rules in
+ fprintf chan "let () =@ @[%s.safe_extend@ %s@ @[(%a, %a)@]@]@ in@ "
+ gram e.gentry_name print_position_opt e.gentry_pos print_rules e.gentry_rules
+
+let print_ast chan ext =
+ let () = fprintf chan "let _ = @[" in
+ let () = fprintf chan "@[<v>%a@]" print_local ext in
+ let () = List.iter (fun e -> print_entry chan ext.gramext_name e) ext.gramext_entries in
+ let () = fprintf chan "()@]\n" in
+ ()
+
+end
+
+let pr_ast chan = function
+| Code s -> fprintf chan "%s@\n" s.code
+| Comment s -> fprintf chan "%s@\n" s
+| GramExt gram -> fprintf chan "%a@\n" GramExt.print_ast gram
+| VernacExt -> fprintf chan "VERNACEXT@\n"
+| TacticExt (id, rules) ->
+ let pr_tok = function
+ | ExtTerminal s -> Printf.sprintf "%s" s
+ | ExtNonTerminal (s, _) -> Printf.sprintf "%s" s
+ in
+ let pr_tacrule r =
+ let toks = String.concat " " (List.map pr_tok r.tac_toks) in
+ Printf.sprintf "[%s] -> {%s}" toks r.tac_body.code
+ in
+ let rules = String.concat ", " (List.map pr_tacrule rules) in
+ fprintf chan "TACTICEXT (%s, [%s])@\n" id rules
+
+let () =
+ let () =
+ if Array.length Sys.argv <> 2 then fatal "Expected exactly one command line argument"
+ in
+ let file = Sys.argv.(1) in
+ let output = Filename.chop_extension file ^ ".ml" in
+ let ast = parse_file file in
+ let chan = open_out output in
+ let fmt = formatter_of_out_channel chan in
+ let iter ast = Format.fprintf fmt "@[%a@]%!"pr_ast ast in
+ let () = List.iter iter ast in
+ let () = close_out chan in
+ exit 0
diff --git a/grammar/coqpp_parse.mly b/grammar/coqpp_parse.mly
new file mode 100644
index 000000000..aa22d2717
--- /dev/null
+++ b/grammar/coqpp_parse.mly
@@ -0,0 +1,190 @@
+/************************************************************************/
+/* v * The Coq Proof Assistant / The Coq Development Team */
+/* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2017 */
+/* \VV/ **************************************************************/
+/* // * This file is distributed under the terms of the */
+/* * GNU Lesser General Public License Version 2.1 */
+/************************************************************************/
+
+%{
+
+open Coqpp_ast
+
+%}
+
+%token <Coqpp_ast.code> CODE
+%token <string> COMMENT
+%token <string> IDENT QUALID
+%token <string> STRING
+%token VERNAC TACTIC GRAMMAR EXTEND END
+%token LBRACKET RBRACKET PIPE ARROW COMMA EQUAL
+%token LPAREN RPAREN COLON SEMICOLON
+%token GLOBAL FIRST LAST BEFORE AFTER LEVEL LEFTA RIGHTA NONA
+%token EOF
+
+%type <Coqpp_ast.t> file
+%start file
+
+%%
+
+file:
+| nodes EOF
+ { $1 }
+;
+
+nodes:
+|
+ { [] }
+| node nodes
+ { $1 :: $2 }
+;
+
+node:
+| CODE { Code $1 }
+| COMMENT { Comment $1 }
+| grammar_extend { $1 }
+| vernac_extend { $1 }
+| tactic_extend { $1 }
+;
+
+grammar_extend:
+| GRAMMAR EXTEND qualid_or_ident globals gram_entries END
+ { GramExt { gramext_name = $3; gramext_globals = $4; gramext_entries = $5 } }
+;
+
+vernac_extend:
+| VERNAC EXTEND IDENT END { VernacExt }
+;
+
+tactic_extend:
+| TACTIC EXTEND IDENT tactic_rules END { TacticExt ($3, $4) }
+;
+
+tactic_rules:
+| tactic_rule { [$1] }
+| tactic_rule tactic_rules { $1 :: $2 }
+;
+
+tactic_rule:
+| PIPE LBRACKET ext_tokens RBRACKET ARROW CODE
+ { { tac_toks = $3; tac_body = $6 } }
+;
+
+ext_tokens:
+| { [] }
+| ext_token ext_tokens { $1 :: $2 }
+;
+
+ext_token:
+| STRING { ExtTerminal $1 }
+| IDENT { ExtNonTerminal ($1, TokNone) }
+| IDENT LPAREN IDENT RPAREN { ExtNonTerminal ($1, TokName $3) }
+| IDENT LPAREN IDENT COMMA STRING RPAREN { ExtNonTerminal ($1, TokNameSep ($3, $5)) }
+;
+
+qualid_or_ident:
+| QUALID { $1 }
+| IDENT { $1 }
+;
+
+globals:
+| { [] }
+| GLOBAL COLON idents SEMICOLON { $3 }
+;
+
+idents:
+| { [] }
+| qualid_or_ident idents { $1 :: $2 }
+;
+
+gram_entries:
+| { [] }
+| gram_entry gram_entries { $1 :: $2 }
+;
+
+gram_entry:
+| qualid_or_ident COLON position_opt LBRACKET levels RBRACKET SEMICOLON
+ { { gentry_name = $1; gentry_pos = $3; gentry_rules = $5; } }
+;
+
+position_opt:
+| { None }
+| position { Some $1 }
+;
+
+position:
+| FIRST { First }
+| LAST { Last }
+| BEFORE STRING { Before $2 }
+| AFTER STRING { After $2 }
+| LEVEL STRING { Level $2 }
+;
+
+string_opt:
+| { None }
+| STRING { Some $1 }
+;
+
+assoc_opt:
+| { None }
+| assoc { Some $1 }
+;
+
+assoc:
+| LEFTA { LeftA }
+| RIGHTA { RightA }
+| NONA { NonA }
+;
+
+levels:
+| level { [$1] }
+| level PIPE levels { $1 :: $3 }
+;
+
+level:
+| string_opt assoc_opt LBRACKET rules_opt RBRACKET
+ { { grule_label = $1; grule_assoc = $2; grule_prods = $4; } }
+;
+
+rules_opt:
+| { [] }
+| rules { $1 }
+;
+
+rules:
+| rule { [$1] }
+| rule PIPE rules { $1 :: $3 }
+;
+
+rule:
+| symbols_opt ARROW CODE
+ { { gprod_symbs = $1; gprod_body = $3; } }
+;
+
+symbols_opt:
+| { [] }
+| symbols { $1 }
+;
+
+symbols:
+| symbol { [$1] }
+| symbol SEMICOLON symbols { $1 :: $3 }
+;
+
+symbol:
+| IDENT EQUAL gram_tokens { (Some $1, $3) }
+| gram_tokens { (None, $1) }
+;
+
+gram_token:
+| qualid_or_ident { GSymbQualid ($1, None) }
+| qualid_or_ident LEVEL STRING { GSymbQualid ($1, Some $3) }
+| LPAREN gram_tokens RPAREN { GSymbParen $2 }
+| LBRACKET rules RBRACKET { GSymbProd $2 }
+| STRING { GSymbString $1 }
+;
+
+gram_tokens:
+| gram_token { [$1] }
+| gram_token gram_tokens { $1 :: $2 }
+;
diff --git a/grammar/q_util.mlp b/grammar/q_util.mlp
index 6cdd2ec19..0b8d7fda7 100644
--- a/grammar/q_util.mlp
+++ b/grammar/q_util.mlp
@@ -75,7 +75,7 @@ let rec mlexpr_of_prod_entry_key f = function
(** Keep in sync with Pcoq! *)
assert (e = "tactic");
if l = 5 then <:expr< Extend.Aentry Pltac.binder_tactic >>
- else <:expr< Extend.Aentryl (Pltac.tactic_expr) $mlexpr_of_int l$ >>
+ else <:expr< Extend.Aentryl (Pltac.tactic_expr) $mlexpr_of_string (string_of_int l)$ >>
let rec type_of_user_symbol = function
| Ulist1 s | Ulist1sep (s, _) | Ulist0 s | Ulist0sep (s, _) ->
diff --git a/parsing/extend.ml b/parsing/extend.ml
index 6805a96ed..f57e32c88 100644
--- a/parsing/extend.ml
+++ b/parsing/extend.ml
@@ -101,7 +101,7 @@ type ('self, 'a) symbol =
| Aself : ('self, 'self) symbol
| Anext : ('self, 'self) symbol
| Aentry : 'a entry -> ('self, 'a) symbol
-| Aentryl : 'a entry * int -> ('self, 'a) symbol
+| Aentryl : 'a entry * string -> ('self, 'a) symbol
| Arules : 'a rules list -> ('self, 'a) symbol
and ('self, _, 'r) rule =
diff --git a/parsing/g_constr.ml4 b/parsing/g_constr.mlg
index 1fa26b455..b2913d5d4 100644
--- a/parsing/g_constr.ml4
+++ b/parsing/g_constr.mlg
@@ -8,6 +8,8 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Names
open Constr
open Libnames
@@ -126,396 +128,399 @@ let name_colon =
let aliasvar = function { CAst.v = CPatAlias (_, na) } -> Some na | _ -> None
-GEXTEND Gram
+}
+
+GRAMMAR EXTEND Gram
GLOBAL: binder_constr lconstr constr operconstr universe_level sort sort_family
global constr_pattern lconstr_pattern Constr.ident
closed_binder open_binders binder binders binders_fixannot
record_declaration typeclass_constraint pattern appl_arg;
Constr.ident:
- [ [ id = Prim.ident -> id ] ]
+ [ [ id = Prim.ident -> { id } ] ]
;
Prim.name:
- [ [ "_" -> CAst.make ~loc:!@loc Anonymous ] ]
+ [ [ "_" -> { CAst.make ~loc Anonymous } ] ]
;
global:
- [ [ r = Prim.reference -> r ] ]
+ [ [ r = Prim.reference -> { r } ] ]
;
constr_pattern:
- [ [ c = constr -> c ] ]
+ [ [ c = constr -> { c } ] ]
;
lconstr_pattern:
- [ [ c = lconstr -> c ] ]
+ [ [ c = lconstr -> { c } ] ]
;
sort:
- [ [ "Set" -> GSet
- | "Prop" -> GProp
- | "Type" -> GType []
- | "Type"; "@{"; u = universe; "}" -> GType u
+ [ [ "Set" -> { GSet }
+ | "Prop" -> { GProp }
+ | "Type" -> { GType [] }
+ | "Type"; "@{"; u = universe; "}" -> { GType u }
] ]
;
sort_family:
- [ [ "Set" -> Sorts.InSet
- | "Prop" -> Sorts.InProp
- | "Type" -> Sorts.InType
+ [ [ "Set" -> { Sorts.InSet }
+ | "Prop" -> { Sorts.InProp }
+ | "Type" -> { Sorts.InType }
] ]
;
universe_expr:
- [ [ id = global; "+"; n = natural -> Some (id,n)
- | id = global -> Some (id,0)
- | "_" -> None
+ [ [ id = global; "+"; n = natural -> { Some (id,n) }
+ | id = global -> { Some (id,0) }
+ | "_" -> { None }
] ]
;
universe:
- [ [ IDENT "max"; "("; ids = LIST1 universe_expr SEP ","; ")" -> ids
- | u = universe_expr -> [u]
+ [ [ IDENT "max"; "("; ids = LIST1 universe_expr SEP ","; ")" -> { ids }
+ | u = universe_expr -> { [u] }
] ]
;
lconstr:
- [ [ c = operconstr LEVEL "200" -> c ] ]
+ [ [ c = operconstr LEVEL "200" -> { c } ] ]
;
constr:
- [ [ c = operconstr LEVEL "8" -> c
- | "@"; f=global; i = instance -> CAst.make ~loc:!@loc @@ CAppExpl((None,f,i),[]) ] ]
+ [ [ c = operconstr LEVEL "8" -> { c }
+ | "@"; f=global; i = instance -> { CAst.make ~loc @@ CAppExpl((None,f,i),[]) } ] ]
;
operconstr:
[ "200" RIGHTA
- [ c = binder_constr -> c ]
+ [ c = binder_constr -> { c } ]
| "100" RIGHTA
[ c1 = operconstr; "<:"; c2 = binder_constr ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastVM c2)
+ { CAst.make ~loc @@ CCast(c1, CastVM c2) }
| c1 = operconstr; "<:"; c2 = SELF ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastVM c2)
+ { CAst.make ~loc @@ CCast(c1, CastVM c2) }
| c1 = operconstr; "<<:"; c2 = binder_constr ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastNative c2)
+ { CAst.make ~loc @@ CCast(c1, CastNative c2) }
| c1 = operconstr; "<<:"; c2 = SELF ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastNative c2)
+ { CAst.make ~loc @@ CCast(c1, CastNative c2) }
| c1 = operconstr; ":";c2 = binder_constr ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastConv c2)
+ { CAst.make ~loc @@ CCast(c1, CastConv c2) }
| c1 = operconstr; ":"; c2 = SELF ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastConv c2)
+ { CAst.make ~loc @@ CCast(c1, CastConv c2) }
| c1 = operconstr; ":>" ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastCoerce) ]
+ { CAst.make ~loc @@ CCast(c1, CastCoerce) } ]
| "99" RIGHTA [ ]
| "90" RIGHTA [ ]
| "10" LEFTA
- [ f=operconstr; args=LIST1 appl_arg -> CAst.make ~loc:(!@loc) @@ CApp((None,f),args)
- | "@"; f=global; i = instance; args=LIST0 NEXT -> CAst.make ~loc:!@loc @@ CAppExpl((None,f,i),args)
+ [ f=operconstr; args=LIST1 appl_arg -> { CAst.make ~loc @@ CApp((None,f),args) }
+ | "@"; f=global; i = instance; args=LIST0 NEXT -> { CAst.make ~loc @@ CAppExpl((None,f,i),args) }
| "@"; lid = pattern_identref; args=LIST1 identref ->
- let { CAst.loc = locid; v = id } = lid in
+ { let { CAst.loc = locid; v = id } = lid in
let args = List.map (fun x -> CAst.make @@ CRef (qualid_of_ident ?loc:x.CAst.loc x.CAst.v, None), None) args in
- CAst.make ~loc:(!@loc) @@ CApp((None, CAst.make ?loc:locid @@ CPatVar id),args) ]
+ CAst.make ~loc @@ CApp((None, CAst.make ?loc:locid @@ CPatVar id),args) } ]
| "9"
[ ".."; c = operconstr LEVEL "0"; ".." ->
- CAst.make ~loc:!@loc @@ CAppExpl ((None, (qualid_of_ident ~loc:!@loc ldots_var), None),[c]) ]
+ { CAst.make ~loc @@ CAppExpl ((None, (qualid_of_ident ~loc ldots_var), None),[c]) } ]
| "8" [ ]
| "1" LEFTA
[ c=operconstr; ".("; f=global; args=LIST0 appl_arg; ")" ->
- CAst.make ~loc:(!@loc) @@ CApp((Some (List.length args+1), CAst.make @@ CRef (f,None)),args@[c,None])
+ { CAst.make ~loc @@ CApp((Some (List.length args+1), CAst.make @@ CRef (f,None)),args@[c,None]) }
| c=operconstr; ".("; "@"; f=global;
args=LIST0 (operconstr LEVEL "9"); ")" ->
- CAst.make ~loc:(!@loc) @@ CAppExpl((Some (List.length args+1),f,None),args@[c])
- | c=operconstr; "%"; key=IDENT -> CAst.make ~loc:(!@loc) @@ CDelimiters (key,c) ]
+ { CAst.make ~loc @@ CAppExpl((Some (List.length args+1),f,None),args@[c]) }
+ | c=operconstr; "%"; key=IDENT -> { CAst.make ~loc @@ CDelimiters (key,c) } ]
| "0"
- [ c=atomic_constr -> c
- | c=match_constr -> c
+ [ c=atomic_constr -> { c }
+ | c=match_constr -> { c }
| "("; c = operconstr LEVEL "200"; ")" ->
- (match c.CAst.v with
+ { (match c.CAst.v with
| CPrim (Numeral (n,true)) ->
- CAst.make ~loc:(!@loc) @@ CNotation("( _ )",([c],[],[],[]))
- | _ -> c)
- | "{|"; c = record_declaration; "|}" -> c
+ CAst.make ~loc @@ CNotation("( _ )",([c],[],[],[]))
+ | _ -> c) }
+ | "{|"; c = record_declaration; "|}" -> { c }
| "{"; c = binder_constr ; "}" ->
- CAst.make ~loc:(!@loc) @@ CNotation(("{ _ }"),([c],[],[],[]))
+ { CAst.make ~loc @@ CNotation(("{ _ }"),([c],[],[],[])) }
| "`{"; c = operconstr LEVEL "200"; "}" ->
- CAst.make ~loc:(!@loc) @@ CGeneralization (Implicit, None, c)
+ { CAst.make ~loc @@ CGeneralization (Implicit, None, c) }
| "`("; c = operconstr LEVEL "200"; ")" ->
- CAst.make ~loc:(!@loc) @@ CGeneralization (Explicit, None, c)
+ { CAst.make ~loc @@ CGeneralization (Explicit, None, c) }
] ]
;
record_declaration:
- [ [ fs = record_fields -> CAst.make ~loc:(!@loc) @@ CRecord fs ] ]
+ [ [ fs = record_fields -> { CAst.make ~loc @@ CRecord fs } ] ]
;
record_fields:
- [ [ f = record_field_declaration; ";"; fs = record_fields -> f :: fs
- | f = record_field_declaration -> [f]
- | -> []
+ [ [ f = record_field_declaration; ";"; fs = record_fields -> { f :: fs }
+ | f = record_field_declaration -> { [f] }
+ | -> { [] }
] ]
;
record_field_declaration:
[ [ id = global; bl = binders; ":="; c = lconstr ->
- (id, mkCLambdaN ~loc:!@loc bl c) ] ]
+ { (id, mkCLambdaN ~loc bl c) } ] ]
;
binder_constr:
[ [ "forall"; bl = open_binders; ","; c = operconstr LEVEL "200" ->
- mkCProdN ~loc:!@loc bl c
+ { mkCProdN ~loc bl c }
| "fun"; bl = open_binders; "=>"; c = operconstr LEVEL "200" ->
- mkCLambdaN ~loc:!@loc bl c
+ { mkCLambdaN ~loc bl c }
| "let"; id=name; bl = binders; ty = type_cstr; ":=";
c1 = operconstr LEVEL "200"; "in"; c2 = operconstr LEVEL "200" ->
- let ty,c1 = match ty, c1 with
+ { let ty,c1 = match ty, c1 with
| (_,None), { CAst.v = CCast(c, CastConv t) } -> (Loc.tag ?loc:(constr_loc t) @@ Some t), c (* Tolerance, see G_vernac.def_body *)
| _, _ -> ty, c1 in
- CAst.make ~loc:!@loc @@ CLetIn(id,mkCLambdaN ?loc:(constr_loc c1) bl c1,
- Option.map (mkCProdN ?loc:(fst ty) bl) (snd ty), c2)
+ CAst.make ~loc @@ CLetIn(id,mkCLambdaN ?loc:(constr_loc c1) bl c1,
+ Option.map (mkCProdN ?loc:(fst ty) bl) (snd ty), c2) }
| "let"; fx = single_fix; "in"; c = operconstr LEVEL "200" ->
- let fixp = mk_single_fix fx in
+ { let fixp = mk_single_fix fx in
let { CAst.loc = li; v = id } = match fixp.CAst.v with
CFix(id,_) -> id
| CCoFix(id,_) -> id
| _ -> assert false in
- CAst.make ~loc:!@loc @@ CLetIn( CAst.make ?loc:li @@ Name id,fixp,None,c)
- | "let"; lb = ["("; l=LIST0 name SEP ","; ")" -> l | "()" -> []];
+ CAst.make ~loc @@ CLetIn( CAst.make ?loc:li @@ Name id,fixp,None,c) }
+ | "let"; lb = ["("; l=LIST0 name SEP ","; ")" -> { l } | "()" -> { [] } ];
po = return_type;
":="; c1 = operconstr LEVEL "200"; "in";
c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@ CLetTuple (lb,po,c1,c2)
+ { CAst.make ~loc @@ CLetTuple (lb,po,c1,c2) }
| "let"; "'"; p=pattern; ":="; c1 = operconstr LEVEL "200";
"in"; c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@
- CCases (LetPatternStyle, None, [c1, None, None], [CAst.make ~loc:!@loc ([[p]], c2)])
+ { CAst.make ~loc @@
+ CCases (LetPatternStyle, None, [c1, None, None], [CAst.make ~loc ([[p]], c2)]) }
| "let"; "'"; p=pattern; ":="; c1 = operconstr LEVEL "200";
rt = case_type; "in"; c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@
- CCases (LetPatternStyle, Some rt, [c1, aliasvar p, None], [CAst.make ~loc:!@loc ([[p]], c2)])
+ { CAst.make ~loc @@
+ CCases (LetPatternStyle, Some rt, [c1, aliasvar p, None], [CAst.make ~loc ([[p]], c2)]) }
| "let"; "'"; p=pattern; "in"; t = pattern LEVEL "200";
":="; c1 = operconstr LEVEL "200"; rt = case_type;
"in"; c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@
- CCases (LetPatternStyle, Some rt, [c1, aliasvar p, Some t], [CAst.make ~loc:!@loc ([[p]], c2)])
+ { CAst.make ~loc @@
+ CCases (LetPatternStyle, Some rt, [c1, aliasvar p, Some t], [CAst.make ~loc ([[p]], c2)]) }
| "if"; c=operconstr LEVEL "200"; po = return_type;
"then"; b1=operconstr LEVEL "200";
"else"; b2=operconstr LEVEL "200" ->
- CAst.make ~loc:(!@loc) @@ CIf (c, po, b1, b2)
- | c=fix_constr -> c ] ]
+ { CAst.make ~loc @@ CIf (c, po, b1, b2) }
+ | c=fix_constr -> { c } ] ]
;
appl_arg:
[ [ id = lpar_id_coloneq; c=lconstr; ")" ->
- (c,Some (CAst.make ~loc:!@loc @@ ExplByName id))
- | c=operconstr LEVEL "9" -> (c,None) ] ]
+ { (c,Some (CAst.make ~loc @@ ExplByName id)) }
+ | c=operconstr LEVEL "9" -> { (c,None) } ] ]
;
atomic_constr:
- [ [ g=global; i=instance -> CAst.make ~loc:!@loc @@ CRef (g,i)
- | s=sort -> CAst.make ~loc:!@loc @@ CSort s
- | n=INT -> CAst.make ~loc:!@loc @@ CPrim (Numeral (n,true))
- | s=string -> CAst.make ~loc:!@loc @@ CPrim (String s)
- | "_" -> CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None)
- | "?"; "["; id=ident; "]" -> CAst.make ~loc:!@loc @@ CHole (None, IntroIdentifier id, None)
- | "?"; "["; id=pattern_ident; "]" -> CAst.make ~loc:!@loc @@ CHole (None, IntroFresh id, None)
- | id=pattern_ident; inst = evar_instance -> CAst.make ~loc:!@loc @@ CEvar(id,inst) ] ]
+ [ [ g=global; i=instance -> { CAst.make ~loc @@ CRef (g,i) }
+ | s=sort -> { CAst.make ~loc @@ CSort s }
+ | n=INT -> { CAst.make ~loc @@ CPrim (Numeral (n,true)) }
+ | s=string -> { CAst.make ~loc @@ CPrim (String s) }
+ | "_" -> { CAst.make ~loc @@ CHole (None, IntroAnonymous, None) }
+ | "?"; "["; id=ident; "]" -> { CAst.make ~loc @@ CHole (None, IntroIdentifier id, None) }
+ | "?"; "["; id=pattern_ident; "]" -> { CAst.make ~loc @@ CHole (None, IntroFresh id, None) }
+ | id=pattern_ident; inst = evar_instance -> { CAst.make ~loc @@ CEvar(id,inst) } ] ]
;
inst:
- [ [ id = ident; ":="; c = lconstr -> (id,c) ] ]
+ [ [ id = ident; ":="; c = lconstr -> { (id,c) } ] ]
;
evar_instance:
- [ [ "@{"; l = LIST1 inst SEP ";"; "}" -> l
- | -> [] ] ]
+ [ [ "@{"; l = LIST1 inst SEP ";"; "}" -> { l }
+ | -> { [] } ] ]
;
instance:
- [ [ "@{"; l = LIST0 universe_level; "}" -> Some l
- | -> None ] ]
+ [ [ "@{"; l = LIST0 universe_level; "}" -> { Some l }
+ | -> { None } ] ]
;
universe_level:
- [ [ "Set" -> GSet
- | "Prop" -> GProp
- | "Type" -> GType UUnknown
- | "_" -> GType UAnonymous
- | id = global -> GType (UNamed id)
+ [ [ "Set" -> { GSet }
+ | "Prop" -> { GProp }
+ | "Type" -> { GType UUnknown }
+ | "_" -> { GType UAnonymous }
+ | id = global -> { GType (UNamed id) }
] ]
;
fix_constr:
- [ [ fx1=single_fix -> mk_single_fix fx1
- | (_,kw,dcl1)=single_fix; "with"; dcls=LIST1 fix_decl SEP "with";
+ [ [ fx1=single_fix -> { mk_single_fix fx1 }
+ | f=single_fix; "with"; dcls=LIST1 fix_decl SEP "with";
"for"; id=identref ->
- mk_fix(!@loc,kw,id,dcl1::dcls)
+ { let (_,kw,dcl1) = f in
+ mk_fix(loc,kw,id,dcl1::dcls) }
] ]
;
single_fix:
- [ [ kw=fix_kw; dcl=fix_decl -> (!@loc,kw,dcl) ] ]
+ [ [ kw=fix_kw; dcl=fix_decl -> { (loc,kw,dcl) } ] ]
;
fix_kw:
- [ [ "fix" -> true
- | "cofix" -> false ] ]
+ [ [ "fix" -> { true }
+ | "cofix" -> { false } ] ]
;
fix_decl:
[ [ id=identref; bl=binders_fixannot; ty=type_cstr; ":=";
c=operconstr LEVEL "200" ->
- (id,fst bl,snd bl,c,ty) ] ]
+ { (id,fst bl,snd bl,c,ty) } ] ]
;
match_constr:
[ [ "match"; ci=LIST1 case_item SEP ","; ty=OPT case_type; "with";
- br=branches; "end" -> CAst.make ~loc:!@loc @@ CCases(RegularStyle,ty,ci,br) ] ]
+ br=branches; "end" -> { CAst.make ~loc @@ CCases(RegularStyle,ty,ci,br) } ] ]
;
case_item:
[ [ c=operconstr LEVEL "100";
- ona = OPT ["as"; id=name -> id];
- ty = OPT ["in"; t=pattern -> t] ->
- (c,ona,ty) ] ]
+ ona = OPT ["as"; id=name -> { id } ];
+ ty = OPT ["in"; t=pattern -> { t } ] ->
+ { (c,ona,ty) } ] ]
;
case_type:
- [ [ "return"; ty = operconstr LEVEL "100" -> ty ] ]
+ [ [ "return"; ty = operconstr LEVEL "100" -> { ty } ] ]
;
return_type:
- [ [ a = OPT [ na = OPT["as"; na=name -> na];
- ty = case_type -> (na,ty) ] ->
- match a with
+ [ [ a = OPT [ na = OPT["as"; na=name -> { na } ];
+ ty = case_type -> { (na,ty) } ] ->
+ { match a with
| None -> None, None
- | Some (na,t) -> (na, Some t)
+ | Some (na,t) -> (na, Some t) }
] ]
;
branches:
- [ [ OPT"|"; br=LIST0 eqn SEP "|" -> br ] ]
+ [ [ OPT"|"; br=LIST0 eqn SEP "|" -> { br } ] ]
;
mult_pattern:
- [ [ pl = LIST1 pattern LEVEL "99" SEP "," -> pl ] ]
+ [ [ pl = LIST1 pattern LEVEL "99" SEP "," -> { pl } ] ]
;
eqn:
[ [ pll = LIST1 mult_pattern SEP "|";
- "=>"; rhs = lconstr -> (CAst.make ~loc:!@loc (pll,rhs)) ] ]
+ "=>"; rhs = lconstr -> { (CAst.make ~loc (pll,rhs)) } ] ]
;
record_pattern:
- [ [ id = global; ":="; pat = pattern -> (id, pat) ] ]
+ [ [ id = global; ":="; pat = pattern -> { (id, pat) } ] ]
;
record_patterns:
- [ [ p = record_pattern; ";"; ps = record_patterns -> p :: ps
- | p = record_pattern; ";" -> [p]
- | p = record_pattern-> [p]
- | -> []
+ [ [ p = record_pattern; ";"; ps = record_patterns -> { p :: ps }
+ | p = record_pattern; ";" -> { [p] }
+ | p = record_pattern-> { [p] }
+ | -> { [] }
] ]
;
pattern:
[ "200" RIGHTA [ ]
| "100" RIGHTA
- [ p = pattern; "|"; pl = LIST1 pattern SEP "|" -> CAst.make ~loc:!@loc @@ CPatOr (p::pl) ]
+ [ p = pattern; "|"; pl = LIST1 pattern SEP "|" -> { CAst.make ~loc @@ CPatOr (p::pl) } ]
| "99" RIGHTA [ ]
| "90" RIGHTA [ ]
| "10" LEFTA
[ p = pattern; "as"; na = name ->
- CAst.make ~loc:!@loc @@ CPatAlias (p, na)
- | p = pattern; lp = LIST1 NEXT -> mkAppPattern ~loc:!@loc p lp
+ { CAst.make ~loc @@ CPatAlias (p, na) }
+ | p = pattern; lp = LIST1 NEXT -> { mkAppPattern ~loc p lp }
| "@"; r = Prim.reference; lp = LIST0 NEXT ->
- CAst.make ~loc:!@loc @@ CPatCstr (r, Some lp, []) ]
+ { CAst.make ~loc @@ CPatCstr (r, Some lp, []) } ]
| "1" LEFTA
- [ c = pattern; "%"; key=IDENT -> CAst.make ~loc:!@loc @@ CPatDelimiters (key,c) ]
+ [ c = pattern; "%"; key=IDENT -> { CAst.make ~loc @@ CPatDelimiters (key,c) } ]
| "0"
- [ r = Prim.reference -> CAst.make ~loc:!@loc @@ CPatAtom (Some r)
- | "{|"; pat = record_patterns; "|}" -> CAst.make ~loc:!@loc @@ CPatRecord pat
- | "_" -> CAst.make ~loc:!@loc @@ CPatAtom None
+ [ r = Prim.reference -> { CAst.make ~loc @@ CPatAtom (Some r) }
+ | "{|"; pat = record_patterns; "|}" -> { CAst.make ~loc @@ CPatRecord pat }
+ | "_" -> { CAst.make ~loc @@ CPatAtom None }
| "("; p = pattern LEVEL "200"; ")" ->
- (match p.CAst.v with
+ { (match p.CAst.v with
| CPatPrim (Numeral (n,true)) ->
- CAst.make ~loc:!@loc @@ CPatNotation("( _ )",([p],[]),[])
- | _ -> p)
+ CAst.make ~loc @@ CPatNotation("( _ )",([p],[]),[])
+ | _ -> p) }
| "("; p = pattern LEVEL "200"; ":"; ty = lconstr; ")" ->
- let p =
+ { let p =
match p with
| { CAst.v = CPatPrim (Numeral (n,true)) } ->
- CAst.make ~loc:!@loc @@ CPatNotation("( _ )",([p],[]),[])
+ CAst.make ~loc @@ CPatNotation("( _ )",([p],[]),[])
| _ -> p
in
- CAst.make ~loc:!@loc @@ CPatCast (p, ty)
- | n = INT -> CAst.make ~loc:!@loc @@ CPatPrim (Numeral (n,true))
- | s = string -> CAst.make ~loc:!@loc @@ CPatPrim (String s) ] ]
+ CAst.make ~loc @@ CPatCast (p, ty) }
+ | n = INT -> { CAst.make ~loc @@ CPatPrim (Numeral (n,true)) }
+ | s = string -> { CAst.make ~loc @@ CPatPrim (String s) } ] ]
;
impl_ident_tail:
- [ [ "}" -> binder_of_name Implicit
+ [ [ "}" -> { binder_of_name Implicit }
| nal=LIST1 name; ":"; c=lconstr; "}" ->
- (fun na -> CLocalAssum (na::nal,Default Implicit,c))
+ { (fun na -> CLocalAssum (na::nal,Default Implicit,c)) }
| nal=LIST1 name; "}" ->
- (fun na -> CLocalAssum (na::nal,Default Implicit,
- CAst.make ?loc:(Loc.merge_opt na.CAst.loc (Some !@loc)) @@
- CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None)))
+ { (fun na -> CLocalAssum (na::nal,Default Implicit,
+ CAst.make ?loc:(Loc.merge_opt na.CAst.loc (Some loc)) @@
+ CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None))) }
| ":"; c=lconstr; "}" ->
- (fun na -> CLocalAssum ([na],Default Implicit,c))
+ { (fun na -> CLocalAssum ([na],Default Implicit,c)) }
] ]
;
fixannot:
- [ [ "{"; IDENT "struct"; id=identref; "}" -> (Some id, CStructRec)
- | "{"; IDENT "wf"; rel=constr; id=OPT identref; "}" -> (id, CWfRec rel)
+ [ [ "{"; IDENT "struct"; id=identref; "}" -> { (Some id, CStructRec) }
+ | "{"; IDENT "wf"; rel=constr; id=OPT identref; "}" -> { (id, CWfRec rel) }
| "{"; IDENT "measure"; m=constr; id=OPT identref;
- rel=OPT constr; "}" -> (id, CMeasureRec (m,rel))
+ rel=OPT constr; "}" -> { (id, CMeasureRec (m,rel)) }
] ]
;
impl_name_head:
- [ [ id = impl_ident_head -> (CAst.make ~loc:!@loc @@ Name id) ] ]
+ [ [ id = impl_ident_head -> { CAst.make ~loc @@ Name id } ] ]
;
binders_fixannot:
[ [ na = impl_name_head; assum = impl_ident_tail; bl = binders_fixannot ->
- (assum na :: fst bl), snd bl
- | f = fixannot -> [], f
- | b = binder; bl = binders_fixannot -> b @ fst bl, snd bl
- | -> [], (None, CStructRec)
+ { (assum na :: fst bl), snd bl }
+ | f = fixannot -> { [], f }
+ | b = binder; bl = binders_fixannot -> { b @ fst bl, snd bl }
+ | -> { [], (None, CStructRec) }
] ]
;
open_binders:
(* Same as binders but parentheses around a closed binder are optional if
the latter is unique *)
- [ [ (* open binder *)
+ [ [
id = name; idl = LIST0 name; ":"; c = lconstr ->
- [CLocalAssum (id::idl,Default Explicit,c)]
- (* binders factorized with open binder *)
+ { [CLocalAssum (id::idl,Default Explicit,c)]
+ (* binders factorized with open binder *) }
| id = name; idl = LIST0 name; bl = binders ->
- binders_of_names (id::idl) @ bl
+ { binders_of_names (id::idl) @ bl }
| id1 = name; ".."; id2 = name ->
- [CLocalAssum ([id1;(CAst.make ~loc:!@loc (Name ldots_var));id2],
- Default Explicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
+ { [CLocalAssum ([id1;(CAst.make ~loc (Name ldots_var));id2],
+ Default Explicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))] }
| bl = closed_binder; bl' = binders ->
- bl@bl'
+ { bl@bl' }
] ]
;
binders:
- [ [ l = LIST0 binder -> List.flatten l ] ]
+ [ [ l = LIST0 binder -> { List.flatten l } ] ]
;
binder:
- [ [ id = name -> [CLocalAssum ([id],Default Explicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
- | bl = closed_binder -> bl ] ]
+ [ [ id = name -> { [CLocalAssum ([id],Default Explicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))] }
+ | bl = closed_binder -> { bl } ] ]
;
closed_binder:
[ [ "("; id=name; idl=LIST1 name; ":"; c=lconstr; ")" ->
- [CLocalAssum (id::idl,Default Explicit,c)]
+ { [CLocalAssum (id::idl,Default Explicit,c)] }
| "("; id=name; ":"; c=lconstr; ")" ->
- [CLocalAssum ([id],Default Explicit,c)]
+ { [CLocalAssum ([id],Default Explicit,c)] }
| "("; id=name; ":="; c=lconstr; ")" ->
- (match c.CAst.v with
+ { (match c.CAst.v with
| CCast(c, CastConv t) -> [CLocalDef (id,c,Some t)]
- | _ -> [CLocalDef (id,c,None)])
+ | _ -> [CLocalDef (id,c,None)]) }
| "("; id=name; ":"; t=lconstr; ":="; c=lconstr; ")" ->
- [CLocalDef (id,c,Some t)]
+ { [CLocalDef (id,c,Some t)] }
| "{"; id=name; "}" ->
- [CLocalAssum ([id],Default Implicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
+ { [CLocalAssum ([id],Default Implicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))] }
| "{"; id=name; idl=LIST1 name; ":"; c=lconstr; "}" ->
- [CLocalAssum (id::idl,Default Implicit,c)]
+ { [CLocalAssum (id::idl,Default Implicit,c)] }
| "{"; id=name; ":"; c=lconstr; "}" ->
- [CLocalAssum ([id],Default Implicit,c)]
+ { [CLocalAssum ([id],Default Implicit,c)] }
| "{"; id=name; idl=LIST1 name; "}" ->
- List.map (fun id -> CLocalAssum ([id],Default Implicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))) (id::idl)
+ { List.map (fun id -> CLocalAssum ([id],Default Implicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))) (id::idl) }
| "`("; tc = LIST1 typeclass_constraint SEP "," ; ")" ->
- List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Explicit, b), t)) tc
+ { List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Explicit, b), t)) tc }
| "`{"; tc = LIST1 typeclass_constraint SEP "," ; "}" ->
- List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Implicit, b), t)) tc
+ { List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Implicit, b), t)) tc }
| "'"; p = pattern LEVEL "0" ->
- let (p, ty) =
+ { let (p, ty) =
match p.CAst.v with
| CPatCast (p, ty) -> (p, Some ty)
| _ -> (p, None)
in
- [CLocalPattern (CAst.make ~loc:!@loc (p, ty))]
+ [CLocalPattern (CAst.make ~loc (p, ty))] }
] ]
;
typeclass_constraint:
- [ [ "!" ; c = operconstr LEVEL "200" -> (CAst.make ~loc:!@loc Anonymous), true, c
- | "{"; id = name; "}"; ":" ; expl = [ "!" -> true | -> false ] ; c = operconstr LEVEL "200" ->
- id, expl, c
- | iid=name_colon ; expl = [ "!" -> true | -> false ] ; c = operconstr LEVEL "200" ->
- (CAst.make ~loc:!@loc iid), expl, c
+ [ [ "!" ; c = operconstr LEVEL "200" -> { (CAst.make ~loc Anonymous), true, c }
+ | "{"; id = name; "}"; ":" ; expl = [ "!" -> { true } | -> { false } ] ; c = operconstr LEVEL "200" ->
+ { id, expl, c }
+ | iid=name_colon ; expl = [ "!" -> { true } | -> { false } ] ; c = operconstr LEVEL "200" ->
+ { (CAst.make ~loc iid), expl, c }
| c = operconstr LEVEL "200" ->
- (CAst.make ~loc:!@loc Anonymous), false, c
+ { (CAst.make ~loc Anonymous), false, c }
] ]
;
type_cstr:
- [ [ c=OPT [":"; c=lconstr -> c] -> Loc.tag ~loc:!@loc c ] ]
+ [ [ c=OPT [":"; c=lconstr -> { c } ] -> { Loc.tag ~loc c } ] ]
;
- END;;
+ END
diff --git a/parsing/g_prim.ml4 b/parsing/g_prim.mlg
index 91dce27fe..774db97f2 100644
--- a/parsing/g_prim.ml4
+++ b/parsing/g_prim.mlg
@@ -8,6 +8,8 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Names
open Libnames
@@ -30,93 +32,96 @@ let my_int_of_string loc s =
with Failure _ | Exit ->
CErrors.user_err ~loc (Pp.str "Cannot support a so large number.")
-GEXTEND Gram
+}
+
+GRAMMAR EXTEND Gram
GLOBAL:
bigint natural integer identref name ident var preident
fullyqualid qualid reference dirpath ne_lstring
ne_string string lstring pattern_ident pattern_identref by_notation smart_global;
preident:
- [ [ s = IDENT -> s ] ]
+ [ [ s = IDENT -> { s } ] ]
;
ident:
- [ [ s = IDENT -> Id.of_string s ] ]
+ [ [ s = IDENT -> { Id.of_string s } ] ]
;
pattern_ident:
- [ [ LEFTQMARK; id = ident -> id ] ]
+ [ [ LEFTQMARK; id = ident -> { id } ] ]
;
pattern_identref:
- [ [ id = pattern_ident -> CAst.make ~loc:!@loc id ] ]
+ [ [ id = pattern_ident -> { CAst.make ~loc id } ] ]
;
var: (* as identref, but interpret as a term identifier in ltac *)
- [ [ id = ident -> CAst.make ~loc:!@loc id ] ]
+ [ [ id = ident -> { CAst.make ~loc id } ] ]
;
identref:
- [ [ id = ident -> CAst.make ~loc:!@loc id ] ]
+ [ [ id = ident -> { CAst.make ~loc id } ] ]
;
field:
- [ [ s = FIELD -> Id.of_string s ] ]
+ [ [ s = FIELD -> { Id.of_string s } ] ]
;
fields:
- [ [ id = field; (l,id') = fields -> (l@[id],id')
- | id = field -> ([],id)
+ [ [ id = field; f = fields -> { let (l,id') = f in (l@[id],id') }
+ | id = field -> { ([],id) }
] ]
;
fullyqualid:
- [ [ id = ident; (l,id')=fields -> CAst.make ~loc:!@loc @@ id::List.rev (id'::l)
- | id = ident -> CAst.make ~loc:!@loc [id]
+ [ [ id = ident; f=fields -> { let (l,id') = f in CAst.make ~loc @@ id::List.rev (id'::l) }
+ | id = ident -> { CAst.make ~loc [id] }
] ]
;
basequalid:
- [ [ id = ident; (l,id')=fields -> local_make_qualid !@loc (l@[id]) id'
- | id = ident -> qualid_of_ident ~loc:!@loc id
+ [ [ id = ident; f=fields -> { let (l,id') = f in local_make_qualid loc (l@[id]) id' }
+ | id = ident -> { qualid_of_ident ~loc id }
] ]
;
name:
- [ [ IDENT "_" -> CAst.make ~loc:!@loc Anonymous
- | id = ident -> CAst.make ~loc:!@loc @@ Name id ] ]
+ [ [ IDENT "_" -> { CAst.make ~loc Anonymous }
+ | id = ident -> { CAst.make ~loc @@ Name id } ] ]
;
reference:
- [ [ id = ident; (l,id') = fields ->
- local_make_qualid !@loc (l@[id]) id'
- | id = ident -> local_make_qualid !@loc [] id
+ [ [ id = ident; f = fields -> {
+ let (l,id') = f in
+ local_make_qualid loc (l@[id]) id' }
+ | id = ident -> { local_make_qualid loc [] id }
] ]
;
by_notation:
- [ [ s = ne_string; sc = OPT ["%"; key = IDENT -> key ] -> (s, sc) ] ]
+ [ [ s = ne_string; sc = OPT ["%"; key = IDENT -> { key } ] -> { (s, sc) } ] ]
;
smart_global:
- [ [ c = reference -> CAst.make ~loc:!@loc @@ Constrexpr.AN c
- | ntn = by_notation -> CAst.make ~loc:!@loc @@ Constrexpr.ByNotation ntn ] ]
+ [ [ c = reference -> { CAst.make ~loc @@ Constrexpr.AN c }
+ | ntn = by_notation -> { CAst.make ~loc @@ Constrexpr.ByNotation ntn } ] ]
;
qualid:
- [ [ qid = basequalid -> qid ] ]
+ [ [ qid = basequalid -> { qid } ] ]
;
ne_string:
[ [ s = STRING ->
- if s="" then CErrors.user_err ~loc:!@loc (Pp.str"Empty string."); s
+ { if s="" then CErrors.user_err ~loc (Pp.str"Empty string."); s }
] ]
;
ne_lstring:
- [ [ s = ne_string -> CAst.make ~loc:!@loc s ] ]
+ [ [ s = ne_string -> { CAst.make ~loc s } ] ]
;
dirpath:
[ [ id = ident; l = LIST0 field ->
- DirPath.make (List.rev (id::l)) ] ]
+ { DirPath.make (List.rev (id::l)) } ] ]
;
string:
- [ [ s = STRING -> s ] ]
+ [ [ s = STRING -> { s } ] ]
;
lstring:
- [ [ s = string -> (CAst.make ~loc:!@loc s) ] ]
+ [ [ s = string -> { CAst.make ~loc s } ] ]
;
integer:
- [ [ i = INT -> my_int_of_string (!@loc) i
- | "-"; i = INT -> - my_int_of_string (!@loc) i ] ]
+ [ [ i = INT -> { my_int_of_string loc i }
+ | "-"; i = INT -> { - my_int_of_string loc i } ] ]
;
natural:
- [ [ i = INT -> my_int_of_string (!@loc) i ] ]
+ [ [ i = INT -> { my_int_of_string loc i } ] ]
;
bigint: (* Negative numbers are dealt with elsewhere *)
- [ [ i = INT -> i ] ]
+ [ [ i = INT -> { i } ] ]
;
END
diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml
index 6fdd9ea9b..171276a70 100644
--- a/parsing/pcoq.ml
+++ b/parsing/pcoq.ml
@@ -17,9 +17,17 @@ let curry f x y = f (x, y)
let uncurry f (x,y) = f x y
(** Location Utils *)
+let ploc_file_of_coq_file = function
+| Loc.ToplevelInput -> ""
+| Loc.InFile f -> f
+
let coq_file_of_ploc_file s =
if s = "" then Loc.ToplevelInput else Loc.InFile s
+let of_coqloc loc =
+ let open Loc in
+ Ploc.make_loc (ploc_file_of_coq_file loc.fname) loc.line_nb loc.bol_pos (loc.bp, loc.ep) ""
+
let to_coqloc loc =
{ Loc.fname = coq_file_of_ploc_file (Ploc.file_name loc);
Loc.line_nb = Ploc.line_nb loc;
@@ -236,7 +244,7 @@ let rec symbol_of_prod_entry_key : type s a. (s, a) symbol -> _ = function
| Aentry e ->
Symbols.snterm (G.Entry.obj e)
| Aentryl (e, n) ->
- Symbols.snterml (G.Entry.obj e, string_of_int n)
+ Symbols.snterml (G.Entry.obj e, n)
| Arules rs ->
G.srules' (List.map symbol_of_rules rs)
@@ -328,6 +336,7 @@ module Gram =
I'm not entirely sure it makes sense, but at least it would be more correct.
*)
G.delete_rule e pil
+ let safe_extend e ext = grammar_extend e None ext
end
(** Remove extensions
diff --git a/parsing/pcoq.mli b/parsing/pcoq.mli
index f959bd80c..48604e188 100644
--- a/parsing/pcoq.mli
+++ b/parsing/pcoq.mli
@@ -80,6 +80,8 @@ module type S =
(* Get comment parsing information from the Lexer *)
val comment_state : coq_parsable -> ((int * int) * string) list
+ val safe_extend : 'a entry -> 'a Extend.extend_statement -> unit
+
(* Apparently not used *)
val srules' : production_rule list -> symbol
val parse_tokens_after_filter : 'a entry -> Tok.t Stream.t -> 'a
@@ -299,6 +301,7 @@ val recover_grammar_command : 'a grammar_command -> 'a list
val with_grammar_rule_protection : ('a -> 'b) -> 'a -> 'b
(** Location Utils *)
+val of_coqloc : Loc.t -> Ploc.t
val to_coqloc : Ploc.t -> Loc.t
val (!@) : Ploc.t -> Loc.t
diff --git a/plugins/ltac/g_tactic.ml4 b/plugins/ltac/g_tactic.mlg
index 31bc34a32..2e1ce814a 100644
--- a/plugins/ltac/g_tactic.ml4
+++ b/plugins/ltac/g_tactic.mlg
@@ -8,6 +8,8 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Pp
open CErrors
open Util
@@ -215,486 +217,488 @@ let warn_deprecated_eqn_syntax =
open Pvernac.Vernac_
-GEXTEND Gram
+}
+
+GRAMMAR EXTEND Gram
GLOBAL: simple_tactic constr_with_bindings quantified_hypothesis
bindings red_expr int_or_var open_constr uconstr
simple_intropattern in_clause clause_dft_concl hypident destruction_arg;
int_or_var:
- [ [ n = integer -> ArgArg n
- | id = identref -> ArgVar id ] ]
+ [ [ n = integer -> { ArgArg n }
+ | id = identref -> { ArgVar id } ] ]
;
nat_or_var:
- [ [ n = natural -> ArgArg n
- | id = identref -> ArgVar id ] ]
+ [ [ n = natural -> { ArgArg n }
+ | id = identref -> { ArgVar id } ] ]
;
(* An identifier or a quotation meta-variable *)
id_or_meta:
- [ [ id = identref -> id ] ]
+ [ [ id = identref -> { id } ] ]
;
open_constr:
- [ [ c = constr -> c ] ]
+ [ [ c = constr -> { c } ] ]
;
uconstr:
- [ [ c = constr -> c ] ]
+ [ [ c = constr -> { c } ] ]
;
destruction_arg:
- [ [ n = natural -> (None,ElimOnAnonHyp n)
+ [ [ n = natural -> { (None,ElimOnAnonHyp n) }
| test_lpar_id_rpar; c = constr_with_bindings ->
- (Some false,destruction_arg_of_constr c)
- | c = constr_with_bindings_arg -> on_snd destruction_arg_of_constr c
+ { (Some false,destruction_arg_of_constr c) }
+ | c = constr_with_bindings_arg -> { on_snd destruction_arg_of_constr c }
] ]
;
constr_with_bindings_arg:
- [ [ ">"; c = constr_with_bindings -> (Some true,c)
- | c = constr_with_bindings -> (None,c) ] ]
+ [ [ ">"; c = constr_with_bindings -> { (Some true,c) }
+ | c = constr_with_bindings -> { (None,c) } ] ]
;
quantified_hypothesis:
- [ [ id = ident -> NamedHyp id
- | n = natural -> AnonHyp n ] ]
+ [ [ id = ident -> { NamedHyp id }
+ | n = natural -> { AnonHyp n } ] ]
;
conversion:
- [ [ c = constr -> (None, c)
- | c1 = constr; "with"; c2 = constr -> (Some (AllOccurrences,c1),c2)
+ [ [ c = constr -> { (None, c) }
+ | c1 = constr; "with"; c2 = constr -> { (Some (AllOccurrences,c1),c2) }
| c1 = constr; "at"; occs = occs_nums; "with"; c2 = constr ->
- (Some (occs,c1), c2) ] ]
+ { (Some (occs,c1), c2) } ] ]
;
occs_nums:
- [ [ nl = LIST1 nat_or_var -> OnlyOccurrences nl
+ [ [ nl = LIST1 nat_or_var -> { OnlyOccurrences nl }
| "-"; n = nat_or_var; nl = LIST0 int_or_var ->
(* have used int_or_var instead of nat_or_var for compatibility *)
- AllOccurrencesBut (List.map (map_int_or_var abs) (n::nl)) ] ]
+ { AllOccurrencesBut (List.map (map_int_or_var abs) (n::nl)) } ] ]
;
occs:
- [ [ "at"; occs = occs_nums -> occs | -> AllOccurrences ] ]
+ [ [ "at"; occs = occs_nums -> { occs } | -> { AllOccurrences } ] ]
;
pattern_occ:
- [ [ c = constr; nl = occs -> (nl,c) ] ]
+ [ [ c = constr; nl = occs -> { (nl,c) } ] ]
;
ref_or_pattern_occ:
(* If a string, it is interpreted as a ref
(anyway a Coq string does not reduce) *)
- [ [ c = smart_global; nl = occs -> nl,Inl c
- | c = constr; nl = occs -> nl,Inr c ] ]
+ [ [ c = smart_global; nl = occs -> { nl,Inl c }
+ | c = constr; nl = occs -> { nl,Inr c } ] ]
;
unfold_occ:
- [ [ c = smart_global; nl = occs -> (nl,c) ] ]
+ [ [ c = smart_global; nl = occs -> { (nl,c) } ] ]
;
intropatterns:
- [ [ l = LIST0 nonsimple_intropattern -> l ]]
+ [ [ l = LIST0 nonsimple_intropattern -> { l } ] ]
;
ne_intropatterns:
- [ [ l = LIST1 nonsimple_intropattern -> l ]]
+ [ [ l = LIST1 nonsimple_intropattern -> { l } ] ]
;
or_and_intropattern:
- [ [ "["; tc = LIST1 intropatterns SEP "|"; "]" -> IntroOrPattern tc
- | "()" -> IntroAndPattern []
- | "("; si = simple_intropattern; ")" -> IntroAndPattern [si]
+ [ [ "["; tc = LIST1 intropatterns SEP "|"; "]" -> { IntroOrPattern tc }
+ | "()" -> { IntroAndPattern [] }
+ | "("; si = simple_intropattern; ")" -> { IntroAndPattern [si] }
| "("; si = simple_intropattern; ",";
tc = LIST1 simple_intropattern SEP "," ; ")" ->
- IntroAndPattern (si::tc)
+ { IntroAndPattern (si::tc) }
| "("; si = simple_intropattern; "&";
tc = LIST1 simple_intropattern SEP "&" ; ")" ->
(* (A & B & C) is translated into (A,(B,C)) *)
- let rec pairify = function
+ { let rec pairify = function
| ([]|[_]|[_;_]) as l -> l
| t::q -> [t; CAst.make ?loc:(loc_of_ne_list q) (IntroAction (IntroOrAndPattern (IntroAndPattern (pairify q))))]
- in IntroAndPattern (pairify (si::tc)) ] ]
+ in IntroAndPattern (pairify (si::tc)) } ] ]
;
equality_intropattern:
- [ [ "->" -> IntroRewrite true
- | "<-" -> IntroRewrite false
- | "[="; tc = intropatterns; "]" -> IntroInjection tc ] ]
+ [ [ "->" -> { IntroRewrite true }
+ | "<-" -> { IntroRewrite false }
+ | "[="; tc = intropatterns; "]" -> { IntroInjection tc } ] ]
;
naming_intropattern:
- [ [ prefix = pattern_ident -> IntroFresh prefix
- | "?" -> IntroAnonymous
- | id = ident -> IntroIdentifier id ] ]
+ [ [ prefix = pattern_ident -> { IntroFresh prefix }
+ | "?" -> { IntroAnonymous }
+ | id = ident -> { IntroIdentifier id } ] ]
;
nonsimple_intropattern:
- [ [ l = simple_intropattern -> l
- | "*" -> CAst.make ~loc:!@loc @@ IntroForthcoming true
- | "**" -> CAst.make ~loc:!@loc @@ IntroForthcoming false ]]
+ [ [ l = simple_intropattern -> { l }
+ | "*" -> { CAst.make ~loc @@ IntroForthcoming true }
+ | "**" -> { CAst.make ~loc @@ IntroForthcoming false } ] ]
;
simple_intropattern:
[ [ pat = simple_intropattern_closed;
- l = LIST0 ["%"; c = operconstr LEVEL "0" -> c] ->
- let {CAst.loc=loc0;v=pat} = pat in
+ l = LIST0 ["%"; c = operconstr LEVEL "0" -> { c } ] ->
+ { let {CAst.loc=loc0;v=pat} = pat in
let f c pat =
let loc1 = Constrexpr_ops.constr_loc c in
let loc = Loc.merge_opt loc0 loc1 in
IntroAction (IntroApplyOn (CAst.(make ?loc:loc1 c),CAst.(make ?loc pat))) in
- CAst.make ~loc:!@loc @@ List.fold_right f l pat ] ]
+ CAst.make ~loc @@ List.fold_right f l pat } ] ]
;
simple_intropattern_closed:
- [ [ pat = or_and_intropattern -> CAst.make ~loc:!@loc @@ IntroAction (IntroOrAndPattern pat)
- | pat = equality_intropattern -> CAst.make ~loc:!@loc @@ IntroAction pat
- | "_" -> CAst.make ~loc:!@loc @@ IntroAction IntroWildcard
- | pat = naming_intropattern -> CAst.make ~loc:!@loc @@ IntroNaming pat ] ]
+ [ [ pat = or_and_intropattern -> { CAst.make ~loc @@ IntroAction (IntroOrAndPattern pat) }
+ | pat = equality_intropattern -> { CAst.make ~loc @@ IntroAction pat }
+ | "_" -> { CAst.make ~loc @@ IntroAction IntroWildcard }
+ | pat = naming_intropattern -> { CAst.make ~loc @@ IntroNaming pat } ] ]
;
simple_binding:
- [ [ "("; id = ident; ":="; c = lconstr; ")" -> CAst.make ~loc:!@loc (NamedHyp id, c)
- | "("; n = natural; ":="; c = lconstr; ")" -> CAst.make ~loc:!@loc (AnonHyp n, c) ] ]
+ [ [ "("; id = ident; ":="; c = lconstr; ")" -> { CAst.make ~loc (NamedHyp id, c) }
+ | "("; n = natural; ":="; c = lconstr; ")" -> { CAst.make ~loc (AnonHyp n, c) } ] ]
;
bindings:
[ [ test_lpar_idnum_coloneq; bl = LIST1 simple_binding ->
- ExplicitBindings bl
- | bl = LIST1 constr -> ImplicitBindings bl ] ]
+ { ExplicitBindings bl }
+ | bl = LIST1 constr -> { ImplicitBindings bl } ] ]
;
constr_with_bindings:
- [ [ c = constr; l = with_bindings -> (c, l) ] ]
+ [ [ c = constr; l = with_bindings -> { (c, l) } ] ]
;
with_bindings:
- [ [ "with"; bl = bindings -> bl | -> NoBindings ] ]
+ [ [ "with"; bl = bindings -> { bl } | -> { NoBindings } ] ]
;
red_flags:
- [ [ IDENT "beta" -> [FBeta]
- | IDENT "iota" -> [FMatch;FFix;FCofix]
- | IDENT "match" -> [FMatch]
- | IDENT "fix" -> [FFix]
- | IDENT "cofix" -> [FCofix]
- | IDENT "zeta" -> [FZeta]
- | IDENT "delta"; d = delta_flag -> [d]
+ [ [ IDENT "beta" -> { [FBeta] }
+ | IDENT "iota" -> { [FMatch;FFix;FCofix] }
+ | IDENT "match" -> { [FMatch] }
+ | IDENT "fix" -> { [FFix] }
+ | IDENT "cofix" -> { [FCofix] }
+ | IDENT "zeta" -> { [FZeta] }
+ | IDENT "delta"; d = delta_flag -> { [d] }
] ]
;
delta_flag:
- [ [ "-"; "["; idl = LIST1 smart_global; "]" -> FDeltaBut idl
- | "["; idl = LIST1 smart_global; "]" -> FConst idl
- | -> FDeltaBut []
+ [ [ "-"; "["; idl = LIST1 smart_global; "]" -> { FDeltaBut idl }
+ | "["; idl = LIST1 smart_global; "]" -> { FConst idl }
+ | -> { FDeltaBut [] }
] ]
;
strategy_flag:
- [ [ s = LIST1 red_flags -> Redops.make_red_flag (List.flatten s)
- | d = delta_flag -> all_with d
+ [ [ s = LIST1 red_flags -> { Redops.make_red_flag (List.flatten s) }
+ | d = delta_flag -> { all_with d }
] ]
;
red_expr:
- [ [ IDENT "red" -> Red false
- | IDENT "hnf" -> Hnf
- | IDENT "simpl"; d = delta_flag; po = OPT ref_or_pattern_occ -> Simpl (all_with d,po)
- | IDENT "cbv"; s = strategy_flag -> Cbv s
- | IDENT "cbn"; s = strategy_flag -> Cbn s
- | IDENT "lazy"; s = strategy_flag -> Lazy s
- | IDENT "compute"; delta = delta_flag -> Cbv (all_with delta)
- | IDENT "vm_compute"; po = OPT ref_or_pattern_occ -> CbvVm po
- | IDENT "native_compute"; po = OPT ref_or_pattern_occ -> CbvNative po
- | IDENT "unfold"; ul = LIST1 unfold_occ SEP "," -> Unfold ul
- | IDENT "fold"; cl = LIST1 constr -> Fold cl
- | IDENT "pattern"; pl = LIST1 pattern_occ SEP"," -> Pattern pl
- | s = IDENT -> ExtraRedExpr s ] ]
+ [ [ IDENT "red" -> { Red false }
+ | IDENT "hnf" -> { Hnf }
+ | IDENT "simpl"; d = delta_flag; po = OPT ref_or_pattern_occ -> { Simpl (all_with d,po) }
+ | IDENT "cbv"; s = strategy_flag -> { Cbv s }
+ | IDENT "cbn"; s = strategy_flag -> { Cbn s }
+ | IDENT "lazy"; s = strategy_flag -> { Lazy s }
+ | IDENT "compute"; delta = delta_flag -> { Cbv (all_with delta) }
+ | IDENT "vm_compute"; po = OPT ref_or_pattern_occ -> { CbvVm po }
+ | IDENT "native_compute"; po = OPT ref_or_pattern_occ -> { CbvNative po }
+ | IDENT "unfold"; ul = LIST1 unfold_occ SEP "," -> { Unfold ul }
+ | IDENT "fold"; cl = LIST1 constr -> { Fold cl }
+ | IDENT "pattern"; pl = LIST1 pattern_occ SEP"," -> { Pattern pl }
+ | s = IDENT -> { ExtraRedExpr s } ] ]
;
hypident:
[ [ id = id_or_meta ->
- let id : lident = id in
- id,InHyp
+ { let id : lident = id in
+ id,InHyp }
| "("; IDENT "type"; IDENT "of"; id = id_or_meta; ")" ->
- let id : lident = id in
- id,InHypTypeOnly
+ { let id : lident = id in
+ id,InHypTypeOnly }
| "("; IDENT "value"; IDENT "of"; id = id_or_meta; ")" ->
- let id : lident = id in
- id,InHypValueOnly
+ { let id : lident = id in
+ id,InHypValueOnly }
] ]
;
hypident_occ:
- [ [ (id,l)=hypident; occs=occs ->
+ [ [ h=hypident; occs=occs ->
+ { let (id,l) = h in
let id : lident = id in
- ((occs,id),l) ] ]
+ ((occs,id),l) } ] ]
;
in_clause:
[ [ "*"; occs=occs ->
- {onhyps=None; concl_occs=occs}
+ { {onhyps=None; concl_occs=occs} }
| "*"; "|-"; occs=concl_occ ->
- {onhyps=None; concl_occs=occs}
+ { {onhyps=None; concl_occs=occs} }
| hl=LIST0 hypident_occ SEP","; "|-"; occs=concl_occ ->
- {onhyps=Some hl; concl_occs=occs}
+ { {onhyps=Some hl; concl_occs=occs} }
| hl=LIST0 hypident_occ SEP"," ->
- {onhyps=Some hl; concl_occs=NoOccurrences} ] ]
+ { {onhyps=Some hl; concl_occs=NoOccurrences} } ] ]
;
clause_dft_concl:
- [ [ "in"; cl = in_clause -> cl
- | occs=occs -> {onhyps=Some[]; concl_occs=occs}
- | -> all_concl_occs_clause ] ]
+ [ [ "in"; cl = in_clause -> { cl }
+ | occs=occs -> { {onhyps=Some[]; concl_occs=occs} }
+ | -> { all_concl_occs_clause } ] ]
;
clause_dft_all:
- [ [ "in"; cl = in_clause -> cl
- | -> {onhyps=None; concl_occs=AllOccurrences} ] ]
+ [ [ "in"; cl = in_clause -> { cl }
+ | -> { {onhyps=None; concl_occs=AllOccurrences} } ] ]
;
opt_clause:
- [ [ "in"; cl = in_clause -> Some cl
- | "at"; occs = occs_nums -> Some {onhyps=Some[]; concl_occs=occs}
- | -> None ] ]
+ [ [ "in"; cl = in_clause -> { Some cl }
+ | "at"; occs = occs_nums -> { Some {onhyps=Some[]; concl_occs=occs} }
+ | -> { None } ] ]
;
concl_occ:
- [ [ "*"; occs = occs -> occs
- | -> NoOccurrences ] ]
+ [ [ "*"; occs = occs -> { occs }
+ | -> { NoOccurrences } ] ]
;
in_hyp_list:
- [ [ "in"; idl = LIST1 id_or_meta -> idl
- | -> [] ] ]
+ [ [ "in"; idl = LIST1 id_or_meta -> { idl }
+ | -> { [] } ] ]
;
in_hyp_as:
- [ [ "in"; id = id_or_meta; ipat = as_ipat -> Some (id,ipat)
- | -> None ] ]
+ [ [ "in"; id = id_or_meta; ipat = as_ipat -> { Some (id,ipat) }
+ | -> { None } ] ]
;
orient:
- [ [ "->" -> true
- | "<-" -> false
- | -> true ]]
+ [ [ "->" -> { true }
+ | "<-" -> { false }
+ | -> { true } ] ]
;
simple_binder:
- [ [ na=name -> ([na],Default Explicit, CAst.make ~loc:!@loc @@
- CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None))
- | "("; nal=LIST1 name; ":"; c=lconstr; ")" -> (nal,Default Explicit,c)
+ [ [ na=name -> { ([na],Default Explicit, CAst.make ~loc @@
+ CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None)) }
+ | "("; nal=LIST1 name; ":"; c=lconstr; ")" -> { (nal,Default Explicit,c) }
] ]
;
fixdecl:
[ [ "("; id = ident; bl=LIST0 simple_binder; ann=fixannot;
- ":"; ty=lconstr; ")" -> (!@loc, id, bl, ann, ty) ] ]
+ ":"; ty=lconstr; ")" -> { (loc, id, bl, ann, ty) } ] ]
;
fixannot:
- [ [ "{"; IDENT "struct"; id=name; "}" -> Some id
- | -> None ] ]
+ [ [ "{"; IDENT "struct"; id=name; "}" -> { Some id }
+ | -> { None } ] ]
;
cofixdecl:
[ [ "("; id = ident; bl=LIST0 simple_binder; ":"; ty=lconstr; ")" ->
- (!@loc, id, bl, None, ty) ] ]
+ { (loc, id, bl, None, ty) } ] ]
;
bindings_with_parameters:
[ [ check_for_coloneq; "("; id = ident; bl = LIST0 simple_binder;
- ":="; c = lconstr; ")" -> (id, mkCLambdaN_simple bl c) ] ]
+ ":="; c = lconstr; ")" -> { (id, mkCLambdaN_simple bl c) } ] ]
;
eliminator:
- [ [ "using"; el = constr_with_bindings -> el ] ]
+ [ [ "using"; el = constr_with_bindings -> { el } ] ]
;
as_ipat:
- [ [ "as"; ipat = simple_intropattern -> Some ipat
- | -> None ] ]
+ [ [ "as"; ipat = simple_intropattern -> { Some ipat }
+ | -> { None } ] ]
;
or_and_intropattern_loc:
- [ [ ipat = or_and_intropattern -> ArgArg (CAst.make ~loc:!@loc ipat)
- | locid = identref -> ArgVar locid ] ]
+ [ [ ipat = or_and_intropattern -> { ArgArg (CAst.make ~loc ipat) }
+ | locid = identref -> { ArgVar locid } ] ]
;
as_or_and_ipat:
- [ [ "as"; ipat = or_and_intropattern_loc -> Some ipat
- | -> None ] ]
+ [ [ "as"; ipat = or_and_intropattern_loc -> { Some ipat }
+ | -> { None } ] ]
;
eqn_ipat:
- [ [ IDENT "eqn"; ":"; pat = naming_intropattern -> Some (CAst.make ~loc:!@loc pat)
+ [ [ IDENT "eqn"; ":"; pat = naming_intropattern -> { Some (CAst.make ~loc pat) }
| IDENT "_eqn"; ":"; pat = naming_intropattern ->
- let loc = !@loc in
- warn_deprecated_eqn_syntax ~loc "H"; Some (CAst.make ~loc pat)
+ { warn_deprecated_eqn_syntax ~loc "H"; Some (CAst.make ~loc pat) }
| IDENT "_eqn" ->
- let loc = !@loc in
- warn_deprecated_eqn_syntax ~loc "?"; Some (CAst.make ~loc IntroAnonymous)
- | -> None ] ]
+ { warn_deprecated_eqn_syntax ~loc "?"; Some (CAst.make ~loc IntroAnonymous) }
+ | -> { None } ] ]
;
as_name:
- [ [ "as"; id = ident ->Names.Name.Name id | -> Names.Name.Anonymous ] ]
+ [ [ "as"; id = ident -> { Names.Name.Name id } | -> { Names.Name.Anonymous } ] ]
;
by_tactic:
- [ [ "by"; tac = tactic_expr LEVEL "3" -> Some tac
- | -> None ] ]
+ [ [ "by"; tac = tactic_expr LEVEL "3" -> { Some tac }
+ | -> { None } ] ]
;
rewriter :
- [ [ "!"; c = constr_with_bindings_arg -> (Equality.RepeatPlus,c)
- | ["?"| LEFTQMARK]; c = constr_with_bindings_arg -> (Equality.RepeatStar,c)
- | n = natural; "!"; c = constr_with_bindings_arg -> (Equality.Precisely n,c)
- | n = natural; ["?" | LEFTQMARK]; c = constr_with_bindings_arg -> (Equality.UpTo n,c)
- | n = natural; c = constr_with_bindings_arg -> (Equality.Precisely n,c)
- | c = constr_with_bindings_arg -> (Equality.Precisely 1, c)
+ [ [ "!"; c = constr_with_bindings_arg -> { (Equality.RepeatPlus,c) }
+ | ["?" -> { () } | LEFTQMARK -> { () } ]; c = constr_with_bindings_arg -> { (Equality.RepeatStar,c) }
+ | n = natural; "!"; c = constr_with_bindings_arg -> { (Equality.Precisely n,c) }
+ | n = natural; ["?" -> { () } | LEFTQMARK -> { () } ]; c = constr_with_bindings_arg -> { (Equality.UpTo n,c) }
+ | n = natural; c = constr_with_bindings_arg -> { (Equality.Precisely n,c) }
+ | c = constr_with_bindings_arg -> { (Equality.Precisely 1, c) }
] ]
;
oriented_rewriter :
- [ [ b = orient; p = rewriter -> let (m,c) = p in (b,m,c) ] ]
+ [ [ b = orient; p = rewriter -> { let (m,c) = p in (b,m,c) } ] ]
;
induction_clause:
[ [ c = destruction_arg; pat = as_or_and_ipat; eq = eqn_ipat;
- cl = opt_clause -> (c,(eq,pat),cl) ] ]
+ cl = opt_clause -> { (c,(eq,pat),cl) } ] ]
;
induction_clause_list:
[ [ ic = LIST1 induction_clause SEP ","; el = OPT eliminator;
cl_tolerance = opt_clause ->
(* Condition for accepting "in" at the end by compatibility *)
- match ic,el,cl_tolerance with
+ { match ic,el,cl_tolerance with
| [c,pat,None],Some _,Some _ -> ([c,pat,cl_tolerance],el)
| _,_,Some _ -> err ()
- | _,_,None -> (ic,el) ]]
+ | _,_,None -> (ic,el) } ] ]
;
simple_tactic:
[ [
(* Basic tactics *)
IDENT "intros"; pl = ne_intropatterns ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacIntroPattern (false,pl))
+ { TacAtom (Loc.tag ~loc @@ TacIntroPattern (false,pl)) }
| IDENT "intros" ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacIntroPattern (false,[CAst.make ~loc:!@loc @@IntroForthcoming false]))
+ { TacAtom (Loc.tag ~loc @@ TacIntroPattern (false,[CAst.make ~loc @@IntroForthcoming false])) }
| IDENT "eintros"; pl = ne_intropatterns ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacIntroPattern (true,pl))
+ { TacAtom (Loc.tag ~loc @@ TacIntroPattern (true,pl)) }
| IDENT "apply"; cl = LIST1 constr_with_bindings_arg SEP ",";
- inhyp = in_hyp_as -> TacAtom (Loc.tag ~loc:!@loc @@ TacApply (true,false,cl,inhyp))
+ inhyp = in_hyp_as -> { TacAtom (Loc.tag ~loc @@ TacApply (true,false,cl,inhyp)) }
| IDENT "eapply"; cl = LIST1 constr_with_bindings_arg SEP ",";
- inhyp = in_hyp_as -> TacAtom (Loc.tag ~loc:!@loc @@ TacApply (true,true,cl,inhyp))
+ inhyp = in_hyp_as -> { TacAtom (Loc.tag ~loc @@ TacApply (true,true,cl,inhyp)) }
| IDENT "simple"; IDENT "apply";
cl = LIST1 constr_with_bindings_arg SEP ",";
- inhyp = in_hyp_as -> TacAtom (Loc.tag ~loc:!@loc @@ TacApply (false,false,cl,inhyp))
+ inhyp = in_hyp_as -> { TacAtom (Loc.tag ~loc @@ TacApply (false,false,cl,inhyp)) }
| IDENT "simple"; IDENT "eapply";
cl = LIST1 constr_with_bindings_arg SEP",";
- inhyp = in_hyp_as -> TacAtom (Loc.tag ~loc:!@loc @@ TacApply (false,true,cl,inhyp))
+ inhyp = in_hyp_as -> { TacAtom (Loc.tag ~loc @@ TacApply (false,true,cl,inhyp)) }
| IDENT "elim"; cl = constr_with_bindings_arg; el = OPT eliminator ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacElim (false,cl,el))
+ { TacAtom (Loc.tag ~loc @@ TacElim (false,cl,el)) }
| IDENT "eelim"; cl = constr_with_bindings_arg; el = OPT eliminator ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacElim (true,cl,el))
- | IDENT "case"; icl = induction_clause_list -> TacAtom (Loc.tag ~loc:!@loc @@ mkTacCase false icl)
- | IDENT "ecase"; icl = induction_clause_list -> TacAtom (Loc.tag ~loc:!@loc @@ mkTacCase true icl)
+ { TacAtom (Loc.tag ~loc @@ TacElim (true,cl,el)) }
+ | IDENT "case"; icl = induction_clause_list -> { TacAtom (Loc.tag ~loc @@ mkTacCase false icl) }
+ | IDENT "ecase"; icl = induction_clause_list -> { TacAtom (Loc.tag ~loc @@ mkTacCase true icl) }
| "fix"; id = ident; n = natural; "with"; fd = LIST1 fixdecl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacMutualFix (id,n,List.map mk_fix_tac fd))
+ { TacAtom (Loc.tag ~loc @@ TacMutualFix (id,n,List.map mk_fix_tac fd)) }
| "cofix"; id = ident; "with"; fd = LIST1 cofixdecl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacMutualCofix (id,List.map mk_cofix_tac fd))
+ { TacAtom (Loc.tag ~loc @@ TacMutualCofix (id,List.map mk_cofix_tac fd)) }
- | IDENT "pose"; (id,b) = bindings_with_parameters ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,Names.Name.Name id,b,Locusops.nowhere,true,None))
+ | IDENT "pose"; bl = bindings_with_parameters ->
+ { let (id,b) = bl in TacAtom (Loc.tag ~loc @@ TacLetTac (false,Names.Name.Name id,b,Locusops.nowhere,true,None)) }
| IDENT "pose"; b = constr; na = as_name ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,na,b,Locusops.nowhere,true,None))
- | IDENT "epose"; (id,b) = bindings_with_parameters ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,Names.Name id,b,Locusops.nowhere,true,None))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (false,na,b,Locusops.nowhere,true,None)) }
+ | IDENT "epose"; bl = bindings_with_parameters ->
+ { let (id,b) = bl in TacAtom (Loc.tag ~loc @@ TacLetTac (true,Names.Name id,b,Locusops.nowhere,true,None)) }
| IDENT "epose"; b = constr; na = as_name ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,na,b,Locusops.nowhere,true,None))
- | IDENT "set"; (id,c) = bindings_with_parameters; p = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,Names.Name.Name id,c,p,true,None))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (true,na,b,Locusops.nowhere,true,None)) }
+ | IDENT "set"; bl = bindings_with_parameters; p = clause_dft_concl ->
+ { let (id,c) = bl in TacAtom (Loc.tag ~loc @@ TacLetTac (false,Names.Name.Name id,c,p,true,None)) }
| IDENT "set"; c = constr; na = as_name; p = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,na,c,p,true,None))
- | IDENT "eset"; (id,c) = bindings_with_parameters; p = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,Names.Name id,c,p,true,None))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (false,na,c,p,true,None)) }
+ | IDENT "eset"; bl = bindings_with_parameters; p = clause_dft_concl ->
+ { let (id,c) = bl in TacAtom (Loc.tag ~loc @@ TacLetTac (true,Names.Name id,c,p,true,None)) }
| IDENT "eset"; c = constr; na = as_name; p = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,na,c,p,true,None))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (true,na,c,p,true,None)) }
| IDENT "remember"; c = constr; na = as_name; e = eqn_ipat;
p = clause_dft_all ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,na,c,p,false,e))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (false,na,c,p,false,e)) }
| IDENT "eremember"; c = constr; na = as_name; e = eqn_ipat;
p = clause_dft_all ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,na,c,p,false,e))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (true,na,c,p,false,e)) }
(* Alternative syntax for "pose proof c as id" *)
| IDENT "assert"; test_lpar_id_coloneq; "("; lid = identref; ":=";
c = lconstr; ")" ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (false,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (false,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
| IDENT "eassert"; test_lpar_id_coloneq; "("; lid = identref; ":=";
c = lconstr; ")" ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (true,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (true,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
(* Alternative syntax for "assert c as id by tac" *)
| IDENT "assert"; test_lpar_id_colon; "("; lid = identref; ":";
c = lconstr; ")"; tac=by_tactic ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (false,true,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (false,true,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
| IDENT "eassert"; test_lpar_id_colon; "("; lid = identref; ":";
c = lconstr; ")"; tac=by_tactic ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (true,true,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (true,true,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
(* Alternative syntax for "enough c as id by tac" *)
| IDENT "enough"; test_lpar_id_colon; "("; lid = identref; ":";
c = lconstr; ")"; tac=by_tactic ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (false,false,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (false,false,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
| IDENT "eenough"; test_lpar_id_colon; "("; lid = identref; ":";
c = lconstr; ")"; tac=by_tactic ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (true,false,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (true,false,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
| IDENT "assert"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (false,true,Some tac,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (false,true,Some tac,ipat,c)) }
| IDENT "eassert"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (true,true,Some tac,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (true,true,Some tac,ipat,c)) }
| IDENT "pose"; IDENT "proof"; c = lconstr; ipat = as_ipat ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (false,true,None,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (false,true,None,ipat,c)) }
| IDENT "epose"; IDENT "proof"; c = lconstr; ipat = as_ipat ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (true,true,None,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (true,true,None,ipat,c)) }
| IDENT "enough"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (false,false,Some tac,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (false,false,Some tac,ipat,c)) }
| IDENT "eenough"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (true,false,Some tac,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (true,false,Some tac,ipat,c)) }
| IDENT "generalize"; c = constr ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacGeneralize [((AllOccurrences,c),Names.Name.Anonymous)])
+ { TacAtom (Loc.tag ~loc @@ TacGeneralize [((AllOccurrences,c),Names.Name.Anonymous)]) }
| IDENT "generalize"; c = constr; l = LIST1 constr ->
- let gen_everywhere c = ((AllOccurrences,c),Names.Name.Anonymous) in
- TacAtom (Loc.tag ~loc:!@loc @@ TacGeneralize (List.map gen_everywhere (c::l)))
+ { let gen_everywhere c = ((AllOccurrences,c),Names.Name.Anonymous) in
+ TacAtom (Loc.tag ~loc @@ TacGeneralize (List.map gen_everywhere (c::l))) }
| IDENT "generalize"; c = constr; lookup_at_as_comma; nl = occs;
na = as_name;
- l = LIST0 [","; c = pattern_occ; na = as_name -> (c,na)] ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacGeneralize (((nl,c),na)::l))
+ l = LIST0 [","; c = pattern_occ; na = as_name -> { (c,na) } ] ->
+ { TacAtom (Loc.tag ~loc @@ TacGeneralize (((nl,c),na)::l)) }
(* Derived basic tactics *)
| IDENT "induction"; ic = induction_clause_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInductionDestruct (true,false,ic))
+ { TacAtom (Loc.tag ~loc @@ TacInductionDestruct (true,false,ic)) }
| IDENT "einduction"; ic = induction_clause_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInductionDestruct(true,true,ic))
+ { TacAtom (Loc.tag ~loc @@ TacInductionDestruct(true,true,ic)) }
| IDENT "destruct"; icl = induction_clause_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInductionDestruct(false,false,icl))
+ { TacAtom (Loc.tag ~loc @@ TacInductionDestruct(false,false,icl)) }
| IDENT "edestruct"; icl = induction_clause_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInductionDestruct(false,true,icl))
+ { TacAtom (Loc.tag ~loc @@ TacInductionDestruct(false,true,icl)) }
(* Equality and inversion *)
| IDENT "rewrite"; l = LIST1 oriented_rewriter SEP ",";
- cl = clause_dft_concl; t=by_tactic -> TacAtom (Loc.tag ~loc:!@loc @@ TacRewrite (false,l,cl,t))
+ cl = clause_dft_concl; t=by_tactic -> { TacAtom (Loc.tag ~loc @@ TacRewrite (false,l,cl,t)) }
| IDENT "erewrite"; l = LIST1 oriented_rewriter SEP ",";
- cl = clause_dft_concl; t=by_tactic -> TacAtom (Loc.tag ~loc:!@loc @@ TacRewrite (true,l,cl,t))
+ cl = clause_dft_concl; t=by_tactic -> { TacAtom (Loc.tag ~loc @@ TacRewrite (true,l,cl,t)) }
| IDENT "dependent"; k =
- [ IDENT "simple"; IDENT "inversion" -> SimpleInversion
- | IDENT "inversion" -> FullInversion
- | IDENT "inversion_clear" -> FullInversionClear ];
+ [ IDENT "simple"; IDENT "inversion" -> { SimpleInversion }
+ | IDENT "inversion" -> { FullInversion }
+ | IDENT "inversion_clear" -> { FullInversionClear } ];
hyp = quantified_hypothesis;
- ids = as_or_and_ipat; co = OPT ["with"; c = constr -> c] ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (DepInversion (k,co,ids),hyp))
+ ids = as_or_and_ipat; co = OPT ["with"; c = constr -> { c } ] ->
+ { TacAtom (Loc.tag ~loc @@ TacInversion (DepInversion (k,co,ids),hyp)) }
| IDENT "simple"; IDENT "inversion";
hyp = quantified_hypothesis; ids = as_or_and_ipat;
cl = in_hyp_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (NonDepInversion (SimpleInversion, cl, ids), hyp))
+ { TacAtom (Loc.tag ~loc @@ TacInversion (NonDepInversion (SimpleInversion, cl, ids), hyp)) }
| IDENT "inversion";
hyp = quantified_hypothesis; ids = as_or_and_ipat;
cl = in_hyp_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (NonDepInversion (FullInversion, cl, ids), hyp))
+ { TacAtom (Loc.tag ~loc @@ TacInversion (NonDepInversion (FullInversion, cl, ids), hyp)) }
| IDENT "inversion_clear";
hyp = quantified_hypothesis; ids = as_or_and_ipat;
cl = in_hyp_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (NonDepInversion (FullInversionClear, cl, ids), hyp))
+ { TacAtom (Loc.tag ~loc @@ TacInversion (NonDepInversion (FullInversionClear, cl, ids), hyp)) }
| IDENT "inversion"; hyp = quantified_hypothesis;
"using"; c = constr; cl = in_hyp_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (InversionUsing (c,cl), hyp))
+ { TacAtom (Loc.tag ~loc @@ TacInversion (InversionUsing (c,cl), hyp)) }
(* Conversion *)
| IDENT "red"; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Red false, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Red false, cl)) }
| IDENT "hnf"; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Hnf, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Hnf, cl)) }
| IDENT "simpl"; d = delta_flag; po = OPT ref_or_pattern_occ; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Simpl (all_with d, po), cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Simpl (all_with d, po), cl)) }
| IDENT "cbv"; s = strategy_flag; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Cbv s, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Cbv s, cl)) }
| IDENT "cbn"; s = strategy_flag; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Cbn s, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Cbn s, cl)) }
| IDENT "lazy"; s = strategy_flag; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Lazy s, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Lazy s, cl)) }
| IDENT "compute"; delta = delta_flag; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Cbv (all_with delta), cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Cbv (all_with delta), cl)) }
| IDENT "vm_compute"; po = OPT ref_or_pattern_occ; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (CbvVm po, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (CbvVm po, cl)) }
| IDENT "native_compute"; po = OPT ref_or_pattern_occ; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (CbvNative po, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (CbvNative po, cl)) }
| IDENT "unfold"; ul = LIST1 unfold_occ SEP ","; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Unfold ul, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Unfold ul, cl)) }
| IDENT "fold"; l = LIST1 constr; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Fold l, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Fold l, cl)) }
| IDENT "pattern"; pl = LIST1 pattern_occ SEP","; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Pattern pl, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Pattern pl, cl)) }
(* Change ne doit pas s'appliquer dans un Definition t := Eval ... *)
- | IDENT "change"; (oc,c) = conversion; cl = clause_dft_concl ->
- let p,cl = merge_occurrences (!@loc) cl oc in
- TacAtom (Loc.tag ~loc:!@loc @@ TacChange (p,c,cl))
+ | IDENT "change"; c = conversion; cl = clause_dft_concl ->
+ { let (oc, c) = c in
+ let p,cl = merge_occurrences loc cl oc in
+ TacAtom (Loc.tag ~loc @@ TacChange (p,c,cl)) }
] ]
;
-END;;
+END
diff --git a/plugins/ltac/tacentries.ml b/plugins/ltac/tacentries.ml
index 98d451536..876e6f320 100644
--- a/plugins/ltac/tacentries.ml
+++ b/plugins/ltac/tacentries.ml
@@ -45,7 +45,7 @@ let coincide s pat off =
let atactic n =
if n = 5 then Aentry Pltac.binder_tactic
- else Aentryl (Pltac.tactic_expr, n)
+ else Aentryl (Pltac.tactic_expr, string_of_int n)
type entry_name = EntryName :
'a raw_abstract_argument_type * (Tacexpr.raw_tactic_expr, 'a) Extend.symbol -> entry_name
@@ -398,7 +398,7 @@ let create_ltac_quotation name cast (e, l) =
let () = ltac_quotations := String.Set.add name !ltac_quotations in
let entry = match l with
| None -> Aentry e
- | Some l -> Aentryl (e, l)
+ | Some l -> Aentryl (e, string_of_int l)
in
(* let level = Some "1" in *)
let level = None in
diff --git a/toplevel/g_toplevel.ml4 b/toplevel/g_toplevel.mlg
index e3cefe236..53d3eef23 100644
--- a/toplevel/g_toplevel.ml4
+++ b/toplevel/g_toplevel.mlg
@@ -8,6 +8,7 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
open Pcoq
open Pcoq.Prim
open Vernacexpr
@@ -28,20 +29,26 @@ end
open Toplevel_
-GEXTEND Gram
+}
+
+GRAMMAR EXTEND Gram
GLOBAL: vernac_toplevel;
vernac_toplevel: FIRST
- [ [ IDENT "Drop"; "." -> CAst.make VernacDrop
- | IDENT "Quit"; "." -> CAst.make VernacQuit
+ [ [ IDENT "Drop"; "." -> { CAst.make VernacDrop }
+ | IDENT "Quit"; "." -> { CAst.make VernacQuit }
| IDENT "Backtrack"; n = natural ; m = natural ; p = natural; "." ->
- CAst.make (VernacBacktrack (n,m,p))
+ { CAst.make (VernacBacktrack (n,m,p)) }
| cmd = Pvernac.main_entry ->
- match cmd with
+ { match cmd with
| None -> raise Stm.End_of_input
- | Some (loc,c) -> CAst.make ~loc (VernacControl c)
+ | Some (loc,c) -> CAst.make ~loc (VernacControl c) }
]
]
;
END
+{
+
let parse_toplevel pa = Pcoq.Gram.entry_parse vernac_toplevel pa
+
+}
diff --git a/vernac/egramcoq.ml b/vernac/egramcoq.ml
index cc9be7b0e..48f225f97 100644
--- a/vernac/egramcoq.ml
+++ b/vernac/egramcoq.ml
@@ -274,7 +274,7 @@ let make_sep_rules = function
Arules [r]
let symbol_of_target : type s. _ -> _ -> _ -> s target -> (s, s) symbol = fun p assoc from forpat ->
- if is_binder_level from p then Aentryl (target_entry forpat, 200)
+ if is_binder_level from p then Aentryl (target_entry forpat, "200")
else if is_self from p then Aself
else
let g = target_entry forpat in
@@ -282,7 +282,7 @@ let symbol_of_target : type s. _ -> _ -> _ -> s target -> (s, s) symbol = fun p
begin match lev with
| None -> Aentry g
| Some None -> Anext
- | Some (Some (lev, cur)) -> Aentryl (g, lev)
+ | Some (Some (lev, cur)) -> Aentryl (g, string_of_int lev)
end
let symbol_of_entry : type s r. _ -> _ -> (s, r) entry -> (s, r) symbol = fun assoc from typ -> match typ with
@@ -291,7 +291,7 @@ let symbol_of_entry : type s r. _ -> _ -> (s, r) entry -> (s, r) symbol = fun as
Alist1 (symbol_of_target typ' assoc from forpat)
| TTConstrList (typ', tkl, forpat) ->
Alist1sep (symbol_of_target typ' assoc from forpat, make_sep_rules tkl)
-| TTPattern p -> Aentryl (Constr.pattern, p)
+| TTPattern p -> Aentryl (Constr.pattern, string_of_int p)
| TTClosedBinderList [] -> Alist1 (Aentry Constr.binder)
| TTClosedBinderList tkl -> Alist1sep (Aentry Constr.binder, make_sep_rules tkl)
| TTName -> Aentry Prim.name
@@ -428,6 +428,7 @@ let prepare_empty_levels forpat (pos,p4assoc,name,reinit) =
let rec pure_sublevels : type a b c. int option -> (a, b, c) rule -> int list = fun level r -> match r with
| Stop -> []
| Next (rem, Aentryl (_, i)) ->
+ let i = int_of_string i in
let rem = pure_sublevels level rem in
begin match level with
| Some j when Int.equal i j -> rem
diff --git a/vernac/g_proofs.ml4 b/vernac/g_proofs.ml4
deleted file mode 100644
index 4b11276af..000000000
--- a/vernac/g_proofs.ml4
+++ /dev/null
@@ -1,131 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open Glob_term
-open Constrexpr
-open Vernacexpr
-open Proof_global
-
-open Pcoq
-open Pcoq.Prim
-open Pcoq.Constr
-open Pvernac.Vernac_
-
-let thm_token = G_vernac.thm_token
-
-let hint = Gram.entry_create "hint"
-
-let warn_deprecated_focus =
- CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
- (fun () ->
- Pp.strbrk
- "The Focus command is deprecated; use bullets or focusing brackets instead"
- )
-
-let warn_deprecated_focus_n n =
- CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
- (fun () ->
- Pp.(str "The Focus command is deprecated;" ++ spc ()
- ++ str "use '" ++ int n ++ str ": {' instead")
- )
-
-let warn_deprecated_unfocus =
- CWarnings.create ~name:"deprecated-unfocus" ~category:"deprecated"
- (fun () -> Pp.strbrk "The Unfocus command is deprecated")
-
-(* Proof commands *)
-GEXTEND Gram
- GLOBAL: hint command;
-
- opt_hintbases:
- [ [ -> []
- | ":"; l = LIST1 [id = IDENT -> id ] -> l ] ]
- ;
- command:
- [ [ IDENT "Goal"; c = lconstr ->
- VernacDefinition (Decl_kinds.(NoDischarge, Definition), ((CAst.make ~loc:!@loc Names.Anonymous), None), ProveBody ([], c))
- | IDENT "Proof" -> VernacProof (None,None)
- | IDENT "Proof" ; IDENT "Mode" ; mn = string -> VernacProofMode mn
- | IDENT "Proof"; c = lconstr -> VernacExactProof c
- | IDENT "Abort" -> VernacAbort None
- | IDENT "Abort"; IDENT "All" -> VernacAbortAll
- | IDENT "Abort"; id = identref -> VernacAbort (Some id)
- | IDENT "Existential"; n = natural; c = constr_body ->
- VernacSolveExistential (n,c)
- | IDENT "Admitted" -> VernacEndProof Admitted
- | IDENT "Qed" -> VernacEndProof (Proved (Opaque,None))
- | IDENT "Save"; id = identref ->
- VernacEndProof (Proved (Opaque, Some id))
- | IDENT "Defined" -> VernacEndProof (Proved (Transparent,None))
- | IDENT "Defined"; id=identref ->
- VernacEndProof (Proved (Transparent,Some id))
- | IDENT "Restart" -> VernacRestart
- | IDENT "Undo" -> VernacUndo 1
- | IDENT "Undo"; n = natural -> VernacUndo n
- | IDENT "Undo"; IDENT "To"; n = natural -> VernacUndoTo n
- | IDENT "Focus" ->
- warn_deprecated_focus ~loc:!@loc ();
- VernacFocus None
- | IDENT "Focus"; n = natural ->
- warn_deprecated_focus_n n ~loc:!@loc ();
- VernacFocus (Some n)
- | IDENT "Unfocus" ->
- warn_deprecated_unfocus ~loc:!@loc ();
- VernacUnfocus
- | IDENT "Unfocused" -> VernacUnfocused
- | IDENT "Show" -> VernacShow (ShowGoal OpenSubgoals)
- | IDENT "Show"; n = natural -> VernacShow (ShowGoal (NthGoal n))
- | IDENT "Show"; id = ident -> VernacShow (ShowGoal (GoalId id))
- | IDENT "Show"; IDENT "Script" -> VernacShow ShowScript
- | IDENT "Show"; IDENT "Existentials" -> VernacShow ShowExistentials
- | IDENT "Show"; IDENT "Universes" -> VernacShow ShowUniverses
- | IDENT "Show"; IDENT "Conjectures" -> VernacShow ShowProofNames
- | IDENT "Show"; IDENT "Proof" -> VernacShow ShowProof
- | IDENT "Show"; IDENT "Intro" -> VernacShow (ShowIntros false)
- | IDENT "Show"; IDENT "Intros" -> VernacShow (ShowIntros true)
- | IDENT "Show"; IDENT "Match"; id = reference -> VernacShow (ShowMatch id)
- | IDENT "Guarded" -> VernacCheckGuard
- (* Hints for Auto and EAuto *)
- | IDENT "Create"; IDENT "HintDb" ;
- id = IDENT ; b = [ "discriminated" -> true | -> false ] ->
- VernacCreateHintDb (id, b)
- | IDENT "Remove"; IDENT "Hints"; ids = LIST1 global; dbnames = opt_hintbases ->
- VernacRemoveHints (dbnames, ids)
- | IDENT "Hint"; h = hint; dbnames = opt_hintbases ->
- VernacHints (dbnames, h)
- ] ];
- reference_or_constr:
- [ [ r = global -> HintsReference r
- | c = constr -> HintsConstr c ] ]
- ;
- hint:
- [ [ IDENT "Resolve"; lc = LIST1 reference_or_constr; info = hint_info ->
- HintsResolve (List.map (fun x -> (info, true, x)) lc)
- | IDENT "Resolve"; "->"; lc = LIST1 global; n = OPT natural ->
- HintsResolveIFF (true, lc, n)
- | IDENT "Resolve"; "<-"; lc = LIST1 global; n = OPT natural ->
- HintsResolveIFF (false, lc, n)
- | IDENT "Immediate"; lc = LIST1 reference_or_constr -> HintsImmediate lc
- | IDENT "Transparent"; lc = LIST1 global -> HintsTransparency (lc, true)
- | IDENT "Opaque"; lc = LIST1 global -> HintsTransparency (lc, false)
- | IDENT "Mode"; l = global; m = mode -> HintsMode (l, m)
- | IDENT "Unfold"; lqid = LIST1 global -> HintsUnfold lqid
- | IDENT "Constructors"; lc = LIST1 global -> HintsConstructors lc ] ]
- ;
- constr_body:
- [ [ ":="; c = lconstr -> c
- | ":"; t = lconstr; ":="; c = lconstr -> CAst.make ~loc:!@loc @@ CCast(c,CastConv t) ] ]
- ;
- mode:
- [ [ l = LIST1 [ "+" -> ModeInput
- | "!" -> ModeNoHeadEvar
- | "-" -> ModeOutput ] -> l ] ]
- ;
-END
diff --git a/vernac/g_proofs.mlg b/vernac/g_proofs.mlg
new file mode 100644
index 000000000..72db53f68
--- /dev/null
+++ b/vernac/g_proofs.mlg
@@ -0,0 +1,135 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+{
+
+open Glob_term
+open Constrexpr
+open Vernacexpr
+open Proof_global
+
+open Pcoq
+open Pcoq.Prim
+open Pcoq.Constr
+open Pvernac.Vernac_
+
+let thm_token = G_vernac.thm_token
+
+let hint = Gram.entry_create "hint"
+
+let warn_deprecated_focus =
+ CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
+ (fun () ->
+ Pp.strbrk
+ "The Focus command is deprecated; use bullets or focusing brackets instead"
+ )
+
+let warn_deprecated_focus_n n =
+ CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
+ (fun () ->
+ Pp.(str "The Focus command is deprecated;" ++ spc ()
+ ++ str "use '" ++ int n ++ str ": {' instead")
+ )
+
+let warn_deprecated_unfocus =
+ CWarnings.create ~name:"deprecated-unfocus" ~category:"deprecated"
+ (fun () -> Pp.strbrk "The Unfocus command is deprecated")
+
+}
+
+(* Proof commands *)
+GRAMMAR EXTEND Gram
+ GLOBAL: hint command;
+
+ opt_hintbases:
+ [ [ -> { [] }
+ | ":"; l = LIST1 [id = IDENT -> { id } ] -> { l } ] ]
+ ;
+ command:
+ [ [ IDENT "Goal"; c = lconstr ->
+ { VernacDefinition (Decl_kinds.(NoDischarge, Definition), ((CAst.make ~loc Names.Anonymous), None), ProveBody ([], c)) }
+ | IDENT "Proof" -> { VernacProof (None,None) }
+ | IDENT "Proof" ; IDENT "Mode" ; mn = string -> { VernacProofMode mn }
+ | IDENT "Proof"; c = lconstr -> { VernacExactProof c }
+ | IDENT "Abort" -> { VernacAbort None }
+ | IDENT "Abort"; IDENT "All" -> { VernacAbortAll }
+ | IDENT "Abort"; id = identref -> { VernacAbort (Some id) }
+ | IDENT "Existential"; n = natural; c = constr_body ->
+ { VernacSolveExistential (n,c) }
+ | IDENT "Admitted" -> { VernacEndProof Admitted }
+ | IDENT "Qed" -> { VernacEndProof (Proved (Opaque,None)) }
+ | IDENT "Save"; id = identref ->
+ { VernacEndProof (Proved (Opaque, Some id)) }
+ | IDENT "Defined" -> { VernacEndProof (Proved (Transparent,None)) }
+ | IDENT "Defined"; id=identref ->
+ { VernacEndProof (Proved (Transparent,Some id)) }
+ | IDENT "Restart" -> { VernacRestart }
+ | IDENT "Undo" -> { VernacUndo 1 }
+ | IDENT "Undo"; n = natural -> { VernacUndo n }
+ | IDENT "Undo"; IDENT "To"; n = natural -> { VernacUndoTo n }
+ | IDENT "Focus" ->
+ { warn_deprecated_focus ~loc ();
+ VernacFocus None }
+ | IDENT "Focus"; n = natural ->
+ { warn_deprecated_focus_n n ~loc ();
+ VernacFocus (Some n) }
+ | IDENT "Unfocus" ->
+ { warn_deprecated_unfocus ~loc ();
+ VernacUnfocus }
+ | IDENT "Unfocused" -> { VernacUnfocused }
+ | IDENT "Show" -> { VernacShow (ShowGoal OpenSubgoals) }
+ | IDENT "Show"; n = natural -> { VernacShow (ShowGoal (NthGoal n)) }
+ | IDENT "Show"; id = ident -> { VernacShow (ShowGoal (GoalId id)) }
+ | IDENT "Show"; IDENT "Script" -> { VernacShow ShowScript }
+ | IDENT "Show"; IDENT "Existentials" -> { VernacShow ShowExistentials }
+ | IDENT "Show"; IDENT "Universes" -> { VernacShow ShowUniverses }
+ | IDENT "Show"; IDENT "Conjectures" -> { VernacShow ShowProofNames }
+ | IDENT "Show"; IDENT "Proof" -> { VernacShow ShowProof }
+ | IDENT "Show"; IDENT "Intro" -> { VernacShow (ShowIntros false) }
+ | IDENT "Show"; IDENT "Intros" -> { VernacShow (ShowIntros true) }
+ | IDENT "Show"; IDENT "Match"; id = reference -> { VernacShow (ShowMatch id) }
+ | IDENT "Guarded" -> { VernacCheckGuard }
+ (* Hints for Auto and EAuto *)
+ | IDENT "Create"; IDENT "HintDb" ;
+ id = IDENT ; b = [ "discriminated" -> { true } | -> { false } ] ->
+ { VernacCreateHintDb (id, b) }
+ | IDENT "Remove"; IDENT "Hints"; ids = LIST1 global; dbnames = opt_hintbases ->
+ { VernacRemoveHints (dbnames, ids) }
+ | IDENT "Hint"; h = hint; dbnames = opt_hintbases ->
+ { VernacHints (dbnames, h) }
+ ] ];
+ reference_or_constr:
+ [ [ r = global -> { HintsReference r }
+ | c = constr -> { HintsConstr c } ] ]
+ ;
+ hint:
+ [ [ IDENT "Resolve"; lc = LIST1 reference_or_constr; info = hint_info ->
+ { HintsResolve (List.map (fun x -> (info, true, x)) lc) }
+ | IDENT "Resolve"; "->"; lc = LIST1 global; n = OPT natural ->
+ { HintsResolveIFF (true, lc, n) }
+ | IDENT "Resolve"; "<-"; lc = LIST1 global; n = OPT natural ->
+ { HintsResolveIFF (false, lc, n) }
+ | IDENT "Immediate"; lc = LIST1 reference_or_constr -> { HintsImmediate lc }
+ | IDENT "Transparent"; lc = LIST1 global -> { HintsTransparency (lc, true) }
+ | IDENT "Opaque"; lc = LIST1 global -> { HintsTransparency (lc, false) }
+ | IDENT "Mode"; l = global; m = mode -> { HintsMode (l, m) }
+ | IDENT "Unfold"; lqid = LIST1 global -> { HintsUnfold lqid }
+ | IDENT "Constructors"; lc = LIST1 global -> { HintsConstructors lc } ] ]
+ ;
+ constr_body:
+ [ [ ":="; c = lconstr -> { c }
+ | ":"; t = lconstr; ":="; c = lconstr -> { CAst.make ~loc @@ CCast(c,CastConv t) } ] ]
+ ;
+ mode:
+ [ [ l = LIST1 [ "+" -> { ModeInput }
+ | "!" -> { ModeNoHeadEvar }
+ | "-" -> { ModeOutput } ] -> { l } ] ]
+ ;
+END
diff --git a/vernac/g_vernac.ml4 b/vernac/g_vernac.ml4
deleted file mode 100644
index 16934fc86..000000000
--- a/vernac/g_vernac.ml4
+++ /dev/null
@@ -1,1156 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open Pp
-open CErrors
-open Util
-open Names
-open Glob_term
-open Vernacexpr
-open Constrexpr
-open Constrexpr_ops
-open Extend
-open Decl_kinds
-open Declaremods
-open Declarations
-open Namegen
-open Tok (* necessary for camlp5 *)
-
-open Pcoq
-open Pcoq.Prim
-open Pcoq.Constr
-open Pcoq.Module
-open Pvernac.Vernac_
-
-let vernac_kw = [ ";"; ","; ">->"; ":<"; "<:"; "where"; "at" ]
-let _ = List.iter CLexer.add_keyword vernac_kw
-
-(* Rem: do not join the different GEXTEND into one, it breaks native *)
-(* compilation on PowerPC and Sun architectures *)
-
-let query_command = Gram.entry_create "vernac:query_command"
-
-let subprf = Gram.entry_create "vernac:subprf"
-
-let class_rawexpr = Gram.entry_create "vernac:class_rawexpr"
-let thm_token = Gram.entry_create "vernac:thm_token"
-let def_body = Gram.entry_create "vernac:def_body"
-let decl_notation = Gram.entry_create "vernac:decl_notation"
-let record_field = Gram.entry_create "vernac:record_field"
-let of_type_with_opt_coercion = Gram.entry_create "vernac:of_type_with_opt_coercion"
-let instance_name = Gram.entry_create "vernac:instance_name"
-let section_subset_expr = Gram.entry_create "vernac:section_subset_expr"
-
-let make_bullet s =
- let open Proof_bullet in
- let n = String.length s in
- match s.[0] with
- | '-' -> Dash n
- | '+' -> Plus n
- | '*' -> Star n
- | _ -> assert false
-
-let parse_compat_version ?(allow_old = true) = let open Flags in function
- | "8.8" -> Current
- | "8.7" -> V8_7
- | "8.6" -> V8_6
- | ("8.5" | "8.4" | "8.3" | "8.2" | "8.1" | "8.0") as s ->
- CErrors.user_err ~hdr:"get_compat_version"
- Pp.(str "Compatibility with version " ++ str s ++ str " not supported.")
- | s ->
- CErrors.user_err ~hdr:"get_compat_version"
- Pp.(str "Unknown compatibility version \"" ++ str s ++ str "\".")
-
-GEXTEND Gram
- GLOBAL: vernac_control gallina_ext noedit_mode subprf;
- vernac_control: FIRST
- [ [ IDENT "Time"; c = located_vernac -> VernacTime (false,c)
- | IDENT "Redirect"; s = ne_string; c = located_vernac -> VernacRedirect (s, c)
- | IDENT "Timeout"; n = natural; v = vernac_control -> VernacTimeout(n,v)
- | IDENT "Fail"; v = vernac_control -> VernacFail v
- | (f, v) = vernac -> VernacExpr(f, v) ]
- ]
- ;
- vernac:
- [ [ IDENT "Local"; (f, v) = vernac_poly -> (VernacLocal true :: f, v)
- | IDENT "Global"; (f, v) = vernac_poly -> (VernacLocal false :: f, v)
-
- | v = vernac_poly -> v ]
- ]
- ;
- vernac_poly:
- [ [ IDENT "Polymorphic"; (f, v) = vernac_aux -> (VernacPolymorphic true :: f, v)
- | IDENT "Monomorphic"; (f, v) = vernac_aux -> (VernacPolymorphic false :: f, v)
- | v = vernac_aux -> v ]
- ]
- ;
- vernac_aux:
- (* Better to parse "." here: in case of failure (e.g. in coerce_to_var), *)
- (* "." is still in the stream and discard_to_dot works correctly *)
- [ [ IDENT "Program"; g = gallina; "." -> ([VernacProgram], g)
- | IDENT "Program"; g = gallina_ext; "." -> ([VernacProgram], g)
- | g = gallina; "." -> ([], g)
- | g = gallina_ext; "." -> ([], g)
- | c = command; "." -> ([], c)
- | c = syntax; "." -> ([], c)
- | c = subprf -> ([], c)
- ] ]
- ;
- vernac_aux: LAST
- [ [ prfcom = command_entry -> ([], prfcom) ] ]
- ;
- noedit_mode:
- [ [ c = query_command -> c None] ]
- ;
-
- subprf:
- [ [ s = BULLET -> VernacBullet (make_bullet s)
- | "{" -> VernacSubproof None
- | "}" -> VernacEndSubproof
- ] ]
- ;
-
- located_vernac:
- [ [ v = vernac_control -> CAst.make ~loc:!@loc v ] ]
- ;
-END
-
-let warn_plural_command =
- CWarnings.create ~name:"plural-command" ~category:"pedantic" ~default:CWarnings.Disabled
- (fun kwd -> strbrk (Printf.sprintf "Command \"%s\" expects more than one assumption." kwd))
-
-let test_plural_form loc kwd = function
- | [(_,([_],_))] ->
- warn_plural_command ~loc:!@loc kwd
- | _ -> ()
-
-let test_plural_form_types loc kwd = function
- | [([_],_)] ->
- warn_plural_command ~loc:!@loc kwd
- | _ -> ()
-
-let lname_of_lident : lident -> lname =
- CAst.map (fun s -> Name s)
-
-let name_of_ident_decl : ident_decl -> name_decl =
- on_fst lname_of_lident
-
-(* Gallina declarations *)
-GEXTEND Gram
- GLOBAL: gallina gallina_ext thm_token def_body of_type_with_opt_coercion
- record_field decl_notation rec_definition ident_decl univ_decl;
-
- gallina:
- (* Definition, Theorem, Variable, Axiom, ... *)
- [ [ thm = thm_token; id = ident_decl; bl = binders; ":"; c = lconstr;
- l = LIST0
- [ "with"; id = ident_decl; bl = binders; ":"; c = lconstr ->
- (id,(bl,c)) ] ->
- VernacStartTheoremProof (thm, (id,(bl,c))::l)
- | stre = assumption_token; nl = inline; bl = assum_list ->
- VernacAssumption (stre, nl, bl)
- | (kwd,stre) = assumptions_token; nl = inline; bl = assum_list ->
- test_plural_form loc kwd bl;
- VernacAssumption (stre, nl, bl)
- | d = def_token; id = ident_decl; b = def_body ->
- VernacDefinition (d, name_of_ident_decl id, b)
- | IDENT "Let"; id = identref; b = def_body ->
- VernacDefinition ((DoDischarge, Let), (lname_of_lident id, None), b)
- (* Gallina inductive declarations *)
- | cum = OPT cumulativity_token; priv = private_token; f = finite_token;
- indl = LIST1 inductive_definition SEP "with" ->
- let (k,f) = f in
- let indl=List.map (fun ((a,b,c,d),e) -> ((a,b,c,k,d),e)) indl in
- VernacInductive (cum, priv,f,indl)
- | "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
- VernacFixpoint (NoDischarge, recs)
- | IDENT "Let"; "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
- VernacFixpoint (DoDischarge, recs)
- | "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
- VernacCoFixpoint (NoDischarge, corecs)
- | IDENT "Let"; "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
- VernacCoFixpoint (DoDischarge, corecs)
- | IDENT "Scheme"; l = LIST1 scheme SEP "with" -> VernacScheme l
- | IDENT "Combined"; IDENT "Scheme"; id = identref; IDENT "from";
- l = LIST1 identref SEP "," -> VernacCombinedScheme (id, l)
- | IDENT "Register"; IDENT "Inline"; id = identref ->
- VernacRegister(id, RegisterInline)
- | IDENT "Universe"; l = LIST1 identref -> VernacUniverse l
- | IDENT "Universes"; l = LIST1 identref -> VernacUniverse l
- | IDENT "Constraint"; l = LIST1 univ_constraint SEP "," -> VernacConstraint l
- ] ]
- ;
-
- thm_token:
- [ [ "Theorem" -> Theorem
- | IDENT "Lemma" -> Lemma
- | IDENT "Fact" -> Fact
- | IDENT "Remark" -> Remark
- | IDENT "Corollary" -> Corollary
- | IDENT "Proposition" -> Proposition
- | IDENT "Property" -> Property ] ]
- ;
- def_token:
- [ [ "Definition" -> (NoDischarge,Definition)
- | IDENT "Example" -> (NoDischarge,Example)
- | IDENT "SubClass" -> (NoDischarge,SubClass) ] ]
- ;
- assumption_token:
- [ [ "Hypothesis" -> (DoDischarge, Logical)
- | "Variable" -> (DoDischarge, Definitional)
- | "Axiom" -> (NoDischarge, Logical)
- | "Parameter" -> (NoDischarge, Definitional)
- | IDENT "Conjecture" -> (NoDischarge, Conjectural) ] ]
- ;
- assumptions_token:
- [ [ IDENT "Hypotheses" -> ("Hypotheses", (DoDischarge, Logical))
- | IDENT "Variables" -> ("Variables", (DoDischarge, Definitional))
- | IDENT "Axioms" -> ("Axioms", (NoDischarge, Logical))
- | IDENT "Parameters" -> ("Parameters", (NoDischarge, Definitional))
- | IDENT "Conjectures" -> ("Conjectures", (NoDischarge, Conjectural)) ] ]
- ;
- inline:
- [ [ IDENT "Inline"; "("; i = INT; ")" -> InlineAt (int_of_string i)
- | IDENT "Inline" -> DefaultInline
- | -> NoInline] ]
- ;
- univ_constraint:
- [ [ l = universe_level; ord = [ "<" -> Univ.Lt | "=" -> Univ.Eq | "<=" -> Univ.Le ];
- r = universe_level -> (l, ord, r) ] ]
- ;
- univ_decl :
- [ [ "@{" ; l = LIST0 identref; ext = [ "+" -> true | -> false ];
- cs = [ "|"; l' = LIST0 univ_constraint SEP ",";
- ext = [ "+" -> true | -> false ]; "}" -> (l',ext)
- | ext = [ "}" -> true | "|}" -> false ] -> ([], ext) ]
- ->
- let open UState in
- { univdecl_instance = l;
- univdecl_extensible_instance = ext;
- univdecl_constraints = fst cs;
- univdecl_extensible_constraints = snd cs }
- ] ]
- ;
- ident_decl:
- [ [ i = identref; l = OPT univ_decl -> (i, l)
- ] ]
- ;
- finite_token:
- [ [ IDENT "Inductive" -> (Inductive_kw,Finite)
- | IDENT "CoInductive" -> (CoInductive,CoFinite)
- | IDENT "Variant" -> (Variant,BiFinite)
- | IDENT "Record" -> (Record,BiFinite)
- | IDENT "Structure" -> (Structure,BiFinite)
- | IDENT "Class" -> (Class true,BiFinite) ] ]
- ;
- cumulativity_token:
- [ [ IDENT "Cumulative" -> VernacCumulative
- | IDENT "NonCumulative" -> VernacNonCumulative ] ]
- ;
- private_token:
- [ [ IDENT "Private" -> true | -> false ] ]
- ;
- (* Simple definitions *)
- def_body:
- [ [ bl = binders; ":="; red = reduce; c = lconstr ->
- if List.exists (function CLocalPattern _ -> true | _ -> false) bl
- then
- (* FIXME: "red" will be applied to types in bl and Cast with remain *)
- let c = mkCLambdaN ~loc:!@loc bl c in
- DefineBody ([], red, c, None)
- else
- (match c with
- | { CAst.v = CCast(c, CastConv t) } -> DefineBody (bl, red, c, Some t)
- | _ -> DefineBody (bl, red, c, None))
- | bl = binders; ":"; t = lconstr; ":="; red = reduce; c = lconstr ->
- let ((bl, c), tyo) =
- if List.exists (function CLocalPattern _ -> true | _ -> false) bl
- then
- (* FIXME: "red" will be applied to types in bl and Cast with remain *)
- let c = CAst.make ~loc:!@loc @@ CCast (c, CastConv t) in
- (([],mkCLambdaN ~loc:!@loc bl c), None)
- else ((bl, c), Some t)
- in
- DefineBody (bl, red, c, tyo)
- | bl = binders; ":"; t = lconstr ->
- ProveBody (bl, t) ] ]
- ;
- reduce:
- [ [ IDENT "Eval"; r = red_expr; "in" -> Some r
- | -> None ] ]
- ;
- one_decl_notation:
- [ [ ntn = ne_lstring; ":="; c = constr;
- scopt = OPT [ ":"; sc = IDENT -> sc] -> (ntn,c,scopt) ] ]
- ;
- decl_notation:
- [ [ "where"; l = LIST1 one_decl_notation SEP IDENT "and" -> l
- | -> [] ] ]
- ;
- (* Inductives and records *)
- opt_constructors_or_fields:
- [ [ ":="; lc = constructor_list_or_record_decl -> lc
- | -> RecordDecl (None, []) ] ]
- ;
- inductive_definition:
- [ [ oc = opt_coercion; id = ident_decl; indpar = binders;
- c = OPT [ ":"; c = lconstr -> c ];
- lc=opt_constructors_or_fields; ntn = decl_notation ->
- (((oc,id),indpar,c,lc),ntn) ] ]
- ;
- constructor_list_or_record_decl:
- [ [ "|"; l = LIST1 constructor SEP "|" -> Constructors l
- | id = identref ; c = constructor_type; "|"; l = LIST0 constructor SEP "|" ->
- Constructors ((c id)::l)
- | id = identref ; c = constructor_type -> Constructors [ c id ]
- | cstr = identref; "{"; fs = record_fields; "}" ->
- RecordDecl (Some cstr,fs)
- | "{";fs = record_fields; "}" -> RecordDecl (None,fs)
- | -> Constructors [] ] ]
- ;
-(*
- csort:
- [ [ s = sort -> CSort (loc,s) ] ]
- ;
-*)
- opt_coercion:
- [ [ ">" -> true
- | -> false ] ]
- ;
- (* (co)-fixpoints *)
- rec_definition:
- [ [ id = ident_decl;
- bl = binders_fixannot;
- ty = type_cstr;
- def = OPT [":="; def = lconstr -> def]; ntn = decl_notation ->
- let bl, annot = bl in ((id,annot,bl,ty,def),ntn) ] ]
- ;
- corec_definition:
- [ [ id = ident_decl; bl = binders; ty = type_cstr;
- def = OPT [":="; def = lconstr -> def]; ntn = decl_notation ->
- ((id,bl,ty,def),ntn) ] ]
- ;
- type_cstr:
- [ [ ":"; c=lconstr -> c
- | -> CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None) ] ]
- ;
- (* Inductive schemes *)
- scheme:
- [ [ kind = scheme_kind -> (None,kind)
- | id = identref; ":="; kind = scheme_kind -> (Some id,kind) ] ]
- ;
- scheme_kind:
- [ [ IDENT "Induction"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> InductionScheme(true,ind,s)
- | IDENT "Minimality"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> InductionScheme(false,ind,s)
- | IDENT "Elimination"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> CaseScheme(true,ind,s)
- | IDENT "Case"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> CaseScheme(false,ind,s)
- | IDENT "Equality"; "for" ; ind = smart_global -> EqualityScheme(ind) ] ]
- ;
- (* Various Binders *)
-(*
- (* ... without coercions *)
- binder_nodef:
- [ [ b = binder_let ->
- (match b with
- CLocalAssum(l,ty) -> (l,ty)
- | CLocalDef _ ->
- Util.user_err_loc
- (loc,"fix_param",Pp.str"defined binder not allowed here.")) ] ]
- ;
-*)
- (* ... with coercions *)
- record_field:
- [ [ bd = record_binder; pri = OPT [ "|"; n = natural -> n ];
- ntn = decl_notation -> (bd,pri),ntn ] ]
- ;
- record_fields:
- [ [ f = record_field; ";"; fs = record_fields -> f :: fs
- | f = record_field; ";" -> [f]
- | f = record_field -> [f]
- | -> []
- ] ]
- ;
- record_binder_body:
- [ [ l = binders; oc = of_type_with_opt_coercion;
- t = lconstr -> fun id -> (oc,AssumExpr (id,mkCProdN ~loc:!@loc l t))
- | l = binders; oc = of_type_with_opt_coercion;
- t = lconstr; ":="; b = lconstr -> fun id ->
- (oc,DefExpr (id,mkCLambdaN ~loc:!@loc l b,Some (mkCProdN ~loc:!@loc l t)))
- | l = binders; ":="; b = lconstr -> fun id ->
- match b.CAst.v with
- | CCast(b', (CastConv t|CastVM t|CastNative t)) ->
- (None,DefExpr(id,mkCLambdaN ~loc:!@loc l b',Some (mkCProdN ~loc:!@loc l t)))
- | _ ->
- (None,DefExpr(id,mkCLambdaN ~loc:!@loc l b,None)) ] ]
- ;
- record_binder:
- [ [ id = name -> (None,AssumExpr(id, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None)))
- | id = name; f = record_binder_body -> f id ] ]
- ;
- assum_list:
- [ [ bl = LIST1 assum_coe -> bl | b = simple_assum_coe -> [b] ] ]
- ;
- assum_coe:
- [ [ "("; a = simple_assum_coe; ")" -> a ] ]
- ;
- simple_assum_coe:
- [ [ idl = LIST1 ident_decl; oc = of_type_with_opt_coercion; c = lconstr ->
- (not (Option.is_empty oc),(idl,c)) ] ]
- ;
-
- constructor_type:
- [[ l = binders;
- t= [ coe = of_type_with_opt_coercion; c = lconstr ->
- fun l id -> (not (Option.is_empty coe),(id,mkCProdN ~loc:!@loc l c))
- | ->
- fun l id -> (false,(id,mkCProdN ~loc:!@loc l (CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None)))) ]
- -> t l
- ]]
-;
-
- constructor:
- [ [ id = identref; c=constructor_type -> c id ] ]
- ;
- of_type_with_opt_coercion:
- [ [ ":>>" -> Some false
- | ":>"; ">" -> Some false
- | ":>" -> Some true
- | ":"; ">"; ">" -> Some false
- | ":"; ">" -> Some true
- | ":" -> None ] ]
- ;
-END
-
-let only_starredidentrefs =
- Gram.Entry.of_parser "test_only_starredidentrefs"
- (fun strm ->
- let rec aux n =
- match Util.stream_nth n strm with
- | KEYWORD "." -> ()
- | KEYWORD ")" -> ()
- | (IDENT _ | KEYWORD "Type" | KEYWORD "*") -> aux (n+1)
- | _ -> raise Stream.Failure in
- aux 0)
-let starredidentreflist_to_expr l =
- match l with
- | [] -> SsEmpty
- | x :: xs -> List.fold_right (fun i acc -> SsUnion(i,acc)) xs x
-
-let warn_deprecated_include_type =
- CWarnings.create ~name:"deprecated-include-type" ~category:"deprecated"
- (fun () -> strbrk "Include Type is deprecated; use Include instead")
-
-(* Modules and Sections *)
-GEXTEND Gram
- GLOBAL: gallina_ext module_expr module_type section_subset_expr;
-
- gallina_ext:
- [ [ (* Interactive module declaration *)
- IDENT "Module"; export = export_token; id = identref;
- bl = LIST0 module_binder; sign = of_module_type;
- body = is_module_expr ->
- VernacDefineModule (export, id, bl, sign, body)
- | IDENT "Module"; "Type"; id = identref;
- bl = LIST0 module_binder; sign = check_module_types;
- body = is_module_type ->
- VernacDeclareModuleType (id, bl, sign, body)
- | IDENT "Declare"; IDENT "Module"; export = export_token; id = identref;
- bl = LIST0 module_binder; ":"; mty = module_type_inl ->
- VernacDeclareModule (export, id, bl, mty)
- (* Section beginning *)
- | IDENT "Section"; id = identref -> VernacBeginSection id
- | IDENT "Chapter"; id = identref -> VernacBeginSection id
-
- (* This end a Section a Module or a Module Type *)
- | IDENT "End"; id = identref -> VernacEndSegment id
-
- (* Naming a set of section hyps *)
- | IDENT "Collection"; id = identref; ":="; expr = section_subset_expr ->
- VernacNameSectionHypSet (id, expr)
-
- (* Requiring an already compiled module *)
- | IDENT "Require"; export = export_token; qidl = LIST1 global ->
- VernacRequire (None, export, qidl)
- | IDENT "From" ; ns = global ; IDENT "Require"; export = export_token
- ; qidl = LIST1 global ->
- VernacRequire (Some ns, export, qidl)
- | IDENT "Import"; qidl = LIST1 global -> VernacImport (false,qidl)
- | IDENT "Export"; qidl = LIST1 global -> VernacImport (true,qidl)
- | IDENT "Include"; e = module_type_inl; l = LIST0 ext_module_expr ->
- VernacInclude(e::l)
- | IDENT "Include"; "Type"; e = module_type_inl; l = LIST0 ext_module_type ->
- warn_deprecated_include_type ~loc:!@loc ();
- VernacInclude(e::l) ] ]
- ;
- export_token:
- [ [ IDENT "Import" -> Some false
- | IDENT "Export" -> Some true
- | -> None ] ]
- ;
- ext_module_type:
- [ [ "<+"; mty = module_type_inl -> mty ] ]
- ;
- ext_module_expr:
- [ [ "<+"; mexpr = module_expr_inl -> mexpr ] ]
- ;
- check_module_type:
- [ [ "<:"; mty = module_type_inl -> mty ] ]
- ;
- check_module_types:
- [ [ mtys = LIST0 check_module_type -> mtys ] ]
- ;
- of_module_type:
- [ [ ":"; mty = module_type_inl -> Enforce mty
- | mtys = check_module_types -> Check mtys ] ]
- ;
- is_module_type:
- [ [ ":="; mty = module_type_inl ; l = LIST0 ext_module_type -> (mty::l)
- | -> [] ] ]
- ;
- is_module_expr:
- [ [ ":="; mexpr = module_expr_inl; l = LIST0 ext_module_expr -> (mexpr::l)
- | -> [] ] ]
- ;
- functor_app_annot:
- [ [ "["; IDENT "inline"; "at"; IDENT "level"; i = INT; "]" ->
- InlineAt (int_of_string i)
- | "["; IDENT "no"; IDENT "inline"; "]" -> NoInline
- | -> DefaultInline
- ] ]
- ;
- module_expr_inl:
- [ [ "!"; me = module_expr -> (me,NoInline)
- | me = module_expr; a = functor_app_annot -> (me,a) ] ]
- ;
- module_type_inl:
- [ [ "!"; me = module_type -> (me,NoInline)
- | me = module_type; a = functor_app_annot -> (me,a) ] ]
- ;
- (* Module binder *)
- module_binder:
- [ [ "("; export = export_token; idl = LIST1 identref; ":";
- mty = module_type_inl; ")" -> (export,idl,mty) ] ]
- ;
- (* Module expressions *)
- module_expr:
- [ [ me = module_expr_atom -> me
- | me1 = module_expr; me2 = module_expr_atom -> CAst.make ~loc:!@loc @@ CMapply (me1,me2)
- ] ]
- ;
- module_expr_atom:
- [ [ qid = qualid -> CAst.make ~loc:!@loc @@ CMident qid | "("; me = module_expr; ")" -> me ] ]
- ;
- with_declaration:
- [ [ "Definition"; fqid = fullyqualid; udecl = OPT univ_decl; ":="; c = Constr.lconstr ->
- CWith_Definition (fqid,udecl,c)
- | IDENT "Module"; fqid = fullyqualid; ":="; qid = qualid ->
- CWith_Module (fqid,qid)
- ] ]
- ;
- module_type:
- [ [ qid = qualid -> CAst.make ~loc:!@loc @@ CMident qid
- | "("; mt = module_type; ")" -> mt
- | mty = module_type; me = module_expr_atom ->
- CAst.make ~loc:!@loc @@ CMapply (mty,me)
- | mty = module_type; "with"; decl = with_declaration ->
- CAst.make ~loc:!@loc @@ CMwith (mty,decl)
- ] ]
- ;
- (* Proof using *)
- section_subset_expr:
- [ [ only_starredidentrefs; l = LIST0 starredidentref ->
- starredidentreflist_to_expr l
- | e = ssexpr -> e ]]
- ;
- starredidentref:
- [ [ i = identref -> SsSingl i
- | i = identref; "*" -> SsFwdClose(SsSingl i)
- | "Type" -> SsType
- | "Type"; "*" -> SsFwdClose SsType ]]
- ;
- ssexpr:
- [ "35"
- [ "-"; e = ssexpr -> SsCompl e ]
- | "50"
- [ e1 = ssexpr; "-"; e2 = ssexpr->SsSubstr(e1,e2)
- | e1 = ssexpr; "+"; e2 = ssexpr->SsUnion(e1,e2)]
- | "0"
- [ i = starredidentref -> i
- | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"->
- starredidentreflist_to_expr l
- | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"; "*" ->
- SsFwdClose(starredidentreflist_to_expr l)
- | "("; e = ssexpr; ")"-> e
- | "("; e = ssexpr; ")"; "*" -> SsFwdClose e ] ]
- ;
-END
-
-(* Extensions: implicits, coercions, etc. *)
-GEXTEND Gram
- GLOBAL: gallina_ext instance_name hint_info;
-
- gallina_ext:
- [ [ (* Transparent and Opaque *)
- IDENT "Transparent"; l = LIST1 smart_global ->
- VernacSetOpacity (Conv_oracle.transparent, l)
- | IDENT "Opaque"; l = LIST1 smart_global ->
- VernacSetOpacity (Conv_oracle.Opaque, l)
- | IDENT "Strategy"; l =
- LIST1 [ v=strategy_level; "["; q=LIST1 smart_global; "]" -> (v,q)] ->
- VernacSetStrategy l
- (* Canonical structure *)
- | IDENT "Canonical"; IDENT "Structure"; qid = global ->
- VernacCanonical CAst.(make ~loc:!@loc @@ AN qid)
- | IDENT "Canonical"; IDENT "Structure"; ntn = by_notation ->
- VernacCanonical CAst.(make ~loc:!@loc @@ ByNotation ntn)
- | IDENT "Canonical"; IDENT "Structure"; qid = global; d = def_body ->
- let s = coerce_reference_to_id qid in
- VernacDefinition ((NoDischarge,CanonicalStructure),((CAst.make (Name s)),None),d)
-
- (* Coercions *)
- | IDENT "Coercion"; qid = global; d = def_body ->
- let s = coerce_reference_to_id qid in
- VernacDefinition ((NoDischarge,Coercion),((CAst.make (Name s)),None),d)
- | IDENT "Identity"; IDENT "Coercion"; f = identref; ":";
- s = class_rawexpr; ">->"; t = class_rawexpr ->
- VernacIdentityCoercion (f, s, t)
- | IDENT "Coercion"; qid = global; ":"; s = class_rawexpr; ">->";
- t = class_rawexpr ->
- VernacCoercion (CAst.make ~loc:!@loc @@ AN qid, s, t)
- | IDENT "Coercion"; ntn = by_notation; ":"; s = class_rawexpr; ">->";
- t = class_rawexpr ->
- VernacCoercion (CAst.make ~loc:!@loc @@ ByNotation ntn, s, t)
-
- | IDENT "Context"; c = LIST1 binder ->
- VernacContext (List.flatten c)
-
- | IDENT "Instance"; namesup = instance_name; ":";
- expl = [ "!" -> Decl_kinds.Implicit | -> Decl_kinds.Explicit ] ; t = operconstr LEVEL "200";
- info = hint_info ;
- props = [ ":="; "{"; r = record_declaration; "}" -> Some (true,r) |
- ":="; c = lconstr -> Some (false,c) | -> None ] ->
- VernacInstance (false,snd namesup,(fst namesup,expl,t),props,info)
-
- | IDENT "Existing"; IDENT "Instance"; id = global;
- info = hint_info ->
- VernacDeclareInstances [id, info]
-
- | IDENT "Existing"; IDENT "Instances"; ids = LIST1 global;
- pri = OPT [ "|"; i = natural -> i ] ->
- let info = { Typeclasses.hint_priority = pri; hint_pattern = None } in
- let insts = List.map (fun i -> (i, info)) ids in
- VernacDeclareInstances insts
-
- | IDENT "Existing"; IDENT "Class"; is = global -> VernacDeclareClass is
-
- (* Arguments *)
- | IDENT "Arguments"; qid = smart_global;
- args = LIST0 argument_spec_block;
- more_implicits = OPT
- [ ","; impl = LIST1
- [ impl = LIST0 more_implicits_block -> List.flatten impl]
- SEP "," -> impl
- ];
- mods = OPT [ ":"; l = LIST1 arguments_modifier SEP "," -> l ] ->
- let mods = match mods with None -> [] | Some l -> List.flatten l in
- let slash_position = ref None in
- let rec parse_args i = function
- | [] -> []
- | `Id x :: args -> x :: parse_args (i+1) args
- | `Slash :: args ->
- if Option.is_empty !slash_position then
- (slash_position := Some i; parse_args i args)
- else
- user_err Pp.(str "The \"/\" modifier can occur only once")
- in
- let args = parse_args 0 (List.flatten args) in
- let more_implicits = Option.default [] more_implicits in
- VernacArguments (qid, args, more_implicits, !slash_position, mods)
-
- | IDENT "Implicit"; "Type"; bl = reserv_list ->
- VernacReserve bl
-
- | IDENT "Implicit"; IDENT "Types"; bl = reserv_list ->
- test_plural_form_types loc "Implicit Types" bl;
- VernacReserve bl
-
- | IDENT "Generalizable";
- gen = [IDENT "All"; IDENT "Variables" -> Some []
- | IDENT "No"; IDENT "Variables" -> None
- | ["Variable" | IDENT "Variables"];
- idl = LIST1 identref -> Some idl ] ->
- VernacGeneralizable gen ] ]
- ;
- arguments_modifier:
- [ [ IDENT "simpl"; IDENT "nomatch" -> [`ReductionDontExposeCase]
- | IDENT "simpl"; IDENT "never" -> [`ReductionNeverUnfold]
- | IDENT "default"; IDENT "implicits" -> [`DefaultImplicits]
- | IDENT "clear"; IDENT "implicits" -> [`ClearImplicits]
- | IDENT "clear"; IDENT "scopes" -> [`ClearScopes]
- | IDENT "rename" -> [`Rename]
- | IDENT "assert" -> [`Assert]
- | IDENT "extra"; IDENT "scopes" -> [`ExtraScopes]
- | IDENT "clear"; IDENT "scopes"; IDENT "and"; IDENT "implicits" ->
- [`ClearImplicits; `ClearScopes]
- | IDENT "clear"; IDENT "implicits"; IDENT "and"; IDENT "scopes" ->
- [`ClearImplicits; `ClearScopes]
- ] ]
- ;
- scope:
- [ [ "%"; key = IDENT -> key ] ]
- ;
- argument_spec: [
- [ b = OPT "!"; id = name ; s = OPT scope ->
- id.CAst.v, not (Option.is_empty b), Option.map (fun x -> CAst.make ~loc:!@loc x) s
- ]
- ];
- (* List of arguments implicit status, scope, modifiers *)
- argument_spec_block: [
- [ item = argument_spec ->
- let name, recarg_like, notation_scope = item in
- [`Id { name=name; recarg_like=recarg_like;
- notation_scope=notation_scope;
- implicit_status = NotImplicit}]
- | "/" -> [`Slash]
- | "("; items = LIST1 argument_spec; ")"; sc = OPT scope ->
- let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
- | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
- List.map (fun (name,recarg_like,notation_scope) ->
- `Id { name=name; recarg_like=recarg_like;
- notation_scope=f notation_scope;
- implicit_status = NotImplicit}) items
- | "["; items = LIST1 argument_spec; "]"; sc = OPT scope ->
- let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
- | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
- List.map (fun (name,recarg_like,notation_scope) ->
- `Id { name=name; recarg_like=recarg_like;
- notation_scope=f notation_scope;
- implicit_status = Implicit}) items
- | "{"; items = LIST1 argument_spec; "}"; sc = OPT scope ->
- let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
- | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
- List.map (fun (name,recarg_like,notation_scope) ->
- `Id { name=name; recarg_like=recarg_like;
- notation_scope=f notation_scope;
- implicit_status = MaximallyImplicit}) items
- ]
- ];
- (* Same as [argument_spec_block], but with only implicit status and names *)
- more_implicits_block: [
- [ name = name -> [(name.CAst.v, Vernacexpr.NotImplicit)]
- | "["; items = LIST1 name; "]" ->
- List.map (fun name -> (name.CAst.v, Vernacexpr.Implicit)) items
- | "{"; items = LIST1 name; "}" ->
- List.map (fun name -> (name.CAst.v, Vernacexpr.MaximallyImplicit)) items
- ]
- ];
- strategy_level:
- [ [ IDENT "expand" -> Conv_oracle.Expand
- | IDENT "opaque" -> Conv_oracle.Opaque
- | n=INT -> Conv_oracle.Level (int_of_string n)
- | "-"; n=INT -> Conv_oracle.Level (- int_of_string n)
- | IDENT "transparent" -> Conv_oracle.transparent ] ]
- ;
- instance_name:
- [ [ name = ident_decl; sup = OPT binders ->
- (CAst.map (fun id -> Name id) (fst name), snd name),
- (Option.default [] sup)
- | -> ((CAst.make ~loc:!@loc Anonymous), None), [] ] ]
- ;
- hint_info:
- [ [ "|"; i = OPT natural; pat = OPT constr_pattern ->
- { Typeclasses.hint_priority = i; hint_pattern = pat }
- | -> { Typeclasses.hint_priority = None; hint_pattern = None } ] ]
- ;
- reserv_list:
- [ [ bl = LIST1 reserv_tuple -> bl | b = simple_reserv -> [b] ] ]
- ;
- reserv_tuple:
- [ [ "("; a = simple_reserv; ")" -> a ] ]
- ;
- simple_reserv:
- [ [ idl = LIST1 identref; ":"; c = lconstr -> (idl,c) ] ]
- ;
-
-END
-
-GEXTEND Gram
- GLOBAL: command query_command class_rawexpr gallina_ext;
-
- gallina_ext:
- [ [ IDENT "Export"; "Set"; table = option_table; v = option_value ->
- VernacSetOption (true, table, v)
- | IDENT "Export"; IDENT "Unset"; table = option_table ->
- VernacUnsetOption (true, table)
- ] ];
-
- command:
- [ [ IDENT "Comments"; l = LIST0 comment -> VernacComments l
-
- (* Hack! Should be in grammar_ext, but camlp5 factorizes badly *)
- | IDENT "Declare"; IDENT "Instance"; namesup = instance_name; ":";
- expl = [ "!" -> Decl_kinds.Implicit | -> Decl_kinds.Explicit ] ; t = operconstr LEVEL "200";
- info = hint_info ->
- VernacInstance (true, snd namesup, (fst namesup, expl, t), None, info)
-
- (* System directory *)
- | IDENT "Pwd" -> VernacChdir None
- | IDENT "Cd" -> VernacChdir None
- | IDENT "Cd"; dir = ne_string -> VernacChdir (Some dir)
-
- | IDENT "Load"; verbosely = [ IDENT "Verbose" -> true | -> false ];
- s = [ s = ne_string -> s | s = IDENT -> s ] ->
- VernacLoad (verbosely, s)
- | IDENT "Declare"; IDENT "ML"; IDENT "Module"; l = LIST1 ne_string ->
- VernacDeclareMLModule l
-
- | IDENT "Locate"; l = locatable -> VernacLocate l
-
- (* Managing load paths *)
- | IDENT "Add"; IDENT "LoadPath"; dir = ne_string; alias = as_dirpath ->
- VernacAddLoadPath (false, dir, alias)
- | IDENT "Add"; IDENT "Rec"; IDENT "LoadPath"; dir = ne_string;
- alias = as_dirpath -> VernacAddLoadPath (true, dir, alias)
- | IDENT "Remove"; IDENT "LoadPath"; dir = ne_string ->
- VernacRemoveLoadPath dir
-
- (* For compatibility *)
- | IDENT "AddPath"; dir = ne_string; "as"; alias = as_dirpath ->
- VernacAddLoadPath (false, dir, alias)
- | IDENT "AddRecPath"; dir = ne_string; "as"; alias = as_dirpath ->
- VernacAddLoadPath (true, dir, alias)
- | IDENT "DelPath"; dir = ne_string ->
- VernacRemoveLoadPath dir
-
- (* Type-Checking (pas dans le refman) *)
- | "Type"; c = lconstr -> VernacGlobalCheck c
-
- (* Printing (careful factorization of entries) *)
- | IDENT "Print"; p = printable -> VernacPrint p
- | IDENT "Print"; qid = smart_global; l = OPT univ_name_list -> VernacPrint (PrintName (qid,l))
- | IDENT "Print"; IDENT "Module"; "Type"; qid = global ->
- VernacPrint (PrintModuleType qid)
- | IDENT "Print"; IDENT "Module"; qid = global ->
- VernacPrint (PrintModule qid)
- | IDENT "Print"; IDENT "Namespace" ; ns = dirpath ->
- VernacPrint (PrintNamespace ns)
- | IDENT "Inspect"; n = natural -> VernacPrint (PrintInspect n)
-
- | IDENT "Add"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
- VernacAddMLPath (false, dir)
- | IDENT "Add"; IDENT "Rec"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
- VernacAddMLPath (true, dir)
-
- (* For acting on parameter tables *)
- | "Set"; table = option_table; v = option_value ->
- VernacSetOption (false, table, v)
- | IDENT "Unset"; table = option_table ->
- VernacUnsetOption (false, table)
-
- | IDENT "Print"; IDENT "Table"; table = option_table ->
- VernacPrintOption table
-
- | IDENT "Add"; table = IDENT; field = IDENT; v = LIST1 option_ref_value
- -> VernacAddOption ([table;field], v)
- (* A global value below will be hidden by a field above! *)
- (* In fact, we give priority to secondary tables *)
- (* No syntax for tertiary tables due to conflict *)
- (* (but they are unused anyway) *)
- | IDENT "Add"; table = IDENT; v = LIST1 option_ref_value ->
- VernacAddOption ([table], v)
-
- | IDENT "Test"; table = option_table; "for"; v = LIST1 option_ref_value
- -> VernacMemOption (table, v)
- | IDENT "Test"; table = option_table ->
- VernacPrintOption table
-
- | IDENT "Remove"; table = IDENT; field = IDENT; v= LIST1 option_ref_value
- -> VernacRemoveOption ([table;field], v)
- | IDENT "Remove"; table = IDENT; v = LIST1 option_ref_value ->
- VernacRemoveOption ([table], v) ]]
- ;
- query_command: (* TODO: rapprocher Eval et Check *)
- [ [ IDENT "Eval"; r = red_expr; "in"; c = lconstr; "." ->
- fun g -> VernacCheckMayEval (Some r, g, c)
- | IDENT "Compute"; c = lconstr; "." ->
- fun g -> VernacCheckMayEval (Some (Genredexpr.CbvVm None), g, c)
- | IDENT "Check"; c = lconstr; "." ->
- fun g -> VernacCheckMayEval (None, g, c)
- (* Searching the environment *)
- | IDENT "About"; qid = smart_global; l = OPT univ_name_list; "." ->
- fun g -> VernacPrint (PrintAbout (qid,l,g))
- | IDENT "SearchHead"; c = constr_pattern; l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchHead c,g, l)
- | IDENT "SearchPattern"; c = constr_pattern; l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchPattern c,g, l)
- | IDENT "SearchRewrite"; c = constr_pattern; l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchRewrite c,g, l)
- | IDENT "Search"; s = searchabout_query; l = searchabout_queries; "." ->
- let (sl,m) = l in fun g -> VernacSearch (SearchAbout (s::sl),g, m)
- (* compatibility: SearchAbout *)
- | IDENT "SearchAbout"; s = searchabout_query; l = searchabout_queries; "." ->
- fun g -> let (sl,m) = l in VernacSearch (SearchAbout (s::sl),g, m)
- (* compatibility: SearchAbout with "[ ... ]" *)
- | IDENT "SearchAbout"; "["; sl = LIST1 searchabout_query; "]";
- l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchAbout sl,g, l)
- ] ]
- ;
- printable:
- [ [ IDENT "Term"; qid = smart_global; l = OPT univ_name_list -> PrintName (qid,l)
- | IDENT "All" -> PrintFullContext
- | IDENT "Section"; s = global -> PrintSectionContext s
- | IDENT "Grammar"; ent = IDENT ->
- (* This should be in "syntax" section but is here for factorization*)
- PrintGrammar ent
- | IDENT "LoadPath"; dir = OPT dirpath -> PrintLoadPath dir
- | IDENT "Modules" ->
- user_err Pp.(str "Print Modules is obsolete; use Print Libraries instead")
- | IDENT "Libraries" -> PrintModules
-
- | IDENT "ML"; IDENT "Path" -> PrintMLLoadPath
- | IDENT "ML"; IDENT "Modules" -> PrintMLModules
- | IDENT "Debug"; IDENT "GC" -> PrintDebugGC
- | IDENT "Graph" -> PrintGraph
- | IDENT "Classes" -> PrintClasses
- | IDENT "TypeClasses" -> PrintTypeClasses
- | IDENT "Instances"; qid = smart_global -> PrintInstances qid
- | IDENT "Coercions" -> PrintCoercions
- | IDENT "Coercion"; IDENT "Paths"; s = class_rawexpr; t = class_rawexpr
- -> PrintCoercionPaths (s,t)
- | IDENT "Canonical"; IDENT "Projections" -> PrintCanonicalConversions
- | IDENT "Tables" -> PrintTables
- | IDENT "Options" -> PrintTables (* A Synonymous to Tables *)
- | IDENT "Hint" -> PrintHintGoal
- | IDENT "Hint"; qid = smart_global -> PrintHint qid
- | IDENT "Hint"; "*" -> PrintHintDb
- | IDENT "HintDb"; s = IDENT -> PrintHintDbName s
- | IDENT "Scopes" -> PrintScopes
- | IDENT "Scope"; s = IDENT -> PrintScope s
- | IDENT "Visibility"; s = OPT [x = IDENT -> x ] -> PrintVisibility s
- | IDENT "Implicit"; qid = smart_global -> PrintImplicit qid
- | IDENT "Universes"; fopt = OPT ne_string -> PrintUniverses (false, fopt)
- | IDENT "Sorted"; IDENT "Universes"; fopt = OPT ne_string -> PrintUniverses (true, fopt)
- | IDENT "Assumptions"; qid = smart_global -> PrintAssumptions (false, false, qid)
- | IDENT "Opaque"; IDENT "Dependencies"; qid = smart_global -> PrintAssumptions (true, false, qid)
- | IDENT "Transparent"; IDENT "Dependencies"; qid = smart_global -> PrintAssumptions (false, true, qid)
- | IDENT "All"; IDENT "Dependencies"; qid = smart_global -> PrintAssumptions (true, true, qid)
- | IDENT "Strategy"; qid = smart_global -> PrintStrategy (Some qid)
- | IDENT "Strategies" -> PrintStrategy None ] ]
- ;
- class_rawexpr:
- [ [ IDENT "Funclass" -> FunClass
- | IDENT "Sortclass" -> SortClass
- | qid = smart_global -> RefClass qid ] ]
- ;
- locatable:
- [ [ qid = smart_global -> LocateAny qid
- | IDENT "Term"; qid = smart_global -> LocateTerm qid
- | IDENT "File"; f = ne_string -> LocateFile f
- | IDENT "Library"; qid = global -> LocateLibrary qid
- | IDENT "Module"; qid = global -> LocateModule qid ] ]
- ;
- option_value:
- [ [ -> BoolValue true
- | n = integer -> IntValue (Some n)
- | s = STRING -> StringValue s ] ]
- ;
- option_ref_value:
- [ [ id = global -> QualidRefValue id
- | s = STRING -> StringRefValue s ] ]
- ;
- option_table:
- [ [ fl = LIST1 [ x = IDENT -> x ] -> fl ]]
- ;
- as_dirpath:
- [ [ d = OPT [ "as"; d = dirpath -> d ] -> d ] ]
- ;
- ne_in_or_out_modules:
- [ [ IDENT "inside"; l = LIST1 global -> SearchInside l
- | IDENT "outside"; l = LIST1 global -> SearchOutside l ] ]
- ;
- in_or_out_modules:
- [ [ m = ne_in_or_out_modules -> m
- | -> SearchOutside [] ] ]
- ;
- comment:
- [ [ c = constr -> CommentConstr c
- | s = STRING -> CommentString s
- | n = natural -> CommentInt n ] ]
- ;
- positive_search_mark:
- [ [ "-" -> false | -> true ] ]
- ;
- scope:
- [ [ "%"; key = IDENT -> key ] ]
- ;
- searchabout_query:
- [ [ b = positive_search_mark; s = ne_string; sc = OPT scope ->
- (b, SearchString (s,sc))
- | b = positive_search_mark; p = constr_pattern ->
- (b, SearchSubPattern p)
- ] ]
- ;
- searchabout_queries:
- [ [ m = ne_in_or_out_modules -> ([],m)
- | s = searchabout_query; l = searchabout_queries ->
- let (sl,m) = l in (s::sl,m)
- | -> ([],SearchOutside [])
- ] ]
- ;
- univ_name_list:
- [ [ "@{" ; l = LIST0 name; "}" -> l ] ]
- ;
-END;
-
-GEXTEND Gram
- command:
- [ [
-(* State management *)
- IDENT "Write"; IDENT "State"; s = IDENT -> VernacWriteState s
- | IDENT "Write"; IDENT "State"; s = ne_string -> VernacWriteState s
- | IDENT "Restore"; IDENT "State"; s = IDENT -> VernacRestoreState s
- | IDENT "Restore"; IDENT "State"; s = ne_string -> VernacRestoreState s
-
-(* Resetting *)
- | IDENT "Reset"; IDENT "Initial" -> VernacResetInitial
- | IDENT "Reset"; id = identref -> VernacResetName id
- | IDENT "Back" -> VernacBack 1
- | IDENT "Back"; n = natural -> VernacBack n
- | IDENT "BackTo"; n = natural -> VernacBackTo n
-
-(* Tactic Debugger *)
- | IDENT "Debug"; IDENT "On" ->
- VernacSetOption (false, ["Ltac";"Debug"], BoolValue true)
-
- | IDENT "Debug"; IDENT "Off" ->
- VernacSetOption (false, ["Ltac";"Debug"], BoolValue false)
-
-(* registration of a custom reduction *)
-
- | IDENT "Declare"; IDENT "Reduction"; s = IDENT; ":=";
- r = red_expr ->
- VernacDeclareReduction (s,r)
-
- ] ];
- END
-;;
-
-(* Grammar extensions *)
-
-GEXTEND Gram
- GLOBAL: syntax;
-
- syntax:
- [ [ IDENT "Open"; IDENT "Scope"; sc = IDENT ->
- VernacOpenCloseScope (true,sc)
-
- | IDENT "Close"; IDENT "Scope"; sc = IDENT ->
- VernacOpenCloseScope (false,sc)
-
- | IDENT "Delimit"; IDENT "Scope"; sc = IDENT; "with"; key = IDENT ->
- VernacDelimiters (sc, Some key)
- | IDENT "Undelimit"; IDENT "Scope"; sc = IDENT ->
- VernacDelimiters (sc, None)
-
- | IDENT "Bind"; IDENT "Scope"; sc = IDENT; "with";
- refl = LIST1 class_rawexpr -> VernacBindScope (sc,refl)
-
- | IDENT "Infix"; op = ne_lstring; ":="; p = constr;
- modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ];
- sc = OPT [ ":"; sc = IDENT -> sc ] ->
- VernacInfix ((op,modl),p,sc)
- | IDENT "Notation"; id = identref;
- idl = LIST0 ident; ":="; c = constr; b = only_parsing ->
- VernacSyntacticDefinition
- (id,(idl,c),b)
- | IDENT "Notation"; s = lstring; ":=";
- c = constr;
- modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ];
- sc = OPT [ ":"; sc = IDENT -> sc ] ->
- VernacNotation (c,(s,modl),sc)
- | IDENT "Format"; IDENT "Notation"; n = STRING; s = STRING; fmt = STRING ->
- VernacNotationAddFormat (n,s,fmt)
-
- | IDENT "Reserved"; IDENT "Infix"; s = ne_lstring;
- l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ] ->
- let s = CAst.map (fun s -> "x '"^s^"' y") s in
- VernacSyntaxExtension (true,(s,l))
-
- | IDENT "Reserved"; IDENT "Notation";
- s = ne_lstring;
- l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ]
- -> VernacSyntaxExtension (false, (s,l))
-
- (* "Print" "Grammar" should be here but is in "command" entry in order
- to factorize with other "Print"-based vernac entries *)
- ] ]
- ;
- only_parsing:
- [ [ "("; IDENT "only"; IDENT "parsing"; ")" ->
- Some Flags.Current
- | "("; IDENT "compat"; s = STRING; ")" ->
- Some (parse_compat_version s)
- | -> None ] ]
- ;
- level:
- [ [ IDENT "level"; n = natural -> NumLevel n
- | IDENT "next"; IDENT "level" -> NextLevel ] ]
- ;
- syntax_modifier:
- [ [ "at"; IDENT "level"; n = natural -> SetLevel n
- | IDENT "left"; IDENT "associativity" -> SetAssoc LeftA
- | IDENT "right"; IDENT "associativity" -> SetAssoc RightA
- | IDENT "no"; IDENT "associativity" -> SetAssoc NonA
- | IDENT "only"; IDENT "printing" -> SetOnlyPrinting
- | IDENT "only"; IDENT "parsing" -> SetOnlyParsing
- | IDENT "compat"; s = STRING ->
- SetCompatVersion (parse_compat_version s)
- | IDENT "format"; s1 = [s = STRING -> CAst.make ~loc:!@loc s];
- s2 = OPT [s = STRING -> CAst.make ~loc:!@loc s] ->
- begin match s1, s2 with
- | { CAst.v = k }, Some s -> SetFormat(k,s)
- | s, None -> SetFormat ("text",s) end
- | x = IDENT; ","; l = LIST1 [id = IDENT -> id ] SEP ","; "at";
- lev = level -> SetItemLevel (x::l,lev)
- | x = IDENT; "at"; lev = level -> SetItemLevel ([x],lev)
- | x = IDENT; "at"; lev = level; b = constr_as_binder_kind -> SetItemLevelAsBinder ([x],b,Some lev)
- | x = IDENT; b = constr_as_binder_kind -> SetItemLevelAsBinder ([x],b,None)
- | x = IDENT; typ = syntax_extension_type -> SetEntryType (x,typ)
- ] ]
- ;
- syntax_extension_type:
- [ [ IDENT "ident" -> ETName | IDENT "global" -> ETReference
- | IDENT "bigint" -> ETBigint
- | IDENT "binder" -> ETBinder true
- | IDENT "constr"; n = OPT at_level; b = constr_as_binder_kind -> ETConstrAsBinder (b,n)
- | IDENT "pattern" -> ETPattern (false,None)
- | IDENT "pattern"; "at"; IDENT "level"; n = natural -> ETPattern (false,Some n)
- | IDENT "strict"; IDENT "pattern" -> ETPattern (true,None)
- | IDENT "strict"; IDENT "pattern"; "at"; IDENT "level"; n = natural -> ETPattern (true,Some n)
- | IDENT "closed"; IDENT "binder" -> ETBinder false
- ] ]
- ;
- at_level:
- [ [ "at"; n = level -> n ] ]
- ;
- constr_as_binder_kind:
- [ [ "as"; IDENT "ident" -> Notation_term.AsIdent
- | "as"; IDENT "pattern" -> Notation_term.AsIdentOrPattern
- | "as"; IDENT "strict"; IDENT "pattern" -> Notation_term.AsStrictPattern ] ]
- ;
-END
diff --git a/vernac/g_vernac.mlg b/vernac/g_vernac.mlg
new file mode 100644
index 000000000..3a01ce6df
--- /dev/null
+++ b/vernac/g_vernac.mlg
@@ -0,0 +1,1173 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+{
+
+open Pp
+open CErrors
+open Util
+open Names
+open Glob_term
+open Vernacexpr
+open Constrexpr
+open Constrexpr_ops
+open Extend
+open Decl_kinds
+open Declaremods
+open Declarations
+open Namegen
+open Tok (* necessary for camlp5 *)
+
+open Pcoq
+open Pcoq.Prim
+open Pcoq.Constr
+open Pcoq.Module
+open Pvernac.Vernac_
+
+let vernac_kw = [ ";"; ","; ">->"; ":<"; "<:"; "where"; "at" ]
+let _ = List.iter CLexer.add_keyword vernac_kw
+
+(* Rem: do not join the different GEXTEND into one, it breaks native *)
+(* compilation on PowerPC and Sun architectures *)
+
+let query_command = Gram.entry_create "vernac:query_command"
+
+let subprf = Gram.entry_create "vernac:subprf"
+
+let class_rawexpr = Gram.entry_create "vernac:class_rawexpr"
+let thm_token = Gram.entry_create "vernac:thm_token"
+let def_body = Gram.entry_create "vernac:def_body"
+let decl_notation = Gram.entry_create "vernac:decl_notation"
+let record_field = Gram.entry_create "vernac:record_field"
+let of_type_with_opt_coercion = Gram.entry_create "vernac:of_type_with_opt_coercion"
+let instance_name = Gram.entry_create "vernac:instance_name"
+let section_subset_expr = Gram.entry_create "vernac:section_subset_expr"
+
+let make_bullet s =
+ let open Proof_bullet in
+ let n = String.length s in
+ match s.[0] with
+ | '-' -> Dash n
+ | '+' -> Plus n
+ | '*' -> Star n
+ | _ -> assert false
+
+let parse_compat_version ?(allow_old = true) = let open Flags in function
+ | "8.8" -> Current
+ | "8.7" -> V8_7
+ | "8.6" -> V8_6
+ | ("8.5" | "8.4" | "8.3" | "8.2" | "8.1" | "8.0") as s ->
+ CErrors.user_err ~hdr:"get_compat_version"
+ Pp.(str "Compatibility with version " ++ str s ++ str " not supported.")
+ | s ->
+ CErrors.user_err ~hdr:"get_compat_version"
+ Pp.(str "Unknown compatibility version \"" ++ str s ++ str "\".")
+
+}
+
+GRAMMAR EXTEND Gram
+ GLOBAL: vernac_control gallina_ext noedit_mode subprf;
+ vernac_control: FIRST
+ [ [ IDENT "Time"; c = located_vernac -> { VernacTime (false,c) }
+ | IDENT "Redirect"; s = ne_string; c = located_vernac -> { VernacRedirect (s, c) }
+ | IDENT "Timeout"; n = natural; v = vernac_control -> { VernacTimeout(n,v) }
+ | IDENT "Fail"; v = vernac_control -> { VernacFail v }
+ | v = vernac -> { let (f, v) = v in VernacExpr(f, v) } ]
+ ]
+ ;
+ vernac:
+ [ [ IDENT "Local"; v = vernac_poly -> { let (f, v) = v in (VernacLocal true :: f, v) }
+ | IDENT "Global"; v = vernac_poly -> { let (f, v) = v in (VernacLocal false :: f, v) }
+
+ | v = vernac_poly -> { v } ]
+ ]
+ ;
+ vernac_poly:
+ [ [ IDENT "Polymorphic"; v = vernac_aux -> { let (f, v) = v in (VernacPolymorphic true :: f, v) }
+ | IDENT "Monomorphic"; v = vernac_aux -> { let (f, v) = v in (VernacPolymorphic false :: f, v) }
+ | v = vernac_aux -> { v } ]
+ ]
+ ;
+ vernac_aux:
+ (* Better to parse "." here: in case of failure (e.g. in coerce_to_var), *)
+ (* "." is still in the stream and discard_to_dot works correctly *)
+ [ [ IDENT "Program"; g = gallina; "." -> { ([VernacProgram], g) }
+ | IDENT "Program"; g = gallina_ext; "." -> { ([VernacProgram], g) }
+ | g = gallina; "." -> { ([], g) }
+ | g = gallina_ext; "." -> { ([], g) }
+ | c = command; "." -> { ([], c) }
+ | c = syntax; "." -> { ([], c) }
+ | c = subprf -> { ([], c) }
+ ] ]
+ ;
+ vernac_aux: LAST
+ [ [ prfcom = command_entry -> { ([], prfcom) } ] ]
+ ;
+ noedit_mode:
+ [ [ c = query_command -> { c None } ] ]
+ ;
+
+ subprf:
+ [ [ s = BULLET -> { VernacBullet (make_bullet s) }
+ | "{" -> { VernacSubproof None }
+ | "}" -> { VernacEndSubproof }
+ ] ]
+ ;
+
+ located_vernac:
+ [ [ v = vernac_control -> { CAst.make ~loc v } ] ]
+ ;
+END
+
+{
+
+let warn_plural_command =
+ CWarnings.create ~name:"plural-command" ~category:"pedantic" ~default:CWarnings.Disabled
+ (fun kwd -> strbrk (Printf.sprintf "Command \"%s\" expects more than one assumption." kwd))
+
+let test_plural_form loc kwd = function
+ | [(_,([_],_))] ->
+ warn_plural_command ~loc kwd
+ | _ -> ()
+
+let test_plural_form_types loc kwd = function
+ | [([_],_)] ->
+ warn_plural_command ~loc kwd
+ | _ -> ()
+
+let lname_of_lident : lident -> lname =
+ CAst.map (fun s -> Name s)
+
+let name_of_ident_decl : ident_decl -> name_decl =
+ on_fst lname_of_lident
+
+}
+
+(* Gallina declarations *)
+GRAMMAR EXTEND Gram
+ GLOBAL: gallina gallina_ext thm_token def_body of_type_with_opt_coercion
+ record_field decl_notation rec_definition ident_decl univ_decl;
+
+ gallina:
+ (* Definition, Theorem, Variable, Axiom, ... *)
+ [ [ thm = thm_token; id = ident_decl; bl = binders; ":"; c = lconstr;
+ l = LIST0
+ [ "with"; id = ident_decl; bl = binders; ":"; c = lconstr ->
+ { (id,(bl,c)) } ] ->
+ { VernacStartTheoremProof (thm, (id,(bl,c))::l) }
+ | stre = assumption_token; nl = inline; bl = assum_list ->
+ { VernacAssumption (stre, nl, bl) }
+ | tk = assumptions_token; nl = inline; bl = assum_list ->
+ { let (kwd,stre) = tk in
+ test_plural_form loc kwd bl;
+ VernacAssumption (stre, nl, bl) }
+ | d = def_token; id = ident_decl; b = def_body ->
+ { VernacDefinition (d, name_of_ident_decl id, b) }
+ | IDENT "Let"; id = identref; b = def_body ->
+ { VernacDefinition ((DoDischarge, Let), (lname_of_lident id, None), b) }
+ (* Gallina inductive declarations *)
+ | cum = OPT cumulativity_token; priv = private_token; f = finite_token;
+ indl = LIST1 inductive_definition SEP "with" ->
+ { let (k,f) = f in
+ let indl=List.map (fun ((a,b,c,d),e) -> ((a,b,c,k,d),e)) indl in
+ VernacInductive (cum, priv,f,indl) }
+ | "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
+ { VernacFixpoint (NoDischarge, recs) }
+ | IDENT "Let"; "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
+ { VernacFixpoint (DoDischarge, recs) }
+ | "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
+ { VernacCoFixpoint (NoDischarge, corecs) }
+ | IDENT "Let"; "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
+ { VernacCoFixpoint (DoDischarge, corecs) }
+ | IDENT "Scheme"; l = LIST1 scheme SEP "with" -> { VernacScheme l }
+ | IDENT "Combined"; IDENT "Scheme"; id = identref; IDENT "from";
+ l = LIST1 identref SEP "," -> { VernacCombinedScheme (id, l) }
+ | IDENT "Register"; IDENT "Inline"; id = identref ->
+ { VernacRegister(id, RegisterInline) }
+ | IDENT "Universe"; l = LIST1 identref -> { VernacUniverse l }
+ | IDENT "Universes"; l = LIST1 identref -> { VernacUniverse l }
+ | IDENT "Constraint"; l = LIST1 univ_constraint SEP "," -> { VernacConstraint l }
+ ] ]
+ ;
+
+ thm_token:
+ [ [ "Theorem" -> { Theorem }
+ | IDENT "Lemma" -> { Lemma }
+ | IDENT "Fact" -> { Fact }
+ | IDENT "Remark" -> { Remark }
+ | IDENT "Corollary" -> { Corollary }
+ | IDENT "Proposition" -> { Proposition }
+ | IDENT "Property" -> { Property } ] ]
+ ;
+ def_token:
+ [ [ "Definition" -> { (NoDischarge,Definition) }
+ | IDENT "Example" -> { (NoDischarge,Example) }
+ | IDENT "SubClass" -> { (NoDischarge,SubClass) } ] ]
+ ;
+ assumption_token:
+ [ [ "Hypothesis" -> { (DoDischarge, Logical) }
+ | "Variable" -> { (DoDischarge, Definitional) }
+ | "Axiom" -> { (NoDischarge, Logical) }
+ | "Parameter" -> { (NoDischarge, Definitional) }
+ | IDENT "Conjecture" -> { (NoDischarge, Conjectural) } ] ]
+ ;
+ assumptions_token:
+ [ [ IDENT "Hypotheses" -> { ("Hypotheses", (DoDischarge, Logical)) }
+ | IDENT "Variables" -> { ("Variables", (DoDischarge, Definitional)) }
+ | IDENT "Axioms" -> { ("Axioms", (NoDischarge, Logical)) }
+ | IDENT "Parameters" -> { ("Parameters", (NoDischarge, Definitional)) }
+ | IDENT "Conjectures" -> { ("Conjectures", (NoDischarge, Conjectural)) } ] ]
+ ;
+ inline:
+ [ [ IDENT "Inline"; "("; i = INT; ")" -> { InlineAt (int_of_string i) }
+ | IDENT "Inline" -> { DefaultInline }
+ | -> { NoInline } ] ]
+ ;
+ univ_constraint:
+ [ [ l = universe_level; ord = [ "<" -> { Univ.Lt } | "=" -> { Univ.Eq } | "<=" -> { Univ.Le } ];
+ r = universe_level -> { (l, ord, r) } ] ]
+ ;
+ univ_decl :
+ [ [ "@{" ; l = LIST0 identref; ext = [ "+" -> { true } | -> { false } ];
+ cs = [ "|"; l' = LIST0 univ_constraint SEP ",";
+ ext = [ "+" -> { true } | -> { false } ]; "}" -> { (l',ext) }
+ | ext = [ "}" -> { true } | "|}" -> { false } ] -> { ([], ext) } ]
+ ->
+ { let open UState in
+ { univdecl_instance = l;
+ univdecl_extensible_instance = ext;
+ univdecl_constraints = fst cs;
+ univdecl_extensible_constraints = snd cs } }
+ ] ]
+ ;
+ ident_decl:
+ [ [ i = identref; l = OPT univ_decl -> { (i, l) }
+ ] ]
+ ;
+ finite_token:
+ [ [ IDENT "Inductive" -> { (Inductive_kw,Finite) }
+ | IDENT "CoInductive" -> { (CoInductive,CoFinite) }
+ | IDENT "Variant" -> { (Variant,BiFinite) }
+ | IDENT "Record" -> { (Record,BiFinite) }
+ | IDENT "Structure" -> { (Structure,BiFinite) }
+ | IDENT "Class" -> { (Class true,BiFinite) } ] ]
+ ;
+ cumulativity_token:
+ [ [ IDENT "Cumulative" -> { VernacCumulative }
+ | IDENT "NonCumulative" -> { VernacNonCumulative } ] ]
+ ;
+ private_token:
+ [ [ IDENT "Private" -> { true } | -> { false } ] ]
+ ;
+ (* Simple definitions *)
+ def_body:
+ [ [ bl = binders; ":="; red = reduce; c = lconstr ->
+ { if List.exists (function CLocalPattern _ -> true | _ -> false) bl
+ then
+ (* FIXME: "red" will be applied to types in bl and Cast with remain *)
+ let c = mkCLambdaN ~loc bl c in
+ DefineBody ([], red, c, None)
+ else
+ (match c with
+ | { CAst.v = CCast(c, CastConv t) } -> DefineBody (bl, red, c, Some t)
+ | _ -> DefineBody (bl, red, c, None)) }
+ | bl = binders; ":"; t = lconstr; ":="; red = reduce; c = lconstr ->
+ { let ((bl, c), tyo) =
+ if List.exists (function CLocalPattern _ -> true | _ -> false) bl
+ then
+ (* FIXME: "red" will be applied to types in bl and Cast with remain *)
+ let c = CAst.make ~loc @@ CCast (c, CastConv t) in
+ (([],mkCLambdaN ~loc bl c), None)
+ else ((bl, c), Some t)
+ in
+ DefineBody (bl, red, c, tyo) }
+ | bl = binders; ":"; t = lconstr ->
+ { ProveBody (bl, t) } ] ]
+ ;
+ reduce:
+ [ [ IDENT "Eval"; r = red_expr; "in" -> { Some r }
+ | -> { None } ] ]
+ ;
+ one_decl_notation:
+ [ [ ntn = ne_lstring; ":="; c = constr;
+ scopt = OPT [ ":"; sc = IDENT -> { sc } ] -> { (ntn,c,scopt) } ] ]
+ ;
+ decl_sep:
+ [ [ IDENT "and" -> { () } ] ]
+ ;
+ decl_notation:
+ [ [ "where"; l = LIST1 one_decl_notation SEP decl_sep -> { l }
+ | -> { [] } ] ]
+ ;
+ (* Inductives and records *)
+ opt_constructors_or_fields:
+ [ [ ":="; lc = constructor_list_or_record_decl -> { lc }
+ | -> { RecordDecl (None, []) } ] ]
+ ;
+ inductive_definition:
+ [ [ oc = opt_coercion; id = ident_decl; indpar = binders;
+ c = OPT [ ":"; c = lconstr -> { c } ];
+ lc=opt_constructors_or_fields; ntn = decl_notation ->
+ { (((oc,id),indpar,c,lc),ntn) } ] ]
+ ;
+ constructor_list_or_record_decl:
+ [ [ "|"; l = LIST1 constructor SEP "|" -> { Constructors l }
+ | id = identref ; c = constructor_type; "|"; l = LIST0 constructor SEP "|" ->
+ { Constructors ((c id)::l) }
+ | id = identref ; c = constructor_type -> { Constructors [ c id ] }
+ | cstr = identref; "{"; fs = record_fields; "}" ->
+ { RecordDecl (Some cstr,fs) }
+ | "{";fs = record_fields; "}" -> { RecordDecl (None,fs) }
+ | -> { Constructors [] } ] ]
+ ;
+(*
+ csort:
+ [ [ s = sort -> CSort (loc,s) ] ]
+ ;
+*)
+ opt_coercion:
+ [ [ ">" -> { true }
+ | -> { false } ] ]
+ ;
+ (* (co)-fixpoints *)
+ rec_definition:
+ [ [ id = ident_decl;
+ bl = binders_fixannot;
+ ty = type_cstr;
+ def = OPT [":="; def = lconstr -> { def } ]; ntn = decl_notation ->
+ { let bl, annot = bl in ((id,annot,bl,ty,def),ntn) } ] ]
+ ;
+ corec_definition:
+ [ [ id = ident_decl; bl = binders; ty = type_cstr;
+ def = OPT [":="; def = lconstr -> { def }]; ntn = decl_notation ->
+ { ((id,bl,ty,def),ntn) } ] ]
+ ;
+ type_cstr:
+ [ [ ":"; c=lconstr -> { c }
+ | -> { CAst.make ~loc @@ CHole (None, IntroAnonymous, None) } ] ]
+ ;
+ (* Inductive schemes *)
+ scheme:
+ [ [ kind = scheme_kind -> { (None,kind) }
+ | id = identref; ":="; kind = scheme_kind -> { (Some id,kind) } ] ]
+ ;
+ scheme_kind:
+ [ [ IDENT "Induction"; "for"; ind = smart_global;
+ IDENT "Sort"; s = sort_family-> { InductionScheme(true,ind,s) }
+ | IDENT "Minimality"; "for"; ind = smart_global;
+ IDENT "Sort"; s = sort_family-> { InductionScheme(false,ind,s) }
+ | IDENT "Elimination"; "for"; ind = smart_global;
+ IDENT "Sort"; s = sort_family-> { CaseScheme(true,ind,s) }
+ | IDENT "Case"; "for"; ind = smart_global;
+ IDENT "Sort"; s = sort_family-> { CaseScheme(false,ind,s) }
+ | IDENT "Equality"; "for" ; ind = smart_global -> { EqualityScheme(ind) } ] ]
+ ;
+ (* Various Binders *)
+(*
+ (* ... without coercions *)
+ binder_nodef:
+ [ [ b = binder_let ->
+ (match b with
+ CLocalAssum(l,ty) -> (l,ty)
+ | CLocalDef _ ->
+ Util.user_err_loc
+ (loc,"fix_param",Pp.str"defined binder not allowed here.")) ] ]
+ ;
+*)
+ (* ... with coercions *)
+ record_field:
+ [ [ bd = record_binder; pri = OPT [ "|"; n = natural -> { n } ];
+ ntn = decl_notation -> { (bd,pri),ntn } ] ]
+ ;
+ record_fields:
+ [ [ f = record_field; ";"; fs = record_fields -> { f :: fs }
+ | f = record_field; ";" -> { [f] }
+ | f = record_field -> { [f] }
+ | -> { [] }
+ ] ]
+ ;
+ record_binder_body:
+ [ [ l = binders; oc = of_type_with_opt_coercion;
+ t = lconstr -> { fun id -> (oc,AssumExpr (id,mkCProdN ~loc l t)) }
+ | l = binders; oc = of_type_with_opt_coercion;
+ t = lconstr; ":="; b = lconstr -> { fun id ->
+ (oc,DefExpr (id,mkCLambdaN ~loc l b,Some (mkCProdN ~loc l t))) }
+ | l = binders; ":="; b = lconstr -> { fun id ->
+ match b.CAst.v with
+ | CCast(b', (CastConv t|CastVM t|CastNative t)) ->
+ (None,DefExpr(id,mkCLambdaN ~loc l b',Some (mkCProdN ~loc l t)))
+ | _ ->
+ (None,DefExpr(id,mkCLambdaN ~loc l b,None)) } ] ]
+ ;
+ record_binder:
+ [ [ id = name -> { (None,AssumExpr(id, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))) }
+ | id = name; f = record_binder_body -> { f id } ] ]
+ ;
+ assum_list:
+ [ [ bl = LIST1 assum_coe -> { bl } | b = simple_assum_coe -> { [b] } ] ]
+ ;
+ assum_coe:
+ [ [ "("; a = simple_assum_coe; ")" -> { a } ] ]
+ ;
+ simple_assum_coe:
+ [ [ idl = LIST1 ident_decl; oc = of_type_with_opt_coercion; c = lconstr ->
+ { (not (Option.is_empty oc),(idl,c)) } ] ]
+ ;
+
+ constructor_type:
+ [[ l = binders;
+ t= [ coe = of_type_with_opt_coercion; c = lconstr ->
+ { fun l id -> (not (Option.is_empty coe),(id,mkCProdN ~loc l c)) }
+ | ->
+ { fun l id -> (false,(id,mkCProdN ~loc l (CAst.make ~loc @@ CHole (None, IntroAnonymous, None)))) } ]
+ -> { t l }
+ ]]
+;
+
+ constructor:
+ [ [ id = identref; c=constructor_type -> { c id } ] ]
+ ;
+ of_type_with_opt_coercion:
+ [ [ ":>>" -> { Some false }
+ | ":>"; ">" -> { Some false }
+ | ":>" -> { Some true }
+ | ":"; ">"; ">" -> { Some false }
+ | ":"; ">" -> { Some true }
+ | ":" -> { None } ] ]
+ ;
+END
+
+{
+
+let only_starredidentrefs =
+ Gram.Entry.of_parser "test_only_starredidentrefs"
+ (fun strm ->
+ let rec aux n =
+ match Util.stream_nth n strm with
+ | KEYWORD "." -> ()
+ | KEYWORD ")" -> ()
+ | (IDENT _ | KEYWORD "Type" | KEYWORD "*") -> aux (n+1)
+ | _ -> raise Stream.Failure in
+ aux 0)
+let starredidentreflist_to_expr l =
+ match l with
+ | [] -> SsEmpty
+ | x :: xs -> List.fold_right (fun i acc -> SsUnion(i,acc)) xs x
+
+let warn_deprecated_include_type =
+ CWarnings.create ~name:"deprecated-include-type" ~category:"deprecated"
+ (fun () -> strbrk "Include Type is deprecated; use Include instead")
+
+}
+
+(* Modules and Sections *)
+GRAMMAR EXTEND Gram
+ GLOBAL: gallina_ext module_expr module_type section_subset_expr;
+
+ gallina_ext:
+ [ [ (* Interactive module declaration *)
+ IDENT "Module"; export = export_token; id = identref;
+ bl = LIST0 module_binder; sign = of_module_type;
+ body = is_module_expr ->
+ { VernacDefineModule (export, id, bl, sign, body) }
+ | IDENT "Module"; "Type"; id = identref;
+ bl = LIST0 module_binder; sign = check_module_types;
+ body = is_module_type ->
+ { VernacDeclareModuleType (id, bl, sign, body) }
+ | IDENT "Declare"; IDENT "Module"; export = export_token; id = identref;
+ bl = LIST0 module_binder; ":"; mty = module_type_inl ->
+ { VernacDeclareModule (export, id, bl, mty) }
+ (* Section beginning *)
+ | IDENT "Section"; id = identref -> { VernacBeginSection id }
+ | IDENT "Chapter"; id = identref -> { VernacBeginSection id }
+
+ (* This end a Section a Module or a Module Type *)
+ | IDENT "End"; id = identref -> { VernacEndSegment id }
+
+ (* Naming a set of section hyps *)
+ | IDENT "Collection"; id = identref; ":="; expr = section_subset_expr ->
+ { VernacNameSectionHypSet (id, expr) }
+
+ (* Requiring an already compiled module *)
+ | IDENT "Require"; export = export_token; qidl = LIST1 global ->
+ { VernacRequire (None, export, qidl) }
+ | IDENT "From" ; ns = global ; IDENT "Require"; export = export_token
+ ; qidl = LIST1 global ->
+ { VernacRequire (Some ns, export, qidl) }
+ | IDENT "Import"; qidl = LIST1 global -> { VernacImport (false,qidl) }
+ | IDENT "Export"; qidl = LIST1 global -> { VernacImport (true,qidl) }
+ | IDENT "Include"; e = module_type_inl; l = LIST0 ext_module_expr ->
+ { VernacInclude(e::l) }
+ | IDENT "Include"; "Type"; e = module_type_inl; l = LIST0 ext_module_type ->
+ { warn_deprecated_include_type ~loc ();
+ VernacInclude(e::l) } ] ]
+ ;
+ export_token:
+ [ [ IDENT "Import" -> { Some false }
+ | IDENT "Export" -> { Some true }
+ | -> { None } ] ]
+ ;
+ ext_module_type:
+ [ [ "<+"; mty = module_type_inl -> { mty } ] ]
+ ;
+ ext_module_expr:
+ [ [ "<+"; mexpr = module_expr_inl -> { mexpr } ] ]
+ ;
+ check_module_type:
+ [ [ "<:"; mty = module_type_inl -> { mty } ] ]
+ ;
+ check_module_types:
+ [ [ mtys = LIST0 check_module_type -> { mtys } ] ]
+ ;
+ of_module_type:
+ [ [ ":"; mty = module_type_inl -> { Enforce mty }
+ | mtys = check_module_types -> { Check mtys } ] ]
+ ;
+ is_module_type:
+ [ [ ":="; mty = module_type_inl ; l = LIST0 ext_module_type -> { (mty::l) }
+ | -> { [] } ] ]
+ ;
+ is_module_expr:
+ [ [ ":="; mexpr = module_expr_inl; l = LIST0 ext_module_expr -> { (mexpr::l) }
+ | -> { [] } ] ]
+ ;
+ functor_app_annot:
+ [ [ "["; IDENT "inline"; "at"; IDENT "level"; i = INT; "]" ->
+ { InlineAt (int_of_string i) }
+ | "["; IDENT "no"; IDENT "inline"; "]" -> { NoInline }
+ | -> { DefaultInline }
+ ] ]
+ ;
+ module_expr_inl:
+ [ [ "!"; me = module_expr -> { (me,NoInline) }
+ | me = module_expr; a = functor_app_annot -> { (me,a) } ] ]
+ ;
+ module_type_inl:
+ [ [ "!"; me = module_type -> { (me,NoInline) }
+ | me = module_type; a = functor_app_annot -> { (me,a) } ] ]
+ ;
+ (* Module binder *)
+ module_binder:
+ [ [ "("; export = export_token; idl = LIST1 identref; ":";
+ mty = module_type_inl; ")" -> { (export,idl,mty) } ] ]
+ ;
+ (* Module expressions *)
+ module_expr:
+ [ [ me = module_expr_atom -> { me }
+ | me1 = module_expr; me2 = module_expr_atom -> { CAst.make ~loc @@ CMapply (me1,me2) }
+ ] ]
+ ;
+ module_expr_atom:
+ [ [ qid = qualid -> { CAst.make ~loc @@ CMident qid } | "("; me = module_expr; ")" -> { me } ] ]
+ ;
+ with_declaration:
+ [ [ "Definition"; fqid = fullyqualid; udecl = OPT univ_decl; ":="; c = Constr.lconstr ->
+ { CWith_Definition (fqid,udecl,c) }
+ | IDENT "Module"; fqid = fullyqualid; ":="; qid = qualid ->
+ { CWith_Module (fqid,qid) }
+ ] ]
+ ;
+ module_type:
+ [ [ qid = qualid -> { CAst.make ~loc @@ CMident qid }
+ | "("; mt = module_type; ")" -> { mt }
+ | mty = module_type; me = module_expr_atom ->
+ { CAst.make ~loc @@ CMapply (mty,me) }
+ | mty = module_type; "with"; decl = with_declaration ->
+ { CAst.make ~loc @@ CMwith (mty,decl) }
+ ] ]
+ ;
+ (* Proof using *)
+ section_subset_expr:
+ [ [ only_starredidentrefs; l = LIST0 starredidentref ->
+ { starredidentreflist_to_expr l }
+ | e = ssexpr -> { e } ]]
+ ;
+ starredidentref:
+ [ [ i = identref -> { SsSingl i }
+ | i = identref; "*" -> { SsFwdClose(SsSingl i) }
+ | "Type" -> { SsType }
+ | "Type"; "*" -> { SsFwdClose SsType } ]]
+ ;
+ ssexpr:
+ [ "35"
+ [ "-"; e = ssexpr -> { SsCompl e } ]
+ | "50"
+ [ e1 = ssexpr; "-"; e2 = ssexpr-> { SsSubstr(e1,e2) }
+ | e1 = ssexpr; "+"; e2 = ssexpr-> { SsUnion(e1,e2) } ]
+ | "0"
+ [ i = starredidentref -> { i }
+ | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"->
+ { starredidentreflist_to_expr l }
+ | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"; "*" ->
+ { SsFwdClose(starredidentreflist_to_expr l) }
+ | "("; e = ssexpr; ")"-> { e }
+ | "("; e = ssexpr; ")"; "*" -> { SsFwdClose e } ] ]
+ ;
+END
+
+(* Extensions: implicits, coercions, etc. *)
+GRAMMAR EXTEND Gram
+ GLOBAL: gallina_ext instance_name hint_info;
+
+ gallina_ext:
+ [ [ (* Transparent and Opaque *)
+ IDENT "Transparent"; l = LIST1 smart_global ->
+ { VernacSetOpacity (Conv_oracle.transparent, l) }
+ | IDENT "Opaque"; l = LIST1 smart_global ->
+ { VernacSetOpacity (Conv_oracle.Opaque, l) }
+ | IDENT "Strategy"; l =
+ LIST1 [ v=strategy_level; "["; q=LIST1 smart_global; "]" -> { (v,q) } ] ->
+ { VernacSetStrategy l }
+ (* Canonical structure *)
+ | IDENT "Canonical"; IDENT "Structure"; qid = global ->
+ { VernacCanonical CAst.(make ~loc @@ AN qid) }
+ | IDENT "Canonical"; IDENT "Structure"; ntn = by_notation ->
+ { VernacCanonical CAst.(make ~loc @@ ByNotation ntn) }
+ | IDENT "Canonical"; IDENT "Structure"; qid = global; d = def_body ->
+ { let s = coerce_reference_to_id qid in
+ VernacDefinition ((NoDischarge,CanonicalStructure),((CAst.make (Name s)),None),d) }
+
+ (* Coercions *)
+ | IDENT "Coercion"; qid = global; d = def_body ->
+ { let s = coerce_reference_to_id qid in
+ VernacDefinition ((NoDischarge,Coercion),((CAst.make (Name s)),None),d) }
+ | IDENT "Identity"; IDENT "Coercion"; f = identref; ":";
+ s = class_rawexpr; ">->"; t = class_rawexpr ->
+ { VernacIdentityCoercion (f, s, t) }
+ | IDENT "Coercion"; qid = global; ":"; s = class_rawexpr; ">->";
+ t = class_rawexpr ->
+ { VernacCoercion (CAst.make ~loc @@ AN qid, s, t) }
+ | IDENT "Coercion"; ntn = by_notation; ":"; s = class_rawexpr; ">->";
+ t = class_rawexpr ->
+ { VernacCoercion (CAst.make ~loc @@ ByNotation ntn, s, t) }
+
+ | IDENT "Context"; c = LIST1 binder ->
+ { VernacContext (List.flatten c) }
+
+ | IDENT "Instance"; namesup = instance_name; ":";
+ expl = [ "!" -> { Decl_kinds.Implicit } | -> { Decl_kinds.Explicit } ] ; t = operconstr LEVEL "200";
+ info = hint_info ;
+ props = [ ":="; "{"; r = record_declaration; "}" -> { Some (true,r) } |
+ ":="; c = lconstr -> { Some (false,c) } | -> { None } ] ->
+ { VernacInstance (false,snd namesup,(fst namesup,expl,t),props,info) }
+
+ | IDENT "Existing"; IDENT "Instance"; id = global;
+ info = hint_info ->
+ { VernacDeclareInstances [id, info] }
+
+ | IDENT "Existing"; IDENT "Instances"; ids = LIST1 global;
+ pri = OPT [ "|"; i = natural -> { i } ] ->
+ { let info = { Typeclasses.hint_priority = pri; hint_pattern = None } in
+ let insts = List.map (fun i -> (i, info)) ids in
+ VernacDeclareInstances insts }
+
+ | IDENT "Existing"; IDENT "Class"; is = global -> { VernacDeclareClass is }
+
+ (* Arguments *)
+ | IDENT "Arguments"; qid = smart_global;
+ args = LIST0 argument_spec_block;
+ more_implicits = OPT
+ [ ","; impl = LIST1
+ [ impl = LIST0 more_implicits_block -> { List.flatten impl } ]
+ SEP "," -> { impl }
+ ];
+ mods = OPT [ ":"; l = LIST1 arguments_modifier SEP "," -> { l } ] ->
+ { let mods = match mods with None -> [] | Some l -> List.flatten l in
+ let slash_position = ref None in
+ let rec parse_args i = function
+ | [] -> []
+ | `Id x :: args -> x :: parse_args (i+1) args
+ | `Slash :: args ->
+ if Option.is_empty !slash_position then
+ (slash_position := Some i; parse_args i args)
+ else
+ user_err Pp.(str "The \"/\" modifier can occur only once")
+ in
+ let args = parse_args 0 (List.flatten args) in
+ let more_implicits = Option.default [] more_implicits in
+ VernacArguments (qid, args, more_implicits, !slash_position, mods) }
+
+ | IDENT "Implicit"; "Type"; bl = reserv_list ->
+ { VernacReserve bl }
+
+ | IDENT "Implicit"; IDENT "Types"; bl = reserv_list ->
+ { test_plural_form_types loc "Implicit Types" bl;
+ VernacReserve bl }
+
+ | IDENT "Generalizable";
+ gen = [IDENT "All"; IDENT "Variables" -> { Some [] }
+ | IDENT "No"; IDENT "Variables" -> { None }
+ | ["Variable" -> { () } | IDENT "Variables" -> { () } ];
+ idl = LIST1 identref -> { Some idl } ] ->
+ { VernacGeneralizable gen } ] ]
+ ;
+ arguments_modifier:
+ [ [ IDENT "simpl"; IDENT "nomatch" -> { [`ReductionDontExposeCase] }
+ | IDENT "simpl"; IDENT "never" -> { [`ReductionNeverUnfold] }
+ | IDENT "default"; IDENT "implicits" -> { [`DefaultImplicits] }
+ | IDENT "clear"; IDENT "implicits" -> { [`ClearImplicits] }
+ | IDENT "clear"; IDENT "scopes" -> { [`ClearScopes] }
+ | IDENT "rename" -> { [`Rename] }
+ | IDENT "assert" -> { [`Assert] }
+ | IDENT "extra"; IDENT "scopes" -> { [`ExtraScopes] }
+ | IDENT "clear"; IDENT "scopes"; IDENT "and"; IDENT "implicits" ->
+ { [`ClearImplicits; `ClearScopes] }
+ | IDENT "clear"; IDENT "implicits"; IDENT "and"; IDENT "scopes" ->
+ { [`ClearImplicits; `ClearScopes] }
+ ] ]
+ ;
+ scope:
+ [ [ "%"; key = IDENT -> { key } ] ]
+ ;
+ argument_spec: [
+ [ b = OPT "!"; id = name ; s = OPT scope ->
+ { id.CAst.v, not (Option.is_empty b), Option.map (fun x -> CAst.make ~loc x) s }
+ ]
+ ];
+ (* List of arguments implicit status, scope, modifiers *)
+ argument_spec_block: [
+ [ item = argument_spec ->
+ { let name, recarg_like, notation_scope = item in
+ [`Id { name=name; recarg_like=recarg_like;
+ notation_scope=notation_scope;
+ implicit_status = NotImplicit}] }
+ | "/" -> { [`Slash] }
+ | "("; items = LIST1 argument_spec; ")"; sc = OPT scope ->
+ { let f x = match sc, x with
+ | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x
+ | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
+ List.map (fun (name,recarg_like,notation_scope) ->
+ `Id { name=name; recarg_like=recarg_like;
+ notation_scope=f notation_scope;
+ implicit_status = NotImplicit}) items }
+ | "["; items = LIST1 argument_spec; "]"; sc = OPT scope ->
+ { let f x = match sc, x with
+ | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x
+ | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
+ List.map (fun (name,recarg_like,notation_scope) ->
+ `Id { name=name; recarg_like=recarg_like;
+ notation_scope=f notation_scope;
+ implicit_status = Implicit}) items }
+ | "{"; items = LIST1 argument_spec; "}"; sc = OPT scope ->
+ { let f x = match sc, x with
+ | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x
+ | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
+ List.map (fun (name,recarg_like,notation_scope) ->
+ `Id { name=name; recarg_like=recarg_like;
+ notation_scope=f notation_scope;
+ implicit_status = MaximallyImplicit}) items }
+ ]
+ ];
+ (* Same as [argument_spec_block], but with only implicit status and names *)
+ more_implicits_block: [
+ [ name = name -> { [(name.CAst.v, Vernacexpr.NotImplicit)] }
+ | "["; items = LIST1 name; "]" ->
+ { List.map (fun name -> (name.CAst.v, Vernacexpr.Implicit)) items }
+ | "{"; items = LIST1 name; "}" ->
+ { List.map (fun name -> (name.CAst.v, Vernacexpr.MaximallyImplicit)) items }
+ ]
+ ];
+ strategy_level:
+ [ [ IDENT "expand" -> { Conv_oracle.Expand }
+ | IDENT "opaque" -> { Conv_oracle.Opaque }
+ | n=INT -> { Conv_oracle.Level (int_of_string n) }
+ | "-"; n=INT -> { Conv_oracle.Level (- int_of_string n) }
+ | IDENT "transparent" -> { Conv_oracle.transparent } ] ]
+ ;
+ instance_name:
+ [ [ name = ident_decl; sup = OPT binders ->
+ { (CAst.map (fun id -> Name id) (fst name), snd name),
+ (Option.default [] sup) }
+ | -> { ((CAst.make ~loc Anonymous), None), [] } ] ]
+ ;
+ hint_info:
+ [ [ "|"; i = OPT natural; pat = OPT constr_pattern ->
+ { { Typeclasses.hint_priority = i; hint_pattern = pat } }
+ | -> { { Typeclasses.hint_priority = None; hint_pattern = None } } ] ]
+ ;
+ reserv_list:
+ [ [ bl = LIST1 reserv_tuple -> { bl } | b = simple_reserv -> { [b] } ] ]
+ ;
+ reserv_tuple:
+ [ [ "("; a = simple_reserv; ")" -> { a } ] ]
+ ;
+ simple_reserv:
+ [ [ idl = LIST1 identref; ":"; c = lconstr -> { (idl,c) } ] ]
+ ;
+
+END
+
+GRAMMAR EXTEND Gram
+ GLOBAL: command query_command class_rawexpr gallina_ext;
+
+ gallina_ext:
+ [ [ IDENT "Export"; "Set"; table = option_table; v = option_value ->
+ { VernacSetOption (true, table, v) }
+ | IDENT "Export"; IDENT "Unset"; table = option_table ->
+ { VernacUnsetOption (true, table) }
+ ] ];
+
+ command:
+ [ [ IDENT "Comments"; l = LIST0 comment -> { VernacComments l }
+
+ (* Hack! Should be in grammar_ext, but camlp5 factorizes badly *)
+ | IDENT "Declare"; IDENT "Instance"; namesup = instance_name; ":";
+ expl = [ "!" -> { Decl_kinds.Implicit } | -> { Decl_kinds.Explicit } ] ; t = operconstr LEVEL "200";
+ info = hint_info ->
+ { VernacInstance (true, snd namesup, (fst namesup, expl, t), None, info) }
+
+ (* System directory *)
+ | IDENT "Pwd" -> { VernacChdir None }
+ | IDENT "Cd" -> { VernacChdir None }
+ | IDENT "Cd"; dir = ne_string -> { VernacChdir (Some dir) }
+
+ | IDENT "Load"; verbosely = [ IDENT "Verbose" -> { true } | -> { false } ];
+ s = [ s = ne_string -> { s } | s = IDENT -> { s } ] ->
+ { VernacLoad (verbosely, s) }
+ | IDENT "Declare"; IDENT "ML"; IDENT "Module"; l = LIST1 ne_string ->
+ { VernacDeclareMLModule l }
+
+ | IDENT "Locate"; l = locatable -> { VernacLocate l }
+
+ (* Managing load paths *)
+ | IDENT "Add"; IDENT "LoadPath"; dir = ne_string; alias = as_dirpath ->
+ { VernacAddLoadPath (false, dir, alias) }
+ | IDENT "Add"; IDENT "Rec"; IDENT "LoadPath"; dir = ne_string;
+ alias = as_dirpath -> { VernacAddLoadPath (true, dir, alias) }
+ | IDENT "Remove"; IDENT "LoadPath"; dir = ne_string ->
+ { VernacRemoveLoadPath dir }
+
+ (* For compatibility *)
+ | IDENT "AddPath"; dir = ne_string; "as"; alias = as_dirpath ->
+ { VernacAddLoadPath (false, dir, alias) }
+ | IDENT "AddRecPath"; dir = ne_string; "as"; alias = as_dirpath ->
+ { VernacAddLoadPath (true, dir, alias) }
+ | IDENT "DelPath"; dir = ne_string ->
+ { VernacRemoveLoadPath dir }
+
+ (* Type-Checking (pas dans le refman) *)
+ | "Type"; c = lconstr -> { VernacGlobalCheck c }
+
+ (* Printing (careful factorization of entries) *)
+ | IDENT "Print"; p = printable -> { VernacPrint p }
+ | IDENT "Print"; qid = smart_global; l = OPT univ_name_list -> { VernacPrint (PrintName (qid,l)) }
+ | IDENT "Print"; IDENT "Module"; "Type"; qid = global ->
+ { VernacPrint (PrintModuleType qid) }
+ | IDENT "Print"; IDENT "Module"; qid = global ->
+ { VernacPrint (PrintModule qid) }
+ | IDENT "Print"; IDENT "Namespace" ; ns = dirpath ->
+ { VernacPrint (PrintNamespace ns) }
+ | IDENT "Inspect"; n = natural -> { VernacPrint (PrintInspect n) }
+
+ | IDENT "Add"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
+ { VernacAddMLPath (false, dir) }
+ | IDENT "Add"; IDENT "Rec"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
+ { VernacAddMLPath (true, dir) }
+
+ (* For acting on parameter tables *)
+ | "Set"; table = option_table; v = option_value ->
+ { VernacSetOption (false, table, v) }
+ | IDENT "Unset"; table = option_table ->
+ { VernacUnsetOption (false, table) }
+
+ | IDENT "Print"; IDENT "Table"; table = option_table ->
+ { VernacPrintOption table }
+
+ | IDENT "Add"; table = IDENT; field = IDENT; v = LIST1 option_ref_value
+ -> { VernacAddOption ([table;field], v) }
+ (* A global value below will be hidden by a field above! *)
+ (* In fact, we give priority to secondary tables *)
+ (* No syntax for tertiary tables due to conflict *)
+ (* (but they are unused anyway) *)
+ | IDENT "Add"; table = IDENT; v = LIST1 option_ref_value ->
+ { VernacAddOption ([table], v) }
+
+ | IDENT "Test"; table = option_table; "for"; v = LIST1 option_ref_value
+ -> { VernacMemOption (table, v) }
+ | IDENT "Test"; table = option_table ->
+ { VernacPrintOption table }
+
+ | IDENT "Remove"; table = IDENT; field = IDENT; v= LIST1 option_ref_value
+ -> { VernacRemoveOption ([table;field], v) }
+ | IDENT "Remove"; table = IDENT; v = LIST1 option_ref_value ->
+ { VernacRemoveOption ([table], v) } ]]
+ ;
+ query_command: (* TODO: rapprocher Eval et Check *)
+ [ [ IDENT "Eval"; r = red_expr; "in"; c = lconstr; "." ->
+ { fun g -> VernacCheckMayEval (Some r, g, c) }
+ | IDENT "Compute"; c = lconstr; "." ->
+ { fun g -> VernacCheckMayEval (Some (Genredexpr.CbvVm None), g, c) }
+ | IDENT "Check"; c = lconstr; "." ->
+ { fun g -> VernacCheckMayEval (None, g, c) }
+ (* Searching the environment *)
+ | IDENT "About"; qid = smart_global; l = OPT univ_name_list; "." ->
+ { fun g -> VernacPrint (PrintAbout (qid,l,g)) }
+ | IDENT "SearchHead"; c = constr_pattern; l = in_or_out_modules; "." ->
+ { fun g -> VernacSearch (SearchHead c,g, l) }
+ | IDENT "SearchPattern"; c = constr_pattern; l = in_or_out_modules; "." ->
+ { fun g -> VernacSearch (SearchPattern c,g, l) }
+ | IDENT "SearchRewrite"; c = constr_pattern; l = in_or_out_modules; "." ->
+ { fun g -> VernacSearch (SearchRewrite c,g, l) }
+ | IDENT "Search"; s = searchabout_query; l = searchabout_queries; "." ->
+ { let (sl,m) = l in fun g -> VernacSearch (SearchAbout (s::sl),g, m) }
+ (* compatibility: SearchAbout *)
+ | IDENT "SearchAbout"; s = searchabout_query; l = searchabout_queries; "." ->
+ { fun g -> let (sl,m) = l in VernacSearch (SearchAbout (s::sl),g, m) }
+ (* compatibility: SearchAbout with "[ ... ]" *)
+ | IDENT "SearchAbout"; "["; sl = LIST1 searchabout_query; "]";
+ l = in_or_out_modules; "." ->
+ { fun g -> VernacSearch (SearchAbout sl,g, l) }
+ ] ]
+ ;
+ printable:
+ [ [ IDENT "Term"; qid = smart_global; l = OPT univ_name_list -> { PrintName (qid,l) }
+ | IDENT "All" -> { PrintFullContext }
+ | IDENT "Section"; s = global -> { PrintSectionContext s }
+ | IDENT "Grammar"; ent = IDENT ->
+ (* This should be in "syntax" section but is here for factorization*)
+ { PrintGrammar ent }
+ | IDENT "LoadPath"; dir = OPT dirpath -> { PrintLoadPath dir }
+ | IDENT "Modules" ->
+ { user_err Pp.(str "Print Modules is obsolete; use Print Libraries instead") }
+ | IDENT "Libraries" -> { PrintModules }
+
+ | IDENT "ML"; IDENT "Path" -> { PrintMLLoadPath }
+ | IDENT "ML"; IDENT "Modules" -> { PrintMLModules }
+ | IDENT "Debug"; IDENT "GC" -> { PrintDebugGC }
+ | IDENT "Graph" -> { PrintGraph }
+ | IDENT "Classes" -> { PrintClasses }
+ | IDENT "TypeClasses" -> { PrintTypeClasses }
+ | IDENT "Instances"; qid = smart_global -> { PrintInstances qid }
+ | IDENT "Coercions" -> { PrintCoercions }
+ | IDENT "Coercion"; IDENT "Paths"; s = class_rawexpr; t = class_rawexpr
+ -> { PrintCoercionPaths (s,t) }
+ | IDENT "Canonical"; IDENT "Projections" -> { PrintCanonicalConversions }
+ | IDENT "Tables" -> { PrintTables }
+ | IDENT "Options" -> { PrintTables (* A Synonymous to Tables *) }
+ | IDENT "Hint" -> { PrintHintGoal }
+ | IDENT "Hint"; qid = smart_global -> { PrintHint qid }
+ | IDENT "Hint"; "*" -> { PrintHintDb }
+ | IDENT "HintDb"; s = IDENT -> { PrintHintDbName s }
+ | IDENT "Scopes" -> { PrintScopes }
+ | IDENT "Scope"; s = IDENT -> { PrintScope s }
+ | IDENT "Visibility"; s = OPT IDENT -> { PrintVisibility s }
+ | IDENT "Implicit"; qid = smart_global -> { PrintImplicit qid }
+ | IDENT "Universes"; fopt = OPT ne_string -> { PrintUniverses (false, fopt) }
+ | IDENT "Sorted"; IDENT "Universes"; fopt = OPT ne_string -> { PrintUniverses (true, fopt) }
+ | IDENT "Assumptions"; qid = smart_global -> { PrintAssumptions (false, false, qid) }
+ | IDENT "Opaque"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (true, false, qid) }
+ | IDENT "Transparent"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (false, true, qid) }
+ | IDENT "All"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (true, true, qid) }
+ | IDENT "Strategy"; qid = smart_global -> { PrintStrategy (Some qid) }
+ | IDENT "Strategies" -> { PrintStrategy None } ] ]
+ ;
+ class_rawexpr:
+ [ [ IDENT "Funclass" -> { FunClass }
+ | IDENT "Sortclass" -> { SortClass }
+ | qid = smart_global -> { RefClass qid } ] ]
+ ;
+ locatable:
+ [ [ qid = smart_global -> { LocateAny qid }
+ | IDENT "Term"; qid = smart_global -> { LocateTerm qid }
+ | IDENT "File"; f = ne_string -> { LocateFile f }
+ | IDENT "Library"; qid = global -> { LocateLibrary qid }
+ | IDENT "Module"; qid = global -> { LocateModule qid } ] ]
+ ;
+ option_value:
+ [ [ -> { BoolValue true }
+ | n = integer -> { IntValue (Some n) }
+ | s = STRING -> { StringValue s } ] ]
+ ;
+ option_ref_value:
+ [ [ id = global -> { QualidRefValue id }
+ | s = STRING -> { StringRefValue s } ] ]
+ ;
+ option_table:
+ [ [ fl = LIST1 [ x = IDENT -> { x } ] -> { fl } ]]
+ ;
+ as_dirpath:
+ [ [ d = OPT [ "as"; d = dirpath -> { d } ] -> { d } ] ]
+ ;
+ ne_in_or_out_modules:
+ [ [ IDENT "inside"; l = LIST1 global -> { SearchInside l }
+ | IDENT "outside"; l = LIST1 global -> { SearchOutside l } ] ]
+ ;
+ in_or_out_modules:
+ [ [ m = ne_in_or_out_modules -> { m }
+ | -> { SearchOutside [] } ] ]
+ ;
+ comment:
+ [ [ c = constr -> { CommentConstr c }
+ | s = STRING -> { CommentString s }
+ | n = natural -> { CommentInt n } ] ]
+ ;
+ positive_search_mark:
+ [ [ "-" -> { false } | -> { true } ] ]
+ ;
+ scope:
+ [ [ "%"; key = IDENT -> { key } ] ]
+ ;
+ searchabout_query:
+ [ [ b = positive_search_mark; s = ne_string; sc = OPT scope ->
+ { (b, SearchString (s,sc)) }
+ | b = positive_search_mark; p = constr_pattern ->
+ { (b, SearchSubPattern p) }
+ ] ]
+ ;
+ searchabout_queries:
+ [ [ m = ne_in_or_out_modules -> { ([],m) }
+ | s = searchabout_query; l = searchabout_queries ->
+ { let (sl,m) = l in (s::sl,m) }
+ | -> { ([],SearchOutside []) }
+ ] ]
+ ;
+ univ_name_list:
+ [ [ "@{" ; l = LIST0 name; "}" -> { l } ] ]
+ ;
+END
+
+GRAMMAR EXTEND Gram
+ GLOBAL: command;
+
+ command:
+ [ [
+(* State management *)
+ IDENT "Write"; IDENT "State"; s = IDENT -> { VernacWriteState s }
+ | IDENT "Write"; IDENT "State"; s = ne_string -> { VernacWriteState s }
+ | IDENT "Restore"; IDENT "State"; s = IDENT -> { VernacRestoreState s }
+ | IDENT "Restore"; IDENT "State"; s = ne_string -> { VernacRestoreState s }
+
+(* Resetting *)
+ | IDENT "Reset"; IDENT "Initial" -> { VernacResetInitial }
+ | IDENT "Reset"; id = identref -> { VernacResetName id }
+ | IDENT "Back" -> { VernacBack 1 }
+ | IDENT "Back"; n = natural -> { VernacBack n }
+ | IDENT "BackTo"; n = natural -> { VernacBackTo n }
+
+(* Tactic Debugger *)
+ | IDENT "Debug"; IDENT "On" ->
+ { VernacSetOption (false, ["Ltac";"Debug"], BoolValue true) }
+
+ | IDENT "Debug"; IDENT "Off" ->
+ { VernacSetOption (false, ["Ltac";"Debug"], BoolValue false) }
+
+(* registration of a custom reduction *)
+
+ | IDENT "Declare"; IDENT "Reduction"; s = IDENT; ":=";
+ r = red_expr ->
+ { VernacDeclareReduction (s,r) }
+
+ ] ];
+ END
+
+(* Grammar extensions *)
+
+GRAMMAR EXTEND Gram
+ GLOBAL: syntax;
+
+ syntax:
+ [ [ IDENT "Open"; IDENT "Scope"; sc = IDENT ->
+ { VernacOpenCloseScope (true,sc) }
+
+ | IDENT "Close"; IDENT "Scope"; sc = IDENT ->
+ { VernacOpenCloseScope (false,sc) }
+
+ | IDENT "Delimit"; IDENT "Scope"; sc = IDENT; "with"; key = IDENT ->
+ { VernacDelimiters (sc, Some key) }
+ | IDENT "Undelimit"; IDENT "Scope"; sc = IDENT ->
+ { VernacDelimiters (sc, None) }
+
+ | IDENT "Bind"; IDENT "Scope"; sc = IDENT; "with";
+ refl = LIST1 class_rawexpr -> { VernacBindScope (sc,refl) }
+
+ | IDENT "Infix"; op = ne_lstring; ":="; p = constr;
+ modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> { l } | -> { [] } ];
+ sc = OPT [ ":"; sc = IDENT -> { sc } ] ->
+ { VernacInfix ((op,modl),p,sc) }
+ | IDENT "Notation"; id = identref;
+ idl = LIST0 ident; ":="; c = constr; b = only_parsing ->
+ { VernacSyntacticDefinition
+ (id,(idl,c),b) }
+ | IDENT "Notation"; s = lstring; ":=";
+ c = constr;
+ modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> { l } | -> { [] } ];
+ sc = OPT [ ":"; sc = IDENT -> { sc } ] ->
+ { VernacNotation (c,(s,modl),sc) }
+ | IDENT "Format"; IDENT "Notation"; n = STRING; s = STRING; fmt = STRING ->
+ { VernacNotationAddFormat (n,s,fmt) }
+
+ | IDENT "Reserved"; IDENT "Infix"; s = ne_lstring;
+ l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> { l } | -> { [] } ] ->
+ { let s = CAst.map (fun s -> "x '"^s^"' y") s in
+ VernacSyntaxExtension (true,(s,l)) }
+
+ | IDENT "Reserved"; IDENT "Notation";
+ s = ne_lstring;
+ l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> { l } | -> { [] } ]
+ -> { VernacSyntaxExtension (false, (s,l)) }
+
+ (* "Print" "Grammar" should be here but is in "command" entry in order
+ to factorize with other "Print"-based vernac entries *)
+ ] ]
+ ;
+ only_parsing:
+ [ [ "("; IDENT "only"; IDENT "parsing"; ")" ->
+ { Some Flags.Current }
+ | "("; IDENT "compat"; s = STRING; ")" ->
+ { Some (parse_compat_version s) }
+ | -> { None } ] ]
+ ;
+ level:
+ [ [ IDENT "level"; n = natural -> { NumLevel n }
+ | IDENT "next"; IDENT "level" -> { NextLevel } ] ]
+ ;
+ syntax_modifier:
+ [ [ "at"; IDENT "level"; n = natural -> { SetLevel n }
+ | IDENT "left"; IDENT "associativity" -> { SetAssoc LeftA }
+ | IDENT "right"; IDENT "associativity" -> { SetAssoc RightA }
+ | IDENT "no"; IDENT "associativity" -> { SetAssoc NonA }
+ | IDENT "only"; IDENT "printing" -> { SetOnlyPrinting }
+ | IDENT "only"; IDENT "parsing" -> { SetOnlyParsing }
+ | IDENT "compat"; s = STRING ->
+ { SetCompatVersion (parse_compat_version s) }
+ | IDENT "format"; s1 = [s = STRING -> { CAst.make ~loc s } ];
+ s2 = OPT [s = STRING -> { CAst.make ~loc s } ] ->
+ { begin match s1, s2 with
+ | { CAst.v = k }, Some s -> SetFormat(k,s)
+ | s, None -> SetFormat ("text",s) end }
+ | x = IDENT; ","; l = LIST1 [id = IDENT -> { id } ] SEP ","; "at";
+ lev = level -> { SetItemLevel (x::l,lev) }
+ | x = IDENT; "at"; lev = level -> { SetItemLevel ([x],lev) }
+ | x = IDENT; "at"; lev = level; b = constr_as_binder_kind -> { SetItemLevelAsBinder ([x],b,Some lev) }
+ | x = IDENT; b = constr_as_binder_kind -> { SetItemLevelAsBinder ([x],b,None) }
+ | x = IDENT; typ = syntax_extension_type -> { SetEntryType (x,typ) }
+ ] ]
+ ;
+ syntax_extension_type:
+ [ [ IDENT "ident" -> { ETName } | IDENT "global" -> { ETReference }
+ | IDENT "bigint" -> { ETBigint }
+ | IDENT "binder" -> { ETBinder true }
+ | IDENT "constr"; n = OPT at_level; b = constr_as_binder_kind -> { ETConstrAsBinder (b,n) }
+ | IDENT "pattern" -> { ETPattern (false,None) }
+ | IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (false,Some n) }
+ | IDENT "strict"; IDENT "pattern" -> { ETPattern (true,None) }
+ | IDENT "strict"; IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (true,Some n) }
+ | IDENT "closed"; IDENT "binder" -> { ETBinder false }
+ ] ]
+ ;
+ at_level:
+ [ [ "at"; n = level -> { n } ] ]
+ ;
+ constr_as_binder_kind:
+ [ [ "as"; IDENT "ident" -> { Notation_term.AsIdent }
+ | "as"; IDENT "pattern" -> { Notation_term.AsIdentOrPattern }
+ | "as"; IDENT "strict"; IDENT "pattern" -> { Notation_term.AsStrictPattern } ] ]
+ ;
+END