summaryrefslogtreecommitdiff
path: root/parsing
diff options
context:
space:
mode:
authorGravatar Benjamin Barenblat <bbaren@debian.org>2018-12-29 14:31:27 -0500
committerGravatar Benjamin Barenblat <bbaren@debian.org>2018-12-29 14:31:27 -0500
commit9043add656177eeac1491a73d2f3ab92bec0013c (patch)
tree2b0092c84bfbf718eca10c81f60b2640dc8cab05 /parsing
parenta4c7f8bd98be2a200489325ff7c5061cf80ab4f3 (diff)
Imported Upstream version 8.8.2upstream/8.8.2
Diffstat (limited to 'parsing')
-rw-r--r--parsing/cLexer.ml4203
-rw-r--r--parsing/cLexer.mli60
-rw-r--r--parsing/compat.ml4421
-rw-r--r--parsing/doc.tex9
-rw-r--r--parsing/egramcoq.ml163
-rw-r--r--parsing/egramcoq.mli22
-rw-r--r--parsing/egramml.ml16
-rw-r--r--parsing/egramml.mli16
-rw-r--r--parsing/g_constr.ml4317
-rw-r--r--parsing/g_prim.ml453
-rw-r--r--parsing/g_proofs.ml498
-rw-r--r--parsing/g_tactic.ml4663
-rw-r--r--parsing/g_vernac.ml4488
-rw-r--r--parsing/highparsing.mllib5
-rw-r--r--parsing/parsing.mllib5
-rw-r--r--parsing/pcoq.ml362
-rw-r--r--parsing/pcoq.mli169
-rw-r--r--parsing/tok.ml12
-rw-r--r--parsing/tok.mli12
19 files changed, 1114 insertions, 1980 deletions
diff --git a/parsing/cLexer.ml4 b/parsing/cLexer.ml4
index aec6a326..d65b35c4 100644
--- a/parsing/cLexer.ml4
+++ b/parsing/cLexer.ml4
@@ -1,15 +1,38 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open Pp
open Util
open Tok
-open Compat
+
+(** Location utilities *)
+let ploc_file_of_coq_file = function
+| Loc.ToplevelInput -> ""
+| Loc.InFile f -> f
+
+let coq_file_of_ploc_file s =
+ if s = "" then Loc.ToplevelInput else Loc.InFile s
+
+let from_coqloc fname line_nb bol_pos bp ep =
+ Ploc.make_loc (ploc_file_of_coq_file fname) line_nb bol_pos (bp, ep) ""
+
+let to_coqloc loc =
+ { Loc.fname = coq_file_of_ploc_file (Ploc.file_name loc);
+ Loc.line_nb = Ploc.line_nb loc;
+ Loc.bol_pos = Ploc.bol_pos loc;
+ Loc.bp = Ploc.first_pos loc;
+ Loc.ep = Ploc.last_pos loc;
+ Loc.line_nb_last = Ploc.line_nb_last loc;
+ Loc.bol_pos_last = Ploc.bol_pos_last loc; }
+
+let (!@) = to_coqloc
(* Dictionaries: trees annotated with string options, each node being a map
from chars to dictionaries (the subtrees). A trie, in other words. *)
@@ -89,7 +112,6 @@ module Error = struct
| Unterminated_string
| Undefined_token
| Bad_token of string
- | UnsupportedUnicode of int
exception E of t
@@ -100,21 +122,45 @@ module Error = struct
| Unterminated_comment -> "Unterminated comment"
| Unterminated_string -> "Unterminated string"
| Undefined_token -> "Undefined token"
- | Bad_token tok -> Format.sprintf "Bad token %S" tok
- | UnsupportedUnicode x ->
- Printf.sprintf "Unsupported Unicode character (0x%x)" x)
-
- (* Require to fix the Camlp4 signature *)
- let print ppf x = Pp.pp_with ~pp_tag:Ppstyle.pp_tag ppf (Pp.str (to_string x))
+ | Bad_token tok -> Format.sprintf "Bad token %S" tok)
end
open Error
-let err loc str = Loc.raise (Compat.to_coqloc loc) (Error.E str)
+let err loc str = Loc.raise ~loc:(to_coqloc loc) (Error.E str)
let bad_token str = raise (Error.E (Bad_token str))
-(* Lexer conventions on tokens *)
+(* Update a loc without allocating an intermediate pair *)
+let set_loc_pos loc bp ep =
+ Ploc.sub loc (bp - Ploc.first_pos loc) (ep - bp)
+
+(* Increase line number by 1 and update position of beginning of line *)
+let bump_loc_line loc bol_pos =
+ Ploc.make_loc (Ploc.file_name loc) (Ploc.line_nb loc + 1) bol_pos
+ (Ploc.first_pos loc, Ploc.last_pos loc) (Ploc.comment loc)
+
+(* Same as [bump_loc_line], but for the last line in location *)
+(* For an obscure reason, camlp5 does not give an easy way to set line_nb_stop,
+ so we have to resort to a hack merging two locations. *)
+(* Warning: [bump_loc_line_last] changes the end position. You may need to call
+ [set_loc_pos] to fix it. *)
+let bump_loc_line_last loc bol_pos =
+ let loc' =
+ Ploc.make_loc (Ploc.file_name loc) (Ploc.line_nb_last loc + 1) bol_pos
+ (Ploc.first_pos loc + 1, Ploc.last_pos loc + 1) (Ploc.comment loc)
+ in
+ Ploc.encl loc loc'
+
+(* For some reason, the [Ploc.after] function of Camlp5 does not update line
+ numbers, so we define our own function that does it. *)
+let after loc =
+ let line_nb = Ploc.line_nb_last loc in
+ let bol_pos = Ploc.bol_pos_last loc in
+ Ploc.make_loc (Ploc.file_name loc) line_nb bol_pos
+ (Ploc.last_pos loc, Ploc.last_pos loc) (Ploc.comment loc)
+
+(** Lexer conventions on tokens *)
type token_kind =
| Utf8Token of (Unicode.status * int)
@@ -186,7 +232,7 @@ let check_keyword str =
let rec loop_symb = parser
| [< ' (' ' | '\n' | '\r' | '\t' | '"') >] -> bad_token str
| [< s >] ->
- match unlocated lookup_utf8 Compat.CompatLoc.ghost s with
+ match unlocated lookup_utf8 Ploc.dummy s with
| Utf8Token (_,n) -> njunk n s; loop_symb s
| AsciiChar -> Stream.junk s; loop_symb s
| EmptyStream -> ()
@@ -200,9 +246,9 @@ let check_ident str =
| [< ' ('0'..'9' | ''') when intail; s >] ->
loop_id true s
| [< s >] ->
- match unlocated lookup_utf8 Compat.CompatLoc.ghost s with
- | Utf8Token (Unicode.Letter, n) -> njunk n s; loop_id true s
- | Utf8Token (Unicode.IdentPart, n) when intail ->
+ match unlocated lookup_utf8 Ploc.dummy s with
+ | Utf8Token (st, n) when not intail && Unicode.is_valid_ident_initial st -> njunk n s; loop_id true s
+ | Utf8Token (st, n) when intail && Unicode.is_valid_ident_trailing st ->
njunk n s;
loop_id true s
| EmptyStream -> ()
@@ -233,25 +279,26 @@ let remove_keyword str =
let keywords () = ttree_elements !token_tree
(* Freeze and unfreeze the state of the lexer *)
-type frozen_t = ttree
+type keyword_state = ttree
-let freeze () = !token_tree
-let unfreeze tt = (token_tree := tt)
+let get_keyword_state () = !token_tree
+let set_keyword_state tt = (token_tree := tt)
(* The string buffering machinery *)
-let buff = ref (String.create 80)
+let buff = ref (Bytes.create 80)
let store len x =
- if len >= String.length !buff then
- buff := !buff ^ String.create (String.length !buff);
- !buff.[len] <- x;
+ let open Bytes in
+ if len >= length !buff then
+ buff := cat !buff (create (length !buff));
+ set !buff len x;
succ len
let rec nstore n len cs =
if n>0 then nstore (n-1) (store len (Stream.next cs)) cs else len
-let get_buff len = String.sub !buff 0 len
+let get_buff len = Bytes.sub_string !buff 0 len
(* The classical lexer: idents, numbers, quoted strings, comments *)
@@ -266,9 +313,9 @@ let rec ident_tail loc len = parser
ident_tail loc (store len c) s
| [< s >] ->
match lookup_utf8 loc s with
- | Utf8Token ((Unicode.IdentPart | Unicode.Letter), n) ->
+ | Utf8Token (st, n) when Unicode.is_valid_ident_trailing st ->
ident_tail loc (nstore n len s) s
- | Utf8Token (Unicode.Unknown, n) ->
+ | Utf8Token (st, n) when Unicode.is_unknown st ->
let id = get_buff len in
let u = String.concat "" (List.map (String.make 1) (Stream.npeek n s)) in
warn_unrecognized_unicode ~loc:!@loc (u,id); len
@@ -293,13 +340,13 @@ let rec string loc ~comm_level bp len = parser
if esc then string loc ~comm_level bp (store len '"') s else (loc, len)
| [< ''('; s >] ->
(parser
- | [< ''*'; s >] ->
- string loc
- (Option.map succ comm_level)
+ | [< ''*'; s >] ->
+ let comm_level = Option.map succ comm_level in
+ string loc ~comm_level
bp (store (store len '(') '*')
s
| [< >] ->
- string loc comm_level bp (store len '(') s) s
+ string loc ~comm_level bp (store len '(') s) s
| [< ''*'; s >] ->
(parser
| [< '')'; s >] ->
@@ -309,9 +356,9 @@ let rec string loc ~comm_level bp len = parser
| _ -> ()
in
let comm_level = Option.map pred comm_level in
- string loc comm_level bp (store (store len '*') ')') s
+ string loc ~comm_level bp (store (store len '*') ')') s
| [< >] ->
- string loc comm_level bp (store len '*') s) s
+ string loc ~comm_level bp (store len '*') s) s
| [< ''\n' as c; s >] ep ->
(* If we are parsing a comment, the string if not part of a token so we
update the first line of the location. Otherwise, we update the last
@@ -320,17 +367,14 @@ let rec string loc ~comm_level bp len = parser
if Option.has_some comm_level then bump_loc_line loc ep
else bump_loc_line_last loc ep
in
- string loc comm_level bp (store len c) s
- | [< 'c; s >] -> string loc comm_level bp (store len c) s
+ string loc ~comm_level bp (store len c) s
+ | [< 'c; s >] -> string loc ~comm_level bp (store len c) s
| [< _ = Stream.empty >] ep ->
let loc = set_loc_pos loc bp ep in
err loc Unterminated_string
-(* Hook for exporting comment into xml theory files *)
-let (f_xml_output_comment, xml_output_comment) = Hook.make ~default:ignore ()
-
(* To associate locations to a file name *)
-let current_file = ref None
+let current_file = ref Loc.ToplevelInput
(* Utilities for comments in beautify *)
let comment_begin = ref None
@@ -342,18 +386,8 @@ let comments = ref []
let current_comment = Buffer.create 8192
let between_commands = ref true
-let rec split_comments comacc acc pos = function
- [] -> comments := List.rev acc; comacc
- | ((b,e),c as com)::coms ->
- (* Take all comments that terminates before pos, or begin exactly
- at pos (used to print comments attached after an expression) *)
- if e<=pos || pos=b then split_comments (c::comacc) acc pos coms
- else split_comments comacc (com::acc) pos coms
-
-let extract_comments pos = split_comments [] [] pos !comments
-
(* The state of the lexer visible from outside *)
-type lexer_state = int option * string * bool * ((int * int) * string) list * string option
+type lexer_state = int option * string * bool * ((int * int) * string) list * Loc.source
let init_lexer_state f = (None,"",true,[],f)
let set_lexer_state (o,s,b,c,f) =
@@ -362,10 +396,13 @@ let set_lexer_state (o,s,b,c,f) =
between_commands := b;
comments := c;
current_file := f
-let release_lexer_state () =
+let get_lexer_state () =
(!comment_begin, Buffer.contents current_comment, !between_commands, !comments, !current_file)
+let release_lexer_state = get_lexer_state
let drop_lexer_state () =
- set_lexer_state (init_lexer_state None)
+ set_lexer_state (init_lexer_state Loc.ToplevelInput)
+
+let get_comment_state (_,_,_,c,_) = c
let real_push_char c = Buffer.add_char current_comment c
@@ -390,9 +427,6 @@ let null_comment s =
let comment_stop ep =
let current_s = Buffer.contents current_comment in
- if !Flags.xml_export && Buffer.length current_comment > 0 &&
- (!between_commands || not(null_comment current_s)) then
- Hook.get f_xml_output_comment current_s;
(if !Flags.beautify && Buffer.length current_comment > 0 &&
(!between_commands || not(null_comment current_s)) then
let bp = match !comment_begin with
@@ -500,7 +534,7 @@ let parse_after_dot loc c bp =
(try find_keyword loc ("."^field) s with Not_found -> FIELD field)
| [< s >] ->
match lookup_utf8 loc s with
- | Utf8Token (Unicode.Letter, n) ->
+ | Utf8Token (st, n) when Unicode.is_valid_ident_initial st ->
let len = ident_tail loc (nstore n 0 s) s in
let field = get_buff len in
(try find_keyword loc ("."^field) s with Not_found -> FIELD field)
@@ -514,7 +548,7 @@ let parse_after_qmark loc bp s =
| None -> KEYWORD "?"
| _ ->
match lookup_utf8 loc s with
- | Utf8Token (Unicode.Letter, _) -> LEFTQMARK
+ | Utf8Token (st, _) when Unicode.is_valid_ident_initial st -> LEFTQMARK
| AsciiChar | Utf8Token _ | EmptyStream ->
fst (process_chars loc bp '?' s)
@@ -560,7 +594,7 @@ let rec next_token loc = parser bp
| [< ' ('0'..'9' as c); len = number (store 0 c) >] ep ->
comment_stop bp;
(INT (get_buff len), set_loc_pos loc bp ep)
- | [< ''\"'; (loc,len) = string loc None bp 0 >] ep ->
+ | [< ''\"'; (loc,len) = string loc ~comm_level:None bp 0 >] ep ->
comment_stop bp;
(STRING (get_buff len), set_loc_pos loc bp ep)
| [< ' ('(' as c);
@@ -579,13 +613,13 @@ let rec next_token loc = parser bp
comment_stop bp; between_commands := new_between_commands; t
| [< s >] ->
match lookup_utf8 loc s with
- | Utf8Token (Unicode.Letter, n) ->
+ | Utf8Token (st, n) when Unicode.is_valid_ident_initial st ->
let len = ident_tail loc (nstore n 0 s) s in
let id = get_buff len in
let ep = Stream.count s in
comment_stop bp;
(try find_keyword loc id s with Not_found -> IDENT id), set_loc_pos loc bp ep
- | AsciiChar | Utf8Token ((Unicode.Symbol | Unicode.IdentPart | Unicode.Unknown), _) ->
+ | AsciiChar | Utf8Token _ ->
let t = process_chars loc bp (Stream.next s) s in
comment_stop bp; t
| EmptyStream ->
@@ -619,8 +653,6 @@ let loct_add loct i loc = Hashtbl.add loct i loc
we unfreeze the state of the lexer. This restores the behaviour of the
lexer. B.B. *)
-IFDEF CAMLP5 THEN
-
type te = Tok.t
(** Names of tokens, for this lexer, used in Grammar error messages *)
@@ -638,12 +670,12 @@ let token_text = function
let func cs =
let loct = loct_create () in
- let cur_loc = ref (Compat.make_loc !current_file 1 0 0 0) in
+ let cur_loc = ref (from_coqloc !current_file 1 0 0 0) in
let ts =
Stream.from
(fun i ->
let (tok, loc) = next_token !cur_loc cs in
- cur_loc := Compat.after loc;
+ cur_loc := after loc;
loct_add loct i loc; Some tok)
in
(ts, loct_func loct)
@@ -659,41 +691,6 @@ let lexer = {
Token.tok_comm = None;
Token.tok_text = token_text }
-ELSE (* official camlp4 for ocaml >= 3.10 *)
-
-module M_ = Camlp4.ErrorHandler.Register (Error)
-
-module Loc = Compat.CompatLoc
-module Token = struct
- include Tok (* Cf. tok.ml *)
- module Loc = Compat.CompatLoc
- module Error = Camlp4.Struct.EmptyError
- module Filter = struct
- type token_filter = (Tok.t * Loc.t) Stream.t -> (Tok.t * Loc.t) Stream.t
- type t = unit
- let mk _is_kwd = ()
- let keyword_added () kwd _ = add_keyword kwd
- let keyword_removed () _ = ()
- let filter () x = x
- let define_filter () _ = ()
- end
-end
-
-let mk () =
- let loct = loct_create () in
- let cur_loc = ref (Compat.make_loc !current_file 1 0 0 0) in
- let rec self init_loc (* FIXME *) =
- parser i
- [< (tok, loc) = next_token !cur_loc; s >] ->
- cur_loc := Compat.set_loc_file loc !current_file;
- loct_add loct i loc;
- [< '(tok, loc); self init_loc s >]
- | [< >] -> [< >]
- in
- self
-
-END
-
(** Terminal symbols interpretation *)
let is_ident_not_keyword s =
@@ -716,13 +713,13 @@ let strip s =
in
if len == String.length s then s
else
- let s' = String.create len in
+ let s' = Bytes.create len in
let rec loop i i' =
if i == String.length s then s'
else if s.[i] == ' ' then loop (i + 1) i'
- else begin s'.[i'] <- s.[i]; loop (i + 1) (i' + 1) end
+ else begin Bytes.set s' i' s.[i]; loop (i + 1) (i' + 1) end
in
- loop 0 0
+ Bytes.to_string (loop 0 0)
let terminal s =
let s = strip s in
diff --git a/parsing/cLexer.mli b/parsing/cLexer.mli
index e0fdf8cb..a14f08d9 100644
--- a/parsing/cLexer.mli
+++ b/parsing/cLexer.mli
@@ -1,32 +1,60 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+(** This should be functional but it is not due to the interface *)
val add_keyword : string -> unit
val remove_keyword : string -> unit
val is_keyword : string -> bool
val keywords : unit -> CString.Set.t
+type keyword_state
+val set_keyword_state : keyword_state -> unit
+val get_keyword_state : unit -> keyword_state
+
val check_ident : string -> unit
val is_ident : string -> bool
val check_keyword : string -> unit
-
-type frozen_t
-val freeze : unit -> frozen_t
-val unfreeze : frozen_t -> unit
-
-val xml_output_comment : (string -> unit) Hook.t
-
-(* Retrieve the comments lexed at a given location of the stream
- currently being processeed *)
-val extract_comments : int -> string list
-
val terminal : string -> Tok.t
(** The lexer of Coq: *)
-include Compat.LexerSig
+(* modtype Grammar.GLexerType: sig
+ type te val
+ lexer : te Plexing.lexer
+ end
+
+where
+
+ type lexer 'te =
+ { tok_func : lexer_func 'te;
+ tok_using : pattern -> unit;
+ tok_removing : pattern -> unit;
+ tok_match : pattern -> 'te -> string;
+ tok_text : pattern -> string;
+ tok_comm : mutable option (list location) }
+ *)
+include Grammar.GLexerType with type te = Tok.t
+
+module Error : sig
+ type t
+ exception E of t
+ val to_string : t -> string
+end
+
+(* Mainly for comments state, etc... *)
+type lexer_state
+
+val init_lexer_state : Loc.source -> lexer_state
+val set_lexer_state : lexer_state -> unit
+val get_lexer_state : unit -> lexer_state
+val release_lexer_state : unit -> lexer_state
+[@@ocaml.deprecated "Use get_lexer_state"]
+val drop_lexer_state : unit -> unit
+val get_comment_state : lexer_state -> ((int * int) * string) list
diff --git a/parsing/compat.ml4 b/parsing/compat.ml4
deleted file mode 100644
index befa0d01..00000000
--- a/parsing/compat.ml4
+++ /dev/null
@@ -1,421 +0,0 @@
-(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(************************************************************************)
-
-(** Compatibility file depending on ocaml/camlp4 version *)
-
-(** Locations *)
-
-let file_loc_of_file = function
-| None -> ""
-| Some f -> f
-
-IFDEF CAMLP5 THEN
-
-module CompatLoc = struct
- include Ploc
- let ghost = dummy
- let merge = encl
-end
-
-exception Exc_located = Ploc.Exc
-
-let to_coqloc loc =
- { Loc.fname = Ploc.file_name loc;
- Loc.line_nb = Ploc.line_nb loc;
- Loc.bol_pos = Ploc.bol_pos loc;
- Loc.bp = Ploc.first_pos loc;
- Loc.ep = Ploc.last_pos loc;
- Loc.line_nb_last = Ploc.line_nb_last loc;
- Loc.bol_pos_last = Ploc.bol_pos_last loc; }
-
-let make_loc fname line_nb bol_pos bp ep = Ploc.make_loc (file_loc_of_file fname) line_nb bol_pos (bp, ep) ""
-
-(* Update a loc without allocating an intermediate pair *)
-let set_loc_pos loc bp ep =
- Ploc.sub loc (bp - Ploc.first_pos loc) (ep - bp)
-
-(* Increase line number by 1 and update position of beginning of line *)
-let bump_loc_line loc bol_pos =
- Ploc.make_loc (Ploc.file_name loc) (Ploc.line_nb loc + 1) bol_pos
- (Ploc.first_pos loc, Ploc.last_pos loc) (Ploc.comment loc)
-
-(* Same as [bump_loc_line], but for the last line in location *)
-(* For an obscure reason, camlp5 does not give an easy way to set line_nb_stop,
- so we have to resort to a hack merging two locations. *)
-(* Warning: [bump_loc_line_last] changes the end position. You may need to call
- [set_loc_pos] to fix it. *)
-let bump_loc_line_last loc bol_pos =
- let loc' =
- Ploc.make_loc (Ploc.file_name loc) (Ploc.line_nb_last loc + 1) bol_pos
- (Ploc.first_pos loc + 1, Ploc.last_pos loc + 1) (Ploc.comment loc)
- in
- Ploc.encl loc loc'
-
-let set_loc_file loc fname =
- Ploc.make_loc fname (Ploc.line_nb loc) (Ploc.bol_pos loc)
- (Ploc.first_pos loc, Ploc.last_pos loc) (Ploc.comment loc)
-
-(* For some reason, the [Ploc.after] function of Camlp5 does not update line
- numbers, so we define our own function that does it. *)
-let after loc =
- let line_nb = Ploc.line_nb_last loc in
- let bol_pos = Ploc.bol_pos_last loc in
- Ploc.make_loc (Ploc.file_name loc) line_nb bol_pos
- (Ploc.last_pos loc, Ploc.last_pos loc) (Ploc.comment loc)
-
-ELSE
-
-module CompatLoc = Camlp4.PreCast.Loc
-
-exception Exc_located = CompatLoc.Exc_located
-
-let to_coqloc loc =
- { Loc.fname = CompatLoc.file_name loc;
- Loc.line_nb = CompatLoc.start_line loc;
- Loc.bol_pos = CompatLoc.start_bol loc;
- Loc.bp = CompatLoc.start_off loc;
- Loc.ep = CompatLoc.stop_off loc;
- Loc.line_nb_last = CompatLoc.stop_line loc;
- Loc.bol_pos_last = CompatLoc.stop_bol loc; }
-
-let make_loc fname line_nb bol_pos start stop =
- CompatLoc.of_tuple (file_loc_of_file fname, line_nb, bol_pos, start, line_nb, bol_pos, stop, false)
-
-open CompatLoc
-
-let set_loc_pos loc bp ep =
- of_tuple (file_name loc, start_line loc, start_bol loc, bp,
- stop_line loc, stop_bol loc, ep, is_ghost loc)
-
-let bump_loc_line loc bol_pos =
- of_tuple (file_name loc, start_line loc + 1, bol_pos, start_off loc,
- start_line loc + 1, bol_pos, stop_off loc, is_ghost loc)
-
-let bump_loc_line_last loc bol_pos =
- of_tuple (file_name loc, start_line loc, start_bol loc, start_off loc,
- stop_line loc + 1, bol_pos, stop_off loc, is_ghost loc)
-
-let set_loc_file loc fname =
- of_tuple (file_loc_of_file fname, start_line loc, start_bol loc, start_off loc,
- stop_line loc, stop_bol loc, stop_off loc, is_ghost loc)
-
-let after loc =
- of_tuple (file_name loc, stop_line loc, stop_bol loc, stop_off loc,
- stop_line loc, stop_bol loc, stop_off loc, is_ghost loc)
-
-END
-
-let (!@) = to_coqloc
-
-(** Misc module emulation *)
-
-IFDEF CAMLP5 THEN
-
-module PcamlSig = struct end
-module Token = Token
-module CompatGramext = struct include Gramext type assoc = g_assoc end
-
-ELSE
-
-module PcamlSig = Camlp4.Sig
-module Ast = Camlp4.PreCast.Ast
-module Pcaml = Camlp4.PreCast.Syntax
-module MLast = Ast
-module Token = struct exception Error of string end
-module CompatGramext = Camlp4.Sig.Grammar
-
-END
-
-(** Signature of CLexer *)
-
-IFDEF CAMLP5 THEN
-
-module type LexerSig = sig
- include Grammar.GLexerType with type te = Tok.t
- module Error : sig
- type t
- exception E of t
- val to_string : t -> string
- end
- type lexer_state
- val init_lexer_state : string option -> lexer_state
- val set_lexer_state : lexer_state -> unit
- val release_lexer_state : unit -> lexer_state
- val drop_lexer_state : unit -> unit
-end
-
-ELSE
-
-module type LexerSig = sig
- include Camlp4.Sig.Lexer with module Loc = CompatLoc and type Token.t = Tok.t
- type lexer_state
- val init_lexer_state : string option -> lexer_state
- val set_lexer_state : lexer_state -> unit
- val release_lexer_state : unit -> lexer_state
- val drop_lexer_state : unit -> unit
-end
-
-END
-
-(** Signature and implementation of grammars *)
-
-IFDEF CAMLP5 THEN
-
-module type GrammarSig = sig
- include Grammar.S with type te = Tok.t
- type 'a entry = 'a Entry.e
- type internal_entry = Tok.t Gramext.g_entry
- type symbol = Tok.t Gramext.g_symbol
- type action = Gramext.g_action
- type production_rule = symbol list * action
- type single_extend_statment =
- string option * Gramext.g_assoc option * production_rule list
- type extend_statment =
- Gramext.position option * single_extend_statment list
- type coq_parsable
- val parsable : ?file:string -> char Stream.t -> coq_parsable
- val action : 'a -> action
- val entry_create : string -> 'a entry
- val entry_parse : 'a entry -> coq_parsable -> 'a
- val entry_print : Format.formatter -> 'a entry -> unit
- val with_parsable : coq_parsable -> ('a -> 'b) -> 'a -> 'b
- val srules' : production_rule list -> symbol
- val parse_tokens_after_filter : 'a entry -> Tok.t Stream.t -> 'a
-end
-
-module GrammarMake (L:LexerSig) : GrammarSig = struct
- include Grammar.GMake (L)
- type 'a entry = 'a Entry.e
- type internal_entry = Tok.t Gramext.g_entry
- type symbol = Tok.t Gramext.g_symbol
- type action = Gramext.g_action
- type production_rule = symbol list * action
- type single_extend_statment =
- string option * Gramext.g_assoc option * production_rule list
- type extend_statment =
- Gramext.position option * single_extend_statment list
- type coq_parsable = parsable * L.lexer_state ref
- let parsable ?file c =
- let state = ref (L.init_lexer_state file) in
- L.set_lexer_state !state;
- let a = parsable c in
- state := L.release_lexer_state ();
- (a,state)
- let action = Gramext.action
- let entry_create = Entry.create
- let entry_parse e (p,state) =
- L.set_lexer_state !state;
- try
- let c = Entry.parse e p in
- state := L.release_lexer_state ();
- c
- with Exc_located (loc,e) ->
- L.drop_lexer_state ();
- let loc' = Loc.get_loc (Exninfo.info e) in
- let loc = match loc' with None -> to_coqloc loc | Some loc -> loc in
- Loc.raise loc e
- let with_parsable (p,state) f x =
- L.set_lexer_state !state;
- try
- let a = f x in
- state := L.release_lexer_state ();
- a
- with e ->
- L.drop_lexer_state ();
- raise e
-
- let entry_print ft x = Entry.print ft x
- let srules' = Gramext.srules
- let parse_tokens_after_filter = Entry.parse_token
-end
-
-ELSE
-
-module type GrammarSig = sig
- include Camlp4.Sig.Grammar.Static
- with module Loc = CompatLoc and type Token.t = Tok.t
- type 'a entry = 'a Entry.t
- type action = Action.t
- type coq_parsable
- val parsable : ?file:string -> char Stream.t -> coq_parsable
- val action : 'a -> action
- val entry_create : string -> 'a entry
- val entry_parse : 'a entry -> coq_parsable -> 'a
- val entry_print : Format.formatter -> 'a entry -> unit
- val with_parsable : coq_parsable -> ('a -> 'b) -> 'a -> 'b
- val srules' : production_rule list -> symbol
-end
-
-module GrammarMake (L:LexerSig) : GrammarSig = struct
- (* We need to refer to Coq's module Loc before it is hidden by include *)
- let raise_coq_loc loc e = Loc.raise (to_coqloc loc) e
- include Camlp4.Struct.Grammar.Static.Make (L)
- type 'a entry = 'a Entry.t
- type action = Action.t
- type coq_parsable = char Stream.t * L.lexer_state ref
- let parsable ?file s = let state = ref (L.init_lexer_state file) in (s, state)
- let action = Action.mk
- let entry_create = Entry.mk
- let entry_parse e (s,state) =
- L.set_lexer_state !state;
- try
- let c = parse e (*FIXME*)CompatLoc.ghost s in
- state := L.release_lexer_state ();
- c
- with Exc_located (loc,e) ->
- L.drop_lexer_state ();
- raise_coq_loc loc e;;
- let with_parsable (p,state) f x =
- L.set_lexer_state !state;
- try
- let a = f x in
- state := L.release_lexer_state ();
- a
- with e ->
- L.drop_lexer_state ();
- Pervasives.raise e;;
- let entry_print ft x = Entry.print ft x
- let srules' = srules (entry_create "dummy")
-end
-
-END
-
-(** Some definitions are grammar-specific in Camlp4, so we use a functor to
- depend on it while taking a dummy argument in Camlp5. *)
-
-module GramextMake (G : GrammarSig) :
-sig
- val stoken : Tok.t -> G.symbol
- val sself : G.symbol
- val snext : G.symbol
- val slist0 : G.symbol -> G.symbol
- val slist0sep : G.symbol * G.symbol -> G.symbol
- val slist1 : G.symbol -> G.symbol
- val slist1sep : G.symbol * G.symbol -> G.symbol
- val sopt : G.symbol -> G.symbol
- val snterml : G.internal_entry * string -> G.symbol
- val snterm : G.internal_entry -> G.symbol
- val snterml_level : G.symbol -> string
-end =
-struct
-
-IFDEF CAMLP5 THEN
- let stoken tok =
- let pattern = match tok with
- | Tok.KEYWORD s -> "", s
- | Tok.IDENT s -> "IDENT", s
- | Tok.PATTERNIDENT s -> "PATTERNIDENT", s
- | Tok.FIELD s -> "FIELD", s
- | Tok.INT s -> "INT", s
- | Tok.STRING s -> "STRING", s
- | Tok.LEFTQMARK -> "LEFTQMARK", ""
- | Tok.BULLET s -> "BULLET", s
- | Tok.EOI -> "EOI", ""
- in
- Gramext.Stoken pattern
-ELSE
- module Gramext = G
- let stoken tok = match tok with
- | Tok.KEYWORD s -> Gramext.Skeyword s
- | tok -> Gramext.Stoken (Tok.equal tok, G.Token.to_string tok)
-END
-
- IFDEF CAMLP5 THEN
- let slist0sep (x, y) = Gramext.Slist0sep (x, y, false)
- let slist1sep (x, y) = Gramext.Slist1sep (x, y, false)
- ELSE
- let slist0sep (x, y) = Gramext.Slist0sep (x, y)
- let slist1sep (x, y) = Gramext.Slist1sep (x, y)
- END
-
- let snterml (x, y) = Gramext.Snterml (x, y)
- let snterm x = Gramext.Snterm x
- let sself = Gramext.Sself
- let snext = Gramext.Snext
- let slist0 x = Gramext.Slist0 x
- let slist1 x = Gramext.Slist1 x
- let sopt x = Gramext.Sopt x
-
- let snterml_level = function
- | Gramext.Snterml (_, l) -> l
- | _ -> failwith "snterml_level"
-
-end
-
-
-(** Misc functional adjustments *)
-
-(** - The lexer produces streams made of pairs in camlp4 *)
-
-let get_tok = IFDEF CAMLP5 THEN fun x -> x ELSE fst END
-
-(** - Gram.extend is more currified in camlp5 than in camlp4 *)
-
-IFDEF CAMLP5 THEN
-let maybe_curry f x y = f (x,y)
-let maybe_uncurry f (x,y) = f x y
-ELSE
-let maybe_curry f = f
-let maybe_uncurry f = f
-END
-
-(** Compatibility with camlp5 strict mode *)
-IFDEF CAMLP5 THEN
- IFDEF STRICT THEN
- let vala x = Ploc.VaVal x
- ELSE
- let vala x = x
- END
-ELSE
- let vala x = x
-END
-
-(** Fix a quotation difference in [str_item] *)
-
-let declare_str_items loc l =
-IFDEF CAMLP5 THEN
- MLast.StDcl (loc, vala l) (* correspond to <:str_item< declare $list:l'$ end >> *)
-ELSE
- Ast.stSem_of_list l
-END
-
-(** Quotation difference for match clauses *)
-
-let default_patt loc =
- (<:patt< _ >>, vala None, <:expr< failwith "Extension: cannot occur" >>)
-
-IFDEF CAMLP5 THEN
-
-let make_fun loc cl =
- let l = cl @ [default_patt loc] in
- MLast.ExFun (loc, vala l) (* correspond to <:expr< fun [ $list:l$ ] >> *)
-
-ELSE
-
-let make_fun loc cl =
- let mk_when = function
- | Some w -> w
- | None -> Ast.ExNil loc
- in
- let mk_clause (patt,optwhen,expr) =
- (* correspond to <:match_case< ... when ... -> ... >> *)
- Ast.McArr (loc, patt, mk_when optwhen, expr) in
- let init = mk_clause (default_patt loc) in
- let add_clause x acc = Ast.McOr (loc, mk_clause x, acc) in
- let l = List.fold_right add_clause cl init in
- Ast.ExFun (loc,l) (* correspond to <:expr< fun [ $l$ ] >> *)
-
-END
-
-IFDEF CAMLP5 THEN
-let warning_verbose = Gramext.warning_verbose
-ELSE
-(* TODO: this is a workaround, since there isn't such
- [warning_verbose] in new camlp4. *)
-let warning_verbose = ref true
-END
diff --git a/parsing/doc.tex b/parsing/doc.tex
deleted file mode 100644
index 68ab601c..00000000
--- a/parsing/doc.tex
+++ /dev/null
@@ -1,9 +0,0 @@
-
-\newpage
-\section*{The Coq parsers and printers}
-
-\ocwsection \label{parsing}
-This chapter describes the implementation of the \Coq\ parsers and printers.
-
-\bigskip
-\begin{center}\epsfig{file=parsing.dep.ps}\end{center}
diff --git a/parsing/egramcoq.ml b/parsing/egramcoq.ml
index a292c746..5f63d21c 100644
--- a/parsing/egramcoq.ml
+++ b/parsing/egramcoq.ml
@@ -1,16 +1,17 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open CErrors
open Util
open Pcoq
open Constrexpr
-open Notation
open Notation_term
open Extend
open Libnames
@@ -22,11 +23,11 @@ open Names
a reference to the current level (to be translated into "SELF" on the
left border and into "constr LEVEL n" elsewhere), to the level below
(to be translated into "NEXT") or to an below wrt associativity (to be
- translated in camlp4 into "constr" without level) or to another level
+ translated in camlp5 into "constr" without level) or to another level
(to be translated into "constr LEVEL n")
The boolean is true if the entry was existing _and_ empty; this to
- circumvent a weakness of camlp4/camlp5 whose undo mechanism is not the
+ circumvent a weakness of camlp5 whose undo mechanism is not the
converse of the extension mechanism *)
let constr_level = string_of_int
@@ -35,7 +36,8 @@ let default_levels =
[200,Extend.RightA,false;
100,Extend.RightA,false;
99,Extend.RightA,true;
- 10,Extend.RightA,false;
+ 90,Extend.RightA,true;
+ 10,Extend.LeftA,false;
9,Extend.RightA,false;
8,Extend.RightA,true;
1,Extend.LeftA,false;
@@ -45,8 +47,8 @@ let default_pattern_levels =
[200,Extend.RightA,true;
100,Extend.RightA,false;
99,Extend.RightA,true;
- 11,Extend.LeftA,false;
- 10,Extend.RightA,false;
+ 90,Extend.RightA,true;
+ 10,Extend.LeftA,false;
1,Extend.LeftA,false;
0,Extend.RightA,false]
@@ -71,7 +73,7 @@ let error_level_assoc p current expected =
| Extend.LeftA -> str "left"
| Extend.RightA -> str "right"
| Extend.NonA -> str "non" in
- errorlabstrm ""
+ user_err
(str "Level " ++ int p ++ str " is already declared " ++
pr_assoc current ++ str " associative while it is now expected to be " ++
pr_assoc expected ++ str " associative.")
@@ -80,10 +82,6 @@ let create_pos = function
| None -> Extend.First
| Some lev -> Extend.After (constr_level lev)
-type gram_level =
- gram_position option * gram_assoc option * string option *
- (** for reinitialization: *) gram_reinit option
-
let find_position_gen current ensure assoc lev =
match lev with
| None ->
@@ -148,11 +146,11 @@ let find_position accu forpat assoc level =
(**************************************************************************)
(*
- * --- Note on the mapping of grammar productions to camlp4 actions ---
+ * --- Note on the mapping of grammar productions to camlp5 actions ---
*
* Translation of environments: a production
* [ nt1(x1) ... nti(xi) ] -> act(x1..xi)
- * is written (with camlp4 conventions):
+ * is written (with camlp5 conventions):
* (fun vi -> .... (fun v1 -> act(v1 .. vi) )..)
* where v1..vi are the values generated by non-terminals nt1..nti.
* Since the actions are executed by substituting an environment,
@@ -176,8 +174,8 @@ let find_position accu forpat assoc level =
(**********************************************************************)
(* Binding constr entry keys to entries *)
-(* Camlp4 levels do not treat NonA: use RightA with a NEXT on the left *)
-let camlp4_assoc = function
+(* Camlp5 levels do not treat NonA: use RightA with a NEXT on the left *)
+let camlp5_assoc = function
| Some NonA | Some RightA -> RightA
| None | Some LeftA -> LeftA
@@ -209,7 +207,7 @@ let adjust_level assoc from = function
(* If NonA on the left-hand side, adopt the current assoc ?? *)
| (NumLevel n,BorderProd (Left,Some NonA)) -> None
(* If the expected assoc is the current one, set to SELF *)
- | (NumLevel n,BorderProd (Left,Some a)) when assoc_eq a (camlp4_assoc assoc) ->
+ | (NumLevel n,BorderProd (Left,Some a)) when assoc_eq a (camlp5_assoc assoc) ->
None
(* Otherwise, force the level, n or n-1, according to expected assoc *)
| (NumLevel n,BorderProd (Left,Some a)) ->
@@ -230,14 +228,14 @@ type _ target =
type prod_info = production_level * production_position
type (_, _) entry =
-| TTName : ('self, Name.t Loc.located) entry
+| TTName : ('self, Misctypes.lname) entry
| TTReference : ('self, reference) entry
-| TTBigint : ('self, Bigint.bigint) entry
-| TTBinder : ('self, local_binder list) entry
+| TTBigint : ('self, Constrexpr.raw_natural_number) entry
| TTConstr : prod_info * 'r target -> ('r, 'r) entry
| TTConstrList : prod_info * Tok.t list * 'r target -> ('r, 'r list) entry
-| TTBinderListT : ('self, local_binder list) entry
-| TTBinderListF : Tok.t list -> ('self, local_binder list list) entry
+| TTPattern : int -> ('self, cases_pattern_expr) entry
+| TTOpenBinderList : ('self, local_binder_expr list) entry
+| TTClosedBinderList : Tok.t list -> ('self, local_binder_expr list list) entry
type _ any_entry = TTAny : ('s, 'r) entry -> 's any_entry
@@ -262,9 +260,11 @@ let is_binder_level from e = match e with
| (NumLevel 200, (BorderProd (Right, _) | InternalProd)) -> from = 200
| _ -> false
-let make_sep_rules tkl =
- let rec mkrule : Tok.t list -> unit rules = function
- | [] -> Rules ({ norec_rule = Stop }, ignore)
+let make_sep_rules = function
+ | [tk] -> Atoken tk
+ | tkl ->
+ let rec mkrule : Tok.t list -> string rules = function
+ | [] -> Rules ({ norec_rule = Stop }, fun _ -> (* dropped anyway: *) "")
| tkn :: rem ->
let Rules ({ norec_rule = r }, f) = mkrule rem in
let r = { norec_rule = Next (r, Atoken tkn) } in
@@ -291,40 +291,34 @@ let symbol_of_entry : type s r. _ -> _ -> (s, r) entry -> (s, r) symbol = fun as
Alist1 (symbol_of_target typ' assoc from forpat)
| TTConstrList (typ', tkl, forpat) ->
Alist1sep (symbol_of_target typ' assoc from forpat, make_sep_rules tkl)
-| TTBinderListF [] -> Alist1 (Aentry Constr.binder)
-| TTBinderListF tkl -> Alist1sep (Aentry Constr.binder, make_sep_rules tkl)
+| TTPattern p -> Aentryl (Constr.pattern, p)
+| TTClosedBinderList [] -> Alist1 (Aentry Constr.binder)
+| TTClosedBinderList tkl -> Alist1sep (Aentry Constr.binder, make_sep_rules tkl)
| TTName -> Aentry Prim.name
-| TTBinder -> Aentry Constr.binder
-| TTBinderListT -> Aentry Constr.open_binders
+| TTOpenBinderList -> Aentry Constr.open_binders
| TTBigint -> Aentry Prim.bigint
| TTReference -> Aentry Constr.global
let interp_entry forpat e = match e with
-| ETName -> TTAny TTName
-| ETReference -> TTAny TTReference
-| ETBigint -> TTAny TTBigint
-| ETBinder true -> anomaly (Pp.str "Should occur only as part of BinderList")
-| ETBinder false -> TTAny TTBinder
-| ETConstr p -> TTAny (TTConstr (p, forpat))
-| ETPattern -> assert false (** not used *)
-| ETOther _ -> assert false (** not used *)
-| ETConstrList (p, tkl) -> TTAny (TTConstrList (p, tkl, forpat))
-| ETBinderList (true, []) -> TTAny TTBinderListT
-| ETBinderList (true, _) -> assert false
-| ETBinderList (false, tkl) -> TTAny (TTBinderListF tkl)
-
-let constr_expr_of_name (loc,na) = match na with
- | Anonymous -> CHole (loc,None,Misctypes.IntroAnonymous,None)
- | Name id -> CRef (Ident (loc,id), None)
-
-let cases_pattern_expr_of_name (loc,na) = match na with
- | Anonymous -> CPatAtom (loc,None)
- | Name id -> CPatAtom (loc,Some (Ident (loc,id)))
+| ETProdName -> TTAny TTName
+| ETProdReference -> TTAny TTReference
+| ETProdBigint -> TTAny TTBigint
+| ETProdConstr p -> TTAny (TTConstr (p, forpat))
+| ETProdPattern p -> TTAny (TTPattern p)
+| ETProdOther _ -> assert false (** not used *)
+| ETProdConstrList (p, tkl) -> TTAny (TTConstrList (p, tkl, forpat))
+| ETProdBinderList ETBinderOpen -> TTAny TTOpenBinderList
+| ETProdBinderList (ETBinderClosed tkl) -> TTAny (TTClosedBinderList tkl)
+
+let cases_pattern_expr_of_name { CAst.loc; v = na } = CAst.make ?loc @@ match na with
+ | Anonymous -> CPatAtom None
+ | Name id -> CPatAtom (Some (CAst.make ?loc @@ Ident id))
type 'r env = {
constrs : 'r list;
constrlists : 'r list list;
- binders : (local_binder list * bool) list;
+ binders : cases_pattern_expr list;
+ binderlists : local_binder_expr list list;
}
let push_constr subst v = { subst with constrs = v :: subst.constrs }
@@ -334,21 +328,25 @@ match e with
| TTConstr _ -> push_constr subst v
| TTName ->
begin match forpat with
- | ForConstr -> push_constr subst (constr_expr_of_name v)
+ | ForConstr -> { subst with binders = cases_pattern_expr_of_name v :: subst.binders }
| ForPattern -> push_constr subst (cases_pattern_expr_of_name v)
end
-| TTBinder -> { subst with binders = (v, true) :: subst.binders }
-| TTBinderListT -> { subst with binders = (v, true) :: subst.binders }
-| TTBinderListF _ -> { subst with binders = (List.flatten v, false) :: subst.binders }
+| TTPattern _ ->
+ begin match forpat with
+ | ForConstr -> { subst with binders = v :: subst.binders }
+ | ForPattern -> push_constr subst v
+ end
+| TTOpenBinderList -> { subst with binderlists = v :: subst.binderlists }
+| TTClosedBinderList _ -> { subst with binderlists = List.flatten v :: subst.binderlists }
| TTBigint ->
begin match forpat with
- | ForConstr -> push_constr subst (CPrim (Loc.ghost, Numeral v))
- | ForPattern -> push_constr subst (CPatPrim (Loc.ghost, Numeral v))
+ | ForConstr -> push_constr subst (CAst.make @@ CPrim (Numeral (v,true)))
+ | ForPattern -> push_constr subst (CAst.make @@ CPatPrim (Numeral (v,true)))
end
| TTReference ->
begin match forpat with
- | ForConstr -> push_constr subst (CRef (v, None))
- | ForPattern -> push_constr subst (CPatAtom (Loc.ghost, Some v))
+ | ForConstr -> push_constr subst (CAst.make @@ CRef (v, None))
+ | ForPattern -> push_constr subst (CAst.make @@ CPatAtom (Some v))
end
| TTConstrList _ -> { subst with constrlists = v :: subst.constrlists }
@@ -359,7 +357,7 @@ type (_, _) ty_symbol =
type ('self, _, 'r) ty_rule =
| TyStop : ('self, 'r, 'r) ty_rule
| TyNext : ('self, 'a, 'r) ty_rule * ('self, 'b) ty_symbol -> ('self, 'b -> 'a, 'r) ty_rule
-| TyMark : int * bool * ('self, 'a, 'r) ty_rule -> ('self, 'a, 'r) ty_rule
+| TyMark : int * bool * int * ('self, 'a, 'r) ty_rule -> ('self, 'a, 'r) ty_rule
type 'r gen_eval = Loc.t -> 'r env -> 'r
@@ -373,18 +371,27 @@ let rec ty_eval : type s a. (s, a, Loc.t -> s) ty_rule -> s gen_eval -> s env ->
| TyNext (rem, TyNonTerm (forpat, e, _, true)) ->
fun f env v ->
ty_eval rem f (push_item forpat e env v)
-| TyMark (n, b, rem) ->
+| TyMark (n, b, p, rem) ->
fun f env ->
let heads, constrs = List.chop n env.constrs in
- let constrlists =
- if b then (heads @ List.hd env.constrlists) :: List.tl env.constrlists
- else heads :: env.constrlists
+ let constrlists, constrs =
+ if b then
+ (* We rearrange constrs = c1..cn rem and constrlists = [d1..dr e1..ep] rem' into
+ constrs = e1..ep rem and constrlists [c1..cn d1..dr] rem' *)
+ let constrlist = List.hd env.constrlists in
+ let constrlist, tail = List.chop (List.length constrlist - p) constrlist in
+ (heads @ constrlist) :: List.tl env.constrlists, tail @ constrs
+ else
+ (* We rearrange constrs = c1..cn e1..ep rem into
+ constrs = e1..ep rem and add a constr list [c1..cn] *)
+ let constrlist, tail = List.chop (n - p) heads in
+ constrlist :: env.constrlists, tail @ constrs
in
ty_eval rem f { env with constrs; constrlists; }
let rec ty_erase : type s a r. (s, a, r) ty_rule -> (s, a, r) Extend.rule = function
| TyStop -> Stop
-| TyMark (_, _, r) -> ty_erase r
+| TyMark (_, _, _, r) -> ty_erase r
| TyNext (rem, TyTerm tok) -> Next (ty_erase rem, Atoken tok)
| TyNext (rem, TyNonTerm (_, _, s, _)) -> Next (ty_erase rem, s)
@@ -403,9 +410,9 @@ let make_ty_rule assoc from forpat prods =
let s = symbol_of_entry assoc from e in
let bind = match var with None -> false | Some _ -> true in
AnyTyRule (TyNext (r, TyNonTerm (forpat, e, s, bind)))
- | GramConstrListMark (n, b) :: rem ->
+ | GramConstrListMark (n, b, p) :: rem ->
let AnyTyRule r = make_ty_rule rem in
- AnyTyRule (TyMark (n, b, r))
+ AnyTyRule (TyMark (n, b, p, r))
in
make_ty_rule (List.rev prods)
@@ -430,16 +437,14 @@ let rec pure_sublevels : type a b c. int option -> (a, b, c) rule -> int list =
let make_act : type r. r target -> _ -> r gen_eval = function
| ForConstr -> fun notation loc env ->
- let env = (env.constrs, env.constrlists, List.map fst env.binders) in
- CNotation (loc, notation , env)
+ let env = (env.constrs, env.constrlists, env.binders, env.binderlists) in
+ CAst.make ~loc @@ CNotation (notation, env)
| ForPattern -> fun notation loc env ->
- let invalid = List.exists (fun (_, b) -> not b) env.binders in
- let () = if invalid then Topconstr.error_invalid_pattern_notation loc in
let env = (env.constrs, env.constrlists) in
- CPatNotation (loc, notation, env, [])
+ CAst.make ~loc @@ CPatNotation (notation, env, [])
let extend_constr state forpat ng =
- let n = ng.notgram_level in
+ let n,_,_ = ng.notgram_level in
let assoc = ng.notgram_assoc in
let (entry, level) = interp_constr_entry_key forpat n in
let fold (accu, state) pt =
@@ -450,7 +455,7 @@ let extend_constr state forpat ng =
let needed_levels, state = register_empty_levels state isforpat pure_sublevels in
let (pos,p4assoc,name,reinit), state = find_position state isforpat assoc level in
let empty_rules = List.map (prepare_empty_levels isforpat) needed_levels in
- let empty = { constrs = []; constrlists = []; binders = [] } in
+ let empty = { constrs = []; constrlists = []; binders = []; binderlists = [] } in
let act = ty_eval r (make_act forpat ng.notgram_notation) empty in
let rule = (name, p4assoc, [Rule (symbs, act)]) in
let r = ExtendRule (entry, reinit, (pos, [rule])) in
@@ -460,7 +465,7 @@ let extend_constr state forpat ng =
let constr_levels = GramState.field ()
-let extend_constr_notation (_, ng) state =
+let extend_constr_notation ng state =
let levels = match GramState.get state constr_levels with
| None -> default_constr_levels
| Some lev -> lev
@@ -472,7 +477,7 @@ let extend_constr_notation (_, ng) state =
let state = GramState.set state constr_levels levels in
(r @ r', state)
-let constr_grammar : (Notation.level * notation_grammar) grammar_command =
+let constr_grammar : one_notation_grammar grammar_command =
create_grammar_command "Notation" extend_constr_notation
-let extend_constr_grammar pr ntn = extend_grammar_command constr_grammar (pr, ntn)
+let extend_constr_grammar ntn = extend_grammar_command constr_grammar ntn
diff --git a/parsing/egramcoq.mli b/parsing/egramcoq.mli
index 6dda3817..e15add10 100644
--- a/parsing/egramcoq.mli
+++ b/parsing/egramcoq.mli
@@ -1,25 +1,19 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
-open Names
-open Constrexpr
-open Notation_term
-open Pcoq
-open Extend
-open Genarg
-open Egramml
-
-(** Mapping of grammar productions to camlp4 actions *)
+(** Mapping of grammar productions to camlp5 actions *)
(** This is the part specific to Coq-level Notation and Tactic Notation.
For the ML-level tactic and vernac extensions, see Egramml. *)
(** {5 Adding notations} *)
-val extend_constr_grammar : Notation.level -> Notation_term.notation_grammar -> unit
+val extend_constr_grammar : Notation_term.one_notation_grammar -> unit
(** Add a term notation rule to the parsing system. *)
diff --git a/parsing/egramml.ml b/parsing/egramml.ml
index 97a3e89a..90cd7d10 100644
--- a/parsing/egramml.ml
+++ b/parsing/egramml.ml
@@ -1,9 +1,11 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open Util
@@ -17,7 +19,7 @@ open Vernacexpr
type 's grammar_prod_item =
| GramTerminal of string
| GramNonTerminal :
- Loc.t * 'a raw_abstract_argument_type * ('s, 'a) symbol -> 's grammar_prod_item
+ ('a raw_abstract_argument_type option * ('s, 'a) symbol) Loc.located -> 's grammar_prod_item
type 'a ty_arg = ('a -> raw_generic_argument)
@@ -36,9 +38,9 @@ let rec ty_rule_of_gram = function
let tok = Atoken (CLexer.terminal s) in
let r = TyNext (rem, tok, None) in
AnyTyRule r
-| GramNonTerminal (_, t, tok) :: rem ->
+| GramNonTerminal (_, (t, tok)) :: rem ->
let AnyTyRule rem = ty_rule_of_gram rem in
- let inj = Some (fun obj -> Genarg.in_gen t obj) in
+ let inj = Option.map (fun t obj -> Genarg.in_gen t obj) t in
let r = TyNext (rem, tok, inj) in
AnyTyRule r
diff --git a/parsing/egramml.mli b/parsing/egramml.mli
index 1ad94720..31aa1a98 100644
--- a/parsing/egramml.mli
+++ b/parsing/egramml.mli
@@ -1,22 +1,24 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open Vernacexpr
-(** Mapping of grammar productions to camlp4 actions. *)
+(** Mapping of grammar productions to camlp5 actions. *)
(** This is the part specific to vernac extensions.
For the Coq-level Notation and Tactic Notation, see Egramcoq. *)
type 's grammar_prod_item =
| GramTerminal of string
- | GramNonTerminal : Loc.t * 'a Genarg.raw_abstract_argument_type *
- ('s, 'a) Extend.symbol -> 's grammar_prod_item
+ | GramNonTerminal : ('a Genarg.raw_abstract_argument_type option *
+ ('s, 'a) Extend.symbol) Loc.located -> 's grammar_prod_item
val extend_vernac_command_grammar :
Vernacexpr.extend_name -> vernac_expr Pcoq.Gram.entry option ->
diff --git a/parsing/g_constr.ml4 b/parsing/g_constr.ml4
index 7f3a3d10..59b74545 100644
--- a/parsing/g_constr.ml4
+++ b/parsing/g_constr.ml4
@@ -1,9 +1,11 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open Names
@@ -12,7 +14,6 @@ open Constrexpr
open Constrexpr_ops
open Util
open Tok
-open Compat
open Misctypes
open Decl_kinds
@@ -34,42 +35,39 @@ let _ = List.iter CLexer.add_keyword constr_kw
let mk_cast = function
(c,(_,None)) -> c
| (c,(_,Some ty)) ->
- let loc = Loc.merge (constr_loc c) (constr_loc ty)
- in CCast(loc, c, CastConv ty)
+ let loc = Loc.merge_opt (constr_loc c) (constr_loc ty)
+ in CAst.make ?loc @@ CCast(c, CastConv ty)
-let binder_of_name expl (loc,na) =
- LocalRawAssum ([loc, na], Default expl,
- CHole (loc, Some (Evar_kinds.BinderType na), IntroAnonymous, None))
+let binder_of_name expl { CAst.loc = loc; v = na } =
+ CLocalAssum ([CAst.make ?loc na], Default expl,
+ CAst.make ?loc @@ CHole (Some (Evar_kinds.BinderType na), IntroAnonymous, None))
let binders_of_names l =
List.map (binder_of_name Explicit) l
-let binders_of_lidents l =
- List.map (fun (loc, id) -> binder_of_name Explicit (loc, Name id)) l
-
-let mk_fixb (id,bl,ann,body,(loc,tyc)) =
+let mk_fixb (id,bl,ann,body,(loc,tyc)) : fix_expr =
let ty = match tyc with
Some ty -> ty
- | None -> CHole (loc, None, IntroAnonymous, None) in
+ | None -> CAst.make @@ CHole (None, IntroAnonymous, None) in
(id,ann,bl,ty,body)
-let mk_cofixb (id,bl,ann,body,(loc,tyc)) =
- let _ = Option.map (fun (aloc,_) ->
- CErrors.user_err_loc
- (aloc,"Constr:mk_cofixb",
- Pp.str"Annotation forbidden in cofix expression.")) (fst ann) in
+let mk_cofixb (id,bl,ann,body,(loc,tyc)) : cofix_expr =
+ let _ = Option.map (fun { CAst.loc = aloc } ->
+ CErrors.user_err ?loc:aloc
+ ~hdr:"Constr:mk_cofixb"
+ (Pp.str"Annotation forbidden in cofix expression.")) (fst ann) in
let ty = match tyc with
Some ty -> ty
- | None -> CHole (loc, None, IntroAnonymous, None) in
+ | None -> CAst.make @@ CHole (None, IntroAnonymous, None) in
(id,bl,ty,body)
let mk_fix(loc,kw,id,dcls) =
if kw then
- let fb = List.map mk_fixb dcls in
- CFix(loc,id,fb)
+ let fb : fix_expr list = List.map mk_fixb dcls in
+ CAst.make ~loc @@ CFix(id,fb)
else
- let fb = List.map mk_cofixb dcls in
- CCoFix(loc,id,fb)
+ let fb : cofix_expr list = List.map mk_cofixb dcls in
+ CAst.make ~loc @@ CCoFix(id,fb)
let mk_single_fix (loc,kw,dcl) =
let (id,_,_,_,_) = dcl in mk_fix(loc,kw,id,[dcl])
@@ -81,11 +79,11 @@ let err () = raise Stream.Failure
let lpar_id_coloneq =
Gram.Entry.of_parser "test_lpar_id_coloneq"
(fun strm ->
- match get_tok (stream_nth 0 strm) with
+ match stream_nth 0 strm with
| KEYWORD "(" ->
- (match get_tok (stream_nth 1 strm) with
+ (match stream_nth 1 strm with
| IDENT s ->
- (match get_tok (stream_nth 2 strm) with
+ (match stream_nth 2 strm with
| KEYWORD ":=" ->
stream_njunk 3 strm;
Names.Id.of_string s
@@ -96,9 +94,9 @@ let lpar_id_coloneq =
let impl_ident_head =
Gram.Entry.of_parser "impl_ident_head"
(fun strm ->
- match get_tok (stream_nth 0 strm) with
+ match stream_nth 0 strm with
| KEYWORD "{" ->
- (match get_tok (stream_nth 1 strm) with
+ (match stream_nth 1 strm with
| IDENT ("wf"|"struct"|"measure") -> err ()
| IDENT s ->
stream_njunk 2 strm;
@@ -109,33 +107,33 @@ let impl_ident_head =
let name_colon =
Gram.Entry.of_parser "name_colon"
(fun strm ->
- match get_tok (stream_nth 0 strm) with
+ match stream_nth 0 strm with
| IDENT s ->
- (match get_tok (stream_nth 1 strm) with
+ (match stream_nth 1 strm with
| KEYWORD ":" ->
stream_njunk 2 strm;
Name (Names.Id.of_string s)
| _ -> err ())
| KEYWORD "_" ->
- (match get_tok (stream_nth 1 strm) with
+ (match stream_nth 1 strm with
| KEYWORD ":" ->
stream_njunk 2 strm;
Anonymous
| _ -> err ())
| _ -> err ())
-let aliasvar = function CPatAlias (loc, _, id) -> Some (loc,Name id) | _ -> None
+let aliasvar = function { CAst.v = CPatAlias (_, na) } -> Some na | _ -> None
GEXTEND Gram
- GLOBAL: binder_constr lconstr constr operconstr universe_level sort global
- constr_pattern lconstr_pattern Constr.ident
+ GLOBAL: binder_constr lconstr constr operconstr universe_level sort sort_family
+ global constr_pattern lconstr_pattern Constr.ident
closed_binder open_binders binder binders binders_fixannot
record_declaration typeclass_constraint pattern appl_arg;
Constr.ident:
[ [ id = Prim.ident -> id ] ]
;
Prim.name:
- [ [ "_" -> (!@loc, Anonymous) ] ]
+ [ [ "_" -> CAst.make ~loc:!@loc Anonymous ] ]
;
global:
[ [ r = Prim.reference -> r ] ]
@@ -150,12 +148,24 @@ GEXTEND Gram
[ [ "Set" -> GSet
| "Prop" -> GProp
| "Type" -> GType []
- | "Type"; "@{"; u = universe; "}" -> GType (List.map (fun (loc,x) -> (loc, Id.to_string x)) u)
+ | "Type"; "@{"; u = universe; "}" -> GType u
+ ] ]
+ ;
+ sort_family:
+ [ [ "Set" -> Sorts.InSet
+ | "Prop" -> Sorts.InProp
+ | "Type" -> Sorts.InType
] ]
;
+ universe_expr:
+ [ [ id = global; "+"; n = natural -> Some (id,n)
+ | id = global -> Some (id,0)
+ | "_" -> None
+ ] ]
+ ;
universe:
- [ [ IDENT "max"; "("; ids = LIST1 identref SEP ","; ")" -> ids
- | id = identref -> [id]
+ [ [ IDENT "max"; "("; ids = LIST1 universe_expr SEP ","; ")" -> ids
+ | u = universe_expr -> [u]
] ]
;
lconstr:
@@ -163,132 +173,136 @@ GEXTEND Gram
;
constr:
[ [ c = operconstr LEVEL "8" -> c
- | "@"; f=global; i = instance -> CAppExpl(!@loc,(None,f,i),[]) ] ]
+ | "@"; f=global; i = instance -> CAst.make ~loc:!@loc @@ CAppExpl((None,f,i),[]) ] ]
;
operconstr:
[ "200" RIGHTA
[ c = binder_constr -> c ]
| "100" RIGHTA
[ c1 = operconstr; "<:"; c2 = binder_constr ->
- CCast(!@loc,c1, CastVM c2)
+ CAst.make ~loc:(!@loc) @@ CCast(c1, CastVM c2)
| c1 = operconstr; "<:"; c2 = SELF ->
- CCast(!@loc,c1, CastVM c2)
+ CAst.make ~loc:(!@loc) @@ CCast(c1, CastVM c2)
| c1 = operconstr; "<<:"; c2 = binder_constr ->
- CCast(!@loc,c1, CastNative c2)
+ CAst.make ~loc:(!@loc) @@ CCast(c1, CastNative c2)
| c1 = operconstr; "<<:"; c2 = SELF ->
- CCast(!@loc,c1, CastNative c2)
+ CAst.make ~loc:(!@loc) @@ CCast(c1, CastNative c2)
| c1 = operconstr; ":";c2 = binder_constr ->
- CCast(!@loc,c1, CastConv c2)
+ CAst.make ~loc:(!@loc) @@ CCast(c1, CastConv c2)
| c1 = operconstr; ":"; c2 = SELF ->
- CCast(!@loc,c1, CastConv c2)
+ CAst.make ~loc:(!@loc) @@ CCast(c1, CastConv c2)
| c1 = operconstr; ":>" ->
- CCast(!@loc,c1, CastCoerce) ]
+ CAst.make ~loc:(!@loc) @@ CCast(c1, CastCoerce) ]
| "99" RIGHTA [ ]
| "90" RIGHTA [ ]
| "10" LEFTA
- [ f=operconstr; args=LIST1 appl_arg -> CApp(!@loc,(None,f),args)
- | "@"; f=global; i = instance; args=LIST0 NEXT -> CAppExpl(!@loc,(None,f,i),args)
- | "@"; (locid,id) = pattern_identref; args=LIST1 identref ->
- let args = List.map (fun x -> CRef (Ident x,None), None) args in
- CApp(!@loc,(None,CPatVar(locid,id)),args) ]
+ [ f=operconstr; args=LIST1 appl_arg -> CAst.make ~loc:(!@loc) @@ CApp((None,f),args)
+ | "@"; f=global; i = instance; args=LIST0 NEXT -> CAst.make ~loc:!@loc @@ CAppExpl((None,f,i),args)
+ | "@"; lid = pattern_identref; args=LIST1 identref ->
+ let { CAst.loc = locid; v = id } = lid in
+ let args = List.map (fun x -> CAst.make @@ CRef (CAst.make ?loc:x.CAst.loc @@ Ident x.CAst.v, None), None) args in
+ CAst.make ~loc:(!@loc) @@ CApp((None, CAst.make ?loc:locid @@ CPatVar id),args) ]
| "9"
[ ".."; c = operconstr LEVEL "0"; ".." ->
- CAppExpl (!@loc,(None,Ident (!@loc,ldots_var),None),[c]) ]
+ CAst.make ~loc:!@loc @@ CAppExpl ((None, CAst.make ~loc:!@loc @@ Ident ldots_var, None),[c]) ]
| "8" [ ]
| "1" LEFTA
[ c=operconstr; ".("; f=global; args=LIST0 appl_arg; ")" ->
- CApp(!@loc,(Some (List.length args+1),CRef (f,None)),args@[c,None])
+ CAst.make ~loc:(!@loc) @@ CApp((Some (List.length args+1), CAst.make @@ CRef (f,None)),args@[c,None])
| c=operconstr; ".("; "@"; f=global;
args=LIST0 (operconstr LEVEL "9"); ")" ->
- CAppExpl(!@loc,(Some (List.length args+1),f,None),args@[c])
- | c=operconstr; "%"; key=IDENT -> CDelimiters (!@loc,key,c) ]
+ CAst.make ~loc:(!@loc) @@ CAppExpl((Some (List.length args+1),f,None),args@[c])
+ | c=operconstr; "%"; key=IDENT -> CAst.make ~loc:(!@loc) @@ CDelimiters (key,c) ]
| "0"
[ c=atomic_constr -> c
| c=match_constr -> c
| "("; c = operconstr LEVEL "200"; ")" ->
- (match c with
- CPrim (_,Numeral z) when Bigint.is_pos_or_zero z ->
- CNotation(!@loc,"( _ )",([c],[],[]))
+ (match c.CAst.v with
+ | CPrim (Numeral (n,true)) ->
+ CAst.make ~loc:(!@loc) @@ CNotation("( _ )",([c],[],[],[]))
| _ -> c)
| "{|"; c = record_declaration; "|}" -> c
+ | "{"; c = binder_constr ; "}" ->
+ CAst.make ~loc:(!@loc) @@ CNotation(("{ _ }"),([c],[],[],[]))
| "`{"; c = operconstr LEVEL "200"; "}" ->
- CGeneralization (!@loc, Implicit, None, c)
+ CAst.make ~loc:(!@loc) @@ CGeneralization (Implicit, None, c)
| "`("; c = operconstr LEVEL "200"; ")" ->
- CGeneralization (!@loc, Explicit, None, c)
- | IDENT "ltac"; ":"; "("; tac = Tactic.tactic_expr; ")" ->
- let arg = Genarg.in_gen (Genarg.rawwit Constrarg.wit_tactic) tac in
- CHole (!@loc, None, IntroAnonymous, Some arg)
+ CAst.make ~loc:(!@loc) @@ CGeneralization (Explicit, None, c)
] ]
;
record_declaration:
- [ [ fs = record_fields -> CRecord (!@loc, fs) ] ]
+ [ [ fs = record_fields -> CAst.make ~loc:(!@loc) @@ CRecord fs ] ]
;
record_fields:
[ [ f = record_field_declaration; ";"; fs = record_fields -> f :: fs
- | f = record_field_declaration; ";" -> [f]
| f = record_field_declaration -> [f]
| -> []
] ]
;
record_field_declaration:
- [ [ id = global; params = LIST0 identref; ":="; c = lconstr ->
- (id, abstract_constr_expr c (binders_of_lidents params)) ] ]
+ [ [ id = global; bl = binders; ":="; c = lconstr ->
+ (id, if bl = [] then c else mkCLambdaN ~loc:!@loc bl c) ] ]
;
binder_constr:
[ [ "forall"; bl = open_binders; ","; c = operconstr LEVEL "200" ->
- mkCProdN (!@loc) bl c
+ mkCProdN ~loc:!@loc bl c
| "fun"; bl = open_binders; "=>"; c = operconstr LEVEL "200" ->
- mkCLambdaN (!@loc) bl c
+ mkCLambdaN ~loc:!@loc bl c
| "let"; id=name; bl = binders; ty = type_cstr; ":=";
c1 = operconstr LEVEL "200"; "in"; c2 = operconstr LEVEL "200" ->
- let loc1 =
- Loc.merge (local_binders_loc bl) (constr_loc c1)
- in
- CLetIn(!@loc,id,mkCLambdaN loc1 bl (mk_cast(c1,ty)),c2)
+ let ty,c1 = match ty, c1 with
+ | (_,None), { CAst.v = CCast(c, CastConv t) } -> (Loc.tag ?loc:(constr_loc t) @@ Some t), c (* Tolerance, see G_vernac.def_body *)
+ | _, _ -> ty, c1 in
+ CAst.make ~loc:!@loc @@ CLetIn(id,mkCLambdaN ?loc:(constr_loc c1) bl c1,
+ Option.map (mkCProdN ?loc:(fst ty) bl) (snd ty), c2)
| "let"; fx = single_fix; "in"; c = operconstr LEVEL "200" ->
let fixp = mk_single_fix fx in
- let (li,id) = match fixp with
- CFix(_,id,_) -> id
- | CCoFix(_,id,_) -> id
+ let { CAst.loc = li; v = id } = match fixp.CAst.v with
+ CFix(id,_) -> id
+ | CCoFix(id,_) -> id
| _ -> assert false in
- CLetIn(!@loc,(li,Name id),fixp,c)
+ CAst.make ~loc:!@loc @@ CLetIn( CAst.make ?loc:li @@ Name id,fixp,None,c)
| "let"; lb = ["("; l=LIST0 name SEP ","; ")" -> l | "()" -> []];
po = return_type;
":="; c1 = operconstr LEVEL "200"; "in";
c2 = operconstr LEVEL "200" ->
- CLetTuple (!@loc,lb,po,c1,c2)
+ CAst.make ~loc:!@loc @@ CLetTuple (lb,po,c1,c2)
| "let"; "'"; p=pattern; ":="; c1 = operconstr LEVEL "200";
"in"; c2 = operconstr LEVEL "200" ->
- CCases (!@loc, LetPatternStyle, None, [c1, None, None], [(!@loc, [(!@loc,[p])], c2)])
+ CAst.make ~loc:!@loc @@
+ CCases (LetPatternStyle, None, [c1, None, None], [CAst.make ~loc:!@loc ([[p]], c2)])
| "let"; "'"; p=pattern; ":="; c1 = operconstr LEVEL "200";
rt = case_type; "in"; c2 = operconstr LEVEL "200" ->
- CCases (!@loc, LetPatternStyle, Some rt, [c1, aliasvar p, None], [(!@loc, [(!@loc, [p])], c2)])
+ CAst.make ~loc:!@loc @@
+ CCases (LetPatternStyle, Some rt, [c1, aliasvar p, None], [CAst.make ~loc:!@loc ([[p]], c2)])
+
| "let"; "'"; p=pattern; "in"; t = pattern LEVEL "200";
":="; c1 = operconstr LEVEL "200"; rt = case_type;
"in"; c2 = operconstr LEVEL "200" ->
- CCases (!@loc, LetPatternStyle, Some rt, [c1, aliasvar p, Some t], [(!@loc, [(!@loc, [p])], c2)])
+ CAst.make ~loc:!@loc @@
+ CCases (LetPatternStyle, Some rt, [c1, aliasvar p, Some t], [CAst.make ~loc:!@loc ([[p]], c2)])
| "if"; c=operconstr LEVEL "200"; po = return_type;
"then"; b1=operconstr LEVEL "200";
"else"; b2=operconstr LEVEL "200" ->
- CIf (!@loc, c, po, b1, b2)
+ CAst.make ~loc:(!@loc) @@ CIf (c, po, b1, b2)
| c=fix_constr -> c ] ]
;
appl_arg:
[ [ id = lpar_id_coloneq; c=lconstr; ")" ->
- (c,Some (!@loc,ExplByName id))
+ (c,Some (CAst.make ~loc:!@loc @@ ExplByName id))
| c=operconstr LEVEL "9" -> (c,None) ] ]
;
atomic_constr:
- [ [ g=global; i=instance -> CRef (g,i)
- | s=sort -> CSort (!@loc,s)
- | n=INT -> CPrim (!@loc, Numeral (Bigint.of_string n))
- | s=string -> CPrim (!@loc, String s)
- | "_" -> CHole (!@loc, None, IntroAnonymous, None)
- | "?"; "["; id=ident; "]" -> CHole (!@loc, None, IntroIdentifier id, None)
- | "?"; "["; id=pattern_ident; "]" -> CHole (!@loc, None, IntroFresh id, None)
- | id=pattern_ident; inst = evar_instance -> CEvar(!@loc,id,inst) ] ]
+ [ [ g=global; i=instance -> CAst.make ~loc:!@loc @@ CRef (g,i)
+ | s=sort -> CAst.make ~loc:!@loc @@ CSort s
+ | n=INT -> CAst.make ~loc:!@loc @@ CPrim (Numeral (n,true))
+ | s=string -> CAst.make ~loc:!@loc @@ CPrim (String s)
+ | "_" -> CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None)
+ | "?"; "["; id=ident; "]" -> CAst.make ~loc:!@loc @@ CHole (None, IntroIdentifier id, None)
+ | "?"; "["; id=pattern_ident; "]" -> CAst.make ~loc:!@loc @@ CHole (None, IntroFresh id, None)
+ | id=pattern_ident; inst = evar_instance -> CAst.make ~loc:!@loc @@ CEvar(id,inst) ] ]
;
inst:
[ [ id = ident; ":="; c = lconstr -> (id,c) ] ]
@@ -298,14 +312,15 @@ GEXTEND Gram
| -> [] ] ]
;
instance:
- [ [ "@{"; l = LIST1 universe_level; "}" -> Some l
+ [ [ "@{"; l = LIST0 universe_level; "}" -> Some l
| -> None ] ]
;
universe_level:
[ [ "Set" -> GSet
| "Prop" -> GProp
- | "Type" -> GType None
- | id = identref -> GType (Some (fst id, Id.to_string (snd id)))
+ | "Type" -> GType UUnknown
+ | "_" -> GType UAnonymous
+ | id = global -> GType (UNamed id)
] ]
;
fix_constr:
@@ -329,7 +344,7 @@ GEXTEND Gram
;
match_constr:
[ [ "match"; ci=LIST1 case_item SEP ","; ty=OPT case_type; "with";
- br=branches; "end" -> CCases(!@loc,RegularStyle,ty,ci,br) ] ]
+ br=branches; "end" -> CAst.make ~loc:!@loc @@ CCases(RegularStyle,ty,ci,br) ] ]
;
case_item:
[ [ c=operconstr LEVEL "100";
@@ -352,11 +367,11 @@ GEXTEND Gram
[ [ OPT"|"; br=LIST0 eqn SEP "|" -> br ] ]
;
mult_pattern:
- [ [ pl = LIST1 pattern LEVEL "99" SEP "," -> (!@loc,pl) ] ]
+ [ [ pl = LIST1 pattern LEVEL "99" SEP "," -> pl ] ]
;
eqn:
[ [ pll = LIST1 mult_pattern SEP "|";
- "=>"; rhs = lconstr -> (!@loc,pll,rhs) ] ]
+ "=>"; rhs = lconstr -> (CAst.make ~loc:!@loc (pll,rhs)) ] ]
;
record_pattern:
[ [ id = global; ":="; pat = pattern -> (id, pat) ] ]
@@ -371,55 +386,47 @@ GEXTEND Gram
pattern:
[ "200" RIGHTA [ ]
| "100" RIGHTA
- [ p = pattern; "|"; pl = LIST1 pattern SEP "|" -> CPatOr (!@loc,p::pl) ]
+ [ p = pattern; "|"; pl = LIST1 pattern SEP "|" -> CAst.make ~loc:!@loc @@ CPatOr (p::pl) ]
| "99" RIGHTA [ ]
- | "11" LEFTA
- [ p = pattern; "as"; id = ident ->
- CPatAlias (!@loc, p, id) ]
- | "10" RIGHTA
- [ p = pattern; lp = LIST1 NEXT ->
- (match p with
- | CPatAtom (_, Some r) -> CPatCstr (!@loc, r, None, lp)
- | CPatCstr (_, r, None, l2) -> CErrors.user_err_loc
- (cases_pattern_expr_loc p, "compound_pattern",
- Pp.str "Nested applications not supported.")
- | CPatCstr (_, r, l1, l2) -> CPatCstr (!@loc, r, l1 , l2@lp)
- | CPatNotation (_, n, s, l) -> CPatNotation (!@loc, n , s, l@lp)
- | _ -> CErrors.user_err_loc
- (cases_pattern_expr_loc p, "compound_pattern",
- Pp.str "Such pattern cannot have arguments."))
- |"@"; r = Prim.reference; lp = LIST0 NEXT ->
- CPatCstr (!@loc, r, Some lp, []) ]
+ | "90" RIGHTA [ ]
+ | "10" LEFTA
+ [ p = pattern; "as"; na = name ->
+ CAst.make ~loc:!@loc @@ CPatAlias (p, na)
+ | p = pattern; lp = LIST1 NEXT -> mkAppPattern ~loc:!@loc p lp
+ | "@"; r = Prim.reference; lp = LIST0 NEXT ->
+ CAst.make ~loc:!@loc @@ CPatCstr (r, Some lp, []) ]
| "1" LEFTA
- [ c = pattern; "%"; key=IDENT -> CPatDelimiters (!@loc,key,c) ]
+ [ c = pattern; "%"; key=IDENT -> CAst.make ~loc:!@loc @@ CPatDelimiters (key,c) ]
| "0"
- [ r = Prim.reference -> CPatAtom (!@loc,Some r)
- | "{|"; pat = record_patterns; "|}" -> CPatRecord (!@loc, pat)
- | "_" -> CPatAtom (!@loc,None)
+ [ r = Prim.reference -> CAst.make ~loc:!@loc @@ CPatAtom (Some r)
+ | "{|"; pat = record_patterns; "|}" -> CAst.make ~loc:!@loc @@ CPatRecord pat
+ | "_" -> CAst.make ~loc:!@loc @@ CPatAtom None
| "("; p = pattern LEVEL "200"; ")" ->
- (match p with
- CPatPrim (_,Numeral z) when Bigint.is_pos_or_zero z ->
- CPatNotation(!@loc,"( _ )",([p],[]),[])
+ (match p.CAst.v with
+ | CPatPrim (Numeral (n,true)) ->
+ CAst.make ~loc:!@loc @@ CPatNotation("( _ )",([p],[]),[])
| _ -> p)
| "("; p = pattern LEVEL "200"; ":"; ty = lconstr; ")" ->
let p =
match p with
- CPatPrim (_,Numeral z) when Bigint.is_pos_or_zero z ->
- CPatNotation(!@loc,"( _ )",([p],[]),[])
+ | { CAst.v = CPatPrim (Numeral (n,true)) } ->
+ CAst.make ~loc:!@loc @@ CPatNotation("( _ )",([p],[]),[])
| _ -> p
in
- CPatCast (!@loc, p, ty)
- | n = INT -> CPatPrim (!@loc, Numeral (Bigint.of_string n))
- | s = string -> CPatPrim (!@loc, String s) ] ]
+ CAst.make ~loc:!@loc @@ CPatCast (p, ty)
+ | n = INT -> CAst.make ~loc:!@loc @@ CPatPrim (Numeral (n,true))
+ | s = string -> CAst.make ~loc:!@loc @@ CPatPrim (String s) ] ]
;
impl_ident_tail:
[ [ "}" -> binder_of_name Implicit
| nal=LIST1 name; ":"; c=lconstr; "}" ->
- (fun na -> LocalRawAssum (na::nal,Default Implicit,c))
+ (fun na -> CLocalAssum (na::nal,Default Implicit,c))
| nal=LIST1 name; "}" ->
- (fun na -> LocalRawAssum (na::nal,Default Implicit,CHole (Loc.join_loc (fst na) !@loc, Some (Evar_kinds.BinderType (snd na)), IntroAnonymous, None)))
+ (fun na -> CLocalAssum (na::nal,Default Implicit,
+ CAst.make ?loc:(Loc.merge_opt na.CAst.loc (Some !@loc)) @@
+ CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None)))
| ":"; c=lconstr; "}" ->
- (fun na -> LocalRawAssum ([na],Default Implicit,c))
+ (fun na -> CLocalAssum ([na],Default Implicit,c))
] ]
;
fixannot:
@@ -430,7 +437,7 @@ GEXTEND Gram
] ]
;
impl_name_head:
- [ [ id = impl_ident_head -> (!@loc,Name id) ] ]
+ [ [ id = impl_ident_head -> (CAst.make ~loc:!@loc @@ Name id) ] ]
;
binders_fixannot:
[ [ na = impl_name_head; assum = impl_ident_tail; bl = binders_fixannot ->
@@ -445,13 +452,13 @@ GEXTEND Gram
the latter is unique *)
[ [ (* open binder *)
id = name; idl = LIST0 name; ":"; c = lconstr ->
- [LocalRawAssum (id::idl,Default Explicit,c)]
+ [CLocalAssum (id::idl,Default Explicit,c)]
(* binders factorized with open binder *)
| id = name; idl = LIST0 name; bl = binders ->
binders_of_names (id::idl) @ bl
| id1 = name; ".."; id2 = name ->
- [LocalRawAssum ([id1;(!@loc,Name ldots_var);id2],
- Default Explicit,CHole (!@loc, None, IntroAnonymous, None))]
+ [CLocalAssum ([id1;(CAst.make ~loc:!@loc (Name ldots_var));id2],
+ Default Explicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
| bl = closed_binder; bl' = binders ->
bl@bl'
] ]
@@ -460,51 +467,53 @@ GEXTEND Gram
[ [ l = LIST0 binder -> List.flatten l ] ]
;
binder:
- [ [ id = name -> [LocalRawAssum ([id],Default Explicit,CHole (!@loc, None, IntroAnonymous, None))]
+ [ [ id = name -> [CLocalAssum ([id],Default Explicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
| bl = closed_binder -> bl ] ]
;
closed_binder:
[ [ "("; id=name; idl=LIST1 name; ":"; c=lconstr; ")" ->
- [LocalRawAssum (id::idl,Default Explicit,c)]
+ [CLocalAssum (id::idl,Default Explicit,c)]
| "("; id=name; ":"; c=lconstr; ")" ->
- [LocalRawAssum ([id],Default Explicit,c)]
+ [CLocalAssum ([id],Default Explicit,c)]
| "("; id=name; ":="; c=lconstr; ")" ->
- [LocalRawDef (id,c)]
+ (match c.CAst.v with
+ | CCast(c, CastConv t) -> [CLocalDef (id,c,Some t)]
+ | _ -> [CLocalDef (id,c,None)])
| "("; id=name; ":"; t=lconstr; ":="; c=lconstr; ")" ->
- [LocalRawDef (id,CCast (Loc.merge (constr_loc t) (!@loc),c, CastConv t))]
+ [CLocalDef (id,c,Some t)]
| "{"; id=name; "}" ->
- [LocalRawAssum ([id],Default Implicit,CHole (!@loc, None, IntroAnonymous, None))]
+ [CLocalAssum ([id],Default Implicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
| "{"; id=name; idl=LIST1 name; ":"; c=lconstr; "}" ->
- [LocalRawAssum (id::idl,Default Implicit,c)]
+ [CLocalAssum (id::idl,Default Implicit,c)]
| "{"; id=name; ":"; c=lconstr; "}" ->
- [LocalRawAssum ([id],Default Implicit,c)]
+ [CLocalAssum ([id],Default Implicit,c)]
| "{"; id=name; idl=LIST1 name; "}" ->
- List.map (fun id -> LocalRawAssum ([id],Default Implicit,CHole (!@loc, None, IntroAnonymous, None))) (id::idl)
+ List.map (fun id -> CLocalAssum ([id],Default Implicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))) (id::idl)
| "`("; tc = LIST1 typeclass_constraint SEP "," ; ")" ->
- List.map (fun (n, b, t) -> LocalRawAssum ([n], Generalized (Implicit, Explicit, b), t)) tc
+ List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Explicit, b), t)) tc
| "`{"; tc = LIST1 typeclass_constraint SEP "," ; "}" ->
- List.map (fun (n, b, t) -> LocalRawAssum ([n], Generalized (Implicit, Implicit, b), t)) tc
+ List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Implicit, b), t)) tc
| "'"; p = pattern LEVEL "0" ->
let (p, ty) =
- match p with
- | CPatCast (_, p, ty) -> (p, Some ty)
+ match p.CAst.v with
+ | CPatCast (p, ty) -> (p, Some ty)
| _ -> (p, None)
in
- [LocalPattern (!@loc, p, ty)]
+ [CLocalPattern (CAst.make ~loc:!@loc (p, ty))]
] ]
;
typeclass_constraint:
- [ [ "!" ; c = operconstr LEVEL "200" -> (!@loc, Anonymous), true, c
+ [ [ "!" ; c = operconstr LEVEL "200" -> (CAst.make ~loc:!@loc Anonymous), true, c
| "{"; id = name; "}"; ":" ; expl = [ "!" -> true | -> false ] ; c = operconstr LEVEL "200" ->
id, expl, c
| iid=name_colon ; expl = [ "!" -> true | -> false ] ; c = operconstr LEVEL "200" ->
- (!@loc, iid), expl, c
+ (CAst.make ~loc:!@loc iid), expl, c
| c = operconstr LEVEL "200" ->
- (!@loc, Anonymous), false, c
+ (CAst.make ~loc:!@loc Anonymous), false, c
] ]
;
type_cstr:
- [ [ c=OPT [":"; c=lconstr -> c] -> (!@loc,c) ] ]
+ [ [ c=OPT [":"; c=lconstr -> c] -> Loc.tag ~loc:!@loc c ] ]
;
END;;
diff --git a/parsing/g_prim.ml4 b/parsing/g_prim.ml4
index b90e06cd..b25ea766 100644
--- a/parsing/g_prim.ml4
+++ b/parsing/g_prim.ml4
@@ -1,15 +1,15 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
-open Compat
open Names
open Libnames
-open Tok (* necessary for camlp4 *)
open Pcoq
open Pcoq.Prim
@@ -28,13 +28,13 @@ let my_int_of_string loc s =
if n > 1024 * 2048 then raise Exit;
n
with Failure _ | Exit ->
- CErrors.user_err_loc (loc,"",Pp.str "Cannot support a so large number.")
+ CErrors.user_err ~loc (Pp.str "Cannot support a so large number.")
GEXTEND Gram
GLOBAL:
bigint natural integer identref name ident var preident
fullyqualid qualid reference dirpath ne_lstring
- ne_string string pattern_ident pattern_identref by_notation smart_global;
+ ne_string string lstring pattern_ident pattern_identref by_notation smart_global;
preident:
[ [ s = IDENT -> s ] ]
;
@@ -45,13 +45,13 @@ GEXTEND Gram
[ [ LEFTQMARK; id = ident -> id ] ]
;
pattern_identref:
- [ [ id = pattern_ident -> (!@loc, id) ] ]
+ [ [ id = pattern_ident -> CAst.make ~loc:!@loc id ] ]
;
var: (* as identref, but interpret as a term identifier in ltac *)
- [ [ id = ident -> (!@loc, id) ] ]
+ [ [ id = ident -> CAst.make ~loc:!@loc id ] ]
;
identref:
- [ [ id = ident -> (!@loc, id) ] ]
+ [ [ id = ident -> CAst.make ~loc:!@loc id ] ]
;
field:
[ [ s = FIELD -> Id.of_string s ] ]
@@ -62,8 +62,8 @@ GEXTEND Gram
] ]
;
fullyqualid:
- [ [ id = ident; (l,id')=fields -> !@loc,id::List.rev (id'::l)
- | id = ident -> !@loc,[id]
+ [ [ id = ident; (l,id')=fields -> CAst.make ~loc:!@loc @@ id::List.rev (id'::l)
+ | id = ident -> CAst.make ~loc:!@loc [id]
] ]
;
basequalid:
@@ -72,32 +72,32 @@ GEXTEND Gram
] ]
;
name:
- [ [ IDENT "_" -> (!@loc, Anonymous)
- | id = ident -> (!@loc, Name id) ] ]
+ [ [ IDENT "_" -> CAst.make ~loc:!@loc Anonymous
+ | id = ident -> CAst.make ~loc:!@loc @@ Name id ] ]
;
reference:
[ [ id = ident; (l,id') = fields ->
- Qualid (!@loc, local_make_qualid (l@[id]) id')
- | id = ident -> Ident (!@loc,id)
+ CAst.make ~loc:!@loc @@ Qualid (local_make_qualid (l@[id]) id')
+ | id = ident -> CAst.make ~loc:!@loc @@ Ident id
] ]
;
by_notation:
- [ [ s = ne_string; sc = OPT ["%"; key = IDENT -> key ] -> (!@loc, s, sc) ] ]
+ [ [ s = ne_string; sc = OPT ["%"; key = IDENT -> key ] -> (s, sc) ] ]
;
smart_global:
- [ [ c = reference -> Misctypes.AN c
- | ntn = by_notation -> Misctypes.ByNotation ntn ] ]
+ [ [ c = reference -> CAst.make ~loc:!@loc @@ Misctypes.AN c
+ | ntn = by_notation -> CAst.make ~loc:!@loc @@ Misctypes.ByNotation ntn ] ]
;
qualid:
- [ [ qid = basequalid -> !@loc, qid ] ]
+ [ [ qid = basequalid -> CAst.make ~loc:!@loc qid ] ]
;
ne_string:
[ [ s = STRING ->
- if s="" then CErrors.user_err_loc(!@loc, "", Pp.str"Empty string."); s
+ if s="" then CErrors.user_err ~loc:!@loc (Pp.str"Empty string."); s
] ]
;
ne_lstring:
- [ [ s = ne_string -> (!@loc, s) ] ]
+ [ [ s = ne_string -> CAst.make ~loc:!@loc s ] ]
;
dirpath:
[ [ id = ident; l = LIST0 field ->
@@ -106,6 +106,9 @@ GEXTEND Gram
string:
[ [ s = STRING -> s ] ]
;
+ lstring:
+ [ [ s = string -> (CAst.make ~loc:!@loc s) ] ]
+ ;
integer:
[ [ i = INT -> my_int_of_string (!@loc) i
| "-"; i = INT -> - my_int_of_string (!@loc) i ] ]
@@ -113,7 +116,7 @@ GEXTEND Gram
natural:
[ [ i = INT -> my_int_of_string (!@loc) i ] ]
;
- bigint: (* Negative numbers are dealt with specially *)
- [ [ i = INT -> (Bigint.of_string i) ] ]
+ bigint: (* Negative numbers are dealt with elsewhere *)
+ [ [ i = INT -> i ] ]
;
END
diff --git a/parsing/g_proofs.ml4 b/parsing/g_proofs.ml4
index 70c5d5d8..e393c2bb 100644
--- a/parsing/g_proofs.ml4
+++ b/parsing/g_proofs.ml4
@@ -1,50 +1,57 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
-open Compat
open Constrexpr
open Vernacexpr
open Misctypes
-open Tok
open Pcoq
-open Pcoq.Tactic
open Pcoq.Prim
open Pcoq.Constr
open Pcoq.Vernac_
let thm_token = G_vernac.thm_token
-let hint_proof_using e = function
- | Some _ as x -> x
- | None -> match Proof_using.get_default_proof_using () with
- | None -> None
- | Some s -> Some (Gram.entry_parse e (Gram.parsable (Stream.of_string s)))
+let hint = Gram.entry_create "hint"
+
+let warn_deprecated_focus =
+ CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
+ (fun () ->
+ Pp.strbrk
+ "The Focus command is deprecated; use bullets or focusing brackets instead"
+ )
+
+let warn_deprecated_focus_n n =
+ CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
+ (fun () ->
+ Pp.(str "The Focus command is deprecated;" ++ spc ()
+ ++ str "use '" ++ int n ++ str ": {' instead")
+ )
+
+let warn_deprecated_unfocus =
+ CWarnings.create ~name:"deprecated-unfocus" ~category:"deprecated"
+ (fun () -> Pp.strbrk "The Unfocus command is deprecated")
(* Proof commands *)
GEXTEND Gram
- GLOBAL: command;
+ GLOBAL: hint command;
opt_hintbases:
[ [ -> []
| ":"; l = LIST1 [id = IDENT -> id ] -> l ] ]
;
command:
- [ [ IDENT "Goal"; c = lconstr -> VernacGoal c
- | IDENT "Proof" ->
- VernacProof (None,hint_proof_using G_vernac.section_subset_expr None)
+ [ [ IDENT "Goal"; c = lconstr ->
+ VernacDefinition (Decl_kinds.(NoDischarge, Definition), ((CAst.make ~loc:!@loc Names.Anonymous), None), ProveBody ([], c))
+ | IDENT "Proof" -> VernacProof (None,None)
| IDENT "Proof" ; IDENT "Mode" ; mn = string -> VernacProofMode mn
- | IDENT "Proof"; "with"; ta = tactic;
- l = OPT [ "using"; l = G_vernac.section_subset_expr -> l ] ->
- VernacProof (Some ta,hint_proof_using G_vernac.section_subset_expr l)
- | IDENT "Proof"; "using"; l = G_vernac.section_subset_expr;
- ta = OPT [ "with"; ta = tactic -> ta ] ->
- VernacProof (ta,Some l)
| IDENT "Proof"; c = lconstr -> VernacExactProof c
| IDENT "Abort" -> VernacAbort None
| IDENT "Abort"; IDENT "All" -> VernacAbortAll
@@ -52,44 +59,37 @@ GEXTEND Gram
| IDENT "Existential"; n = natural; c = constr_body ->
VernacSolveExistential (n,c)
| IDENT "Admitted" -> VernacEndProof Admitted
- | IDENT "Qed" -> VernacEndProof (Proved (Opaque None,None))
- | IDENT "Qed"; IDENT "exporting"; l = LIST0 identref SEP "," ->
- VernacEndProof (Proved (Opaque (Some l),None))
- | IDENT "Save" -> VernacEndProof (Proved (Opaque None,None))
- | IDENT "Save"; tok = thm_token; id = identref ->
- VernacEndProof (Proved (Opaque None,Some (id,Some tok)))
+ | IDENT "Qed" -> VernacEndProof (Proved (Opaque,None))
| IDENT "Save"; id = identref ->
- VernacEndProof (Proved (Opaque None,Some (id,None)))
+ VernacEndProof (Proved (Opaque, Some id))
| IDENT "Defined" -> VernacEndProof (Proved (Transparent,None))
| IDENT "Defined"; id=identref ->
- VernacEndProof (Proved (Transparent,Some (id,None)))
+ VernacEndProof (Proved (Transparent,Some id))
| IDENT "Restart" -> VernacRestart
| IDENT "Undo" -> VernacUndo 1
| IDENT "Undo"; n = natural -> VernacUndo n
| IDENT "Undo"; IDENT "To"; n = natural -> VernacUndoTo n
- | IDENT "Focus" -> VernacFocus None
- | IDENT "Focus"; n = natural -> VernacFocus (Some n)
- | IDENT "Unfocus" -> VernacUnfocus
+ | IDENT "Focus" ->
+ warn_deprecated_focus ~loc:!@loc ();
+ VernacFocus None
+ | IDENT "Focus"; n = natural ->
+ warn_deprecated_focus_n n ~loc:!@loc ();
+ VernacFocus (Some n)
+ | IDENT "Unfocus" ->
+ warn_deprecated_unfocus ~loc:!@loc ();
+ VernacUnfocus
| IDENT "Unfocused" -> VernacUnfocused
| IDENT "Show" -> VernacShow (ShowGoal OpenSubgoals)
| IDENT "Show"; n = natural -> VernacShow (ShowGoal (NthGoal n))
| IDENT "Show"; id = ident -> VernacShow (ShowGoal (GoalId id))
- | IDENT "Show"; IDENT "Goal" -> VernacShow (ShowGoal (GoalId (Names.Id.of_string "Goal")))
- | IDENT "Show"; IDENT "Goal"; n = string ->
- VernacShow (ShowGoal (GoalUid n))
- | IDENT "Show"; IDENT "Implicit"; IDENT "Arguments"; n = OPT natural ->
- VernacShow (ShowGoalImplicitly n)
- | IDENT "Show"; IDENT "Node" -> VernacShow ShowNode
| IDENT "Show"; IDENT "Script" -> VernacShow ShowScript
| IDENT "Show"; IDENT "Existentials" -> VernacShow ShowExistentials
| IDENT "Show"; IDENT "Universes" -> VernacShow ShowUniverses
- | IDENT "Show"; IDENT "Tree" -> VernacShow ShowTree
| IDENT "Show"; IDENT "Conjectures" -> VernacShow ShowProofNames
| IDENT "Show"; IDENT "Proof" -> VernacShow ShowProof
| IDENT "Show"; IDENT "Intro" -> VernacShow (ShowIntros false)
| IDENT "Show"; IDENT "Intros" -> VernacShow (ShowIntros true)
| IDENT "Show"; IDENT "Match"; id = reference -> VernacShow (ShowMatch id)
- | IDENT "Show"; IDENT "Thesis" -> VernacShow ShowThesis
| IDENT "Guarded" -> VernacCheckGuard
(* Hints for Auto and EAuto *)
| IDENT "Create"; IDENT "HintDb" ;
@@ -97,19 +97,16 @@ GEXTEND Gram
VernacCreateHintDb (id, b)
| IDENT "Remove"; IDENT "Hints"; ids = LIST1 global; dbnames = opt_hintbases ->
VernacRemoveHints (dbnames, ids)
- | IDENT "Hint"; local = obsolete_locality; h = hint;
+ | IDENT "Hint"; h = hint;
dbnames = opt_hintbases ->
- VernacHints (local,dbnames, h)
+ VernacHints (dbnames, h)
(* Declare "Resolve" explicitly so as to be able to later extend with
"Resolve ->" and "Resolve <-" *)
| IDENT "Hint"; IDENT "Resolve"; lc = LIST1 reference_or_constr;
info = hint_info; dbnames = opt_hintbases ->
- VernacHints (false,dbnames,
+ VernacHints (dbnames,
HintsResolve (List.map (fun x -> (info, true, x)) lc))
] ];
- obsolete_locality:
- [ [ IDENT "Local" -> true | -> false ] ]
- ;
reference_or_constr:
[ [ r = global -> HintsReference r
| c = constr -> HintsConstr c ] ]
@@ -122,14 +119,11 @@ GEXTEND Gram
| IDENT "Opaque"; lc = LIST1 global -> HintsTransparency (lc, false)
| IDENT "Mode"; l = global; m = mode -> HintsMode (l, m)
| IDENT "Unfold"; lqid = LIST1 global -> HintsUnfold lqid
- | IDENT "Constructors"; lc = LIST1 global -> HintsConstructors lc
- | IDENT "Extern"; n = natural; c = OPT constr_pattern ; "=>";
- tac = tactic ->
- HintsExtern (n,c,tac) ] ]
+ | IDENT "Constructors"; lc = LIST1 global -> HintsConstructors lc ] ]
;
constr_body:
[ [ ":="; c = lconstr -> c
- | ":"; t = lconstr; ":="; c = lconstr -> CCast(!@loc,c,CastConv t) ] ]
+ | ":"; t = lconstr; ":="; c = lconstr -> CAst.make ~loc:!@loc @@ CCast(c,CastConv t) ] ]
;
mode:
[ [ l = LIST1 [ "+" -> ModeInput
diff --git a/parsing/g_tactic.ml4 b/parsing/g_tactic.ml4
deleted file mode 100644
index 3152afb2..00000000
--- a/parsing/g_tactic.ml4
+++ /dev/null
@@ -1,663 +0,0 @@
-(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(************************************************************************)
-
-open Pp
-open CErrors
-open Util
-open Tacexpr
-open Genredexpr
-open Constrexpr
-open Libnames
-open Tok
-open Compat
-open Misctypes
-open Locus
-open Decl_kinds
-
-open Pcoq
-
-
-let all_with delta = Redops.make_red_flag [FBeta;FMatch;FFix;FCofix;FZeta;delta]
-
-let tactic_kw = [ "->"; "<-" ; "by" ]
-let _ = List.iter CLexer.add_keyword tactic_kw
-
-let err () = raise Stream.Failure
-
-(* Hack to parse "(x:=t)" as an explicit argument without conflicts with the *)
-(* admissible notation "(x t)" *)
-let test_lpar_id_coloneq =
- Gram.Entry.of_parser "lpar_id_coloneq"
- (fun strm ->
- match get_tok (stream_nth 0 strm) with
- | KEYWORD "(" ->
- (match get_tok (stream_nth 1 strm) with
- | IDENT _ ->
- (match get_tok (stream_nth 2 strm) with
- | KEYWORD ":=" -> ()
- | _ -> err ())
- | _ -> err ())
- | _ -> err ())
-
-(* Hack to recognize "(x)" *)
-let test_lpar_id_rpar =
- Gram.Entry.of_parser "lpar_id_coloneq"
- (fun strm ->
- match get_tok (stream_nth 0 strm) with
- | KEYWORD "(" ->
- (match get_tok (stream_nth 1 strm) with
- | IDENT _ ->
- (match get_tok (stream_nth 2 strm) with
- | KEYWORD ")" -> ()
- | _ -> err ())
- | _ -> err ())
- | _ -> err ())
-
-(* idem for (x:=t) and (1:=t) *)
-let test_lpar_idnum_coloneq =
- Gram.Entry.of_parser "test_lpar_idnum_coloneq"
- (fun strm ->
- match get_tok (stream_nth 0 strm) with
- | KEYWORD "(" ->
- (match get_tok (stream_nth 1 strm) with
- | IDENT _ | INT _ ->
- (match get_tok (stream_nth 2 strm) with
- | KEYWORD ":=" -> ()
- | _ -> err ())
- | _ -> err ())
- | _ -> err ())
-
-(* idem for (x:t) *)
-let test_lpar_id_colon =
- Gram.Entry.of_parser "lpar_id_colon"
- (fun strm ->
- match get_tok (stream_nth 0 strm) with
- | KEYWORD "(" ->
- (match get_tok (stream_nth 1 strm) with
- | IDENT _ ->
- (match get_tok (stream_nth 2 strm) with
- | KEYWORD ":" -> ()
- | _ -> err ())
- | _ -> err ())
- | _ -> err ())
-
-(* idem for (x1..xn:t) [n^2 complexity but exceptional use] *)
-let check_for_coloneq =
- Gram.Entry.of_parser "lpar_id_colon"
- (fun strm ->
- let rec skip_to_rpar p n =
- match get_tok (List.last (Stream.npeek n strm)) with
- | KEYWORD "(" -> skip_to_rpar (p+1) (n+1)
- | KEYWORD ")" -> if Int.equal p 0 then n+1 else skip_to_rpar (p-1) (n+1)
- | KEYWORD "." -> err ()
- | _ -> skip_to_rpar p (n+1) in
- let rec skip_names n =
- match get_tok (List.last (Stream.npeek n strm)) with
- | IDENT _ | KEYWORD "_" -> skip_names (n+1)
- | KEYWORD ":" -> skip_to_rpar 0 (n+1) (* skip a constr *)
- | _ -> err () in
- let rec skip_binders n =
- match get_tok (List.last (Stream.npeek n strm)) with
- | KEYWORD "(" -> skip_binders (skip_names (n+1))
- | IDENT _ | KEYWORD "_" -> skip_binders (n+1)
- | KEYWORD ":=" -> ()
- | _ -> err () in
- match get_tok (stream_nth 0 strm) with
- | KEYWORD "(" -> skip_binders 2
- | _ -> err ())
-
-let lookup_at_as_comma =
- Gram.Entry.of_parser "lookup_at_as_comma"
- (fun strm ->
- match get_tok (stream_nth 0 strm) with
- | KEYWORD (","|"at"|"as") -> ()
- | _ -> err ())
-
-open Constr
-open Prim
-open Tactic
-
-let mk_fix_tac (loc,id,bl,ann,ty) =
- let n =
- match bl,ann with
- [([_],_,_)], None -> 1
- | _, Some x ->
- let ids = List.map snd (List.flatten (List.map pi1 bl)) in
- (try List.index Names.Name.equal (snd x) ids
- with Not_found -> error "No such fix variable.")
- | _ -> error "Cannot guess decreasing argument of fix." in
- (id,n,CProdN(loc,bl,ty))
-
-let mk_cofix_tac (loc,id,bl,ann,ty) =
- let _ = Option.map (fun (aloc,_) ->
- user_err_loc
- (aloc,"Constr:mk_cofix_tac",
- Pp.str"Annotation forbidden in cofix expression.")) ann in
- (id,CProdN(loc,bl,ty))
-
-(* Functions overloaded by quotifier *)
-let destruction_arg_of_constr (c,lbind as clbind) = match lbind with
- | NoBindings ->
- begin
- try ElimOnIdent (Constrexpr_ops.constr_loc c,snd(Constrexpr_ops.coerce_to_id c))
- with e when CErrors.noncritical e -> ElimOnConstr clbind
- end
- | _ -> ElimOnConstr clbind
-
-let mkTacCase with_evar = function
- | [(clear,ElimOnConstr cl),(None,None),None],None ->
- TacCase (with_evar,(clear,cl))
- (* Reinterpret numbers as a notation for terms *)
- | [(clear,ElimOnAnonHyp n),(None,None),None],None ->
- TacCase (with_evar,
- (clear,(CPrim (Loc.ghost, Numeral (Bigint.of_int n)),
- NoBindings)))
- (* Reinterpret ident as notations for variables in the context *)
- (* because we don't know if they are quantified or not *)
- | [(clear,ElimOnIdent id),(None,None),None],None ->
- TacCase (with_evar,(clear,(CRef (Ident id,None),NoBindings)))
- | ic ->
- if List.exists (function ((_, ElimOnAnonHyp _),_,_) -> true | _ -> false) (fst ic)
- then
- error "Use of numbers as direct arguments of 'case' is not supported.";
- TacInductionDestruct (false,with_evar,ic)
-
-let rec mkCLambdaN_simple_loc loc bll c =
- match bll with
- | ((loc1,_)::_ as idl,bk,t) :: bll ->
- CLambdaN (loc,[idl,bk,t],mkCLambdaN_simple_loc (Loc.merge loc1 loc) bll c)
- | ([],_,_) :: bll -> mkCLambdaN_simple_loc loc bll c
- | [] -> c
-
-let mkCLambdaN_simple bl c = match bl with
- | [] -> c
- | h :: _ ->
- let loc = Loc.merge (fst (List.hd (pi1 h))) (Constrexpr_ops.constr_loc c) in
- mkCLambdaN_simple_loc loc bl c
-
-let loc_of_ne_list l = Loc.merge (fst (List.hd l)) (fst (List.last l))
-
-let map_int_or_var f = function
- | ArgArg x -> ArgArg (f x)
- | ArgVar _ as y -> y
-
-let all_concl_occs_clause = { onhyps=Some[]; concl_occs=AllOccurrences }
-
-let merge_occurrences loc cl = function
- | None ->
- if Locusops.clause_with_generic_occurrences cl then (None, cl)
- else
- user_err_loc (loc,"",str "Found an \"at\" clause without \"with\" clause.")
- | Some (occs, p) ->
- let ans = match occs with
- | AllOccurrences -> cl
- | _ ->
- begin match cl with
- | { onhyps = Some []; concl_occs = AllOccurrences } ->
- { onhyps = Some []; concl_occs = occs }
- | { onhyps = Some [(AllOccurrences, id), l]; concl_occs = NoOccurrences } ->
- { cl with onhyps = Some [(occs, id), l] }
- | _ ->
- if Locusops.clause_with_generic_occurrences cl then
- user_err_loc (loc,"",str "Unable to interpret the \"at\" clause; move it in the \"in\" clause.")
- else
- user_err_loc (loc,"",str "Cannot use clause \"at\" twice.")
- end
- in
- (Some p, ans)
-
-let warn_deprecated_eqn_syntax =
- CWarnings.create ~name:"deprecated-eqn-syntax" ~category:"deprecated"
- (fun arg -> strbrk (Printf.sprintf "Syntax \"_eqn:%s\" is deprecated. Please use \"eqn:%s\" instead." arg arg))
-
-(* Auxiliary grammar rules *)
-
-GEXTEND Gram
- GLOBAL: simple_tactic constr_with_bindings quantified_hypothesis
- bindings red_expr int_or_var open_constr uconstr
- simple_intropattern in_clause clause_dft_concl hypident destruction_arg;
-
- int_or_var:
- [ [ n = integer -> ArgArg n
- | id = identref -> ArgVar id ] ]
- ;
- nat_or_var:
- [ [ n = natural -> ArgArg n
- | id = identref -> ArgVar id ] ]
- ;
- (* An identifier or a quotation meta-variable *)
- id_or_meta:
- [ [ id = identref -> id ] ]
- ;
- open_constr:
- [ [ c = constr -> c ] ]
- ;
- uconstr:
- [ [ c = constr -> c ] ]
- ;
- destruction_arg:
- [ [ n = natural -> (None,ElimOnAnonHyp n)
- | test_lpar_id_rpar; c = constr_with_bindings ->
- (Some false,destruction_arg_of_constr c)
- | c = constr_with_bindings_arg -> on_snd destruction_arg_of_constr c
- ] ]
- ;
- constr_with_bindings_arg:
- [ [ ">"; c = constr_with_bindings -> (Some true,c)
- | c = constr_with_bindings -> (None,c) ] ]
- ;
- quantified_hypothesis:
- [ [ id = ident -> NamedHyp id
- | n = natural -> AnonHyp n ] ]
- ;
- conversion:
- [ [ c = constr -> (None, c)
- | c1 = constr; "with"; c2 = constr -> (Some (AllOccurrences,c1),c2)
- | c1 = constr; "at"; occs = occs_nums; "with"; c2 = constr ->
- (Some (occs,c1), c2) ] ]
- ;
- occs_nums:
- [ [ nl = LIST1 nat_or_var -> OnlyOccurrences nl
- | "-"; n = nat_or_var; nl = LIST0 int_or_var ->
- (* have used int_or_var instead of nat_or_var for compatibility *)
- AllOccurrencesBut (List.map (map_int_or_var abs) (n::nl)) ] ]
- ;
- occs:
- [ [ "at"; occs = occs_nums -> occs | -> AllOccurrences ] ]
- ;
- pattern_occ:
- [ [ c = constr; nl = occs -> (nl,c) ] ]
- ;
- ref_or_pattern_occ:
- (* If a string, it is interpreted as a ref
- (anyway a Coq string does not reduce) *)
- [ [ c = smart_global; nl = occs -> nl,Inl c
- | c = constr; nl = occs -> nl,Inr c ] ]
- ;
- unfold_occ:
- [ [ c = smart_global; nl = occs -> (nl,c) ] ]
- ;
- intropatterns:
- [ [ l = LIST0 nonsimple_intropattern -> l ]]
- ;
- ne_intropatterns:
- [ [ l = LIST1 nonsimple_intropattern -> l ]]
- ;
- or_and_intropattern:
- [ [ "["; tc = LIST1 intropatterns SEP "|"; "]" -> IntroOrPattern tc
- | "()" -> IntroAndPattern []
- | "("; si = simple_intropattern; ")" -> IntroAndPattern [si]
- | "("; si = simple_intropattern; ",";
- tc = LIST1 simple_intropattern SEP "," ; ")" ->
- IntroAndPattern (si::tc)
- | "("; si = simple_intropattern; "&";
- tc = LIST1 simple_intropattern SEP "&" ; ")" ->
- (* (A & B & C) is translated into (A,(B,C)) *)
- let rec pairify = function
- | ([]|[_]|[_;_]) as l -> l
- | t::q -> [t;(loc_of_ne_list q,IntroAction (IntroOrAndPattern (IntroAndPattern (pairify q))))]
- in IntroAndPattern (pairify (si::tc)) ] ]
- ;
- equality_intropattern:
- [ [ "->" -> IntroRewrite true
- | "<-" -> IntroRewrite false
- | "[="; tc = intropatterns; "]" -> IntroInjection tc ] ]
- ;
- naming_intropattern:
- [ [ prefix = pattern_ident -> IntroFresh prefix
- | "?" -> IntroAnonymous
- | id = ident -> IntroIdentifier id ] ]
- ;
- nonsimple_intropattern:
- [ [ l = simple_intropattern -> l
- | "*" -> !@loc, IntroForthcoming true
- | "**" -> !@loc, IntroForthcoming false ]]
- ;
- simple_intropattern:
- [ [ pat = simple_intropattern_closed;
- l = LIST0 ["%"; c = operconstr LEVEL "0" -> c] ->
- let loc0,pat = pat in
- let f c pat =
- let loc = Loc.merge loc0 (Constrexpr_ops.constr_loc c) in
- IntroAction (IntroApplyOn (c,(loc,pat))) in
- !@loc, List.fold_right f l pat ] ]
- ;
- simple_intropattern_closed:
- [ [ pat = or_and_intropattern -> !@loc, IntroAction (IntroOrAndPattern pat)
- | pat = equality_intropattern -> !@loc, IntroAction pat
- | "_" -> !@loc, IntroAction IntroWildcard
- | pat = naming_intropattern -> !@loc, IntroNaming pat ] ]
- ;
- simple_binding:
- [ [ "("; id = ident; ":="; c = lconstr; ")" -> (!@loc, NamedHyp id, c)
- | "("; n = natural; ":="; c = lconstr; ")" -> (!@loc, AnonHyp n, c) ] ]
- ;
- bindings:
- [ [ test_lpar_idnum_coloneq; bl = LIST1 simple_binding ->
- ExplicitBindings bl
- | bl = LIST1 constr -> ImplicitBindings bl ] ]
- ;
- constr_with_bindings:
- [ [ c = constr; l = with_bindings -> (c, l) ] ]
- ;
- with_bindings:
- [ [ "with"; bl = bindings -> bl | -> NoBindings ] ]
- ;
- red_flags:
- [ [ IDENT "beta" -> [FBeta]
- | IDENT "iota" -> [FMatch;FFix;FCofix]
- | IDENT "match" -> [FMatch]
- | IDENT "fix" -> [FFix]
- | IDENT "cofix" -> [FCofix]
- | IDENT "zeta" -> [FZeta]
- | IDENT "delta"; d = delta_flag -> [d]
- ] ]
- ;
- delta_flag:
- [ [ "-"; "["; idl = LIST1 smart_global; "]" -> FDeltaBut idl
- | "["; idl = LIST1 smart_global; "]" -> FConst idl
- | -> FDeltaBut []
- ] ]
- ;
- strategy_flag:
- [ [ s = LIST1 red_flags -> Redops.make_red_flag (List.flatten s)
- | d = delta_flag -> all_with d
- ] ]
- ;
- red_expr:
- [ [ IDENT "red" -> Red false
- | IDENT "hnf" -> Hnf
- | IDENT "simpl"; d = delta_flag; po = OPT ref_or_pattern_occ -> Simpl (all_with d,po)
- | IDENT "cbv"; s = strategy_flag -> Cbv s
- | IDENT "cbn"; s = strategy_flag -> Cbn s
- | IDENT "lazy"; s = strategy_flag -> Lazy s
- | IDENT "compute"; delta = delta_flag -> Cbv (all_with delta)
- | IDENT "vm_compute"; po = OPT ref_or_pattern_occ -> CbvVm po
- | IDENT "native_compute"; po = OPT ref_or_pattern_occ -> CbvNative po
- | IDENT "unfold"; ul = LIST1 unfold_occ SEP "," -> Unfold ul
- | IDENT "fold"; cl = LIST1 constr -> Fold cl
- | IDENT "pattern"; pl = LIST1 pattern_occ SEP"," -> Pattern pl
- | s = IDENT -> ExtraRedExpr s ] ]
- ;
- hypident:
- [ [ id = id_or_meta ->
- id,InHyp
- | "("; IDENT "type"; IDENT "of"; id = id_or_meta; ")" ->
- id,InHypTypeOnly
- | "("; IDENT "value"; IDENT "of"; id = id_or_meta; ")" ->
- id,InHypValueOnly
- ] ]
- ;
- hypident_occ:
- [ [ (id,l)=hypident; occs=occs -> ((occs,id),l) ] ]
- ;
- in_clause:
- [ [ "*"; occs=occs ->
- {onhyps=None; concl_occs=occs}
- | "*"; "|-"; occs=concl_occ ->
- {onhyps=None; concl_occs=occs}
- | hl=LIST0 hypident_occ SEP","; "|-"; occs=concl_occ ->
- {onhyps=Some hl; concl_occs=occs}
- | hl=LIST0 hypident_occ SEP"," ->
- {onhyps=Some hl; concl_occs=NoOccurrences} ] ]
- ;
- clause_dft_concl:
- [ [ "in"; cl = in_clause -> cl
- | occs=occs -> {onhyps=Some[]; concl_occs=occs}
- | -> all_concl_occs_clause ] ]
- ;
- clause_dft_all:
- [ [ "in"; cl = in_clause -> cl
- | -> {onhyps=None; concl_occs=AllOccurrences} ] ]
- ;
- opt_clause:
- [ [ "in"; cl = in_clause -> Some cl
- | "at"; occs = occs_nums -> Some {onhyps=Some[]; concl_occs=occs}
- | -> None ] ]
- ;
- concl_occ:
- [ [ "*"; occs = occs -> occs
- | -> NoOccurrences ] ]
- ;
- in_hyp_list:
- [ [ "in"; idl = LIST1 id_or_meta -> idl
- | -> [] ] ]
- ;
- in_hyp_as:
- [ [ "in"; id = id_or_meta; ipat = as_ipat -> Some (id,ipat)
- | -> None ] ]
- ;
- orient:
- [ [ "->" -> true
- | "<-" -> false
- | -> true ]]
- ;
- simple_binder:
- [ [ na=name -> ([na],Default Explicit,CHole (!@loc, Some (Evar_kinds.BinderType (snd na)), IntroAnonymous, None))
- | "("; nal=LIST1 name; ":"; c=lconstr; ")" -> (nal,Default Explicit,c)
- ] ]
- ;
- fixdecl:
- [ [ "("; id = ident; bl=LIST0 simple_binder; ann=fixannot;
- ":"; ty=lconstr; ")" -> (!@loc, id, bl, ann, ty) ] ]
- ;
- fixannot:
- [ [ "{"; IDENT "struct"; id=name; "}" -> Some id
- | -> None ] ]
- ;
- cofixdecl:
- [ [ "("; id = ident; bl=LIST0 simple_binder; ":"; ty=lconstr; ")" ->
- (!@loc, id, bl, None, ty) ] ]
- ;
- bindings_with_parameters:
- [ [ check_for_coloneq; "("; id = ident; bl = LIST0 simple_binder;
- ":="; c = lconstr; ")" -> (id, mkCLambdaN_simple bl c) ] ]
- ;
- eliminator:
- [ [ "using"; el = constr_with_bindings -> el ] ]
- ;
- as_ipat:
- [ [ "as"; ipat = simple_intropattern -> Some ipat
- | -> None ] ]
- ;
- or_and_intropattern_loc:
- [ [ ipat = or_and_intropattern -> ArgArg (!@loc,ipat)
- | locid = identref -> ArgVar locid ] ]
- ;
- as_or_and_ipat:
- [ [ "as"; ipat = or_and_intropattern_loc -> Some ipat
- | -> None ] ]
- ;
- eqn_ipat:
- [ [ IDENT "eqn"; ":"; pat = naming_intropattern -> Some (!@loc, pat)
- | IDENT "_eqn"; ":"; pat = naming_intropattern ->
- let loc = !@loc in
- warn_deprecated_eqn_syntax ~loc "H"; Some (loc, pat)
- | IDENT "_eqn" ->
- let loc = !@loc in
- warn_deprecated_eqn_syntax ~loc "?"; Some (loc, IntroAnonymous)
- | -> None ] ]
- ;
- as_name:
- [ [ "as"; id = ident -> Names.Name id | -> Names.Anonymous ] ]
- ;
- by_tactic:
- [ [ "by"; tac = tactic_expr LEVEL "3" -> Some tac
- | -> None ] ]
- ;
- rewriter :
- [ [ "!"; c = constr_with_bindings_arg -> (RepeatPlus,c)
- | ["?"| LEFTQMARK]; c = constr_with_bindings_arg -> (RepeatStar,c)
- | n = natural; "!"; c = constr_with_bindings_arg -> (Precisely n,c)
- | n = natural; ["?" | LEFTQMARK]; c = constr_with_bindings_arg -> (UpTo n,c)
- | n = natural; c = constr_with_bindings_arg -> (Precisely n,c)
- | c = constr_with_bindings_arg -> (Precisely 1, c)
- ] ]
- ;
- oriented_rewriter :
- [ [ b = orient; p = rewriter -> let (m,c) = p in (b,m,c) ] ]
- ;
- induction_clause:
- [ [ c = destruction_arg; pat = as_or_and_ipat; eq = eqn_ipat;
- cl = opt_clause -> (c,(eq,pat),cl) ] ]
- ;
- induction_clause_list:
- [ [ ic = LIST1 induction_clause SEP ","; el = OPT eliminator;
- cl_tolerance = opt_clause ->
- (* Condition for accepting "in" at the end by compatibility *)
- match ic,el,cl_tolerance with
- | [c,pat,None],Some _,Some _ -> ([c,pat,cl_tolerance],el)
- | _,_,Some _ -> err ()
- | _,_,None -> (ic,el) ]]
- ;
- simple_tactic:
- [ [
- (* Basic tactics *)
- IDENT "intros"; pl = ne_intropatterns ->
- TacAtom (!@loc, TacIntroPattern (false,pl))
- | IDENT "intros" ->
- TacAtom (!@loc, TacIntroPattern (false,[!@loc,IntroForthcoming false]))
- | IDENT "eintros"; pl = ne_intropatterns ->
- TacAtom (!@loc, TacIntroPattern (true,pl))
-
- | IDENT "apply"; cl = LIST1 constr_with_bindings_arg SEP ",";
- inhyp = in_hyp_as -> TacAtom (!@loc, TacApply (true,false,cl,inhyp))
- | IDENT "eapply"; cl = LIST1 constr_with_bindings_arg SEP ",";
- inhyp = in_hyp_as -> TacAtom (!@loc, TacApply (true,true,cl,inhyp))
- | IDENT "simple"; IDENT "apply";
- cl = LIST1 constr_with_bindings_arg SEP ",";
- inhyp = in_hyp_as -> TacAtom (!@loc, TacApply (false,false,cl,inhyp))
- | IDENT "simple"; IDENT "eapply";
- cl = LIST1 constr_with_bindings_arg SEP",";
- inhyp = in_hyp_as -> TacAtom (!@loc, TacApply (false,true,cl,inhyp))
- | IDENT "elim"; cl = constr_with_bindings_arg; el = OPT eliminator ->
- TacAtom (!@loc, TacElim (false,cl,el))
- | IDENT "eelim"; cl = constr_with_bindings_arg; el = OPT eliminator ->
- TacAtom (!@loc, TacElim (true,cl,el))
- | IDENT "case"; icl = induction_clause_list -> TacAtom (!@loc, mkTacCase false icl)
- | IDENT "ecase"; icl = induction_clause_list -> TacAtom (!@loc, mkTacCase true icl)
- | "fix"; id = ident; n = natural; "with"; fd = LIST1 fixdecl ->
- TacAtom (!@loc, TacMutualFix (id,n,List.map mk_fix_tac fd))
- | "cofix"; id = ident; "with"; fd = LIST1 cofixdecl ->
- TacAtom (!@loc, TacMutualCofix (id,List.map mk_cofix_tac fd))
-
- | IDENT "pose"; (id,b) = bindings_with_parameters ->
- TacAtom (!@loc, TacLetTac (Names.Name id,b,Locusops.nowhere,true,None))
- | IDENT "pose"; b = constr; na = as_name ->
- TacAtom (!@loc, TacLetTac (na,b,Locusops.nowhere,true,None))
- | IDENT "set"; (id,c) = bindings_with_parameters; p = clause_dft_concl ->
- TacAtom (!@loc, TacLetTac (Names.Name id,c,p,true,None))
- | IDENT "set"; c = constr; na = as_name; p = clause_dft_concl ->
- TacAtom (!@loc, TacLetTac (na,c,p,true,None))
- | IDENT "remember"; c = constr; na = as_name; e = eqn_ipat;
- p = clause_dft_all ->
- TacAtom (!@loc, TacLetTac (na,c,p,false,e))
-
- (* Alternative syntax for "pose proof c as id" *)
- | IDENT "assert"; test_lpar_id_coloneq; "("; (loc,id) = identref; ":=";
- c = lconstr; ")" ->
- TacAtom (!@loc, TacAssert (true,None,Some (!@loc,IntroNaming (IntroIdentifier id)),c))
-
- (* Alternative syntax for "assert c as id by tac" *)
- | IDENT "assert"; test_lpar_id_colon; "("; (loc,id) = identref; ":";
- c = lconstr; ")"; tac=by_tactic ->
- TacAtom (!@loc, TacAssert (true,Some tac,Some (!@loc,IntroNaming (IntroIdentifier id)),c))
-
- (* Alternative syntax for "enough c as id by tac" *)
- | IDENT "enough"; test_lpar_id_colon; "("; (loc,id) = identref; ":";
- c = lconstr; ")"; tac=by_tactic ->
- TacAtom (!@loc, TacAssert (false,Some tac,Some (!@loc,IntroNaming (IntroIdentifier id)),c))
-
- | IDENT "assert"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (!@loc, TacAssert (true,Some tac,ipat,c))
- | IDENT "pose"; IDENT "proof"; c = lconstr; ipat = as_ipat ->
- TacAtom (!@loc, TacAssert (true,None,ipat,c))
- | IDENT "enough"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (!@loc, TacAssert (false,Some tac,ipat,c))
-
- | IDENT "generalize"; c = constr ->
- TacAtom (!@loc, TacGeneralize [((AllOccurrences,c),Names.Anonymous)])
- | IDENT "generalize"; c = constr; l = LIST1 constr ->
- let gen_everywhere c = ((AllOccurrences,c),Names.Anonymous) in
- TacAtom (!@loc, TacGeneralize (List.map gen_everywhere (c::l)))
- | IDENT "generalize"; c = constr; lookup_at_as_comma; nl = occs;
- na = as_name;
- l = LIST0 [","; c = pattern_occ; na = as_name -> (c,na)] ->
- TacAtom (!@loc, TacGeneralize (((nl,c),na)::l))
-
- (* Derived basic tactics *)
- | IDENT "induction"; ic = induction_clause_list ->
- TacAtom (!@loc, TacInductionDestruct (true,false,ic))
- | IDENT "einduction"; ic = induction_clause_list ->
- TacAtom (!@loc, TacInductionDestruct(true,true,ic))
- | IDENT "destruct"; icl = induction_clause_list ->
- TacAtom (!@loc, TacInductionDestruct(false,false,icl))
- | IDENT "edestruct"; icl = induction_clause_list ->
- TacAtom (!@loc, TacInductionDestruct(false,true,icl))
-
- (* Equality and inversion *)
- | IDENT "rewrite"; l = LIST1 oriented_rewriter SEP ",";
- cl = clause_dft_concl; t=by_tactic -> TacAtom (!@loc, TacRewrite (false,l,cl,t))
- | IDENT "erewrite"; l = LIST1 oriented_rewriter SEP ",";
- cl = clause_dft_concl; t=by_tactic -> TacAtom (!@loc, TacRewrite (true,l,cl,t))
- | IDENT "dependent"; k =
- [ IDENT "simple"; IDENT "inversion" -> SimpleInversion
- | IDENT "inversion" -> FullInversion
- | IDENT "inversion_clear" -> FullInversionClear ];
- hyp = quantified_hypothesis;
- ids = as_or_and_ipat; co = OPT ["with"; c = constr -> c] ->
- TacAtom (!@loc, TacInversion (DepInversion (k,co,ids),hyp))
- | IDENT "simple"; IDENT "inversion";
- hyp = quantified_hypothesis; ids = as_or_and_ipat;
- cl = in_hyp_list ->
- TacAtom (!@loc, TacInversion (NonDepInversion (SimpleInversion, cl, ids), hyp))
- | IDENT "inversion";
- hyp = quantified_hypothesis; ids = as_or_and_ipat;
- cl = in_hyp_list ->
- TacAtom (!@loc, TacInversion (NonDepInversion (FullInversion, cl, ids), hyp))
- | IDENT "inversion_clear";
- hyp = quantified_hypothesis; ids = as_or_and_ipat;
- cl = in_hyp_list ->
- TacAtom (!@loc, TacInversion (NonDepInversion (FullInversionClear, cl, ids), hyp))
- | IDENT "inversion"; hyp = quantified_hypothesis;
- "using"; c = constr; cl = in_hyp_list ->
- TacAtom (!@loc, TacInversion (InversionUsing (c,cl), hyp))
-
- (* Conversion *)
- | IDENT "red"; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Red false, cl))
- | IDENT "hnf"; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Hnf, cl))
- | IDENT "simpl"; d = delta_flag; po = OPT ref_or_pattern_occ; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Simpl (all_with d, po), cl))
- | IDENT "cbv"; s = strategy_flag; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Cbv s, cl))
- | IDENT "cbn"; s = strategy_flag; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Cbn s, cl))
- | IDENT "lazy"; s = strategy_flag; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Lazy s, cl))
- | IDENT "compute"; delta = delta_flag; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Cbv (all_with delta), cl))
- | IDENT "vm_compute"; po = OPT ref_or_pattern_occ; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (CbvVm po, cl))
- | IDENT "native_compute"; po = OPT ref_or_pattern_occ; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (CbvNative po, cl))
- | IDENT "unfold"; ul = LIST1 unfold_occ SEP ","; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Unfold ul, cl))
- | IDENT "fold"; l = LIST1 constr; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Fold l, cl))
- | IDENT "pattern"; pl = LIST1 pattern_occ SEP","; cl = clause_dft_concl ->
- TacAtom (!@loc, TacReduce (Pattern pl, cl))
-
- (* Change ne doit pas s'appliquer dans un Definition t := Eval ... *)
- | IDENT "change"; (oc,c) = conversion; cl = clause_dft_concl ->
- let p,cl = merge_occurrences (!@loc) cl oc in
- TacAtom (!@loc, TacChange (p,c,cl))
- ] ]
- ;
-END;;
diff --git a/parsing/g_vernac.ml4 b/parsing/g_vernac.ml4
index e61be53a..61b1de82 100644
--- a/parsing/g_vernac.ml4
+++ b/parsing/g_vernac.ml4
@@ -1,23 +1,25 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open Pp
-open Compat
open CErrors
open Util
open Names
+open Vernacexpr
open Constrexpr
open Constrexpr_ops
open Extend
-open Vernacexpr
open Decl_kinds
+open Declarations
open Misctypes
-open Tok (* necessary for camlp4 *)
+open Tok (* necessary for camlp5 *)
open Pcoq
open Pcoq.Prim
@@ -41,7 +43,6 @@ let def_body = Gram.entry_create "vernac:def_body"
let decl_notation = Gram.entry_create "vernac:decl_notation"
let record_field = Gram.entry_create "vernac:record_field"
let of_type_with_opt_coercion = Gram.entry_create "vernac:of_type_with_opt_coercion"
-let subgoal_command = Gram.entry_create "proof_mode:subgoal_command"
let instance_name = Gram.entry_create "vernac:instance_name"
let section_subset_expr = Gram.entry_create "vernac:section_subset_expr"
@@ -53,53 +54,57 @@ let make_bullet s =
| '*' -> Star n
| _ -> assert false
+let parse_compat_version ?(allow_old = true) = let open Flags in function
+ | "8.8" -> Current
+ | "8.7" -> V8_7
+ | "8.6" -> V8_6
+ | ("8.5" | "8.4" | "8.3" | "8.2" | "8.1" | "8.0") as s ->
+ CErrors.user_err ~hdr:"get_compat_version"
+ Pp.(str "Compatibility with version " ++ str s ++ str " not supported.")
+ | s ->
+ CErrors.user_err ~hdr:"get_compat_version"
+ Pp.(str "Unknown compatibility version \"" ++ str s ++ str "\".")
+
GEXTEND Gram
- GLOBAL: vernac gallina_ext noedit_mode subprf subgoal_command;
- vernac: FIRST
- [ [ IDENT "Time"; c = located_vernac -> VernacTime c
+ GLOBAL: vernac_control gallina_ext noedit_mode subprf;
+ vernac_control: FIRST
+ [ [ IDENT "Time"; c = located_vernac -> VernacTime (false,c)
| IDENT "Redirect"; s = ne_string; c = located_vernac -> VernacRedirect (s, c)
- | IDENT "Timeout"; n = natural; v = vernac -> VernacTimeout(n,v)
- | IDENT "Fail"; v = vernac -> VernacFail v
-
- | IDENT "Local"; v = vernac_poly -> VernacLocal (true, v)
- | IDENT "Global"; v = vernac_poly -> VernacLocal (false, v)
-
- (* Stm backdoor *)
- | IDENT "Stm"; IDENT "JoinDocument"; "." -> VernacStm JoinDocument
- | IDENT "Stm"; IDENT "Finish"; "." -> VernacStm Finish
- | IDENT "Stm"; IDENT "Wait"; "." -> VernacStm Wait
- | IDENT "Stm"; IDENT "PrintDag"; "." -> VernacStm PrintDag
- | IDENT "Stm"; IDENT "Observe"; id = INT; "." ->
- VernacStm (Observe (Stateid.of_int (int_of_string id)))
- | IDENT "Stm"; IDENT "Command"; v = vernac_aux -> VernacStm (Command v)
- | IDENT "Stm"; IDENT "PGLast"; v = vernac_aux -> VernacStm (PGLast v)
+ | IDENT "Timeout"; n = natural; v = vernac_control -> VernacTimeout(n,v)
+ | IDENT "Fail"; v = vernac_control -> VernacFail v
+ | (f, v) = vernac -> VernacExpr(f, v) ]
+ ]
+ ;
+ vernac:
+ [ [ IDENT "Local"; (f, v) = vernac_poly -> (VernacLocal true :: f, v)
+ | IDENT "Global"; (f, v) = vernac_poly -> (VernacLocal false :: f, v)
| v = vernac_poly -> v ]
]
;
vernac_poly:
- [ [ IDENT "Polymorphic"; v = vernac_aux -> VernacPolymorphic (true, v)
- | IDENT "Monomorphic"; v = vernac_aux -> VernacPolymorphic (false, v)
+ [ [ IDENT "Polymorphic"; (f, v) = vernac_aux -> (VernacPolymorphic true :: f, v)
+ | IDENT "Monomorphic"; (f, v) = vernac_aux -> (VernacPolymorphic false :: f, v)
| v = vernac_aux -> v ]
]
;
vernac_aux:
(* Better to parse "." here: in case of failure (e.g. in coerce_to_var), *)
(* "." is still in the stream and discard_to_dot works correctly *)
- [ [ IDENT "Program"; g = gallina; "." -> VernacProgram g
- | IDENT "Program"; g = gallina_ext; "." -> VernacProgram g
- | g = gallina; "." -> g
- | g = gallina_ext; "." -> g
- | c = command; "." -> c
- | c = syntax; "." -> c
- | c = subprf -> c
+ [ [ IDENT "Program"; g = gallina; "." -> ([VernacProgram], g)
+ | IDENT "Program"; g = gallina_ext; "." -> ([VernacProgram], g)
+ | g = gallina; "." -> ([], g)
+ | g = gallina_ext; "." -> ([], g)
+ | c = command; "." -> ([], c)
+ | c = syntax; "." -> ([], c)
+ | c = subprf -> ([], c)
] ]
;
vernac_aux: LAST
- [ [ prfcom = command_entry -> prfcom ] ]
+ [ [ prfcom = command_entry -> ([], prfcom) ] ]
;
noedit_mode:
- [ [ c = subgoal_command -> c None] ]
+ [ [ c = query_command -> c None] ]
;
subprf:
@@ -109,17 +114,8 @@ GEXTEND Gram
] ]
;
- subgoal_command:
- [ [ c = query_command; "." ->
- begin function
- | Some (SelectNth g) -> c (Some g)
- | None -> c None
- | _ ->
- VernacError (UserError ("",str"Typing and evaluation commands, cannot be used with the \"all:\" selector."))
- end ] ]
- ;
located_vernac:
- [ [ v = vernac -> !@loc, v ] ]
+ [ [ v = vernac_control -> CAst.make ~loc:!@loc v ] ]
;
END
@@ -137,47 +133,57 @@ let test_plural_form_types loc kwd = function
warn_plural_command ~loc:!@loc kwd
| _ -> ()
-let fresh_var env c =
- Namegen.next_ident_away (Id.of_string "pat")
- (env @ Id.Set.elements (Topconstr.free_vars_of_constr_expr c))
+let lname_of_lident : lident -> lname =
+ CAst.map (fun s -> Name s)
-let _ = Hook.set Constrexpr_ops.fresh_var_hook fresh_var
+let name_of_ident_decl : ident_decl -> name_decl =
+ on_fst lname_of_lident
(* Gallina declarations *)
GEXTEND Gram
GLOBAL: gallina gallina_ext thm_token def_body of_type_with_opt_coercion
- record_field decl_notation rec_definition pidentref;
+ record_field decl_notation rec_definition ident_decl univ_decl;
gallina:
(* Definition, Theorem, Variable, Axiom, ... *)
- [ [ thm = thm_token; id = pidentref; bl = binders; ":"; c = lconstr;
+ [ [ thm = thm_token; id = ident_decl; bl = binders; ":"; c = lconstr;
l = LIST0
- [ "with"; id = pidentref; bl = binders; ":"; c = lconstr ->
- (Some id,(bl,c,None)) ] ->
- VernacStartTheoremProof (thm, (Some id,(bl,c,None))::l, false)
+ [ "with"; id = ident_decl; bl = binders; ":"; c = lconstr ->
+ (id,(bl,c)) ] ->
+ VernacStartTheoremProof (thm, (id,(bl,c))::l)
| stre = assumption_token; nl = inline; bl = assum_list ->
VernacAssumption (stre, nl, bl)
| (kwd,stre) = assumptions_token; nl = inline; bl = assum_list ->
test_plural_form loc kwd bl;
VernacAssumption (stre, nl, bl)
- | d = def_token; id = pidentref; b = def_body ->
- VernacDefinition (d, id, b)
+ | d = def_token; id = ident_decl; b = def_body ->
+ VernacDefinition (d, name_of_ident_decl id, b)
| IDENT "Let"; id = identref; b = def_body ->
- VernacDefinition ((Some Discharge, Definition), (id, None), b)
+ VernacDefinition ((DoDischarge, Let), (lname_of_lident id, None), b)
(* Gallina inductive declarations *)
- | priv = private_token; f = finite_token;
+ | cum = cumulativity_token; priv = private_token; f = finite_token;
indl = LIST1 inductive_definition SEP "with" ->
let (k,f) = f in
- let indl=List.map (fun ((a,b,c,d),e) -> ((a,b,c,k,d),e)) indl in
- VernacInductive (priv,f,indl)
+ let indl=List.map (fun ((a,b,c,d),e) -> ((a,b,c,k,d),e)) indl in
+ let cum =
+ match cum with
+ Some true -> LocalCumulativity
+ | Some false -> LocalNonCumulativity
+ | None ->
+ if Flags.is_polymorphic_inductive_cumulativity () then
+ GlobalCumulativity
+ else
+ GlobalNonCumulativity
+ in
+ VernacInductive (cum, priv,f,indl)
| "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
- VernacFixpoint (None, recs)
+ VernacFixpoint (NoDischarge, recs)
| IDENT "Let"; "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
- VernacFixpoint (Some Discharge, recs)
+ VernacFixpoint (DoDischarge, recs)
| "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
- VernacCoFixpoint (None, corecs)
+ VernacCoFixpoint (NoDischarge, corecs)
| IDENT "Let"; "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
- VernacCoFixpoint (Some Discharge, corecs)
+ VernacCoFixpoint (DoDischarge, corecs)
| IDENT "Scheme"; l = LIST1 scheme SEP "with" -> VernacScheme l
| IDENT "Combined"; IDENT "Scheme"; id = identref; IDENT "from";
l = LIST1 identref SEP "," -> VernacCombinedScheme (id, l)
@@ -199,60 +205,82 @@ GEXTEND Gram
| IDENT "Property" -> Property ] ]
;
def_token:
- [ [ "Definition" -> (None, Definition)
- | IDENT "Example" -> (None, Example)
- | IDENT "SubClass" -> (None, SubClass) ] ]
+ [ [ "Definition" -> (NoDischarge,Definition)
+ | IDENT "Example" -> (NoDischarge,Example)
+ | IDENT "SubClass" -> (NoDischarge,SubClass) ] ]
;
assumption_token:
- [ [ "Hypothesis" -> (Some Discharge, Logical)
- | "Variable" -> (Some Discharge, Definitional)
- | "Axiom" -> (None, Logical)
- | "Parameter" -> (None, Definitional)
- | IDENT "Conjecture" -> (None, Conjectural) ] ]
+ [ [ "Hypothesis" -> (DoDischarge, Logical)
+ | "Variable" -> (DoDischarge, Definitional)
+ | "Axiom" -> (NoDischarge, Logical)
+ | "Parameter" -> (NoDischarge, Definitional)
+ | IDENT "Conjecture" -> (NoDischarge, Conjectural) ] ]
;
assumptions_token:
- [ [ IDENT "Hypotheses" -> ("Hypotheses", (Some Discharge, Logical))
- | IDENT "Variables" -> ("Variables", (Some Discharge, Definitional))
- | IDENT "Axioms" -> ("Axioms", (None, Logical))
- | IDENT "Parameters" -> ("Parameters", (None, Definitional))
- | IDENT "Conjectures" -> ("Conjectures", (None, Conjectural)) ] ]
+ [ [ IDENT "Hypotheses" -> ("Hypotheses", (DoDischarge, Logical))
+ | IDENT "Variables" -> ("Variables", (DoDischarge, Definitional))
+ | IDENT "Axioms" -> ("Axioms", (NoDischarge, Logical))
+ | IDENT "Parameters" -> ("Parameters", (NoDischarge, Definitional))
+ | IDENT "Conjectures" -> ("Conjectures", (NoDischarge, Conjectural)) ] ]
;
inline:
[ [ IDENT "Inline"; "("; i = INT; ")" -> InlineAt (int_of_string i)
| IDENT "Inline" -> DefaultInline
| -> NoInline] ]
;
- pidentref:
- [ [ i = identref; l = OPT [ "@{" ; l = LIST0 identref; "}" -> l ] -> (i,l) ] ]
- ;
univ_constraint:
[ [ l = universe_level; ord = [ "<" -> Univ.Lt | "=" -> Univ.Eq | "<=" -> Univ.Le ];
r = universe_level -> (l, ord, r) ] ]
;
+ univ_decl :
+ [ [ "@{" ; l = LIST0 identref; ext = [ "+" -> true | -> false ];
+ cs = [ "|"; l' = LIST0 univ_constraint SEP ",";
+ ext = [ "+" -> true | -> false ]; "}" -> (l',ext)
+ | ext = [ "}" -> true | "|}" -> false ] -> ([], ext) ]
+ ->
+ { univdecl_instance = l;
+ univdecl_extensible_instance = ext;
+ univdecl_constraints = fst cs;
+ univdecl_extensible_constraints = snd cs }
+ ] ]
+ ;
+ ident_decl:
+ [ [ i = identref; l = OPT univ_decl -> (i, l)
+ ] ]
+ ;
finite_token:
- [ [ "Inductive" -> (Inductive_kw,Finite)
- | "CoInductive" -> (CoInductive,CoFinite)
- | "Variant" -> (Variant,BiFinite)
+ [ [ IDENT "Inductive" -> (Inductive_kw,Finite)
+ | IDENT "CoInductive" -> (CoInductive,CoFinite)
+ | IDENT "Variant" -> (Variant,BiFinite)
| IDENT "Record" -> (Record,BiFinite)
| IDENT "Structure" -> (Structure,BiFinite)
| IDENT "Class" -> (Class true,BiFinite) ] ]
;
+ cumulativity_token:
+ [ [ IDENT "Cumulative" -> Some true | IDENT "NonCumulative" -> Some false | -> None ] ]
+ ;
private_token:
[ [ IDENT "Private" -> true | -> false ] ]
;
(* Simple definitions *)
def_body:
[ [ bl = binders; ":="; red = reduce; c = lconstr ->
- let (bl, c) = expand_pattern_binders mkCLambdaN bl c in
- (match c with
- CCast(_,c, CastConv t) -> DefineBody (bl, red, c, Some t)
+ if List.exists (function CLocalPattern _ -> true | _ -> false) bl
+ then
+ (* FIXME: "red" will be applied to types in bl and Cast with remain *)
+ let c = mkCLambdaN ~loc:!@loc bl c in
+ DefineBody ([], red, c, None)
+ else
+ (match c with
+ | { CAst.v = CCast(c, CastConv t) } -> DefineBody (bl, red, c, Some t)
| _ -> DefineBody (bl, red, c, None))
| bl = binders; ":"; t = lconstr; ":="; red = reduce; c = lconstr ->
let ((bl, c), tyo) =
- if List.exists (function LocalPattern _ -> true | _ -> false) bl
+ if List.exists (function CLocalPattern _ -> true | _ -> false) bl
then
- let c = CCast (!@loc, c, CastConv t) in
- (expand_pattern_binders mkCLambdaN bl c, None)
+ (* FIXME: "red" will be applied to types in bl and Cast with remain *)
+ let c = CAst.make ~loc:!@loc @@ CCast (c, CastConv t) in
+ (([],mkCLambdaN ~loc:!@loc bl c), None)
else ((bl, c), Some t)
in
DefineBody (bl, red, c, tyo)
@@ -260,7 +288,7 @@ GEXTEND Gram
ProveBody (bl, t) ] ]
;
reduce:
- [ [ IDENT "Eval"; r = Tactic.red_expr; "in" -> Some r
+ [ [ IDENT "Eval"; r = red_expr; "in" -> Some r
| -> None ] ]
;
one_decl_notation:
@@ -277,7 +305,7 @@ GEXTEND Gram
| -> RecordDecl (None, []) ] ]
;
inductive_definition:
- [ [ oc = opt_coercion; id = pidentref; indpar = binders;
+ [ [ oc = opt_coercion; id = ident_decl; indpar = binders;
c = OPT [ ":"; c = lconstr -> c ];
lc=opt_constructors_or_fields; ntn = decl_notation ->
(((oc,id),indpar,c,lc),ntn) ] ]
@@ -303,20 +331,20 @@ GEXTEND Gram
;
(* (co)-fixpoints *)
rec_definition:
- [ [ id = pidentref;
+ [ [ id = ident_decl;
bl = binders_fixannot;
ty = type_cstr;
def = OPT [":="; def = lconstr -> def]; ntn = decl_notation ->
let bl, annot = bl in ((id,annot,bl,ty,def),ntn) ] ]
;
corec_definition:
- [ [ id = pidentref; bl = binders; ty = type_cstr;
+ [ [ id = ident_decl; bl = binders; ty = type_cstr;
def = OPT [":="; def = lconstr -> def]; ntn = decl_notation ->
((id,bl,ty,def),ntn) ] ]
;
type_cstr:
[ [ ":"; c=lconstr -> c
- | -> CHole (!@loc, None, Misctypes.IntroAnonymous, None) ] ]
+ | -> CAst.make ~loc:!@loc @@ CHole (None, Misctypes.IntroAnonymous, None) ] ]
;
(* Inductive schemes *)
scheme:
@@ -325,13 +353,13 @@ GEXTEND Gram
;
scheme_kind:
[ [ IDENT "Induction"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort-> InductionScheme(true,ind,s)
+ IDENT "Sort"; s = sort_family-> InductionScheme(true,ind,s)
| IDENT "Minimality"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort-> InductionScheme(false,ind,s)
+ IDENT "Sort"; s = sort_family-> InductionScheme(false,ind,s)
| IDENT "Elimination"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort-> CaseScheme(true,ind,s)
+ IDENT "Sort"; s = sort_family-> CaseScheme(true,ind,s)
| IDENT "Case"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort-> CaseScheme(false,ind,s)
+ IDENT "Sort"; s = sort_family-> CaseScheme(false,ind,s)
| IDENT "Equality"; "for" ; ind = smart_global -> EqualityScheme(ind) ] ]
;
(* Various Binders *)
@@ -340,8 +368,8 @@ GEXTEND Gram
binder_nodef:
[ [ b = binder_let ->
(match b with
- LocalRawAssum(l,ty) -> (l,ty)
- | LocalRawDef _ ->
+ CLocalAssum(l,ty) -> (l,ty)
+ | CLocalDef _ ->
Util.user_err_loc
(loc,"fix_param",Pp.str"defined binder not allowed here.")) ] ]
;
@@ -360,19 +388,19 @@ GEXTEND Gram
;
record_binder_body:
[ [ l = binders; oc = of_type_with_opt_coercion;
- t = lconstr -> fun id -> (oc,AssumExpr (id,mkCProdN (!@loc) l t))
+ t = lconstr -> fun id -> (oc,AssumExpr (id,mkCProdN ~loc:!@loc l t))
| l = binders; oc = of_type_with_opt_coercion;
t = lconstr; ":="; b = lconstr -> fun id ->
- (oc,DefExpr (id,mkCLambdaN (!@loc) l b,Some (mkCProdN (!@loc) l t)))
+ (oc,DefExpr (id,mkCLambdaN ~loc:!@loc l b,Some (mkCProdN ~loc:!@loc l t)))
| l = binders; ":="; b = lconstr -> fun id ->
- match b with
- | CCast(_,b, (CastConv t|CastVM t|CastNative t)) ->
- (None,DefExpr(id,mkCLambdaN (!@loc) l b,Some (mkCProdN (!@loc) l t)))
+ match b.CAst.v with
+ | CCast(b', (CastConv t|CastVM t|CastNative t)) ->
+ (None,DefExpr(id,mkCLambdaN ~loc:!@loc l b',Some (mkCProdN ~loc:!@loc l t)))
| _ ->
- (None,DefExpr(id,mkCLambdaN (!@loc) l b,None)) ] ]
+ (None,DefExpr(id,mkCLambdaN ~loc:!@loc l b,None)) ] ]
;
record_binder:
- [ [ id = name -> (None,AssumExpr(id,CHole (!@loc, None, Misctypes.IntroAnonymous, None)))
+ [ [ id = name -> (None,AssumExpr(id, CAst.make ~loc:!@loc @@ CHole (None, Misctypes.IntroAnonymous, None)))
| id = name; f = record_binder_body -> f id ] ]
;
assum_list:
@@ -382,16 +410,16 @@ GEXTEND Gram
[ [ "("; a = simple_assum_coe; ")" -> a ] ]
;
simple_assum_coe:
- [ [ idl = LIST1 pidentref; oc = of_type_with_opt_coercion; c = lconstr ->
+ [ [ idl = LIST1 ident_decl; oc = of_type_with_opt_coercion; c = lconstr ->
(not (Option.is_empty oc),(idl,c)) ] ]
;
constructor_type:
[[ l = binders;
t= [ coe = of_type_with_opt_coercion; c = lconstr ->
- fun l id -> (not (Option.is_empty coe),(id,mkCProdN (!@loc) l c))
+ fun l id -> (not (Option.is_empty coe),(id,mkCProdN ~loc:!@loc l c))
| ->
- fun l id -> (false,(id,mkCProdN (!@loc) l (CHole (!@loc, None, Misctypes.IntroAnonymous, None)))) ]
+ fun l id -> (false,(id,mkCProdN ~loc:!@loc l (CAst.make ~loc:!@loc @@ CHole (None, Misctypes.IntroAnonymous, None)))) ]
-> t l
]]
;
@@ -413,7 +441,7 @@ let only_starredidentrefs =
Gram.Entry.of_parser "test_only_starredidentrefs"
(fun strm ->
let rec aux n =
- match get_tok (Util.stream_nth n strm) with
+ match Util.stream_nth n strm with
| KEYWORD "." -> ()
| KEYWORD ")" -> ()
| (IDENT _ | KEYWORD "Type" | KEYWORD "*") -> aux (n+1)
@@ -522,25 +550,26 @@ GEXTEND Gram
(* Module expressions *)
module_expr:
[ [ me = module_expr_atom -> me
- | me1 = module_expr; me2 = module_expr_atom -> CMapply (!@loc,me1,me2)
+ | me1 = module_expr; me2 = module_expr_atom -> CAst.make ~loc:!@loc @@ CMapply (me1,me2)
] ]
;
module_expr_atom:
- [ [ qid = qualid -> CMident qid | "("; me = module_expr; ")" -> me ] ]
+ [ [ qid = qualid -> CAst.make ~loc:!@loc @@ CMident (qid.CAst.v) | "("; me = module_expr; ")" -> me ] ]
;
with_declaration:
- [ [ "Definition"; fqid = fullyqualid; ":="; c = Constr.lconstr ->
- CWith_Definition (fqid,c)
+ [ [ "Definition"; fqid = fullyqualid; udecl = OPT univ_decl; ":="; c = Constr.lconstr ->
+ CWith_Definition (fqid,udecl,c)
| IDENT "Module"; fqid = fullyqualid; ":="; qid = qualid ->
CWith_Module (fqid,qid)
] ]
;
module_type:
- [ [ qid = qualid -> CMident qid
+ [ [ qid = qualid -> CAst.make ~loc:!@loc @@ CMident (qid.CAst.v)
| "("; mt = module_type; ")" -> mt
- | mty = module_type; me = module_expr_atom -> CMapply (!@loc,mty,me)
+ | mty = module_type; me = module_expr_atom ->
+ CAst.make ~loc:!@loc @@ CMapply (mty,me)
| mty = module_type; "with"; decl = with_declaration ->
- CMwith (!@loc,mty,decl)
+ CAst.make ~loc:!@loc @@ CMwith (mty,decl)
] ]
;
(* Proof using *)
@@ -552,8 +581,8 @@ GEXTEND Gram
starredidentref:
[ [ i = identref -> SsSingl i
| i = identref; "*" -> SsFwdClose(SsSingl i)
- | "Type" -> SsSingl (!@loc, Id.of_string "Type")
- | "Type"; "*" -> SsFwdClose (SsSingl (!@loc, Id.of_string "Type")) ]]
+ | "Type" -> SsType
+ | "Type"; "*" -> SsFwdClose SsType ]]
;
ssexpr:
[ "35"
@@ -580,12 +609,6 @@ let warn_deprecated_implicit_arguments =
CWarnings.create ~name:"deprecated-implicit-arguments" ~category:"deprecated"
(fun () -> strbrk "Implicit Arguments is deprecated; use Arguments instead")
-let warn_deprecated_arguments_syntax =
- CWarnings.create ~name:"deprecated-arguments-syntax" ~category:"deprecated"
- (fun () -> strbrk "The \"/\" and \"!\" modifiers have an effect only "
- ++ strbrk "in the first arguments list. The syntax allowing"
- ++ strbrk " them to appear in other lists is deprecated.")
-
(* Extensions: implicits, coercions, etc. *)
GEXTEND Gram
GLOBAL: gallina_ext instance_name hint_info;
@@ -601,43 +624,29 @@ GEXTEND Gram
VernacSetStrategy l
(* Canonical structure *)
| IDENT "Canonical"; IDENT "Structure"; qid = global ->
- VernacCanonical (AN qid)
+ VernacCanonical CAst.(make ~loc:!@loc @@ AN qid)
| IDENT "Canonical"; IDENT "Structure"; ntn = by_notation ->
- VernacCanonical (ByNotation ntn)
- | IDENT "Canonical"; IDENT "Structure"; qid = global;
- d = def_body ->
+ VernacCanonical CAst.(make ~loc:!@loc @@ ByNotation ntn)
+ | IDENT "Canonical"; IDENT "Structure"; qid = global; d = def_body ->
let s = coerce_reference_to_id qid in
- VernacDefinition
- ((Some Global,CanonicalStructure),((Loc.ghost,s),None),d)
+ VernacDefinition ((NoDischarge,CanonicalStructure),((CAst.make (Name s)),None),d)
(* Coercions *)
| IDENT "Coercion"; qid = global; d = def_body ->
let s = coerce_reference_to_id qid in
- VernacDefinition ((None,Coercion),((Loc.ghost,s),None),d)
- | IDENT "Coercion"; IDENT "Local"; qid = global; d = def_body ->
- let s = coerce_reference_to_id qid in
- VernacDefinition ((Some Decl_kinds.Local,Coercion),((Loc.ghost,s),None),d)
- | IDENT "Identity"; IDENT "Coercion"; IDENT "Local"; f = identref;
- ":"; s = class_rawexpr; ">->"; t = class_rawexpr ->
- VernacIdentityCoercion (true, f, s, t)
+ VernacDefinition ((NoDischarge,Coercion),((CAst.make (Name s)),None),d)
| IDENT "Identity"; IDENT "Coercion"; f = identref; ":";
s = class_rawexpr; ">->"; t = class_rawexpr ->
- VernacIdentityCoercion (false, f, s, t)
- | IDENT "Coercion"; IDENT "Local"; qid = global; ":";
- s = class_rawexpr; ">->"; t = class_rawexpr ->
- VernacCoercion (true, AN qid, s, t)
- | IDENT "Coercion"; IDENT "Local"; ntn = by_notation; ":";
- s = class_rawexpr; ">->"; t = class_rawexpr ->
- VernacCoercion (true, ByNotation ntn, s, t)
+ VernacIdentityCoercion (f, s, t)
| IDENT "Coercion"; qid = global; ":"; s = class_rawexpr; ">->";
t = class_rawexpr ->
- VernacCoercion (false, AN qid, s, t)
+ VernacCoercion (CAst.make ~loc:!@loc @@ AN qid, s, t)
| IDENT "Coercion"; ntn = by_notation; ":"; s = class_rawexpr; ">->";
t = class_rawexpr ->
- VernacCoercion (false, ByNotation ntn, s, t)
+ VernacCoercion (CAst.make ~loc:!@loc @@ ByNotation ntn, s, t)
- | IDENT "Context"; c = binders ->
- VernacContext c
+ | IDENT "Context"; c = LIST1 binder ->
+ VernacContext (List.flatten c)
| IDENT "Instance"; namesup = instance_name; ":";
expl = [ "!" -> Decl_kinds.Implicit | -> Decl_kinds.Explicit ] ; t = operconstr LEVEL "200";
@@ -663,10 +672,7 @@ GEXTEND Gram
args = LIST0 argument_spec_block;
more_implicits = OPT
[ ","; impl = LIST1
- [ impl = LIST0 more_implicits_block ->
- let warn_deprecated = List.exists fst impl in
- if warn_deprecated then warn_deprecated_arguments_syntax ~loc:!@loc ();
- List.flatten (List.map snd impl)]
+ [ impl = LIST0 more_implicits_block -> List.flatten impl]
SEP "," -> impl
];
mods = OPT [ ":"; l = LIST1 arguments_modifier SEP "," -> l ] ->
@@ -679,7 +685,7 @@ GEXTEND Gram
if Option.is_empty !slash_position then
(slash_position := Some i; parse_args i args)
else
- error "The \"/\" modifier can occur only once"
+ user_err Pp.(str "The \"/\" modifier can occur only once")
in
let args = parse_args 0 (List.flatten args) in
let more_implicits = Option.default [] more_implicits in
@@ -739,7 +745,7 @@ GEXTEND Gram
;
argument_spec: [
[ b = OPT "!"; id = name ; s = OPT scope ->
- snd id, not (Option.is_empty b), Option.map (fun x -> !@loc, x) s
+ id.CAst.v, not (Option.is_empty b), Option.map (fun x -> CAst.make ~loc:!@loc x) s
]
];
(* List of arguments implicit status, scope, modifiers *)
@@ -752,43 +758,37 @@ GEXTEND Gram
| "/" -> [`Slash]
| "("; items = LIST1 argument_spec; ")"; sc = OPT scope ->
let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> !@loc, y) x
- | Some _, Some _ -> error "scope declared twice" in
+ | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
+ | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
List.map (fun (name,recarg_like,notation_scope) ->
`Id { name=name; recarg_like=recarg_like;
notation_scope=f notation_scope;
implicit_status = NotImplicit}) items
| "["; items = LIST1 argument_spec; "]"; sc = OPT scope ->
let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> !@loc, y) x
- | Some _, Some _ -> error "scope declared twice" in
+ | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
+ | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
List.map (fun (name,recarg_like,notation_scope) ->
`Id { name=name; recarg_like=recarg_like;
notation_scope=f notation_scope;
implicit_status = Implicit}) items
| "{"; items = LIST1 argument_spec; "}"; sc = OPT scope ->
let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> !@loc, y) x
- | Some _, Some _ -> error "scope declared twice" in
+ | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
+ | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
List.map (fun (name,recarg_like,notation_scope) ->
`Id { name=name; recarg_like=recarg_like;
notation_scope=f notation_scope;
implicit_status = MaximallyImplicit}) items
]
];
- name_or_bang: [
- [ b = OPT "!"; id = name ->
- not (Option.is_empty b), id
- ]
- ];
(* Same as [argument_spec_block], but with only implicit status and names *)
more_implicits_block: [
- [ (bang,name) = name_or_bang -> (bang, [(snd name, Vernacexpr.NotImplicit)])
- | "/" -> (true (* Should warn about deprecated syntax *), [])
- | "["; items = LIST1 name_or_bang; "]" ->
- (List.exists fst items, List.map (fun (_,(_,name)) -> (name, Vernacexpr.Implicit)) items)
- | "{"; items = LIST1 name_or_bang; "}" ->
- (List.exists fst items, List.map (fun (_,(_,name)) -> (name, Vernacexpr.MaximallyImplicit)) items)
+ [ name = name -> [(name.CAst.v, Vernacexpr.NotImplicit)]
+ | "["; items = LIST1 name; "]" ->
+ List.map (fun name -> (name.CAst.v, Vernacexpr.Implicit)) items
+ | "{"; items = LIST1 name; "}" ->
+ List.map (fun name -> (name.CAst.v, Vernacexpr.MaximallyImplicit)) items
]
];
strategy_level:
@@ -799,10 +799,10 @@ GEXTEND Gram
| IDENT "transparent" -> Conv_oracle.transparent ] ]
;
instance_name:
- [ [ name = pidentref; sup = OPT binders ->
- (let ((loc,id),l) = name in ((loc, Name id),l)),
+ [ [ name = ident_decl; sup = OPT binders ->
+ (CAst.map (fun id -> Name id) (fst name), snd name),
(Option.default [] sup)
- | -> ((!@loc, Anonymous), None), [] ] ]
+ | -> ((CAst.make ~loc:!@loc Anonymous), None), [] ] ]
;
hint_info:
[ [ "|"; i = OPT natural; pat = OPT constr_pattern ->
@@ -822,12 +822,19 @@ GEXTEND Gram
END
GEXTEND Gram
- GLOBAL: command query_command class_rawexpr;
+ GLOBAL: command query_command class_rawexpr gallina_ext;
+
+ gallina_ext:
+ [ [ IDENT "Export"; "Set"; table = option_table; v = option_value ->
+ VernacSetOption (true, table, v)
+ | IDENT "Export"; IDENT "Unset"; table = option_table ->
+ VernacUnsetOption (true, table)
+ ] ];
command:
[ [ IDENT "Comments"; l = LIST0 comment -> VernacComments l
- (* Hack! Should be in grammar_ext, but camlp4 factorize badly *)
+ (* Hack! Should be in grammar_ext, but camlp5 factorizes badly *)
| IDENT "Declare"; IDENT "Instance"; namesup = instance_name; ":";
expl = [ "!" -> Decl_kinds.Implicit | -> Decl_kinds.Explicit ] ; t = operconstr LEVEL "200";
info = hint_info ->
@@ -871,7 +878,7 @@ GEXTEND Gram
(* Printing (careful factorization of entries) *)
| IDENT "Print"; p = printable -> VernacPrint p
- | IDENT "Print"; qid = smart_global -> VernacPrint (PrintName qid)
+ | IDENT "Print"; qid = smart_global; l = OPT univ_name_list -> VernacPrint (PrintName (qid,l))
| IDENT "Print"; IDENT "Module"; "Type"; qid = global ->
VernacPrint (PrintModuleType qid)
| IDENT "Print"; IDENT "Module"; qid = global ->
@@ -887,24 +894,9 @@ GEXTEND Gram
(* For acting on parameter tables *)
| "Set"; table = option_table; v = option_value ->
- begin match v with
- | StringValue s ->
- (* We make a special case for warnings because appending is their
- natural semantics *)
- if CString.List.equal table ["Warnings"] then
- VernacSetAppendOption (table, s)
- else
- let (last, prefix) = List.sep_last table in
- if String.equal last "Append" && not (List.is_empty prefix) then
- VernacSetAppendOption (prefix, s)
- else
- VernacSetOption (table, v)
- | _ -> VernacSetOption (table, v)
- end
- | "Set"; table = option_table ->
- VernacSetOption (table,BoolValue true)
+ VernacSetOption (false, table, v)
| IDENT "Unset"; table = option_table ->
- VernacUnsetOption table
+ VernacUnsetOption (false, table)
| IDENT "Print"; IDENT "Table"; table = option_table ->
VernacPrintOption table
@@ -929,33 +921,34 @@ GEXTEND Gram
VernacRemoveOption ([table], v) ]]
;
query_command: (* TODO: rapprocher Eval et Check *)
- [ [ IDENT "Eval"; r = Tactic.red_expr; "in"; c = lconstr ->
+ [ [ IDENT "Eval"; r = red_expr; "in"; c = lconstr; "." ->
fun g -> VernacCheckMayEval (Some r, g, c)
- | IDENT "Compute"; c = lconstr ->
+ | IDENT "Compute"; c = lconstr; "." ->
fun g -> VernacCheckMayEval (Some (Genredexpr.CbvVm None), g, c)
- | IDENT "Check"; c = lconstr ->
+ | IDENT "Check"; c = lconstr; "." ->
fun g -> VernacCheckMayEval (None, g, c)
(* Searching the environment *)
- | IDENT "About"; qid = smart_global ->
- fun g -> VernacPrint (PrintAbout (qid,g))
- | IDENT "SearchHead"; c = constr_pattern; l = in_or_out_modules ->
+ | IDENT "About"; qid = smart_global; l = OPT univ_name_list; "." ->
+ fun g -> VernacPrint (PrintAbout (qid,l,g))
+ | IDENT "SearchHead"; c = constr_pattern; l = in_or_out_modules; "." ->
fun g -> VernacSearch (SearchHead c,g, l)
- | IDENT "SearchPattern"; c = constr_pattern; l = in_or_out_modules ->
+ | IDENT "SearchPattern"; c = constr_pattern; l = in_or_out_modules; "." ->
fun g -> VernacSearch (SearchPattern c,g, l)
- | IDENT "SearchRewrite"; c = constr_pattern; l = in_or_out_modules ->
+ | IDENT "SearchRewrite"; c = constr_pattern; l = in_or_out_modules; "." ->
fun g -> VernacSearch (SearchRewrite c,g, l)
- | IDENT "Search"; s = searchabout_query; l = searchabout_queries ->
+ | IDENT "Search"; s = searchabout_query; l = searchabout_queries; "." ->
let (sl,m) = l in fun g -> VernacSearch (SearchAbout (s::sl),g, m)
(* compatibility: SearchAbout *)
- | IDENT "SearchAbout"; s = searchabout_query; l = searchabout_queries ->
+ | IDENT "SearchAbout"; s = searchabout_query; l = searchabout_queries; "." ->
fun g -> let (sl,m) = l in VernacSearch (SearchAbout (s::sl),g, m)
(* compatibility: SearchAbout with "[ ... ]" *)
| IDENT "SearchAbout"; "["; sl = LIST1 searchabout_query; "]";
- l = in_or_out_modules -> fun g -> VernacSearch (SearchAbout sl,g, l)
+ l = in_or_out_modules; "." ->
+ fun g -> VernacSearch (SearchAbout sl,g, l)
] ]
;
printable:
- [ [ IDENT "Term"; qid = smart_global -> PrintName qid
+ [ [ IDENT "Term"; qid = smart_global; l = OPT univ_name_list -> PrintName (qid,l)
| IDENT "All" -> PrintFullContext
| IDENT "Section"; s = global -> PrintSectionContext s
| IDENT "Grammar"; ent = IDENT ->
@@ -963,7 +956,7 @@ GEXTEND Gram
PrintGrammar ent
| IDENT "LoadPath"; dir = OPT dirpath -> PrintLoadPath dir
| IDENT "Modules" ->
- error "Print Modules is obsolete; use Print Libraries instead"
+ user_err Pp.(str "Print Modules is obsolete; use Print Libraries instead")
| IDENT "Libraries" -> PrintModules
| IDENT "ML"; IDENT "Path" -> PrintMLLoadPath
@@ -1006,11 +999,11 @@ GEXTEND Gram
| IDENT "Term"; qid = smart_global -> LocateTerm qid
| IDENT "File"; f = ne_string -> LocateFile f
| IDENT "Library"; qid = global -> LocateLibrary qid
- | IDENT "Module"; qid = global -> LocateModule qid
- | IDENT "Ltac"; qid = global -> LocateTactic qid ] ]
+ | IDENT "Module"; qid = global -> LocateModule qid ] ]
;
option_value:
- [ [ n = integer -> IntValue (Some n)
+ [ [ -> BoolValue true
+ | n = integer -> IntValue (Some n)
| s = STRING -> StringValue s ] ]
;
option_ref_value:
@@ -1056,6 +1049,9 @@ GEXTEND Gram
| -> ([],SearchOutside [])
] ]
;
+ univ_name_list:
+ [ [ "@{" ; l = LIST0 name; "}" -> l ] ]
+ ;
END;
GEXTEND Gram
@@ -1078,15 +1074,15 @@ GEXTEND Gram
(* Tactic Debugger *)
| IDENT "Debug"; IDENT "On" ->
- VernacSetOption (["Ltac";"Debug"], BoolValue true)
+ VernacSetOption (false, ["Ltac";"Debug"], BoolValue true)
| IDENT "Debug"; IDENT "Off" ->
- VernacSetOption (["Ltac";"Debug"], BoolValue false)
+ VernacSetOption (false, ["Ltac";"Debug"], BoolValue false)
(* registration of a custom reduction *)
| IDENT "Declare"; IDENT "Reduction"; s = IDENT; ":=";
- r = Tactic.red_expr ->
+ r = red_expr ->
VernacDeclareReduction (s,r)
] ];
@@ -1099,11 +1095,11 @@ GEXTEND Gram
GLOBAL: syntax;
syntax:
- [ [ IDENT "Open"; local = obsolete_locality; IDENT "Scope"; sc = IDENT ->
- VernacOpenCloseScope (local,(true,sc))
+ [ [ IDENT "Open"; IDENT "Scope"; sc = IDENT ->
+ VernacOpenCloseScope (true,sc)
- | IDENT "Close"; local = obsolete_locality; IDENT "Scope"; sc = IDENT ->
- VernacOpenCloseScope (local,(false,sc))
+ | IDENT "Close"; IDENT "Scope"; sc = IDENT ->
+ VernacOpenCloseScope (false,sc)
| IDENT "Delimit"; IDENT "Scope"; sc = IDENT; "with"; key = IDENT ->
VernacDelimiters (sc, Some key)
@@ -1113,33 +1109,31 @@ GEXTEND Gram
| IDENT "Bind"; IDENT "Scope"; sc = IDENT; "with";
refl = LIST1 class_rawexpr -> VernacBindScope (sc,refl)
- | IDENT "Infix"; local = obsolete_locality;
- op = ne_lstring; ":="; p = constr;
+ | IDENT "Infix"; op = ne_lstring; ":="; p = constr;
modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ];
sc = OPT [ ":"; sc = IDENT -> sc ] ->
- VernacInfix (local,(op,modl),p,sc)
- | IDENT "Notation"; local = obsolete_locality; id = identref;
+ VernacInfix ((op,modl),p,sc)
+ | IDENT "Notation"; id = identref;
idl = LIST0 ident; ":="; c = constr; b = only_parsing ->
VernacSyntacticDefinition
- (id,(idl,c),local,b)
- | IDENT "Notation"; local = obsolete_locality; s = ne_lstring; ":=";
+ (id,(idl,c),b)
+ | IDENT "Notation"; s = lstring; ":=";
c = constr;
modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ];
sc = OPT [ ":"; sc = IDENT -> sc ] ->
- VernacNotation (local,c,(s,modl),sc)
+ VernacNotation (c,(s,modl),sc)
| IDENT "Format"; IDENT "Notation"; n = STRING; s = STRING; fmt = STRING ->
VernacNotationAddFormat (n,s,fmt)
| IDENT "Reserved"; IDENT "Infix"; s = ne_lstring;
l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ] ->
- Metasyntax.check_infix_modifiers l;
- let (loc,s) = s in
- VernacSyntaxExtension (false,((loc,"x '"^s^"' y"),l))
+ let s = CAst.map (fun s -> "x '"^s^"' y") s in
+ VernacSyntaxExtension (true,(s,l))
- | IDENT "Reserved"; IDENT "Notation"; local = obsolete_locality;
+ | IDENT "Reserved"; IDENT "Notation";
s = ne_lstring;
l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ]
- -> VernacSyntaxExtension (local,(s,l))
+ -> VernacSyntaxExtension (false, (s,l))
(* "Print" "Grammar" should be here but is in "command" entry in order
to factorize with other "Print"-based vernac entries *)
@@ -1149,12 +1143,9 @@ GEXTEND Gram
[ [ "("; IDENT "only"; IDENT "parsing"; ")" ->
Some Flags.Current
| "("; IDENT "compat"; s = STRING; ")" ->
- Some (Coqinit.get_compat_version s)
+ Some (parse_compat_version s)
| -> None ] ]
;
- obsolete_locality:
- [ [ IDENT "Local" -> true | -> false ] ]
- ;
level:
[ [ IDENT "level"; n = natural -> NumLevel n
| IDENT "next"; IDENT "level" -> NextLevel ] ]
@@ -1167,15 +1158,17 @@ GEXTEND Gram
| IDENT "only"; IDENT "printing" -> SetOnlyPrinting
| IDENT "only"; IDENT "parsing" -> SetOnlyParsing
| IDENT "compat"; s = STRING ->
- SetCompatVersion (Coqinit.get_compat_version s)
- | IDENT "format"; s1 = [s = STRING -> (!@loc,s)];
- s2 = OPT [s = STRING -> (!@loc,s)] ->
+ SetCompatVersion (parse_compat_version s)
+ | IDENT "format"; s1 = [s = STRING -> CAst.make ~loc:!@loc s];
+ s2 = OPT [s = STRING -> CAst.make ~loc:!@loc s] ->
begin match s1, s2 with
- | (_,k), Some s -> SetFormat(k,s)
+ | { CAst.v = k }, Some s -> SetFormat(k,s)
| s, None -> SetFormat ("text",s) end
| x = IDENT; ","; l = LIST1 [id = IDENT -> id ] SEP ","; "at";
lev = level -> SetItemLevel (x::l,lev)
| x = IDENT; "at"; lev = level -> SetItemLevel ([x],lev)
+ | x = IDENT; "at"; lev = level; b = constr_as_binder_kind -> SetItemLevelAsBinder ([x],b,Some lev)
+ | x = IDENT; b = constr_as_binder_kind -> SetItemLevelAsBinder ([x],b,None)
| x = IDENT; typ = syntax_extension_type -> SetEntryType (x,typ)
] ]
;
@@ -1183,7 +1176,20 @@ GEXTEND Gram
[ [ IDENT "ident" -> ETName | IDENT "global" -> ETReference
| IDENT "bigint" -> ETBigint
| IDENT "binder" -> ETBinder true
+ | IDENT "constr"; n = OPT at_level; b = constr_as_binder_kind -> ETConstrAsBinder (b,n)
+ | IDENT "pattern" -> ETPattern (false,None)
+ | IDENT "pattern"; "at"; IDENT "level"; n = natural -> ETPattern (false,Some n)
+ | IDENT "strict"; IDENT "pattern" -> ETPattern (true,None)
+ | IDENT "strict"; IDENT "pattern"; "at"; IDENT "level"; n = natural -> ETPattern (true,Some n)
| IDENT "closed"; IDENT "binder" -> ETBinder false
] ]
;
+ at_level:
+ [ [ "at"; n = level -> n ] ]
+ ;
+ constr_as_binder_kind:
+ [ [ "as"; IDENT "ident" -> AsIdent
+ | "as"; IDENT "pattern" -> AsIdentOrPattern
+ | "as"; IDENT "strict"; IDENT "pattern" -> AsStrictPattern ] ]
+ ;
END
diff --git a/parsing/highparsing.mllib b/parsing/highparsing.mllib
deleted file mode 100644
index 8df519b5..00000000
--- a/parsing/highparsing.mllib
+++ /dev/null
@@ -1,5 +0,0 @@
-G_constr
-G_vernac
-G_prim
-G_proofs
-G_tactic
diff --git a/parsing/parsing.mllib b/parsing/parsing.mllib
index 0e1c79c9..1f29636b 100644
--- a/parsing/parsing.mllib
+++ b/parsing/parsing.mllib
@@ -1,6 +1,9 @@
Tok
-Compat
CLexer
Pcoq
Egramml
Egramcoq
+G_constr
+G_vernac
+G_prim
+G_proofs
diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml
index 7dc02190..258c4bb1 100644
--- a/parsing/pcoq.ml
+++ b/parsing/pcoq.ml
@@ -1,46 +1,211 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
-open Pp
-open Compat
open CErrors
open Util
open Extend
open Genarg
+let curry f x y = f (x, y)
+let uncurry f (x,y) = f x y
+
+(** Location Utils *)
+let coq_file_of_ploc_file s =
+ if s = "" then Loc.ToplevelInput else Loc.InFile s
+
+let to_coqloc loc =
+ { Loc.fname = coq_file_of_ploc_file (Ploc.file_name loc);
+ Loc.line_nb = Ploc.line_nb loc;
+ Loc.bol_pos = Ploc.bol_pos loc;
+ Loc.bp = Ploc.first_pos loc;
+ Loc.ep = Ploc.last_pos loc;
+ Loc.line_nb_last = Ploc.line_nb_last loc;
+ Loc.bol_pos_last = Ploc.bol_pos_last loc; }
+
+let (!@) = to_coqloc
+
(** The parser of Coq *)
+module G : sig
+
+ include Grammar.S with type te = Tok.t
+
+(* where Grammar.S
+
+module type S =
+ sig
+ type te = 'x;
+ type parsable = 'x;
+ value parsable : Stream.t char -> parsable;
+ value tokens : string -> list (string * int);
+ value glexer : Plexing.lexer te;
+ value set_algorithm : parse_algorithm -> unit;
+ module Entry :
+ sig
+ type e 'a = 'y;
+ value create : string -> e 'a;
+ value parse : e 'a -> parsable -> 'a;
+ value parse_token : e 'a -> Stream.t te -> 'a;
+ value name : e 'a -> string;
+ value of_parser : string -> (Stream.t te -> 'a) -> e 'a;
+ value print : Format.formatter -> e 'a -> unit;
+ external obj : e 'a -> Gramext.g_entry te = "%identity";
+ end
+ ;
+ module Unsafe :
+ sig
+ value gram_reinit : Plexing.lexer te -> unit;
+ value clear_entry : Entry.e 'a -> unit;
+ end
+ ;
+ value extend :
+ Entry.e 'a -> option Gramext.position ->
+ list
+ (option string * option Gramext.g_assoc *
+ list (list (Gramext.g_symbol te) * Gramext.g_action)) ->
+ unit;
+ value delete_rule : Entry.e 'a -> list (Gramext.g_symbol te) -> unit;
+ end
+ *)
+
+ type 'a entry = 'a Entry.e
+ type internal_entry = Tok.t Gramext.g_entry
+ type symbol = Tok.t Gramext.g_symbol
+ type action = Gramext.g_action
+ type production_rule = symbol list * action
+ type single_extend_statment =
+ string option * Gramext.g_assoc option * production_rule list
+ type extend_statment =
+ Gramext.position option * single_extend_statment list
+ type coq_parsable
+
+ val parsable : ?file:Loc.source -> char Stream.t -> coq_parsable
+ val action : 'a -> action
+ val entry_create : string -> 'a entry
+ val entry_parse : 'a entry -> coq_parsable -> 'a
+ val entry_print : Format.formatter -> 'a entry -> unit
+
+ val comment_state : coq_parsable -> ((int * int) * string) list
+
+ val srules' : production_rule list -> symbol
+ val parse_tokens_after_filter : 'a entry -> Tok.t Stream.t -> 'a
+
+end with type 'a Entry.e = 'a Grammar.GMake(CLexer).Entry.e = struct
+
+ include Grammar.GMake(CLexer)
+
+ type 'a entry = 'a Entry.e
+ type internal_entry = Tok.t Gramext.g_entry
+ type symbol = Tok.t Gramext.g_symbol
+ type action = Gramext.g_action
+ type production_rule = symbol list * action
+ type single_extend_statment =
+ string option * Gramext.g_assoc option * production_rule list
+ type extend_statment =
+ Gramext.position option * single_extend_statment list
+
+ type coq_parsable = parsable * CLexer.lexer_state ref
+
+ let parsable ?(file=Loc.ToplevelInput) c =
+ let state = ref (CLexer.init_lexer_state file) in
+ CLexer.set_lexer_state !state;
+ let a = parsable c in
+ state := CLexer.get_lexer_state ();
+ (a,state)
+
+ let action = Gramext.action
+ let entry_create = Entry.create
+
+ let entry_parse e (p,state) =
+ CLexer.set_lexer_state !state;
+ try
+ let c = Entry.parse e p in
+ state := CLexer.get_lexer_state ();
+ c
+ with Ploc.Exc (loc,e) ->
+ CLexer.drop_lexer_state ();
+ let loc' = Loc.get_loc (Exninfo.info e) in
+ let loc = match loc' with None -> to_coqloc loc | Some loc -> loc in
+ Loc.raise ~loc e
+
+ let comment_state (p,state) =
+ CLexer.get_comment_state !state
+
+ let entry_print ft x = Entry.print ft x
+
+ (* Not used *)
+ let srules' = Gramext.srules
+ let parse_tokens_after_filter = Entry.parse_token
+
+end
-module G = GrammarMake (CLexer)
-let warning_verbose = Compat.warning_verbose
+let warning_verbose = Gramext.warning_verbose
let of_coq_assoc = function
-| Extend.RightA -> CompatGramext.RightA
-| Extend.LeftA -> CompatGramext.LeftA
-| Extend.NonA -> CompatGramext.NonA
+| Extend.RightA -> Gramext.RightA
+| Extend.LeftA -> Gramext.LeftA
+| Extend.NonA -> Gramext.NonA
let of_coq_position = function
-| Extend.First -> CompatGramext.First
-| Extend.Last -> CompatGramext.Last
-| Extend.Before s -> CompatGramext.Before s
-| Extend.After s -> CompatGramext.After s
-| Extend.Level s -> CompatGramext.Level s
+| Extend.First -> Gramext.First
+| Extend.Last -> Gramext.Last
+| Extend.Before s -> Gramext.Before s
+| Extend.After s -> Gramext.After s
+| Extend.Level s -> Gramext.Level s
+
+module Symbols : sig
+ val stoken : Tok.t -> G.symbol
+ val sself : G.symbol
+ val snext : G.symbol
+ val slist0 : G.symbol -> G.symbol
+ val slist0sep : G.symbol * G.symbol -> G.symbol
+ val slist1 : G.symbol -> G.symbol
+ val slist1sep : G.symbol * G.symbol -> G.symbol
+ val sopt : G.symbol -> G.symbol
+ val snterml : G.internal_entry * string -> G.symbol
+ val snterm : G.internal_entry -> G.symbol
+end = struct
+
+ let stoken tok =
+ let pattern = match tok with
+ | Tok.KEYWORD s -> "", s
+ | Tok.IDENT s -> "IDENT", s
+ | Tok.PATTERNIDENT s -> "PATTERNIDENT", s
+ | Tok.FIELD s -> "FIELD", s
+ | Tok.INT s -> "INT", s
+ | Tok.STRING s -> "STRING", s
+ | Tok.LEFTQMARK -> "LEFTQMARK", ""
+ | Tok.BULLET s -> "BULLET", s
+ | Tok.EOI -> "EOI", ""
+ in
+ Gramext.Stoken pattern
-module Symbols = GramextMake(G)
+ let slist0sep (x, y) = Gramext.Slist0sep (x, y, false)
+ let slist1sep (x, y) = Gramext.Slist1sep (x, y, false)
-let camlp4_verbosity silent f x =
+ let snterml (x, y) = Gramext.Snterml (x, y)
+ let snterm x = Gramext.Snterm x
+ let sself = Gramext.Sself
+ let snext = Gramext.Snext
+ let slist0 x = Gramext.Slist0 x
+ let slist1 x = Gramext.Slist1 x
+ let sopt x = Gramext.Sopt x
+
+end
+
+let camlp5_verbosity silent f x =
let a = !warning_verbose in
warning_verbose := silent;
f x;
warning_verbose := a
-let camlp4_verbose f x = camlp4_verbosity (Flags.is_verbose ()) f x
-
(** Grammar extensions *)
(** NB: [extend_statment =
@@ -55,7 +220,7 @@ let camlp4_verbose f x = camlp4_verbosity (Flags.is_verbose ()) f x
(** Binding general entry keys to symbol *)
let rec of_coq_action : type a r. (r, a, Loc.t -> r) Extend.rule -> a -> G.action = function
-| Stop -> fun f -> G.action (fun loc -> f (to_coqloc loc))
+| Stop -> fun f -> G.action (fun loc -> f (!@ loc))
| Next (r, _) -> fun f -> G.action (fun x -> of_coq_action r (f x))
let rec symbol_of_prod_entry_key : type s a. (s, a) symbol -> _ = function
@@ -107,7 +272,7 @@ type ext_kind =
(** The list of extensions *)
-let camlp4_state = ref []
+let camlp5_state = ref []
(** Deletion *)
@@ -121,10 +286,10 @@ let grammar_delete e reinit (pos,rls) =
let a = of_coq_assoc a in
let ext = of_coq_position ext in
let lev = match pos with
- | Some (CompatGramext.Level n) -> n
+ | Some (Gramext.Level n) -> n
| _ -> assert false
in
- maybe_uncurry (G.extend e) (Some ext, [Some lev,Some a,[]])
+ (G.extend e) (Some ext) [Some lev,Some a,[]]
| None -> ()
(** Extension *)
@@ -132,13 +297,13 @@ let grammar_delete e reinit (pos,rls) =
let grammar_extend e reinit ext =
let ext = of_coq_extend_statement ext in
let undo () = grammar_delete e reinit ext in
- let redo () = camlp4_verbosity false (maybe_uncurry (G.extend e)) ext in
- camlp4_state := ByEXTEND (undo, redo) :: !camlp4_state;
+ let redo () = camlp5_verbosity false (uncurry (G.extend e)) ext in
+ camlp5_state := ByEXTEND (undo, redo) :: !camlp5_state;
redo ()
let grammar_extend_sync e reinit ext =
- camlp4_state := ByGrammar (ExtendRule (e, reinit, ext)) :: !camlp4_state;
- camlp4_verbosity false (maybe_uncurry (G.extend e)) (of_coq_extend_statement ext)
+ camlp5_state := ByGrammar (ExtendRule (e, reinit, ext)) :: !camlp5_state;
+ camlp5_verbosity false (uncurry (G.extend e)) (of_coq_extend_statement ext)
(** The apparent parser of Coq; encapsulate G to keep track
of the extensions. *)
@@ -147,21 +312,21 @@ module Gram =
struct
include G
let extend e =
- maybe_curry
- (fun ext ->
- camlp4_state :=
- (ByEXTEND ((fun () -> grammar_delete e None ext),
- (fun () -> maybe_uncurry (G.extend e) ext)))
- :: !camlp4_state;
- maybe_uncurry (G.extend e) ext)
+ curry
+ (fun ext ->
+ camlp5_state :=
+ (ByEXTEND ((fun () -> grammar_delete e None ext),
+ (fun () -> uncurry (G.extend e) ext)))
+ :: !camlp5_state;
+ uncurry (G.extend e) ext)
let delete_rule e pil =
(* spiwack: if you use load an ML module which contains GDELETE_RULE
- in a section, God kills a kitty. As it would corrupt remove_grammars.
+ in a section, God kills a kitty. As it would corrupt remove_grammars.
There does not seem to be a good way to undo a delete rule. As deleting
- takes fewer arguments than extending. The production rule isn't returned
- by delete_rule. If we could retrieve the necessary information, then
- ByEXTEND provides just the framework we need to allow this in section.
- I'm not entirely sure it makes sense, but at least it would be more correct.
+ takes fewer arguments than extending. The production rule isn't returned
+ by delete_rule. If we could retrieve the necessary information, then
+ ByEXTEND provides just the framework we need to allow this in section.
+ I'm not entirely sure it makes sense, but at least it would be more correct.
*)
G.delete_rule e pil
end
@@ -173,18 +338,18 @@ module Gram =
let rec remove_grammars n =
if n>0 then
- (match !camlp4_state with
- | [] -> anomaly ~label:"Pcoq.remove_grammars" (Pp.str "too many rules to remove")
+ (match !camlp5_state with
+ | [] -> anomaly ~label:"Pcoq.remove_grammars" (Pp.str "too many rules to remove.")
| ByGrammar (ExtendRule (g, reinit, ext)) :: t ->
grammar_delete g reinit (of_coq_extend_statement ext);
- camlp4_state := t;
+ camlp5_state := t;
remove_grammars (n-1)
| ByEXTEND (undo,redo)::t ->
undo();
- camlp4_state := t;
+ camlp5_state := t;
remove_grammars n;
redo();
- camlp4_state := ByEXTEND (undo,redo) :: !camlp4_state)
+ camlp5_state := ByEXTEND (undo,redo) :: !camlp5_state)
let make_rule r = [None, None, r]
@@ -194,14 +359,14 @@ let eoi_entry en =
let e = Gram.entry_create ((Gram.Entry.name en) ^ "_eoi") in
let symbs = [Symbols.snterm (Gram.Entry.obj en); Symbols.stoken Tok.EOI] in
let act = Gram.action (fun _ x loc -> x) in
- maybe_uncurry (Gram.extend e) (None, make_rule [symbs, act]);
+ uncurry (Gram.extend e) (None, make_rule [symbs, act]);
e
let map_entry f en =
let e = Gram.entry_create ((Gram.Entry.name en) ^ "_map") in
let symbs = [Symbols.snterm (Gram.Entry.obj en)] in
let act = Gram.action (fun x loc -> f x) in
- maybe_uncurry (Gram.extend e) (None, make_rule [symbs, act]);
+ uncurry (Gram.extend e) (None, make_rule [symbs, act]);
e
(* Parse a string, does NOT check if the entire string was read
@@ -267,6 +432,7 @@ module Prim =
let integer = gec_gen "integer"
let bigint = Gram.entry_create "Prim.bigint"
let string = gec_gen "string"
+ let lstring = Gram.entry_create "Prim.lstring"
let reference = make_gen_entry uprim "reference"
let by_notation = Gram.entry_create "by_notation"
let smart_global = Gram.entry_create "smart_global"
@@ -276,7 +442,8 @@ module Prim =
let name = Gram.entry_create "Prim.name"
let identref = Gram.entry_create "Prim.identref"
- let pidentref = Gram.entry_create "Prim.pidentref"
+ let univ_decl = Gram.entry_create "Prim.univ_decl"
+ let ident_decl = Gram.entry_create "Prim.ident_decl"
let pattern_ident = Gram.entry_create "pattern_ident"
let pattern_identref = Gram.entry_create "pattern_identref"
@@ -306,6 +473,7 @@ module Constr =
let global = make_gen_entry uconstr "global"
let universe_level = make_gen_entry uconstr "universe_level"
let sort = make_gen_entry uconstr "sort"
+ let sort_family = make_gen_entry uconstr "sort_family"
let pattern = Gram.entry_create "constr:pattern"
let constr_pattern = gec_constr "constr_pattern"
let lconstr_pattern = gec_constr "lconstr_pattern"
@@ -325,48 +493,6 @@ module Module =
let module_type = Gram.entry_create "module_type"
end
-module Tactic =
- struct
- (* Main entry for extensions *)
- let simple_tactic = Gram.entry_create "tactic:simple_tactic"
-
- (* Entries that can be referred via the string -> Gram.entry table *)
- (* Typically for tactic user extensions *)
- let open_constr =
- make_gen_entry utactic "open_constr"
- let constr_with_bindings =
- make_gen_entry utactic "constr_with_bindings"
- let bindings =
- make_gen_entry utactic "bindings"
- let hypident = Gram.entry_create "hypident"
- let constr_may_eval = make_gen_entry utactic "constr_may_eval"
- let constr_eval = make_gen_entry utactic "constr_eval"
- let uconstr =
- make_gen_entry utactic "uconstr"
- let quantified_hypothesis =
- make_gen_entry utactic "quantified_hypothesis"
- let destruction_arg = make_gen_entry utactic "destruction_arg"
- let int_or_var = make_gen_entry utactic "int_or_var"
- let red_expr = make_gen_entry utactic "red_expr"
- let simple_intropattern =
- make_gen_entry utactic "simple_intropattern"
- let in_clause = make_gen_entry utactic "in_clause"
- let clause_dft_concl =
- make_gen_entry utactic "clause"
-
-
- (* Main entries for ltac *)
- let tactic_arg = Gram.entry_create "tactic:tactic_arg"
- let tactic_expr = make_gen_entry utactic "tactic_expr"
- let binder_tactic = make_gen_entry utactic "binder_tactic"
-
- let tactic = make_gen_entry utactic "tactic"
-
- (* Main entry for quotations *)
- let tactic_eoi = eoi_entry tactic
-
- end
-
module Vernac_ =
struct
let gec_vernac s = Gram.entry_create ("vernac:" ^ s)
@@ -376,22 +502,22 @@ module Vernac_ =
let gallina_ext = gec_vernac "gallina_ext"
let command = gec_vernac "command"
let syntax = gec_vernac "syntax_command"
- let vernac = gec_vernac "Vernac.vernac"
- let vernac_eoi = eoi_entry vernac
+ let vernac_control = gec_vernac "Vernac.vernac_control"
let rec_definition = gec_vernac "Vernac.rec_definition"
+ let red_expr = make_gen_entry utactic "red_expr"
let hint_info = gec_vernac "hint_info"
(* Main vernac entry *)
let main_entry = Gram.entry_create "vernac"
let noedit_mode = gec_vernac "noedit_command"
let () =
- let act_vernac = Gram.action (fun v loc -> Some (!@loc, v)) in
+ let act_vernac = Gram.action (fun v loc -> Some (to_coqloc loc, v)) in
let act_eoi = Gram.action (fun _ loc -> None) in
let rule = [
([ Symbols.stoken Tok.EOI ], act_eoi);
- ([ Symbols.snterm (Gram.Entry.obj vernac) ], act_vernac );
+ ([ Symbols.snterm (Gram.Entry.obj vernac_control) ], act_vernac );
] in
- maybe_uncurry (Gram.extend main_entry) (None, make_rule rule)
+ uncurry (Gram.extend main_entry) (None, make_rule rule)
let command_entry_ref = ref noedit_mode
let command_entry =
@@ -409,16 +535,16 @@ let epsilon_value f e =
let r = Rule (Next (Stop, e), fun x _ -> f x) in
let ext = of_coq_extend_statement (None, [None, None, [r]]) in
let entry = G.entry_create "epsilon" in
- let () = maybe_uncurry (G.extend entry) ext in
+ let () = uncurry (G.extend entry) ext in
try Some (parse_string entry "") with _ -> None
(** Synchronized grammar extensions *)
-module GramState = Store.Make(struct end)
+module GramState = Store.Make ()
type 'a grammar_extension = 'a -> GramState.t -> extend_rule list * GramState.t
-module GrammarCommand = Dyn.Make(struct end)
+module GrammarCommand = Dyn.Make ()
module GrammarInterp = struct type 'a t = 'a grammar_extension end
module GrammarInterpMap = GrammarCommand.Map(GrammarInterp)
@@ -458,9 +584,9 @@ let extend_dyn_grammar (GrammarCommand.Dyn (tag, g)) = extend_grammar_command ta
(* Summary functions: the state of the lexer is included in that of the parser.
Because the grammar affects the set of keywords when adding or removing
grammar rules. *)
-type frozen_t = (int * GrammarCommand.t * GramState.t) list * CLexer.frozen_t
+type frozen_t = (int * GrammarCommand.t * GramState.t) list * CLexer.keyword_state
-let freeze _ : frozen_t = (!grammar_stack, CLexer.freeze ())
+let freeze _ : frozen_t = (!grammar_stack, CLexer.get_keyword_state ())
(* We compare the current state of the grammar and the state to unfreeze,
by computing the longest common suffixes *)
@@ -475,7 +601,7 @@ let unfreeze (grams, lex) =
let n = number_of_entries undo in
remove_grammars n;
grammar_stack := common;
- CLexer.unfreeze lex;
+ CLexer.set_keyword_state lex;
List.iter extend_dyn_grammar (List.rev_map pi2 redo)
(** No need to provide an init function : the grammar state is
@@ -483,8 +609,8 @@ let unfreeze (grams, lex) =
the lexer state should not be resetted, since it contains
keywords declared in g_*.ml4 *)
-let _ =
- Summary.declare_summary "GRAMMAR_LEXER"
+let parser_summary_tag =
+ Summary.declare_summary_tag "GRAMMAR_LEXER"
{ Summary.freeze_function = freeze;
Summary.unfreeze_function = unfreeze;
Summary.init_function = Summary.nop }
@@ -501,27 +627,25 @@ let with_grammar_rule_protection f x =
let () =
let open Stdarg in
- let open Constrarg in
-(* Grammar.register0 wit_unit; *)
-(* Grammar.register0 wit_bool; *)
Grammar.register0 wit_int (Prim.integer);
Grammar.register0 wit_string (Prim.string);
Grammar.register0 wit_pre_ident (Prim.preident);
- Grammar.register0 wit_int_or_var (Tactic.int_or_var);
- Grammar.register0 wit_intro_pattern (Tactic.simple_intropattern);
Grammar.register0 wit_ident (Prim.ident);
Grammar.register0 wit_var (Prim.var);
Grammar.register0 wit_ref (Prim.reference);
- Grammar.register0 wit_quant_hyp (Tactic.quantified_hypothesis);
+ Grammar.register0 wit_sort_family (Constr.sort_family);
Grammar.register0 wit_constr (Constr.constr);
- Grammar.register0 wit_uconstr (Tactic.uconstr);
- Grammar.register0 wit_open_constr (Tactic.open_constr);
- Grammar.register0 wit_constr_with_bindings (Tactic.constr_with_bindings);
- Grammar.register0 wit_bindings (Tactic.bindings);
-(* Grammar.register0 wit_hyp_location_flag; *)
- Grammar.register0 wit_red_expr (Tactic.red_expr);
- Grammar.register0 wit_tactic (Tactic.tactic);
- Grammar.register0 wit_ltac (Tactic.tactic);
- Grammar.register0 wit_clause_dft_concl (Tactic.clause_dft_concl);
- Grammar.register0 wit_destruction_arg (Tactic.destruction_arg);
+ Grammar.register0 wit_red_expr (Vernac_.red_expr);
()
+
+(** Registering extra grammar *)
+
+type any_entry = AnyEntry : 'a Gram.entry -> any_entry
+
+let grammar_names : any_entry list String.Map.t ref = ref String.Map.empty
+
+let register_grammars_by_name name grams =
+ grammar_names := String.Map.add name grams !grammar_names
+
+let find_grammars_by_name name =
+ String.Map.find name !grammar_names
diff --git a/parsing/pcoq.mli b/parsing/pcoq.mli
index 37165f6c..9f186224 100644
--- a/parsing/pcoq.mli
+++ b/parsing/pcoq.mli
@@ -1,25 +1,93 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
-open Loc
open Names
open Extend
open Vernacexpr
open Genarg
open Constrexpr
-open Tacexpr
open Libnames
open Misctypes
open Genredexpr
(** The parser of Coq *)
-module Gram : module type of Compat.GrammarMake(CLexer)
+module Gram : sig
+
+ include Grammar.S with type te = Tok.t
+
+(* Where Grammar.S is
+
+module type S =
+ sig
+ type te = 'x;
+ type parsable = 'x;
+ value parsable : Stream.t char -> parsable;
+ value tokens : string -> list (string * int);
+ value glexer : Plexing.lexer te;
+ value set_algorithm : parse_algorithm -> unit;
+ module Entry :
+ sig
+ type e 'a = 'y;
+ value create : string -> e 'a;
+ value parse : e 'a -> parsable -> 'a;
+ value parse_token : e 'a -> Stream.t te -> 'a;
+ value name : e 'a -> string;
+ value of_parser : string -> (Stream.t te -> 'a) -> e 'a;
+ value print : Format.formatter -> e 'a -> unit;
+ external obj : e 'a -> Gramext.g_entry te = "%identity";
+ end
+ ;
+ module Unsafe :
+ sig
+ value gram_reinit : Plexing.lexer te -> unit;
+ value clear_entry : Entry.e 'a -> unit;
+ end
+ ;
+ value extend :
+ Entry.e 'a -> option Gramext.position ->
+ list
+ (option string * option Gramext.g_assoc *
+ list (list (Gramext.g_symbol te) * Gramext.g_action)) ->
+ unit;
+ value delete_rule : Entry.e 'a -> list (Gramext.g_symbol te) -> unit;
+ end
+
+*)
+
+ type 'a entry = 'a Entry.e
+ type internal_entry = Tok.t Gramext.g_entry
+ type symbol = Tok.t Gramext.g_symbol
+ type action = Gramext.g_action
+ type production_rule = symbol list * action
+ type single_extend_statment =
+ string option * Gramext.g_assoc option * production_rule list
+ type extend_statment =
+ Gramext.position option * single_extend_statment list
+
+ type coq_parsable
+
+ val parsable : ?file:Loc.source -> char Stream.t -> coq_parsable
+ val action : 'a -> action
+ val entry_create : string -> 'a entry
+ val entry_parse : 'a entry -> coq_parsable -> 'a
+ val entry_print : Format.formatter -> 'a entry -> unit
+
+ (* Get comment parsing information from the Lexer *)
+ val comment_state : coq_parsable -> ((int * int) * string) list
+
+ (* Apparently not used *)
+ val srules' : production_rule list -> symbol
+ val parse_tokens_after_filter : 'a entry -> Tok.t Stream.t -> 'a
+
+end with type 'a Entry.e = 'a Grammar.GMake(CLexer).Entry.e
(** The parser of Coq is built from three kinds of rule declarations:
@@ -59,7 +127,7 @@ module Gram : module type of Compat.GrammarMake(CLexer)
|
| Egrammar.make_constr_prod_item
V
- Gramext.g_symbol list which is sent to camlp4
+ Gramext.g_symbol list which is sent to camlp5
For user level tactic notations, dynamic addition of new rules is
also done in several steps:
@@ -96,9 +164,9 @@ module Gram : module type of Compat.GrammarMake(CLexer)
*)
-(** Temporarily activate camlp4 verbosity *)
+(** Temporarily activate camlp5 verbosity *)
-val camlp4_verbosity : bool -> ('a -> unit) -> 'a -> unit
+val camlp5_verbosity : bool -> ('a -> unit) -> 'a -> unit
(** Parse a string *)
@@ -127,25 +195,27 @@ module Prim :
open Libnames
val preident : string Gram.entry
val ident : Id.t Gram.entry
- val name : Name.t located Gram.entry
- val identref : Id.t located Gram.entry
- val pidentref : (Id.t located * (Id.t located list) option) Gram.entry
+ val name : lname Gram.entry
+ val identref : lident Gram.entry
+ val univ_decl : universe_decl_expr Gram.entry
+ val ident_decl : ident_decl Gram.entry
val pattern_ident : Id.t Gram.entry
- val pattern_identref : Id.t located Gram.entry
+ val pattern_identref : lident Gram.entry
val base_ident : Id.t Gram.entry
val natural : int Gram.entry
- val bigint : Bigint.bigint Gram.entry
+ val bigint : Constrexpr.raw_natural_number Gram.entry
val integer : int Gram.entry
val string : string Gram.entry
- val qualid : qualid located Gram.entry
- val fullyqualid : Id.t list located Gram.entry
+ val lstring : lstring Gram.entry
+ val qualid : qualid CAst.t Gram.entry
+ val fullyqualid : Id.t list CAst.t Gram.entry
val reference : reference Gram.entry
- val by_notation : (Loc.t * string * string option) Gram.entry
+ val by_notation : (string * string option) Gram.entry
val smart_global : reference or_by_notation Gram.entry
val dirpath : DirPath.t Gram.entry
val ne_string : string Gram.entry
- val ne_lstring : string located Gram.entry
- val var : Id.t located Gram.entry
+ val ne_lstring : lstring Gram.entry
+ val var : lident Gram.entry
end
module Constr :
@@ -159,17 +229,18 @@ module Constr :
val global : reference Gram.entry
val universe_level : glob_level Gram.entry
val sort : glob_sort Gram.entry
+ val sort_family : Sorts.family Gram.entry
val pattern : cases_pattern_expr Gram.entry
val constr_pattern : constr_expr Gram.entry
val lconstr_pattern : constr_expr Gram.entry
- val closed_binder : local_binder list Gram.entry
- val binder : local_binder list Gram.entry (* closed_binder or variable *)
- val binders : local_binder list Gram.entry (* list of binder *)
- val open_binders : local_binder list Gram.entry
- val binders_fixannot : (local_binder list * (Id.t located option * recursion_order_expr)) Gram.entry
- val typeclass_constraint : (Name.t located * bool * constr_expr) Gram.entry
+ val closed_binder : local_binder_expr list Gram.entry
+ val binder : local_binder_expr list Gram.entry (* closed_binder or variable *)
+ val binders : local_binder_expr list Gram.entry (* list of binder *)
+ val open_binders : local_binder_expr list Gram.entry
+ val binders_fixannot : (local_binder_expr list * (lident option * recursion_order_expr)) Gram.entry
+ val typeclass_constraint : (lname * bool * constr_expr) Gram.entry
val record_declaration : constr_expr Gram.entry
- val appl_arg : (constr_expr * explicitation located option) Gram.entry
+ val appl_arg : (constr_expr * explicitation CAst.t option) Gram.entry
end
module Module :
@@ -178,46 +249,22 @@ module Module :
val module_type : module_ast Gram.entry
end
-module Tactic :
- sig
- val open_constr : constr_expr Gram.entry
- val constr_with_bindings : constr_expr with_bindings Gram.entry
- val bindings : constr_expr bindings Gram.entry
- val hypident : (Id.t located * Locus.hyp_location_flag) Gram.entry
- val constr_may_eval : (constr_expr,reference or_by_notation,constr_expr) may_eval Gram.entry
- val constr_eval : (constr_expr,reference or_by_notation,constr_expr) may_eval Gram.entry
- val uconstr : constr_expr Gram.entry
- val quantified_hypothesis : quantified_hypothesis Gram.entry
- val destruction_arg : constr_expr with_bindings destruction_arg Gram.entry
- val int_or_var : int or_var Gram.entry
- val red_expr : raw_red_expr Gram.entry
- val simple_tactic : raw_tactic_expr Gram.entry
- val simple_intropattern : constr_expr intro_pattern_expr located Gram.entry
- val in_clause : Names.Id.t Loc.located Locus.clause_expr Gram.entry
- val clause_dft_concl : Names.Id.t Loc.located Locus.clause_expr Gram.entry
- val tactic_arg : raw_tactic_arg Gram.entry
- val tactic_expr : raw_tactic_expr Gram.entry
- val binder_tactic : raw_tactic_expr Gram.entry
- val tactic : raw_tactic_expr Gram.entry
- val tactic_eoi : raw_tactic_expr Gram.entry
- end
-
module Vernac_ :
sig
val gallina : vernac_expr Gram.entry
val gallina_ext : vernac_expr Gram.entry
val command : vernac_expr Gram.entry
val syntax : vernac_expr Gram.entry
- val vernac : vernac_expr Gram.entry
+ val vernac_control : vernac_control Gram.entry
val rec_definition : (fixpoint_expr * decl_notation list) Gram.entry
- val vernac_eoi : vernac_expr Gram.entry
val noedit_mode : vernac_expr Gram.entry
val command_entry : vernac_expr Gram.entry
+ val red_expr : raw_red_expr Gram.entry
val hint_info : Vernacexpr.hint_info_expr Gram.entry
end
(** The main entry: reads an optional vernac command *)
-val main_entry : (Loc.t * vernac_expr) option Gram.entry
+val main_entry : (Loc.t * vernac_control) option Gram.entry
(** Handling of the proof mode entry *)
val get_command_entry : unit -> vernac_expr Gram.entry
@@ -265,3 +312,17 @@ val recover_grammar_command : 'a grammar_command -> 'a list
(** Recover the current stack of grammar extensions. *)
val with_grammar_rule_protection : ('a -> 'b) -> 'a -> 'b
+
+(** Location Utils *)
+val to_coqloc : Ploc.t -> Loc.t
+val (!@) : Ploc.t -> Loc.t
+
+type frozen_t
+val parser_summary_tag : frozen_t Summary.Dyn.tag
+
+(** Registering grammars by name *)
+
+type any_entry = AnyEntry : 'a Gram.entry -> any_entry
+
+val register_grammars_by_name : string -> any_entry list -> unit
+val find_grammars_by_name : string -> any_entry list
diff --git a/parsing/tok.ml b/parsing/tok.ml
index f4b60aee..91b4f25b 100644
--- a/parsing/tok.ml
+++ b/parsing/tok.ml
@@ -1,9 +1,11 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
(** The type of token for the Coq lexer and parser *)
@@ -60,7 +62,7 @@ let match_keyword kwd = function
| KEYWORD kwd' when kwd = kwd' -> true
| _ -> false
-(* Needed to fix Camlp4 signature.
+(* Needed to fix Camlp5 signature.
Cannot use Pp because of silly Tox -> Compat -> Pp dependency *)
let print ppf tok = Format.pp_print_string ppf (to_string tok)
diff --git a/parsing/tok.mli b/parsing/tok.mli
index b9286c53..9b8c0085 100644
--- a/parsing/tok.mli
+++ b/parsing/tok.mli
@@ -1,9 +1,11 @@
(************************************************************************)
-(* v * The Coq Proof Assistant / The Coq Development Team *)
-(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2016 *)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
(** The type of token for the Coq lexer and parser *)
@@ -22,7 +24,7 @@ type t =
val equal : t -> t -> bool
val extract_string : t -> string
val to_string : t -> string
-(* Needed to fit Camlp4 signature *)
+(* Needed to fit Camlp5 signature *)
val print : Format.formatter -> t -> unit
val match_keyword : string -> t -> bool
(** for camlp5 *)