summaryrefslogtreecommitdiff
path: root/parsing
diff options
context:
space:
mode:
authorGravatar Benjamin Barenblat <bbaren@debian.org>2019-02-02 19:29:23 -0500
committerGravatar Benjamin Barenblat <bbaren@debian.org>2019-02-02 19:29:23 -0500
commit9ebf44d84754adc5b64fcf612c6816c02c80462d (patch)
treebf5e06a28488e0e06a2f2011ff0d110e2e02f8fc /parsing
parent9043add656177eeac1491a73d2f3ab92bec0013c (diff)
Imported Upstream version 8.9.0upstream/8.9.0upstream
Diffstat (limited to 'parsing')
-rw-r--r--parsing/egramcoq.ml483
-rw-r--r--parsing/egramml.ml84
-rw-r--r--parsing/egramml.mli33
-rw-r--r--parsing/extend.ml126
-rw-r--r--parsing/g_constr.mlg (renamed from parsing/g_constr.ml4)357
-rw-r--r--parsing/g_prim.ml4122
-rw-r--r--parsing/g_prim.mlg123
-rw-r--r--parsing/g_proofs.ml4133
-rw-r--r--parsing/g_vernac.ml41195
-rw-r--r--parsing/notation_gram.ml43
-rw-r--r--parsing/notgram_ops.ml71
-rw-r--r--parsing/notgram_ops.mli (renamed from parsing/egramcoq.mli)13
-rw-r--r--parsing/parsing.mllib8
-rw-r--r--parsing/pcoq.ml299
-rw-r--r--parsing/pcoq.mli252
-rw-r--r--parsing/ppextend.ml77
-rw-r--r--parsing/ppextend.mli51
17 files changed, 960 insertions, 2510 deletions
diff --git a/parsing/egramcoq.ml b/parsing/egramcoq.ml
deleted file mode 100644
index 5f63d21c..00000000
--- a/parsing/egramcoq.ml
+++ /dev/null
@@ -1,483 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open CErrors
-open Util
-open Pcoq
-open Constrexpr
-open Notation_term
-open Extend
-open Libnames
-open Names
-
-(**********************************************************************)
-(* This determines (depending on the associativity of the current
- level and on the expected associativity) if a reference to constr_n is
- a reference to the current level (to be translated into "SELF" on the
- left border and into "constr LEVEL n" elsewhere), to the level below
- (to be translated into "NEXT") or to an below wrt associativity (to be
- translated in camlp5 into "constr" without level) or to another level
- (to be translated into "constr LEVEL n")
-
- The boolean is true if the entry was existing _and_ empty; this to
- circumvent a weakness of camlp5 whose undo mechanism is not the
- converse of the extension mechanism *)
-
-let constr_level = string_of_int
-
-let default_levels =
- [200,Extend.RightA,false;
- 100,Extend.RightA,false;
- 99,Extend.RightA,true;
- 90,Extend.RightA,true;
- 10,Extend.LeftA,false;
- 9,Extend.RightA,false;
- 8,Extend.RightA,true;
- 1,Extend.LeftA,false;
- 0,Extend.RightA,false]
-
-let default_pattern_levels =
- [200,Extend.RightA,true;
- 100,Extend.RightA,false;
- 99,Extend.RightA,true;
- 90,Extend.RightA,true;
- 10,Extend.LeftA,false;
- 1,Extend.LeftA,false;
- 0,Extend.RightA,false]
-
-let default_constr_levels = (default_levels, default_pattern_levels)
-
-(* At a same level, LeftA takes precedence over RightA and NoneA *)
-(* In case, several associativity exists for a level, we make two levels, *)
-(* first LeftA, then RightA and NoneA together *)
-
-let admissible_assoc = function
- | Extend.LeftA, Some (Extend.RightA | Extend.NonA) -> false
- | Extend.RightA, Some Extend.LeftA -> false
- | _ -> true
-
-let create_assoc = function
- | None -> Extend.RightA
- | Some a -> a
-
-let error_level_assoc p current expected =
- let open Pp in
- let pr_assoc = function
- | Extend.LeftA -> str "left"
- | Extend.RightA -> str "right"
- | Extend.NonA -> str "non" in
- user_err
- (str "Level " ++ int p ++ str " is already declared " ++
- pr_assoc current ++ str " associative while it is now expected to be " ++
- pr_assoc expected ++ str " associative.")
-
-let create_pos = function
- | None -> Extend.First
- | Some lev -> Extend.After (constr_level lev)
-
-let find_position_gen current ensure assoc lev =
- match lev with
- | None ->
- current, (None, None, None, None)
- | Some n ->
- let after = ref None in
- let init = ref None in
- let rec add_level q = function
- | (p,_,_ as pa)::l when p > n -> pa :: add_level (Some p) l
- | (p,a,reinit)::l when Int.equal p n ->
- if reinit then
- let a' = create_assoc assoc in
- (init := Some (a',create_pos q); (p,a',false)::l)
- else if admissible_assoc (a,assoc) then
- raise Exit
- else
- error_level_assoc p a (Option.get assoc)
- | l -> after := q; (n,create_assoc assoc,ensure)::l
- in
- try
- let updated = add_level None current in
- let assoc = create_assoc assoc in
- begin match !init with
- | None ->
- (* Create the entry *)
- updated, (Some (create_pos !after), Some assoc, Some (constr_level n), None)
- | _ ->
- (* The reinit flag has been updated *)
- updated, (Some (Extend.Level (constr_level n)), None, None, !init)
- end
- with
- (* Nothing has changed *)
- Exit ->
- (* Just inherit the existing associativity and name (None) *)
- current, (Some (Extend.Level (constr_level n)), None, None, None)
-
-let rec list_mem_assoc_triple x = function
- | [] -> false
- | (a,b,c) :: l -> Int.equal a x || list_mem_assoc_triple x l
-
-let register_empty_levels accu forpat levels =
- let rec filter accu = function
- | [] -> ([], accu)
- | n :: rem ->
- let rem, accu = filter accu rem in
- let (clev, plev) = accu in
- let levels = if forpat then plev else clev in
- if not (list_mem_assoc_triple n levels) then
- let nlev, ans = find_position_gen levels true None (Some n) in
- let nlev = if forpat then (clev, nlev) else (nlev, plev) in
- ans :: rem, nlev
- else rem, accu
- in
- filter accu levels
-
-let find_position accu forpat assoc level =
- let (clev, plev) = accu in
- let levels = if forpat then plev else clev in
- let nlev, ans = find_position_gen levels false assoc level in
- let nlev = if forpat then (clev, nlev) else (nlev, plev) in
- (ans, nlev)
-
-(**************************************************************************)
-(*
- * --- Note on the mapping of grammar productions to camlp5 actions ---
- *
- * Translation of environments: a production
- * [ nt1(x1) ... nti(xi) ] -> act(x1..xi)
- * is written (with camlp5 conventions):
- * (fun vi -> .... (fun v1 -> act(v1 .. vi) )..)
- * where v1..vi are the values generated by non-terminals nt1..nti.
- * Since the actions are executed by substituting an environment,
- * the make_*_action family build the following closure:
- *
- * ((fun env ->
- * (fun vi ->
- * (fun env -> ...
- *
- * (fun v1 ->
- * (fun env -> gram_action .. env act)
- * ((x1,v1)::env))
- * ...)
- * ((xi,vi)::env)))
- * [])
- *)
-
-(**********************************************************************)
-(** Declare Notations grammar rules *)
-
-(**********************************************************************)
-(* Binding constr entry keys to entries *)
-
-(* Camlp5 levels do not treat NonA: use RightA with a NEXT on the left *)
-let camlp5_assoc = function
- | Some NonA | Some RightA -> RightA
- | None | Some LeftA -> LeftA
-
-let assoc_eq al ar = match al, ar with
-| NonA, NonA
-| RightA, RightA
-| LeftA, LeftA -> true
-| _, _ -> false
-
-(* [adjust_level assoc from prod] where [assoc] and [from] are the name
- and associativity of the level where to add the rule; the meaning of
- the result is
-
- None = SELF
- Some None = NEXT
- Some (Some (n,cur)) = constr LEVEL n
- s.t. if [cur] is set then [n] is the same as the [from] level *)
-let adjust_level assoc from = function
-(* Associativity is None means force the level *)
- | (NumLevel n,BorderProd (_,None)) -> Some (Some (n,true))
-(* Compute production name on the right side *)
- (* If NonA or LeftA on the right-hand side, set to NEXT *)
- | (NumLevel n,BorderProd (Right,Some (NonA|LeftA))) ->
- Some None
- (* If RightA on the right-hand side, set to the explicit (current) level *)
- | (NumLevel n,BorderProd (Right,Some RightA)) ->
- Some (Some (n,true))
-(* Compute production name on the left side *)
- (* If NonA on the left-hand side, adopt the current assoc ?? *)
- | (NumLevel n,BorderProd (Left,Some NonA)) -> None
- (* If the expected assoc is the current one, set to SELF *)
- | (NumLevel n,BorderProd (Left,Some a)) when assoc_eq a (camlp5_assoc assoc) ->
- None
- (* Otherwise, force the level, n or n-1, according to expected assoc *)
- | (NumLevel n,BorderProd (Left,Some a)) ->
- begin match a with
- | LeftA -> Some (Some (n, true))
- | _ -> Some None
- end
- (* None means NEXT *)
- | (NextLevel,_) -> Some None
-(* Compute production name elsewhere *)
- | (NumLevel n,InternalProd) ->
- if from = n + 1 then Some None else Some (Some (n, Int.equal n from))
-
-type _ target =
-| ForConstr : constr_expr target
-| ForPattern : cases_pattern_expr target
-
-type prod_info = production_level * production_position
-
-type (_, _) entry =
-| TTName : ('self, Misctypes.lname) entry
-| TTReference : ('self, reference) entry
-| TTBigint : ('self, Constrexpr.raw_natural_number) entry
-| TTConstr : prod_info * 'r target -> ('r, 'r) entry
-| TTConstrList : prod_info * Tok.t list * 'r target -> ('r, 'r list) entry
-| TTPattern : int -> ('self, cases_pattern_expr) entry
-| TTOpenBinderList : ('self, local_binder_expr list) entry
-| TTClosedBinderList : Tok.t list -> ('self, local_binder_expr list list) entry
-
-type _ any_entry = TTAny : ('s, 'r) entry -> 's any_entry
-
-(* This computes the name of the level where to add a new rule *)
-let interp_constr_entry_key : type r. r target -> int -> r Gram.entry * int option =
- fun forpat level -> match forpat with
- | ForConstr ->
- if level = 200 then Constr.binder_constr, None
- else Constr.operconstr, Some level
- | ForPattern -> Constr.pattern, Some level
-
-let target_entry : type s. s target -> s Gram.entry = function
-| ForConstr -> Constr.operconstr
-| ForPattern -> Constr.pattern
-
-let is_self from e = match e with
-| (NumLevel n, BorderProd (Right, _ (* Some(NonA|LeftA) *))) -> false
-| (NumLevel n, BorderProd (Left, _)) -> Int.equal from n
-| _ -> false
-
-let is_binder_level from e = match e with
-| (NumLevel 200, (BorderProd (Right, _) | InternalProd)) -> from = 200
-| _ -> false
-
-let make_sep_rules = function
- | [tk] -> Atoken tk
- | tkl ->
- let rec mkrule : Tok.t list -> string rules = function
- | [] -> Rules ({ norec_rule = Stop }, fun _ -> (* dropped anyway: *) "")
- | tkn :: rem ->
- let Rules ({ norec_rule = r }, f) = mkrule rem in
- let r = { norec_rule = Next (r, Atoken tkn) } in
- Rules (r, fun _ -> f)
- in
- let r = mkrule (List.rev tkl) in
- Arules [r]
-
-let symbol_of_target : type s. _ -> _ -> _ -> s target -> (s, s) symbol = fun p assoc from forpat ->
- if is_binder_level from p then Aentryl (target_entry forpat, 200)
- else if is_self from p then Aself
- else
- let g = target_entry forpat in
- let lev = adjust_level assoc from p in
- begin match lev with
- | None -> Aentry g
- | Some None -> Anext
- | Some (Some (lev, cur)) -> Aentryl (g, lev)
- end
-
-let symbol_of_entry : type s r. _ -> _ -> (s, r) entry -> (s, r) symbol = fun assoc from typ -> match typ with
-| TTConstr (p, forpat) -> symbol_of_target p assoc from forpat
-| TTConstrList (typ', [], forpat) ->
- Alist1 (symbol_of_target typ' assoc from forpat)
-| TTConstrList (typ', tkl, forpat) ->
- Alist1sep (symbol_of_target typ' assoc from forpat, make_sep_rules tkl)
-| TTPattern p -> Aentryl (Constr.pattern, p)
-| TTClosedBinderList [] -> Alist1 (Aentry Constr.binder)
-| TTClosedBinderList tkl -> Alist1sep (Aentry Constr.binder, make_sep_rules tkl)
-| TTName -> Aentry Prim.name
-| TTOpenBinderList -> Aentry Constr.open_binders
-| TTBigint -> Aentry Prim.bigint
-| TTReference -> Aentry Constr.global
-
-let interp_entry forpat e = match e with
-| ETProdName -> TTAny TTName
-| ETProdReference -> TTAny TTReference
-| ETProdBigint -> TTAny TTBigint
-| ETProdConstr p -> TTAny (TTConstr (p, forpat))
-| ETProdPattern p -> TTAny (TTPattern p)
-| ETProdOther _ -> assert false (** not used *)
-| ETProdConstrList (p, tkl) -> TTAny (TTConstrList (p, tkl, forpat))
-| ETProdBinderList ETBinderOpen -> TTAny TTOpenBinderList
-| ETProdBinderList (ETBinderClosed tkl) -> TTAny (TTClosedBinderList tkl)
-
-let cases_pattern_expr_of_name { CAst.loc; v = na } = CAst.make ?loc @@ match na with
- | Anonymous -> CPatAtom None
- | Name id -> CPatAtom (Some (CAst.make ?loc @@ Ident id))
-
-type 'r env = {
- constrs : 'r list;
- constrlists : 'r list list;
- binders : cases_pattern_expr list;
- binderlists : local_binder_expr list list;
-}
-
-let push_constr subst v = { subst with constrs = v :: subst.constrs }
-
-let push_item : type s r. s target -> (s, r) entry -> s env -> r -> s env = fun forpat e subst v ->
-match e with
-| TTConstr _ -> push_constr subst v
-| TTName ->
- begin match forpat with
- | ForConstr -> { subst with binders = cases_pattern_expr_of_name v :: subst.binders }
- | ForPattern -> push_constr subst (cases_pattern_expr_of_name v)
- end
-| TTPattern _ ->
- begin match forpat with
- | ForConstr -> { subst with binders = v :: subst.binders }
- | ForPattern -> push_constr subst v
- end
-| TTOpenBinderList -> { subst with binderlists = v :: subst.binderlists }
-| TTClosedBinderList _ -> { subst with binderlists = List.flatten v :: subst.binderlists }
-| TTBigint ->
- begin match forpat with
- | ForConstr -> push_constr subst (CAst.make @@ CPrim (Numeral (v,true)))
- | ForPattern -> push_constr subst (CAst.make @@ CPatPrim (Numeral (v,true)))
- end
-| TTReference ->
- begin match forpat with
- | ForConstr -> push_constr subst (CAst.make @@ CRef (v, None))
- | ForPattern -> push_constr subst (CAst.make @@ CPatAtom (Some v))
- end
-| TTConstrList _ -> { subst with constrlists = v :: subst.constrlists }
-
-type (_, _) ty_symbol =
-| TyTerm : Tok.t -> ('s, string) ty_symbol
-| TyNonTerm : 's target * ('s, 'a) entry * ('s, 'a) symbol * bool -> ('s, 'a) ty_symbol
-
-type ('self, _, 'r) ty_rule =
-| TyStop : ('self, 'r, 'r) ty_rule
-| TyNext : ('self, 'a, 'r) ty_rule * ('self, 'b) ty_symbol -> ('self, 'b -> 'a, 'r) ty_rule
-| TyMark : int * bool * int * ('self, 'a, 'r) ty_rule -> ('self, 'a, 'r) ty_rule
-
-type 'r gen_eval = Loc.t -> 'r env -> 'r
-
-let rec ty_eval : type s a. (s, a, Loc.t -> s) ty_rule -> s gen_eval -> s env -> a = function
-| TyStop ->
- fun f env loc -> f loc env
-| TyNext (rem, TyTerm _) ->
- fun f env _ -> ty_eval rem f env
-| TyNext (rem, TyNonTerm (_, _, _, false)) ->
- fun f env _ -> ty_eval rem f env
-| TyNext (rem, TyNonTerm (forpat, e, _, true)) ->
- fun f env v ->
- ty_eval rem f (push_item forpat e env v)
-| TyMark (n, b, p, rem) ->
- fun f env ->
- let heads, constrs = List.chop n env.constrs in
- let constrlists, constrs =
- if b then
- (* We rearrange constrs = c1..cn rem and constrlists = [d1..dr e1..ep] rem' into
- constrs = e1..ep rem and constrlists [c1..cn d1..dr] rem' *)
- let constrlist = List.hd env.constrlists in
- let constrlist, tail = List.chop (List.length constrlist - p) constrlist in
- (heads @ constrlist) :: List.tl env.constrlists, tail @ constrs
- else
- (* We rearrange constrs = c1..cn e1..ep rem into
- constrs = e1..ep rem and add a constr list [c1..cn] *)
- let constrlist, tail = List.chop (n - p) heads in
- constrlist :: env.constrlists, tail @ constrs
- in
- ty_eval rem f { env with constrs; constrlists; }
-
-let rec ty_erase : type s a r. (s, a, r) ty_rule -> (s, a, r) Extend.rule = function
-| TyStop -> Stop
-| TyMark (_, _, _, r) -> ty_erase r
-| TyNext (rem, TyTerm tok) -> Next (ty_erase rem, Atoken tok)
-| TyNext (rem, TyNonTerm (_, _, s, _)) -> Next (ty_erase rem, s)
-
-type ('self, 'r) any_ty_rule =
-| AnyTyRule : ('self, 'act, Loc.t -> 'r) ty_rule -> ('self, 'r) any_ty_rule
-
-let make_ty_rule assoc from forpat prods =
- let rec make_ty_rule = function
- | [] -> AnyTyRule TyStop
- | GramConstrTerminal tok :: rem ->
- let AnyTyRule r = make_ty_rule rem in
- AnyTyRule (TyNext (r, TyTerm tok))
- | GramConstrNonTerminal (e, var) :: rem ->
- let AnyTyRule r = make_ty_rule rem in
- let TTAny e = interp_entry forpat e in
- let s = symbol_of_entry assoc from e in
- let bind = match var with None -> false | Some _ -> true in
- AnyTyRule (TyNext (r, TyNonTerm (forpat, e, s, bind)))
- | GramConstrListMark (n, b, p) :: rem ->
- let AnyTyRule r = make_ty_rule rem in
- AnyTyRule (TyMark (n, b, p, r))
- in
- make_ty_rule (List.rev prods)
-
-let target_to_bool : type r. r target -> bool = function
-| ForConstr -> false
-| ForPattern -> true
-
-let prepare_empty_levels forpat (pos,p4assoc,name,reinit) =
- let empty = (pos, [(name, p4assoc, [])]) in
- if forpat then ExtendRule (Constr.pattern, reinit, empty)
- else ExtendRule (Constr.operconstr, reinit, empty)
-
-let rec pure_sublevels : type a b c. int option -> (a, b, c) rule -> int list = fun level r -> match r with
-| Stop -> []
-| Next (rem, Aentryl (_, i)) ->
- let rem = pure_sublevels level rem in
- begin match level with
- | Some j when Int.equal i j -> rem
- | _ -> i :: rem
- end
-| Next (rem, _) -> pure_sublevels level rem
-
-let make_act : type r. r target -> _ -> r gen_eval = function
-| ForConstr -> fun notation loc env ->
- let env = (env.constrs, env.constrlists, env.binders, env.binderlists) in
- CAst.make ~loc @@ CNotation (notation, env)
-| ForPattern -> fun notation loc env ->
- let env = (env.constrs, env.constrlists) in
- CAst.make ~loc @@ CPatNotation (notation, env, [])
-
-let extend_constr state forpat ng =
- let n,_,_ = ng.notgram_level in
- let assoc = ng.notgram_assoc in
- let (entry, level) = interp_constr_entry_key forpat n in
- let fold (accu, state) pt =
- let AnyTyRule r = make_ty_rule assoc n forpat pt in
- let symbs = ty_erase r in
- let pure_sublevels = pure_sublevels level symbs in
- let isforpat = target_to_bool forpat in
- let needed_levels, state = register_empty_levels state isforpat pure_sublevels in
- let (pos,p4assoc,name,reinit), state = find_position state isforpat assoc level in
- let empty_rules = List.map (prepare_empty_levels isforpat) needed_levels in
- let empty = { constrs = []; constrlists = []; binders = []; binderlists = [] } in
- let act = ty_eval r (make_act forpat ng.notgram_notation) empty in
- let rule = (name, p4assoc, [Rule (symbs, act)]) in
- let r = ExtendRule (entry, reinit, (pos, [rule])) in
- (accu @ empty_rules @ [r], state)
- in
- List.fold_left fold ([], state) ng.notgram_prods
-
-let constr_levels = GramState.field ()
-
-let extend_constr_notation ng state =
- let levels = match GramState.get state constr_levels with
- | None -> default_constr_levels
- | Some lev -> lev
- in
- (* Add the notation in constr *)
- let (r, levels) = extend_constr levels ForConstr ng in
- (* Add the notation in cases_pattern *)
- let (r', levels) = extend_constr levels ForPattern ng in
- let state = GramState.set state constr_levels levels in
- (r @ r', state)
-
-let constr_grammar : one_notation_grammar grammar_command =
- create_grammar_command "Notation" extend_constr_notation
-
-let extend_constr_grammar ntn = extend_grammar_command constr_grammar ntn
diff --git a/parsing/egramml.ml b/parsing/egramml.ml
deleted file mode 100644
index 90cd7d10..00000000
--- a/parsing/egramml.ml
+++ /dev/null
@@ -1,84 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open Util
-open Extend
-open Pcoq
-open Genarg
-open Vernacexpr
-
-(** Grammar extensions declared at ML level *)
-
-type 's grammar_prod_item =
- | GramTerminal of string
- | GramNonTerminal :
- ('a raw_abstract_argument_type option * ('s, 'a) symbol) Loc.located -> 's grammar_prod_item
-
-type 'a ty_arg = ('a -> raw_generic_argument)
-
-type ('self, _, 'r) ty_rule =
-| TyStop : ('self, 'r, 'r) ty_rule
-| TyNext : ('self, 'a, 'r) ty_rule * ('self, 'b) Extend.symbol * 'b ty_arg option ->
- ('self, 'b -> 'a, 'r) ty_rule
-
-type ('self, 'r) any_ty_rule =
-| AnyTyRule : ('self, 'act, Loc.t -> 'r) ty_rule -> ('self, 'r) any_ty_rule
-
-let rec ty_rule_of_gram = function
-| [] -> AnyTyRule TyStop
-| GramTerminal s :: rem ->
- let AnyTyRule rem = ty_rule_of_gram rem in
- let tok = Atoken (CLexer.terminal s) in
- let r = TyNext (rem, tok, None) in
- AnyTyRule r
-| GramNonTerminal (_, (t, tok)) :: rem ->
- let AnyTyRule rem = ty_rule_of_gram rem in
- let inj = Option.map (fun t obj -> Genarg.in_gen t obj) t in
- let r = TyNext (rem, tok, inj) in
- AnyTyRule r
-
-let rec ty_erase : type s a r. (s, a, r) ty_rule -> (s, a, r) Extend.rule = function
-| TyStop -> Extend.Stop
-| TyNext (rem, tok, _) -> Extend.Next (ty_erase rem, tok)
-
-type 'r gen_eval = Loc.t -> raw_generic_argument list -> 'r
-
-let rec ty_eval : type s a. (s, a, Loc.t -> s) ty_rule -> s gen_eval -> a = function
-| TyStop -> fun f loc -> f loc []
-| TyNext (rem, tok, None) -> fun f _ -> ty_eval rem f
-| TyNext (rem, tok, Some inj) -> fun f x ->
- let f loc args = f loc (inj x :: args) in
- ty_eval rem f
-
-let make_rule f prod =
- let AnyTyRule ty_rule = ty_rule_of_gram (List.rev prod) in
- let symb = ty_erase ty_rule in
- let f loc l = f loc (List.rev l) in
- let act = ty_eval ty_rule f in
- Extend.Rule (symb, act)
-
-(** Vernac grammar extensions *)
-
-let vernac_exts = ref []
-
-let get_extend_vernac_rule (s, i) =
- try
- let find ((name, j), _) = String.equal name s && Int.equal i j in
- let (_, rules) = List.find find !vernac_exts in
- rules
- with
- | Failure _ -> raise Not_found
-
-let extend_vernac_command_grammar s nt gl =
- let nt = Option.default Vernac_.command nt in
- vernac_exts := (s,gl) :: !vernac_exts;
- let mkact loc l = VernacExtend (s, l) in
- let rules = [make_rule mkact gl] in
- grammar_extend nt None (None, [None, None, rules])
diff --git a/parsing/egramml.mli b/parsing/egramml.mli
deleted file mode 100644
index 31aa1a98..00000000
--- a/parsing/egramml.mli
+++ /dev/null
@@ -1,33 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open Vernacexpr
-
-(** Mapping of grammar productions to camlp5 actions. *)
-
-(** This is the part specific to vernac extensions.
- For the Coq-level Notation and Tactic Notation, see Egramcoq. *)
-
-type 's grammar_prod_item =
- | GramTerminal of string
- | GramNonTerminal : ('a Genarg.raw_abstract_argument_type option *
- ('s, 'a) Extend.symbol) Loc.located -> 's grammar_prod_item
-
-val extend_vernac_command_grammar :
- Vernacexpr.extend_name -> vernac_expr Pcoq.Gram.entry option ->
- vernac_expr grammar_prod_item list -> unit
-
-val get_extend_vernac_rule : Vernacexpr.extend_name -> vernac_expr grammar_prod_item list
-
-(** Utility function reused in Egramcoq : *)
-
-val make_rule :
- (Loc.t -> Genarg.raw_generic_argument list -> 'a) ->
- 'a grammar_prod_item list -> 'a Extend.production_rule
diff --git a/parsing/extend.ml b/parsing/extend.ml
new file mode 100644
index 00000000..6fe29566
--- /dev/null
+++ b/parsing/extend.ml
@@ -0,0 +1,126 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+(** Entry keys for constr notations *)
+
+type 'a entry = 'a Grammar.GMake(CLexer).Entry.e
+
+type side = Left | Right
+
+type gram_assoc = NonA | RightA | LeftA
+
+type gram_position =
+ | First
+ | Last
+ | Before of string
+ | After of string
+ | Level of string
+
+type production_position =
+ | BorderProd of side * gram_assoc option
+ | InternalProd
+
+type production_level =
+ | NextLevel
+ | NumLevel of int
+
+(** User-level types used to tell how to parse or interpret of the non-terminal *)
+
+type 'a constr_entry_key_gen =
+ | ETIdent
+ | ETGlobal
+ | ETBigint
+ | ETBinder of bool (* open list of binders if true, closed list of binders otherwise *)
+ | ETConstr of Constrexpr.notation_entry * Notation_term.constr_as_binder_kind option * 'a
+ | ETPattern of bool * int option (* true = strict pattern, i.e. not a single variable *)
+
+(** Entries level (left-hand side of grammar rules) *)
+
+type constr_entry_key =
+ (production_level * production_position) constr_entry_key_gen
+
+(** Entries used in productions, vernac side (e.g. "x bigint" or "x ident") *)
+
+type simple_constr_prod_entry_key =
+ production_level option constr_entry_key_gen
+
+(** Entries used in productions (in right-hand-side of grammar rules), to parse non-terminals *)
+
+type binder_entry_kind = ETBinderOpen | ETBinderClosed of Tok.t list
+
+type binder_target = ForBinder | ForTerm
+
+type constr_prod_entry_key =
+ | ETProdName (* Parsed as a name (ident or _) *)
+ | ETProdReference (* Parsed as a global reference *)
+ | ETProdBigint (* Parsed as an (unbounded) integer *)
+ | ETProdConstr of Constrexpr.notation_entry * (production_level * production_position) (* Parsed as constr or pattern, or a subentry of those *)
+ | ETProdPattern of int (* Parsed as pattern as a binder (as subpart of a constr) *)
+ | ETProdConstrList of (production_level * production_position) * Tok.t list (* Parsed as non-empty list of constr *)
+ | ETProdBinderList of binder_entry_kind (* Parsed as non-empty list of local binders *)
+
+(** {5 AST for user-provided entries} *)
+
+type 'a user_symbol =
+| Ulist1 of 'a user_symbol
+| Ulist1sep of 'a user_symbol * string
+| Ulist0 of 'a user_symbol
+| Ulist0sep of 'a user_symbol * string
+| Uopt of 'a user_symbol
+| Uentry of 'a
+| Uentryl of 'a * int
+
+type ('a,'b,'c) ty_user_symbol =
+| TUlist1 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol
+| TUlist1sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol
+| TUlist0 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol
+| TUlist0sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol
+| TUopt : ('a,'b,'c) ty_user_symbol -> ('a option, 'b option, 'c option) ty_user_symbol
+| TUentry : ('a, 'b, 'c) Genarg.ArgT.tag -> ('a,'b,'c) ty_user_symbol
+| TUentryl : ('a, 'b, 'c) Genarg.ArgT.tag * int -> ('a,'b,'c) ty_user_symbol
+
+(** {5 Type-safe grammar extension} *)
+
+type ('self, 'a) symbol =
+| Atoken : Tok.t -> ('self, string) symbol
+| Alist1 : ('self, 'a) symbol -> ('self, 'a list) symbol
+| Alist1sep : ('self, 'a) symbol * ('self, _) symbol -> ('self, 'a list) symbol
+| Alist0 : ('self, 'a) symbol -> ('self, 'a list) symbol
+| Alist0sep : ('self, 'a) symbol * ('self, _) symbol -> ('self, 'a list) symbol
+| Aopt : ('self, 'a) symbol -> ('self, 'a option) symbol
+| Aself : ('self, 'self) symbol
+| Anext : ('self, 'self) symbol
+| Aentry : 'a entry -> ('self, 'a) symbol
+| Aentryl : 'a entry * string -> ('self, 'a) symbol
+| Arules : 'a rules list -> ('self, 'a) symbol
+
+and ('self, _, 'r) rule =
+| Stop : ('self, 'r, 'r) rule
+| Next : ('self, 'a, 'r) rule * ('self, 'b) symbol -> ('self, 'b -> 'a, 'r) rule
+
+and ('a, 'r) norec_rule = { norec_rule : 's. ('s, 'a, 'r) rule }
+
+and 'a rules =
+| Rules : ('act, Loc.t -> 'a) norec_rule * 'act -> 'a rules
+
+type 'a production_rule =
+| Rule : ('a, 'act, Loc.t -> 'a) rule * 'act -> 'a production_rule
+
+type 'a single_extend_statement =
+ string option *
+ (** Level *)
+ gram_assoc option *
+ (** Associativity *)
+ 'a production_rule list
+ (** Symbol list with the interpretation function *)
+
+type 'a extend_statement =
+ gram_position option *
+ 'a single_extend_statement list
diff --git a/parsing/g_constr.ml4 b/parsing/g_constr.mlg
index 59b74545..7cb5af78 100644
--- a/parsing/g_constr.ml4
+++ b/parsing/g_constr.mlg
@@ -8,13 +8,17 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Names
+open Constr
open Libnames
+open Glob_term
open Constrexpr
open Constrexpr_ops
open Util
open Tok
-open Misctypes
+open Namegen
open Decl_kinds
open Pcoq
@@ -124,396 +128,399 @@ let name_colon =
let aliasvar = function { CAst.v = CPatAlias (_, na) } -> Some na | _ -> None
-GEXTEND Gram
+}
+
+GRAMMAR EXTEND Gram
GLOBAL: binder_constr lconstr constr operconstr universe_level sort sort_family
global constr_pattern lconstr_pattern Constr.ident
closed_binder open_binders binder binders binders_fixannot
record_declaration typeclass_constraint pattern appl_arg;
Constr.ident:
- [ [ id = Prim.ident -> id ] ]
+ [ [ id = Prim.ident -> { id } ] ]
;
Prim.name:
- [ [ "_" -> CAst.make ~loc:!@loc Anonymous ] ]
+ [ [ "_" -> { CAst.make ~loc Anonymous } ] ]
;
global:
- [ [ r = Prim.reference -> r ] ]
+ [ [ r = Prim.reference -> { r } ] ]
;
constr_pattern:
- [ [ c = constr -> c ] ]
+ [ [ c = constr -> { c } ] ]
;
lconstr_pattern:
- [ [ c = lconstr -> c ] ]
+ [ [ c = lconstr -> { c } ] ]
;
sort:
- [ [ "Set" -> GSet
- | "Prop" -> GProp
- | "Type" -> GType []
- | "Type"; "@{"; u = universe; "}" -> GType u
+ [ [ "Set" -> { GSet }
+ | "Prop" -> { GProp }
+ | "Type" -> { GType [] }
+ | "Type"; "@{"; u = universe; "}" -> { GType u }
] ]
;
sort_family:
- [ [ "Set" -> Sorts.InSet
- | "Prop" -> Sorts.InProp
- | "Type" -> Sorts.InType
+ [ [ "Set" -> { Sorts.InSet }
+ | "Prop" -> { Sorts.InProp }
+ | "Type" -> { Sorts.InType }
] ]
;
universe_expr:
- [ [ id = global; "+"; n = natural -> Some (id,n)
- | id = global -> Some (id,0)
- | "_" -> None
+ [ [ id = global; "+"; n = natural -> { Some (id,n) }
+ | id = global -> { Some (id,0) }
+ | "_" -> { None }
] ]
;
universe:
- [ [ IDENT "max"; "("; ids = LIST1 universe_expr SEP ","; ")" -> ids
- | u = universe_expr -> [u]
+ [ [ IDENT "max"; "("; ids = LIST1 universe_expr SEP ","; ")" -> { ids }
+ | u = universe_expr -> { [u] }
] ]
;
lconstr:
- [ [ c = operconstr LEVEL "200" -> c ] ]
+ [ [ c = operconstr LEVEL "200" -> { c } ] ]
;
constr:
- [ [ c = operconstr LEVEL "8" -> c
- | "@"; f=global; i = instance -> CAst.make ~loc:!@loc @@ CAppExpl((None,f,i),[]) ] ]
+ [ [ c = operconstr LEVEL "8" -> { c }
+ | "@"; f=global; i = instance -> { CAst.make ~loc @@ CAppExpl((None,f,i),[]) } ] ]
;
operconstr:
[ "200" RIGHTA
- [ c = binder_constr -> c ]
+ [ c = binder_constr -> { c } ]
| "100" RIGHTA
[ c1 = operconstr; "<:"; c2 = binder_constr ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastVM c2)
+ { CAst.make ~loc @@ CCast(c1, CastVM c2) }
| c1 = operconstr; "<:"; c2 = SELF ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastVM c2)
+ { CAst.make ~loc @@ CCast(c1, CastVM c2) }
| c1 = operconstr; "<<:"; c2 = binder_constr ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastNative c2)
+ { CAst.make ~loc @@ CCast(c1, CastNative c2) }
| c1 = operconstr; "<<:"; c2 = SELF ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastNative c2)
+ { CAst.make ~loc @@ CCast(c1, CastNative c2) }
| c1 = operconstr; ":";c2 = binder_constr ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastConv c2)
+ { CAst.make ~loc @@ CCast(c1, CastConv c2) }
| c1 = operconstr; ":"; c2 = SELF ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastConv c2)
+ { CAst.make ~loc @@ CCast(c1, CastConv c2) }
| c1 = operconstr; ":>" ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastCoerce) ]
+ { CAst.make ~loc @@ CCast(c1, CastCoerce) } ]
| "99" RIGHTA [ ]
| "90" RIGHTA [ ]
| "10" LEFTA
- [ f=operconstr; args=LIST1 appl_arg -> CAst.make ~loc:(!@loc) @@ CApp((None,f),args)
- | "@"; f=global; i = instance; args=LIST0 NEXT -> CAst.make ~loc:!@loc @@ CAppExpl((None,f,i),args)
+ [ f=operconstr; args=LIST1 appl_arg -> { CAst.make ~loc @@ CApp((None,f),args) }
+ | "@"; f=global; i = instance; args=LIST0 NEXT -> { CAst.make ~loc @@ CAppExpl((None,f,i),args) }
| "@"; lid = pattern_identref; args=LIST1 identref ->
- let { CAst.loc = locid; v = id } = lid in
- let args = List.map (fun x -> CAst.make @@ CRef (CAst.make ?loc:x.CAst.loc @@ Ident x.CAst.v, None), None) args in
- CAst.make ~loc:(!@loc) @@ CApp((None, CAst.make ?loc:locid @@ CPatVar id),args) ]
+ { let { CAst.loc = locid; v = id } = lid in
+ let args = List.map (fun x -> CAst.make @@ CRef (qualid_of_ident ?loc:x.CAst.loc x.CAst.v, None), None) args in
+ CAst.make ~loc @@ CApp((None, CAst.make ?loc:locid @@ CPatVar id),args) } ]
| "9"
[ ".."; c = operconstr LEVEL "0"; ".." ->
- CAst.make ~loc:!@loc @@ CAppExpl ((None, CAst.make ~loc:!@loc @@ Ident ldots_var, None),[c]) ]
+ { CAst.make ~loc @@ CAppExpl ((None, (qualid_of_ident ~loc ldots_var), None),[c]) } ]
| "8" [ ]
| "1" LEFTA
[ c=operconstr; ".("; f=global; args=LIST0 appl_arg; ")" ->
- CAst.make ~loc:(!@loc) @@ CApp((Some (List.length args+1), CAst.make @@ CRef (f,None)),args@[c,None])
+ { CAst.make ~loc @@ CApp((Some (List.length args+1), CAst.make @@ CRef (f,None)),args@[c,None]) }
| c=operconstr; ".("; "@"; f=global;
args=LIST0 (operconstr LEVEL "9"); ")" ->
- CAst.make ~loc:(!@loc) @@ CAppExpl((Some (List.length args+1),f,None),args@[c])
- | c=operconstr; "%"; key=IDENT -> CAst.make ~loc:(!@loc) @@ CDelimiters (key,c) ]
+ { CAst.make ~loc @@ CAppExpl((Some (List.length args+1),f,None),args@[c]) }
+ | c=operconstr; "%"; key=IDENT -> { CAst.make ~loc @@ CDelimiters (key,c) } ]
| "0"
- [ c=atomic_constr -> c
- | c=match_constr -> c
+ [ c=atomic_constr -> { c }
+ | c=match_constr -> { c }
| "("; c = operconstr LEVEL "200"; ")" ->
- (match c.CAst.v with
+ { (match c.CAst.v with
| CPrim (Numeral (n,true)) ->
- CAst.make ~loc:(!@loc) @@ CNotation("( _ )",([c],[],[],[]))
- | _ -> c)
- | "{|"; c = record_declaration; "|}" -> c
+ CAst.make ~loc @@ CNotation((InConstrEntrySomeLevel,"( _ )"),([c],[],[],[]))
+ | _ -> c) }
+ | "{|"; c = record_declaration; "|}" -> { c }
| "{"; c = binder_constr ; "}" ->
- CAst.make ~loc:(!@loc) @@ CNotation(("{ _ }"),([c],[],[],[]))
+ { CAst.make ~loc @@ CNotation((InConstrEntrySomeLevel,"{ _ }"),([c],[],[],[])) }
| "`{"; c = operconstr LEVEL "200"; "}" ->
- CAst.make ~loc:(!@loc) @@ CGeneralization (Implicit, None, c)
+ { CAst.make ~loc @@ CGeneralization (Implicit, None, c) }
| "`("; c = operconstr LEVEL "200"; ")" ->
- CAst.make ~loc:(!@loc) @@ CGeneralization (Explicit, None, c)
+ { CAst.make ~loc @@ CGeneralization (Explicit, None, c) }
] ]
;
record_declaration:
- [ [ fs = record_fields -> CAst.make ~loc:(!@loc) @@ CRecord fs ] ]
+ [ [ fs = record_fields -> { CAst.make ~loc @@ CRecord fs } ] ]
;
record_fields:
- [ [ f = record_field_declaration; ";"; fs = record_fields -> f :: fs
- | f = record_field_declaration -> [f]
- | -> []
+ [ [ f = record_field_declaration; ";"; fs = record_fields -> { f :: fs }
+ | f = record_field_declaration -> { [f] }
+ | -> { [] }
] ]
;
record_field_declaration:
[ [ id = global; bl = binders; ":="; c = lconstr ->
- (id, if bl = [] then c else mkCLambdaN ~loc:!@loc bl c) ] ]
+ { (id, if bl = [] then c else mkCLambdaN ~loc bl c) } ] ]
;
binder_constr:
[ [ "forall"; bl = open_binders; ","; c = operconstr LEVEL "200" ->
- mkCProdN ~loc:!@loc bl c
+ { mkCProdN ~loc bl c }
| "fun"; bl = open_binders; "=>"; c = operconstr LEVEL "200" ->
- mkCLambdaN ~loc:!@loc bl c
+ { mkCLambdaN ~loc bl c }
| "let"; id=name; bl = binders; ty = type_cstr; ":=";
c1 = operconstr LEVEL "200"; "in"; c2 = operconstr LEVEL "200" ->
- let ty,c1 = match ty, c1 with
+ { let ty,c1 = match ty, c1 with
| (_,None), { CAst.v = CCast(c, CastConv t) } -> (Loc.tag ?loc:(constr_loc t) @@ Some t), c (* Tolerance, see G_vernac.def_body *)
| _, _ -> ty, c1 in
- CAst.make ~loc:!@loc @@ CLetIn(id,mkCLambdaN ?loc:(constr_loc c1) bl c1,
- Option.map (mkCProdN ?loc:(fst ty) bl) (snd ty), c2)
+ CAst.make ~loc @@ CLetIn(id,mkCLambdaN ?loc:(constr_loc c1) bl c1,
+ Option.map (mkCProdN ?loc:(fst ty) bl) (snd ty), c2) }
| "let"; fx = single_fix; "in"; c = operconstr LEVEL "200" ->
- let fixp = mk_single_fix fx in
+ { let fixp = mk_single_fix fx in
let { CAst.loc = li; v = id } = match fixp.CAst.v with
CFix(id,_) -> id
| CCoFix(id,_) -> id
| _ -> assert false in
- CAst.make ~loc:!@loc @@ CLetIn( CAst.make ?loc:li @@ Name id,fixp,None,c)
- | "let"; lb = ["("; l=LIST0 name SEP ","; ")" -> l | "()" -> []];
+ CAst.make ~loc @@ CLetIn( CAst.make ?loc:li @@ Name id,fixp,None,c) }
+ | "let"; lb = ["("; l=LIST0 name SEP ","; ")" -> { l } | "()" -> { [] } ];
po = return_type;
":="; c1 = operconstr LEVEL "200"; "in";
c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@ CLetTuple (lb,po,c1,c2)
+ { CAst.make ~loc @@ CLetTuple (lb,po,c1,c2) }
| "let"; "'"; p=pattern; ":="; c1 = operconstr LEVEL "200";
"in"; c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@
- CCases (LetPatternStyle, None, [c1, None, None], [CAst.make ~loc:!@loc ([[p]], c2)])
+ { CAst.make ~loc @@
+ CCases (LetPatternStyle, None, [c1, None, None], [CAst.make ~loc ([[p]], c2)]) }
| "let"; "'"; p=pattern; ":="; c1 = operconstr LEVEL "200";
rt = case_type; "in"; c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@
- CCases (LetPatternStyle, Some rt, [c1, aliasvar p, None], [CAst.make ~loc:!@loc ([[p]], c2)])
+ { CAst.make ~loc @@
+ CCases (LetPatternStyle, Some rt, [c1, aliasvar p, None], [CAst.make ~loc ([[p]], c2)]) }
| "let"; "'"; p=pattern; "in"; t = pattern LEVEL "200";
":="; c1 = operconstr LEVEL "200"; rt = case_type;
"in"; c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@
- CCases (LetPatternStyle, Some rt, [c1, aliasvar p, Some t], [CAst.make ~loc:!@loc ([[p]], c2)])
+ { CAst.make ~loc @@
+ CCases (LetPatternStyle, Some rt, [c1, aliasvar p, Some t], [CAst.make ~loc ([[p]], c2)]) }
| "if"; c=operconstr LEVEL "200"; po = return_type;
"then"; b1=operconstr LEVEL "200";
"else"; b2=operconstr LEVEL "200" ->
- CAst.make ~loc:(!@loc) @@ CIf (c, po, b1, b2)
- | c=fix_constr -> c ] ]
+ { CAst.make ~loc @@ CIf (c, po, b1, b2) }
+ | c=fix_constr -> { c } ] ]
;
appl_arg:
[ [ id = lpar_id_coloneq; c=lconstr; ")" ->
- (c,Some (CAst.make ~loc:!@loc @@ ExplByName id))
- | c=operconstr LEVEL "9" -> (c,None) ] ]
+ { (c,Some (CAst.make ~loc @@ ExplByName id)) }
+ | c=operconstr LEVEL "9" -> { (c,None) } ] ]
;
atomic_constr:
- [ [ g=global; i=instance -> CAst.make ~loc:!@loc @@ CRef (g,i)
- | s=sort -> CAst.make ~loc:!@loc @@ CSort s
- | n=INT -> CAst.make ~loc:!@loc @@ CPrim (Numeral (n,true))
- | s=string -> CAst.make ~loc:!@loc @@ CPrim (String s)
- | "_" -> CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None)
- | "?"; "["; id=ident; "]" -> CAst.make ~loc:!@loc @@ CHole (None, IntroIdentifier id, None)
- | "?"; "["; id=pattern_ident; "]" -> CAst.make ~loc:!@loc @@ CHole (None, IntroFresh id, None)
- | id=pattern_ident; inst = evar_instance -> CAst.make ~loc:!@loc @@ CEvar(id,inst) ] ]
+ [ [ g=global; i=instance -> { CAst.make ~loc @@ CRef (g,i) }
+ | s=sort -> { CAst.make ~loc @@ CSort s }
+ | n=INT -> { CAst.make ~loc @@ CPrim (Numeral (n,true)) }
+ | s=string -> { CAst.make ~loc @@ CPrim (String s) }
+ | "_" -> { CAst.make ~loc @@ CHole (None, IntroAnonymous, None) }
+ | "?"; "["; id=ident; "]" -> { CAst.make ~loc @@ CHole (None, IntroIdentifier id, None) }
+ | "?"; "["; id=pattern_ident; "]" -> { CAst.make ~loc @@ CHole (None, IntroFresh id, None) }
+ | id=pattern_ident; inst = evar_instance -> { CAst.make ~loc @@ CEvar(id,inst) } ] ]
;
inst:
- [ [ id = ident; ":="; c = lconstr -> (id,c) ] ]
+ [ [ id = ident; ":="; c = lconstr -> { (id,c) } ] ]
;
evar_instance:
- [ [ "@{"; l = LIST1 inst SEP ";"; "}" -> l
- | -> [] ] ]
+ [ [ "@{"; l = LIST1 inst SEP ";"; "}" -> { l }
+ | -> { [] } ] ]
;
instance:
- [ [ "@{"; l = LIST0 universe_level; "}" -> Some l
- | -> None ] ]
+ [ [ "@{"; l = LIST0 universe_level; "}" -> { Some l }
+ | -> { None } ] ]
;
universe_level:
- [ [ "Set" -> GSet
- | "Prop" -> GProp
- | "Type" -> GType UUnknown
- | "_" -> GType UAnonymous
- | id = global -> GType (UNamed id)
+ [ [ "Set" -> { GSet }
+ | "Prop" -> { GProp }
+ | "Type" -> { GType UUnknown }
+ | "_" -> { GType UAnonymous }
+ | id = global -> { GType (UNamed id) }
] ]
;
fix_constr:
- [ [ fx1=single_fix -> mk_single_fix fx1
- | (_,kw,dcl1)=single_fix; "with"; dcls=LIST1 fix_decl SEP "with";
+ [ [ fx1=single_fix -> { mk_single_fix fx1 }
+ | f=single_fix; "with"; dcls=LIST1 fix_decl SEP "with";
"for"; id=identref ->
- mk_fix(!@loc,kw,id,dcl1::dcls)
+ { let (_,kw,dcl1) = f in
+ mk_fix(loc,kw,id,dcl1::dcls) }
] ]
;
single_fix:
- [ [ kw=fix_kw; dcl=fix_decl -> (!@loc,kw,dcl) ] ]
+ [ [ kw=fix_kw; dcl=fix_decl -> { (loc,kw,dcl) } ] ]
;
fix_kw:
- [ [ "fix" -> true
- | "cofix" -> false ] ]
+ [ [ "fix" -> { true }
+ | "cofix" -> { false } ] ]
;
fix_decl:
[ [ id=identref; bl=binders_fixannot; ty=type_cstr; ":=";
c=operconstr LEVEL "200" ->
- (id,fst bl,snd bl,c,ty) ] ]
+ { (id,fst bl,snd bl,c,ty) } ] ]
;
match_constr:
[ [ "match"; ci=LIST1 case_item SEP ","; ty=OPT case_type; "with";
- br=branches; "end" -> CAst.make ~loc:!@loc @@ CCases(RegularStyle,ty,ci,br) ] ]
+ br=branches; "end" -> { CAst.make ~loc @@ CCases(RegularStyle,ty,ci,br) } ] ]
;
case_item:
[ [ c=operconstr LEVEL "100";
- ona = OPT ["as"; id=name -> id];
- ty = OPT ["in"; t=pattern -> t] ->
- (c,ona,ty) ] ]
+ ona = OPT ["as"; id=name -> { id } ];
+ ty = OPT ["in"; t=pattern -> { t } ] ->
+ { (c,ona,ty) } ] ]
;
case_type:
- [ [ "return"; ty = operconstr LEVEL "100" -> ty ] ]
+ [ [ "return"; ty = operconstr LEVEL "100" -> { ty } ] ]
;
return_type:
- [ [ a = OPT [ na = OPT["as"; na=name -> na];
- ty = case_type -> (na,ty) ] ->
- match a with
+ [ [ a = OPT [ na = OPT["as"; na=name -> { na } ];
+ ty = case_type -> { (na,ty) } ] ->
+ { match a with
| None -> None, None
- | Some (na,t) -> (na, Some t)
+ | Some (na,t) -> (na, Some t) }
] ]
;
branches:
- [ [ OPT"|"; br=LIST0 eqn SEP "|" -> br ] ]
+ [ [ OPT"|"; br=LIST0 eqn SEP "|" -> { br } ] ]
;
mult_pattern:
- [ [ pl = LIST1 pattern LEVEL "99" SEP "," -> pl ] ]
+ [ [ pl = LIST1 pattern LEVEL "99" SEP "," -> { pl } ] ]
;
eqn:
[ [ pll = LIST1 mult_pattern SEP "|";
- "=>"; rhs = lconstr -> (CAst.make ~loc:!@loc (pll,rhs)) ] ]
+ "=>"; rhs = lconstr -> { (CAst.make ~loc (pll,rhs)) } ] ]
;
record_pattern:
- [ [ id = global; ":="; pat = pattern -> (id, pat) ] ]
+ [ [ id = global; ":="; pat = pattern -> { (id, pat) } ] ]
;
record_patterns:
- [ [ p = record_pattern; ";"; ps = record_patterns -> p :: ps
- | p = record_pattern; ";" -> [p]
- | p = record_pattern-> [p]
- | -> []
+ [ [ p = record_pattern; ";"; ps = record_patterns -> { p :: ps }
+ | p = record_pattern; ";" -> { [p] }
+ | p = record_pattern-> { [p] }
+ | -> { [] }
] ]
;
pattern:
[ "200" RIGHTA [ ]
| "100" RIGHTA
- [ p = pattern; "|"; pl = LIST1 pattern SEP "|" -> CAst.make ~loc:!@loc @@ CPatOr (p::pl) ]
+ [ p = pattern; "|"; pl = LIST1 pattern SEP "|" -> { CAst.make ~loc @@ CPatOr (p::pl) } ]
| "99" RIGHTA [ ]
| "90" RIGHTA [ ]
| "10" LEFTA
[ p = pattern; "as"; na = name ->
- CAst.make ~loc:!@loc @@ CPatAlias (p, na)
- | p = pattern; lp = LIST1 NEXT -> mkAppPattern ~loc:!@loc p lp
+ { CAst.make ~loc @@ CPatAlias (p, na) }
+ | p = pattern; lp = LIST1 NEXT -> { mkAppPattern ~loc p lp }
| "@"; r = Prim.reference; lp = LIST0 NEXT ->
- CAst.make ~loc:!@loc @@ CPatCstr (r, Some lp, []) ]
+ { CAst.make ~loc @@ CPatCstr (r, Some lp, []) } ]
| "1" LEFTA
- [ c = pattern; "%"; key=IDENT -> CAst.make ~loc:!@loc @@ CPatDelimiters (key,c) ]
+ [ c = pattern; "%"; key=IDENT -> { CAst.make ~loc @@ CPatDelimiters (key,c) } ]
| "0"
- [ r = Prim.reference -> CAst.make ~loc:!@loc @@ CPatAtom (Some r)
- | "{|"; pat = record_patterns; "|}" -> CAst.make ~loc:!@loc @@ CPatRecord pat
- | "_" -> CAst.make ~loc:!@loc @@ CPatAtom None
+ [ r = Prim.reference -> { CAst.make ~loc @@ CPatAtom (Some r) }
+ | "{|"; pat = record_patterns; "|}" -> { CAst.make ~loc @@ CPatRecord pat }
+ | "_" -> { CAst.make ~loc @@ CPatAtom None }
| "("; p = pattern LEVEL "200"; ")" ->
- (match p.CAst.v with
+ { (match p.CAst.v with
| CPatPrim (Numeral (n,true)) ->
- CAst.make ~loc:!@loc @@ CPatNotation("( _ )",([p],[]),[])
- | _ -> p)
+ CAst.make ~loc @@ CPatNotation((InConstrEntrySomeLevel,"( _ )"),([p],[]),[])
+ | _ -> p) }
| "("; p = pattern LEVEL "200"; ":"; ty = lconstr; ")" ->
- let p =
+ { let p =
match p with
| { CAst.v = CPatPrim (Numeral (n,true)) } ->
- CAst.make ~loc:!@loc @@ CPatNotation("( _ )",([p],[]),[])
+ CAst.make ~loc @@ CPatNotation((InConstrEntrySomeLevel,"( _ )"),([p],[]),[])
| _ -> p
in
- CAst.make ~loc:!@loc @@ CPatCast (p, ty)
- | n = INT -> CAst.make ~loc:!@loc @@ CPatPrim (Numeral (n,true))
- | s = string -> CAst.make ~loc:!@loc @@ CPatPrim (String s) ] ]
+ CAst.make ~loc @@ CPatCast (p, ty) }
+ | n = INT -> { CAst.make ~loc @@ CPatPrim (Numeral (n,true)) }
+ | s = string -> { CAst.make ~loc @@ CPatPrim (String s) } ] ]
;
impl_ident_tail:
- [ [ "}" -> binder_of_name Implicit
+ [ [ "}" -> { binder_of_name Implicit }
| nal=LIST1 name; ":"; c=lconstr; "}" ->
- (fun na -> CLocalAssum (na::nal,Default Implicit,c))
+ { (fun na -> CLocalAssum (na::nal,Default Implicit,c)) }
| nal=LIST1 name; "}" ->
- (fun na -> CLocalAssum (na::nal,Default Implicit,
- CAst.make ?loc:(Loc.merge_opt na.CAst.loc (Some !@loc)) @@
- CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None)))
+ { (fun na -> CLocalAssum (na::nal,Default Implicit,
+ CAst.make ?loc:(Loc.merge_opt na.CAst.loc (Some loc)) @@
+ CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None))) }
| ":"; c=lconstr; "}" ->
- (fun na -> CLocalAssum ([na],Default Implicit,c))
+ { (fun na -> CLocalAssum ([na],Default Implicit,c)) }
] ]
;
fixannot:
- [ [ "{"; IDENT "struct"; id=identref; "}" -> (Some id, CStructRec)
- | "{"; IDENT "wf"; rel=constr; id=OPT identref; "}" -> (id, CWfRec rel)
+ [ [ "{"; IDENT "struct"; id=identref; "}" -> { (Some id, CStructRec) }
+ | "{"; IDENT "wf"; rel=constr; id=OPT identref; "}" -> { (id, CWfRec rel) }
| "{"; IDENT "measure"; m=constr; id=OPT identref;
- rel=OPT constr; "}" -> (id, CMeasureRec (m,rel))
+ rel=OPT constr; "}" -> { (id, CMeasureRec (m,rel)) }
] ]
;
impl_name_head:
- [ [ id = impl_ident_head -> (CAst.make ~loc:!@loc @@ Name id) ] ]
+ [ [ id = impl_ident_head -> { CAst.make ~loc @@ Name id } ] ]
;
binders_fixannot:
[ [ na = impl_name_head; assum = impl_ident_tail; bl = binders_fixannot ->
- (assum na :: fst bl), snd bl
- | f = fixannot -> [], f
- | b = binder; bl = binders_fixannot -> b @ fst bl, snd bl
- | -> [], (None, CStructRec)
+ { (assum na :: fst bl), snd bl }
+ | f = fixannot -> { [], f }
+ | b = binder; bl = binders_fixannot -> { b @ fst bl, snd bl }
+ | -> { [], (None, CStructRec) }
] ]
;
open_binders:
(* Same as binders but parentheses around a closed binder are optional if
the latter is unique *)
- [ [ (* open binder *)
+ [ [
id = name; idl = LIST0 name; ":"; c = lconstr ->
- [CLocalAssum (id::idl,Default Explicit,c)]
- (* binders factorized with open binder *)
+ { [CLocalAssum (id::idl,Default Explicit,c)]
+ (* binders factorized with open binder *) }
| id = name; idl = LIST0 name; bl = binders ->
- binders_of_names (id::idl) @ bl
+ { binders_of_names (id::idl) @ bl }
| id1 = name; ".."; id2 = name ->
- [CLocalAssum ([id1;(CAst.make ~loc:!@loc (Name ldots_var));id2],
- Default Explicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
+ { [CLocalAssum ([id1;(CAst.make ~loc (Name ldots_var));id2],
+ Default Explicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))] }
| bl = closed_binder; bl' = binders ->
- bl@bl'
+ { bl@bl' }
] ]
;
binders:
- [ [ l = LIST0 binder -> List.flatten l ] ]
+ [ [ l = LIST0 binder -> { List.flatten l } ] ]
;
binder:
- [ [ id = name -> [CLocalAssum ([id],Default Explicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
- | bl = closed_binder -> bl ] ]
+ [ [ id = name -> { [CLocalAssum ([id],Default Explicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))] }
+ | bl = closed_binder -> { bl } ] ]
;
closed_binder:
[ [ "("; id=name; idl=LIST1 name; ":"; c=lconstr; ")" ->
- [CLocalAssum (id::idl,Default Explicit,c)]
+ { [CLocalAssum (id::idl,Default Explicit,c)] }
| "("; id=name; ":"; c=lconstr; ")" ->
- [CLocalAssum ([id],Default Explicit,c)]
+ { [CLocalAssum ([id],Default Explicit,c)] }
| "("; id=name; ":="; c=lconstr; ")" ->
- (match c.CAst.v with
+ { (match c.CAst.v with
| CCast(c, CastConv t) -> [CLocalDef (id,c,Some t)]
- | _ -> [CLocalDef (id,c,None)])
+ | _ -> [CLocalDef (id,c,None)]) }
| "("; id=name; ":"; t=lconstr; ":="; c=lconstr; ")" ->
- [CLocalDef (id,c,Some t)]
+ { [CLocalDef (id,c,Some t)] }
| "{"; id=name; "}" ->
- [CLocalAssum ([id],Default Implicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
+ { [CLocalAssum ([id],Default Implicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))] }
| "{"; id=name; idl=LIST1 name; ":"; c=lconstr; "}" ->
- [CLocalAssum (id::idl,Default Implicit,c)]
+ { [CLocalAssum (id::idl,Default Implicit,c)] }
| "{"; id=name; ":"; c=lconstr; "}" ->
- [CLocalAssum ([id],Default Implicit,c)]
+ { [CLocalAssum ([id],Default Implicit,c)] }
| "{"; id=name; idl=LIST1 name; "}" ->
- List.map (fun id -> CLocalAssum ([id],Default Implicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))) (id::idl)
+ { List.map (fun id -> CLocalAssum ([id],Default Implicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))) (id::idl) }
| "`("; tc = LIST1 typeclass_constraint SEP "," ; ")" ->
- List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Explicit, b), t)) tc
+ { List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Explicit, b), t)) tc }
| "`{"; tc = LIST1 typeclass_constraint SEP "," ; "}" ->
- List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Implicit, b), t)) tc
+ { List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Implicit, b), t)) tc }
| "'"; p = pattern LEVEL "0" ->
- let (p, ty) =
+ { let (p, ty) =
match p.CAst.v with
| CPatCast (p, ty) -> (p, Some ty)
| _ -> (p, None)
in
- [CLocalPattern (CAst.make ~loc:!@loc (p, ty))]
+ [CLocalPattern (CAst.make ~loc (p, ty))] }
] ]
;
typeclass_constraint:
- [ [ "!" ; c = operconstr LEVEL "200" -> (CAst.make ~loc:!@loc Anonymous), true, c
- | "{"; id = name; "}"; ":" ; expl = [ "!" -> true | -> false ] ; c = operconstr LEVEL "200" ->
- id, expl, c
- | iid=name_colon ; expl = [ "!" -> true | -> false ] ; c = operconstr LEVEL "200" ->
- (CAst.make ~loc:!@loc iid), expl, c
+ [ [ "!" ; c = operconstr LEVEL "200" -> { (CAst.make ~loc Anonymous), true, c }
+ | "{"; id = name; "}"; ":" ; expl = [ "!" -> { true } | -> { false } ] ; c = operconstr LEVEL "200" ->
+ { id, expl, c }
+ | iid=name_colon ; expl = [ "!" -> { true } | -> { false } ] ; c = operconstr LEVEL "200" ->
+ { (CAst.make ~loc iid), expl, c }
| c = operconstr LEVEL "200" ->
- (CAst.make ~loc:!@loc Anonymous), false, c
+ { (CAst.make ~loc Anonymous), false, c }
] ]
;
type_cstr:
- [ [ c=OPT [":"; c=lconstr -> c] -> Loc.tag ~loc:!@loc c ] ]
+ [ [ c=OPT [":"; c=lconstr -> { c } ] -> { Loc.tag ~loc c } ] ]
;
- END;;
+ END
diff --git a/parsing/g_prim.ml4 b/parsing/g_prim.ml4
deleted file mode 100644
index b25ea766..00000000
--- a/parsing/g_prim.ml4
+++ /dev/null
@@ -1,122 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open Names
-open Libnames
-
-open Pcoq
-open Pcoq.Prim
-
-let prim_kw = ["{"; "}"; "["; "]"; "("; ")"; "'"]
-let _ = List.iter CLexer.add_keyword prim_kw
-
-
-let local_make_qualid l id = make_qualid (DirPath.make l) id
-
-let my_int_of_string loc s =
- try
- let n = int_of_string s in
- (* To avoid Array.make errors (that e.g. Undo uses), we set a *)
- (* more restricted limit than int_of_string does *)
- if n > 1024 * 2048 then raise Exit;
- n
- with Failure _ | Exit ->
- CErrors.user_err ~loc (Pp.str "Cannot support a so large number.")
-
-GEXTEND Gram
- GLOBAL:
- bigint natural integer identref name ident var preident
- fullyqualid qualid reference dirpath ne_lstring
- ne_string string lstring pattern_ident pattern_identref by_notation smart_global;
- preident:
- [ [ s = IDENT -> s ] ]
- ;
- ident:
- [ [ s = IDENT -> Id.of_string s ] ]
- ;
- pattern_ident:
- [ [ LEFTQMARK; id = ident -> id ] ]
- ;
- pattern_identref:
- [ [ id = pattern_ident -> CAst.make ~loc:!@loc id ] ]
- ;
- var: (* as identref, but interpret as a term identifier in ltac *)
- [ [ id = ident -> CAst.make ~loc:!@loc id ] ]
- ;
- identref:
- [ [ id = ident -> CAst.make ~loc:!@loc id ] ]
- ;
- field:
- [ [ s = FIELD -> Id.of_string s ] ]
- ;
- fields:
- [ [ id = field; (l,id') = fields -> (l@[id],id')
- | id = field -> ([],id)
- ] ]
- ;
- fullyqualid:
- [ [ id = ident; (l,id')=fields -> CAst.make ~loc:!@loc @@ id::List.rev (id'::l)
- | id = ident -> CAst.make ~loc:!@loc [id]
- ] ]
- ;
- basequalid:
- [ [ id = ident; (l,id')=fields -> local_make_qualid (l@[id]) id'
- | id = ident -> qualid_of_ident id
- ] ]
- ;
- name:
- [ [ IDENT "_" -> CAst.make ~loc:!@loc Anonymous
- | id = ident -> CAst.make ~loc:!@loc @@ Name id ] ]
- ;
- reference:
- [ [ id = ident; (l,id') = fields ->
- CAst.make ~loc:!@loc @@ Qualid (local_make_qualid (l@[id]) id')
- | id = ident -> CAst.make ~loc:!@loc @@ Ident id
- ] ]
- ;
- by_notation:
- [ [ s = ne_string; sc = OPT ["%"; key = IDENT -> key ] -> (s, sc) ] ]
- ;
- smart_global:
- [ [ c = reference -> CAst.make ~loc:!@loc @@ Misctypes.AN c
- | ntn = by_notation -> CAst.make ~loc:!@loc @@ Misctypes.ByNotation ntn ] ]
- ;
- qualid:
- [ [ qid = basequalid -> CAst.make ~loc:!@loc qid ] ]
- ;
- ne_string:
- [ [ s = STRING ->
- if s="" then CErrors.user_err ~loc:!@loc (Pp.str"Empty string."); s
- ] ]
- ;
- ne_lstring:
- [ [ s = ne_string -> CAst.make ~loc:!@loc s ] ]
- ;
- dirpath:
- [ [ id = ident; l = LIST0 field ->
- DirPath.make (List.rev (id::l)) ] ]
- ;
- string:
- [ [ s = STRING -> s ] ]
- ;
- lstring:
- [ [ s = string -> (CAst.make ~loc:!@loc s) ] ]
- ;
- integer:
- [ [ i = INT -> my_int_of_string (!@loc) i
- | "-"; i = INT -> - my_int_of_string (!@loc) i ] ]
- ;
- natural:
- [ [ i = INT -> my_int_of_string (!@loc) i ] ]
- ;
- bigint: (* Negative numbers are dealt with elsewhere *)
- [ [ i = INT -> i ] ]
- ;
-END
diff --git a/parsing/g_prim.mlg b/parsing/g_prim.mlg
new file mode 100644
index 00000000..dfb78890
--- /dev/null
+++ b/parsing/g_prim.mlg
@@ -0,0 +1,123 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+{
+
+open Names
+open Libnames
+
+open Pcoq
+open Pcoq.Prim
+
+let prim_kw = ["{"; "}"; "["; "]"; "("; ")"; "'"]
+let _ = List.iter CLexer.add_keyword prim_kw
+
+
+let local_make_qualid loc l id = make_qualid ~loc (DirPath.make l) id
+
+let my_int_of_string loc s =
+ try
+ int_of_string s
+ with Failure _ ->
+ CErrors.user_err ~loc (Pp.str "This number is too large.")
+
+}
+
+GRAMMAR EXTEND Gram
+ GLOBAL:
+ bigint natural integer identref name ident var preident
+ fullyqualid qualid reference dirpath ne_lstring
+ ne_string string lstring pattern_ident pattern_identref by_notation smart_global;
+ preident:
+ [ [ s = IDENT -> { s } ] ]
+ ;
+ ident:
+ [ [ s = IDENT -> { Id.of_string s } ] ]
+ ;
+ pattern_ident:
+ [ [ LEFTQMARK; id = ident -> { id } ] ]
+ ;
+ pattern_identref:
+ [ [ id = pattern_ident -> { CAst.make ~loc id } ] ]
+ ;
+ var: (* as identref, but interpret as a term identifier in ltac *)
+ [ [ id = ident -> { CAst.make ~loc id } ] ]
+ ;
+ identref:
+ [ [ id = ident -> { CAst.make ~loc id } ] ]
+ ;
+ field:
+ [ [ s = FIELD -> { Id.of_string s } ] ]
+ ;
+ fields:
+ [ [ id = field; f = fields -> { let (l,id') = f in (l@[id],id') }
+ | id = field -> { ([],id) }
+ ] ]
+ ;
+ fullyqualid:
+ [ [ id = ident; f=fields -> { let (l,id') = f in CAst.make ~loc @@ id::List.rev (id'::l) }
+ | id = ident -> { CAst.make ~loc [id] }
+ ] ]
+ ;
+ basequalid:
+ [ [ id = ident; f=fields -> { let (l,id') = f in local_make_qualid loc (l@[id]) id' }
+ | id = ident -> { qualid_of_ident ~loc id }
+ ] ]
+ ;
+ name:
+ [ [ IDENT "_" -> { CAst.make ~loc Anonymous }
+ | id = ident -> { CAst.make ~loc @@ Name id } ] ]
+ ;
+ reference:
+ [ [ id = ident; f = fields -> {
+ let (l,id') = f in
+ local_make_qualid loc (l@[id]) id' }
+ | id = ident -> { local_make_qualid loc [] id }
+ ] ]
+ ;
+ by_notation:
+ [ [ s = ne_string; sc = OPT ["%"; key = IDENT -> { key } ] -> { (s, sc) } ] ]
+ ;
+ smart_global:
+ [ [ c = reference -> { CAst.make ~loc @@ Constrexpr.AN c }
+ | ntn = by_notation -> { CAst.make ~loc @@ Constrexpr.ByNotation ntn } ] ]
+ ;
+ qualid:
+ [ [ qid = basequalid -> { qid } ] ]
+ ;
+ ne_string:
+ [ [ s = STRING ->
+ { if s="" then CErrors.user_err ~loc (Pp.str"Empty string."); s }
+ ] ]
+ ;
+ ne_lstring:
+ [ [ s = ne_string -> { CAst.make ~loc s } ] ]
+ ;
+ dirpath:
+ [ [ id = ident; l = LIST0 field ->
+ { DirPath.make (List.rev (id::l)) } ] ]
+ ;
+ string:
+ [ [ s = STRING -> { s } ] ]
+ ;
+ lstring:
+ [ [ s = string -> { CAst.make ~loc s } ] ]
+ ;
+ integer:
+ [ [ i = INT -> { my_int_of_string loc i }
+ | "-"; i = INT -> { - my_int_of_string loc i } ] ]
+ ;
+ natural:
+ [ [ i = INT -> { my_int_of_string loc i } ] ]
+ ;
+ bigint: (* Negative numbers are dealt with elsewhere *)
+ [ [ i = INT -> { i } ] ]
+ ;
+END
diff --git a/parsing/g_proofs.ml4 b/parsing/g_proofs.ml4
deleted file mode 100644
index e393c2bb..00000000
--- a/parsing/g_proofs.ml4
+++ /dev/null
@@ -1,133 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open Constrexpr
-open Vernacexpr
-open Misctypes
-
-open Pcoq
-open Pcoq.Prim
-open Pcoq.Constr
-open Pcoq.Vernac_
-
-let thm_token = G_vernac.thm_token
-
-let hint = Gram.entry_create "hint"
-
-let warn_deprecated_focus =
- CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
- (fun () ->
- Pp.strbrk
- "The Focus command is deprecated; use bullets or focusing brackets instead"
- )
-
-let warn_deprecated_focus_n n =
- CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
- (fun () ->
- Pp.(str "The Focus command is deprecated;" ++ spc ()
- ++ str "use '" ++ int n ++ str ": {' instead")
- )
-
-let warn_deprecated_unfocus =
- CWarnings.create ~name:"deprecated-unfocus" ~category:"deprecated"
- (fun () -> Pp.strbrk "The Unfocus command is deprecated")
-
-(* Proof commands *)
-GEXTEND Gram
- GLOBAL: hint command;
-
- opt_hintbases:
- [ [ -> []
- | ":"; l = LIST1 [id = IDENT -> id ] -> l ] ]
- ;
- command:
- [ [ IDENT "Goal"; c = lconstr ->
- VernacDefinition (Decl_kinds.(NoDischarge, Definition), ((CAst.make ~loc:!@loc Names.Anonymous), None), ProveBody ([], c))
- | IDENT "Proof" -> VernacProof (None,None)
- | IDENT "Proof" ; IDENT "Mode" ; mn = string -> VernacProofMode mn
- | IDENT "Proof"; c = lconstr -> VernacExactProof c
- | IDENT "Abort" -> VernacAbort None
- | IDENT "Abort"; IDENT "All" -> VernacAbortAll
- | IDENT "Abort"; id = identref -> VernacAbort (Some id)
- | IDENT "Existential"; n = natural; c = constr_body ->
- VernacSolveExistential (n,c)
- | IDENT "Admitted" -> VernacEndProof Admitted
- | IDENT "Qed" -> VernacEndProof (Proved (Opaque,None))
- | IDENT "Save"; id = identref ->
- VernacEndProof (Proved (Opaque, Some id))
- | IDENT "Defined" -> VernacEndProof (Proved (Transparent,None))
- | IDENT "Defined"; id=identref ->
- VernacEndProof (Proved (Transparent,Some id))
- | IDENT "Restart" -> VernacRestart
- | IDENT "Undo" -> VernacUndo 1
- | IDENT "Undo"; n = natural -> VernacUndo n
- | IDENT "Undo"; IDENT "To"; n = natural -> VernacUndoTo n
- | IDENT "Focus" ->
- warn_deprecated_focus ~loc:!@loc ();
- VernacFocus None
- | IDENT "Focus"; n = natural ->
- warn_deprecated_focus_n n ~loc:!@loc ();
- VernacFocus (Some n)
- | IDENT "Unfocus" ->
- warn_deprecated_unfocus ~loc:!@loc ();
- VernacUnfocus
- | IDENT "Unfocused" -> VernacUnfocused
- | IDENT "Show" -> VernacShow (ShowGoal OpenSubgoals)
- | IDENT "Show"; n = natural -> VernacShow (ShowGoal (NthGoal n))
- | IDENT "Show"; id = ident -> VernacShow (ShowGoal (GoalId id))
- | IDENT "Show"; IDENT "Script" -> VernacShow ShowScript
- | IDENT "Show"; IDENT "Existentials" -> VernacShow ShowExistentials
- | IDENT "Show"; IDENT "Universes" -> VernacShow ShowUniverses
- | IDENT "Show"; IDENT "Conjectures" -> VernacShow ShowProofNames
- | IDENT "Show"; IDENT "Proof" -> VernacShow ShowProof
- | IDENT "Show"; IDENT "Intro" -> VernacShow (ShowIntros false)
- | IDENT "Show"; IDENT "Intros" -> VernacShow (ShowIntros true)
- | IDENT "Show"; IDENT "Match"; id = reference -> VernacShow (ShowMatch id)
- | IDENT "Guarded" -> VernacCheckGuard
- (* Hints for Auto and EAuto *)
- | IDENT "Create"; IDENT "HintDb" ;
- id = IDENT ; b = [ "discriminated" -> true | -> false ] ->
- VernacCreateHintDb (id, b)
- | IDENT "Remove"; IDENT "Hints"; ids = LIST1 global; dbnames = opt_hintbases ->
- VernacRemoveHints (dbnames, ids)
- | IDENT "Hint"; h = hint;
- dbnames = opt_hintbases ->
- VernacHints (dbnames, h)
- (* Declare "Resolve" explicitly so as to be able to later extend with
- "Resolve ->" and "Resolve <-" *)
- | IDENT "Hint"; IDENT "Resolve"; lc = LIST1 reference_or_constr;
- info = hint_info; dbnames = opt_hintbases ->
- VernacHints (dbnames,
- HintsResolve (List.map (fun x -> (info, true, x)) lc))
- ] ];
- reference_or_constr:
- [ [ r = global -> HintsReference r
- | c = constr -> HintsConstr c ] ]
- ;
- hint:
- [ [ IDENT "Resolve"; lc = LIST1 reference_or_constr; info = hint_info ->
- HintsResolve (List.map (fun x -> (info, true, x)) lc)
- | IDENT "Immediate"; lc = LIST1 reference_or_constr -> HintsImmediate lc
- | IDENT "Transparent"; lc = LIST1 global -> HintsTransparency (lc, true)
- | IDENT "Opaque"; lc = LIST1 global -> HintsTransparency (lc, false)
- | IDENT "Mode"; l = global; m = mode -> HintsMode (l, m)
- | IDENT "Unfold"; lqid = LIST1 global -> HintsUnfold lqid
- | IDENT "Constructors"; lc = LIST1 global -> HintsConstructors lc ] ]
- ;
- constr_body:
- [ [ ":="; c = lconstr -> c
- | ":"; t = lconstr; ":="; c = lconstr -> CAst.make ~loc:!@loc @@ CCast(c,CastConv t) ] ]
- ;
- mode:
- [ [ l = LIST1 [ "+" -> ModeInput
- | "!" -> ModeNoHeadEvar
- | "-" -> ModeOutput ] -> l ] ]
- ;
-END
diff --git a/parsing/g_vernac.ml4 b/parsing/g_vernac.ml4
deleted file mode 100644
index 61b1de82..00000000
--- a/parsing/g_vernac.ml4
+++ /dev/null
@@ -1,1195 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open Pp
-open CErrors
-open Util
-open Names
-open Vernacexpr
-open Constrexpr
-open Constrexpr_ops
-open Extend
-open Decl_kinds
-open Declarations
-open Misctypes
-open Tok (* necessary for camlp5 *)
-
-open Pcoq
-open Pcoq.Prim
-open Pcoq.Constr
-open Pcoq.Vernac_
-open Pcoq.Module
-
-let vernac_kw = [ ";"; ","; ">->"; ":<"; "<:"; "where"; "at" ]
-let _ = List.iter CLexer.add_keyword vernac_kw
-
-(* Rem: do not join the different GEXTEND into one, it breaks native *)
-(* compilation on PowerPC and Sun architectures *)
-
-let query_command = Gram.entry_create "vernac:query_command"
-
-let subprf = Gram.entry_create "vernac:subprf"
-
-let class_rawexpr = Gram.entry_create "vernac:class_rawexpr"
-let thm_token = Gram.entry_create "vernac:thm_token"
-let def_body = Gram.entry_create "vernac:def_body"
-let decl_notation = Gram.entry_create "vernac:decl_notation"
-let record_field = Gram.entry_create "vernac:record_field"
-let of_type_with_opt_coercion = Gram.entry_create "vernac:of_type_with_opt_coercion"
-let instance_name = Gram.entry_create "vernac:instance_name"
-let section_subset_expr = Gram.entry_create "vernac:section_subset_expr"
-
-let make_bullet s =
- let n = String.length s in
- match s.[0] with
- | '-' -> Dash n
- | '+' -> Plus n
- | '*' -> Star n
- | _ -> assert false
-
-let parse_compat_version ?(allow_old = true) = let open Flags in function
- | "8.8" -> Current
- | "8.7" -> V8_7
- | "8.6" -> V8_6
- | ("8.5" | "8.4" | "8.3" | "8.2" | "8.1" | "8.0") as s ->
- CErrors.user_err ~hdr:"get_compat_version"
- Pp.(str "Compatibility with version " ++ str s ++ str " not supported.")
- | s ->
- CErrors.user_err ~hdr:"get_compat_version"
- Pp.(str "Unknown compatibility version \"" ++ str s ++ str "\".")
-
-GEXTEND Gram
- GLOBAL: vernac_control gallina_ext noedit_mode subprf;
- vernac_control: FIRST
- [ [ IDENT "Time"; c = located_vernac -> VernacTime (false,c)
- | IDENT "Redirect"; s = ne_string; c = located_vernac -> VernacRedirect (s, c)
- | IDENT "Timeout"; n = natural; v = vernac_control -> VernacTimeout(n,v)
- | IDENT "Fail"; v = vernac_control -> VernacFail v
- | (f, v) = vernac -> VernacExpr(f, v) ]
- ]
- ;
- vernac:
- [ [ IDENT "Local"; (f, v) = vernac_poly -> (VernacLocal true :: f, v)
- | IDENT "Global"; (f, v) = vernac_poly -> (VernacLocal false :: f, v)
-
- | v = vernac_poly -> v ]
- ]
- ;
- vernac_poly:
- [ [ IDENT "Polymorphic"; (f, v) = vernac_aux -> (VernacPolymorphic true :: f, v)
- | IDENT "Monomorphic"; (f, v) = vernac_aux -> (VernacPolymorphic false :: f, v)
- | v = vernac_aux -> v ]
- ]
- ;
- vernac_aux:
- (* Better to parse "." here: in case of failure (e.g. in coerce_to_var), *)
- (* "." is still in the stream and discard_to_dot works correctly *)
- [ [ IDENT "Program"; g = gallina; "." -> ([VernacProgram], g)
- | IDENT "Program"; g = gallina_ext; "." -> ([VernacProgram], g)
- | g = gallina; "." -> ([], g)
- | g = gallina_ext; "." -> ([], g)
- | c = command; "." -> ([], c)
- | c = syntax; "." -> ([], c)
- | c = subprf -> ([], c)
- ] ]
- ;
- vernac_aux: LAST
- [ [ prfcom = command_entry -> ([], prfcom) ] ]
- ;
- noedit_mode:
- [ [ c = query_command -> c None] ]
- ;
-
- subprf:
- [ [ s = BULLET -> VernacBullet (make_bullet s)
- | "{" -> VernacSubproof None
- | "}" -> VernacEndSubproof
- ] ]
- ;
-
- located_vernac:
- [ [ v = vernac_control -> CAst.make ~loc:!@loc v ] ]
- ;
-END
-
-let warn_plural_command =
- CWarnings.create ~name:"plural-command" ~category:"pedantic" ~default:CWarnings.Disabled
- (fun kwd -> strbrk (Printf.sprintf "Command \"%s\" expects more than one assumption." kwd))
-
-let test_plural_form loc kwd = function
- | [(_,([_],_))] ->
- warn_plural_command ~loc:!@loc kwd
- | _ -> ()
-
-let test_plural_form_types loc kwd = function
- | [([_],_)] ->
- warn_plural_command ~loc:!@loc kwd
- | _ -> ()
-
-let lname_of_lident : lident -> lname =
- CAst.map (fun s -> Name s)
-
-let name_of_ident_decl : ident_decl -> name_decl =
- on_fst lname_of_lident
-
-(* Gallina declarations *)
-GEXTEND Gram
- GLOBAL: gallina gallina_ext thm_token def_body of_type_with_opt_coercion
- record_field decl_notation rec_definition ident_decl univ_decl;
-
- gallina:
- (* Definition, Theorem, Variable, Axiom, ... *)
- [ [ thm = thm_token; id = ident_decl; bl = binders; ":"; c = lconstr;
- l = LIST0
- [ "with"; id = ident_decl; bl = binders; ":"; c = lconstr ->
- (id,(bl,c)) ] ->
- VernacStartTheoremProof (thm, (id,(bl,c))::l)
- | stre = assumption_token; nl = inline; bl = assum_list ->
- VernacAssumption (stre, nl, bl)
- | (kwd,stre) = assumptions_token; nl = inline; bl = assum_list ->
- test_plural_form loc kwd bl;
- VernacAssumption (stre, nl, bl)
- | d = def_token; id = ident_decl; b = def_body ->
- VernacDefinition (d, name_of_ident_decl id, b)
- | IDENT "Let"; id = identref; b = def_body ->
- VernacDefinition ((DoDischarge, Let), (lname_of_lident id, None), b)
- (* Gallina inductive declarations *)
- | cum = cumulativity_token; priv = private_token; f = finite_token;
- indl = LIST1 inductive_definition SEP "with" ->
- let (k,f) = f in
- let indl=List.map (fun ((a,b,c,d),e) -> ((a,b,c,k,d),e)) indl in
- let cum =
- match cum with
- Some true -> LocalCumulativity
- | Some false -> LocalNonCumulativity
- | None ->
- if Flags.is_polymorphic_inductive_cumulativity () then
- GlobalCumulativity
- else
- GlobalNonCumulativity
- in
- VernacInductive (cum, priv,f,indl)
- | "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
- VernacFixpoint (NoDischarge, recs)
- | IDENT "Let"; "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
- VernacFixpoint (DoDischarge, recs)
- | "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
- VernacCoFixpoint (NoDischarge, corecs)
- | IDENT "Let"; "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
- VernacCoFixpoint (DoDischarge, corecs)
- | IDENT "Scheme"; l = LIST1 scheme SEP "with" -> VernacScheme l
- | IDENT "Combined"; IDENT "Scheme"; id = identref; IDENT "from";
- l = LIST1 identref SEP "," -> VernacCombinedScheme (id, l)
- | IDENT "Register"; IDENT "Inline"; id = identref ->
- VernacRegister(id, RegisterInline)
- | IDENT "Universe"; l = LIST1 identref -> VernacUniverse l
- | IDENT "Universes"; l = LIST1 identref -> VernacUniverse l
- | IDENT "Constraint"; l = LIST1 univ_constraint SEP "," -> VernacConstraint l
- ] ]
- ;
-
- thm_token:
- [ [ "Theorem" -> Theorem
- | IDENT "Lemma" -> Lemma
- | IDENT "Fact" -> Fact
- | IDENT "Remark" -> Remark
- | IDENT "Corollary" -> Corollary
- | IDENT "Proposition" -> Proposition
- | IDENT "Property" -> Property ] ]
- ;
- def_token:
- [ [ "Definition" -> (NoDischarge,Definition)
- | IDENT "Example" -> (NoDischarge,Example)
- | IDENT "SubClass" -> (NoDischarge,SubClass) ] ]
- ;
- assumption_token:
- [ [ "Hypothesis" -> (DoDischarge, Logical)
- | "Variable" -> (DoDischarge, Definitional)
- | "Axiom" -> (NoDischarge, Logical)
- | "Parameter" -> (NoDischarge, Definitional)
- | IDENT "Conjecture" -> (NoDischarge, Conjectural) ] ]
- ;
- assumptions_token:
- [ [ IDENT "Hypotheses" -> ("Hypotheses", (DoDischarge, Logical))
- | IDENT "Variables" -> ("Variables", (DoDischarge, Definitional))
- | IDENT "Axioms" -> ("Axioms", (NoDischarge, Logical))
- | IDENT "Parameters" -> ("Parameters", (NoDischarge, Definitional))
- | IDENT "Conjectures" -> ("Conjectures", (NoDischarge, Conjectural)) ] ]
- ;
- inline:
- [ [ IDENT "Inline"; "("; i = INT; ")" -> InlineAt (int_of_string i)
- | IDENT "Inline" -> DefaultInline
- | -> NoInline] ]
- ;
- univ_constraint:
- [ [ l = universe_level; ord = [ "<" -> Univ.Lt | "=" -> Univ.Eq | "<=" -> Univ.Le ];
- r = universe_level -> (l, ord, r) ] ]
- ;
- univ_decl :
- [ [ "@{" ; l = LIST0 identref; ext = [ "+" -> true | -> false ];
- cs = [ "|"; l' = LIST0 univ_constraint SEP ",";
- ext = [ "+" -> true | -> false ]; "}" -> (l',ext)
- | ext = [ "}" -> true | "|}" -> false ] -> ([], ext) ]
- ->
- { univdecl_instance = l;
- univdecl_extensible_instance = ext;
- univdecl_constraints = fst cs;
- univdecl_extensible_constraints = snd cs }
- ] ]
- ;
- ident_decl:
- [ [ i = identref; l = OPT univ_decl -> (i, l)
- ] ]
- ;
- finite_token:
- [ [ IDENT "Inductive" -> (Inductive_kw,Finite)
- | IDENT "CoInductive" -> (CoInductive,CoFinite)
- | IDENT "Variant" -> (Variant,BiFinite)
- | IDENT "Record" -> (Record,BiFinite)
- | IDENT "Structure" -> (Structure,BiFinite)
- | IDENT "Class" -> (Class true,BiFinite) ] ]
- ;
- cumulativity_token:
- [ [ IDENT "Cumulative" -> Some true | IDENT "NonCumulative" -> Some false | -> None ] ]
- ;
- private_token:
- [ [ IDENT "Private" -> true | -> false ] ]
- ;
- (* Simple definitions *)
- def_body:
- [ [ bl = binders; ":="; red = reduce; c = lconstr ->
- if List.exists (function CLocalPattern _ -> true | _ -> false) bl
- then
- (* FIXME: "red" will be applied to types in bl and Cast with remain *)
- let c = mkCLambdaN ~loc:!@loc bl c in
- DefineBody ([], red, c, None)
- else
- (match c with
- | { CAst.v = CCast(c, CastConv t) } -> DefineBody (bl, red, c, Some t)
- | _ -> DefineBody (bl, red, c, None))
- | bl = binders; ":"; t = lconstr; ":="; red = reduce; c = lconstr ->
- let ((bl, c), tyo) =
- if List.exists (function CLocalPattern _ -> true | _ -> false) bl
- then
- (* FIXME: "red" will be applied to types in bl and Cast with remain *)
- let c = CAst.make ~loc:!@loc @@ CCast (c, CastConv t) in
- (([],mkCLambdaN ~loc:!@loc bl c), None)
- else ((bl, c), Some t)
- in
- DefineBody (bl, red, c, tyo)
- | bl = binders; ":"; t = lconstr ->
- ProveBody (bl, t) ] ]
- ;
- reduce:
- [ [ IDENT "Eval"; r = red_expr; "in" -> Some r
- | -> None ] ]
- ;
- one_decl_notation:
- [ [ ntn = ne_lstring; ":="; c = constr;
- scopt = OPT [ ":"; sc = IDENT -> sc] -> (ntn,c,scopt) ] ]
- ;
- decl_notation:
- [ [ "where"; l = LIST1 one_decl_notation SEP IDENT "and" -> l
- | -> [] ] ]
- ;
- (* Inductives and records *)
- opt_constructors_or_fields:
- [ [ ":="; lc = constructor_list_or_record_decl -> lc
- | -> RecordDecl (None, []) ] ]
- ;
- inductive_definition:
- [ [ oc = opt_coercion; id = ident_decl; indpar = binders;
- c = OPT [ ":"; c = lconstr -> c ];
- lc=opt_constructors_or_fields; ntn = decl_notation ->
- (((oc,id),indpar,c,lc),ntn) ] ]
- ;
- constructor_list_or_record_decl:
- [ [ "|"; l = LIST1 constructor SEP "|" -> Constructors l
- | id = identref ; c = constructor_type; "|"; l = LIST0 constructor SEP "|" ->
- Constructors ((c id)::l)
- | id = identref ; c = constructor_type -> Constructors [ c id ]
- | cstr = identref; "{"; fs = record_fields; "}" ->
- RecordDecl (Some cstr,fs)
- | "{";fs = record_fields; "}" -> RecordDecl (None,fs)
- | -> Constructors [] ] ]
- ;
-(*
- csort:
- [ [ s = sort -> CSort (loc,s) ] ]
- ;
-*)
- opt_coercion:
- [ [ ">" -> true
- | -> false ] ]
- ;
- (* (co)-fixpoints *)
- rec_definition:
- [ [ id = ident_decl;
- bl = binders_fixannot;
- ty = type_cstr;
- def = OPT [":="; def = lconstr -> def]; ntn = decl_notation ->
- let bl, annot = bl in ((id,annot,bl,ty,def),ntn) ] ]
- ;
- corec_definition:
- [ [ id = ident_decl; bl = binders; ty = type_cstr;
- def = OPT [":="; def = lconstr -> def]; ntn = decl_notation ->
- ((id,bl,ty,def),ntn) ] ]
- ;
- type_cstr:
- [ [ ":"; c=lconstr -> c
- | -> CAst.make ~loc:!@loc @@ CHole (None, Misctypes.IntroAnonymous, None) ] ]
- ;
- (* Inductive schemes *)
- scheme:
- [ [ kind = scheme_kind -> (None,kind)
- | id = identref; ":="; kind = scheme_kind -> (Some id,kind) ] ]
- ;
- scheme_kind:
- [ [ IDENT "Induction"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> InductionScheme(true,ind,s)
- | IDENT "Minimality"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> InductionScheme(false,ind,s)
- | IDENT "Elimination"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> CaseScheme(true,ind,s)
- | IDENT "Case"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> CaseScheme(false,ind,s)
- | IDENT "Equality"; "for" ; ind = smart_global -> EqualityScheme(ind) ] ]
- ;
- (* Various Binders *)
-(*
- (* ... without coercions *)
- binder_nodef:
- [ [ b = binder_let ->
- (match b with
- CLocalAssum(l,ty) -> (l,ty)
- | CLocalDef _ ->
- Util.user_err_loc
- (loc,"fix_param",Pp.str"defined binder not allowed here.")) ] ]
- ;
-*)
- (* ... with coercions *)
- record_field:
- [ [ bd = record_binder; pri = OPT [ "|"; n = natural -> n ];
- ntn = decl_notation -> (bd,pri),ntn ] ]
- ;
- record_fields:
- [ [ f = record_field; ";"; fs = record_fields -> f :: fs
- | f = record_field; ";" -> [f]
- | f = record_field -> [f]
- | -> []
- ] ]
- ;
- record_binder_body:
- [ [ l = binders; oc = of_type_with_opt_coercion;
- t = lconstr -> fun id -> (oc,AssumExpr (id,mkCProdN ~loc:!@loc l t))
- | l = binders; oc = of_type_with_opt_coercion;
- t = lconstr; ":="; b = lconstr -> fun id ->
- (oc,DefExpr (id,mkCLambdaN ~loc:!@loc l b,Some (mkCProdN ~loc:!@loc l t)))
- | l = binders; ":="; b = lconstr -> fun id ->
- match b.CAst.v with
- | CCast(b', (CastConv t|CastVM t|CastNative t)) ->
- (None,DefExpr(id,mkCLambdaN ~loc:!@loc l b',Some (mkCProdN ~loc:!@loc l t)))
- | _ ->
- (None,DefExpr(id,mkCLambdaN ~loc:!@loc l b,None)) ] ]
- ;
- record_binder:
- [ [ id = name -> (None,AssumExpr(id, CAst.make ~loc:!@loc @@ CHole (None, Misctypes.IntroAnonymous, None)))
- | id = name; f = record_binder_body -> f id ] ]
- ;
- assum_list:
- [ [ bl = LIST1 assum_coe -> bl | b = simple_assum_coe -> [b] ] ]
- ;
- assum_coe:
- [ [ "("; a = simple_assum_coe; ")" -> a ] ]
- ;
- simple_assum_coe:
- [ [ idl = LIST1 ident_decl; oc = of_type_with_opt_coercion; c = lconstr ->
- (not (Option.is_empty oc),(idl,c)) ] ]
- ;
-
- constructor_type:
- [[ l = binders;
- t= [ coe = of_type_with_opt_coercion; c = lconstr ->
- fun l id -> (not (Option.is_empty coe),(id,mkCProdN ~loc:!@loc l c))
- | ->
- fun l id -> (false,(id,mkCProdN ~loc:!@loc l (CAst.make ~loc:!@loc @@ CHole (None, Misctypes.IntroAnonymous, None)))) ]
- -> t l
- ]]
-;
-
- constructor:
- [ [ id = identref; c=constructor_type -> c id ] ]
- ;
- of_type_with_opt_coercion:
- [ [ ":>>" -> Some false
- | ":>"; ">" -> Some false
- | ":>" -> Some true
- | ":"; ">"; ">" -> Some false
- | ":"; ">" -> Some true
- | ":" -> None ] ]
- ;
-END
-
-let only_starredidentrefs =
- Gram.Entry.of_parser "test_only_starredidentrefs"
- (fun strm ->
- let rec aux n =
- match Util.stream_nth n strm with
- | KEYWORD "." -> ()
- | KEYWORD ")" -> ()
- | (IDENT _ | KEYWORD "Type" | KEYWORD "*") -> aux (n+1)
- | _ -> raise Stream.Failure in
- aux 0)
-let starredidentreflist_to_expr l =
- match l with
- | [] -> SsEmpty
- | x :: xs -> List.fold_right (fun i acc -> SsUnion(i,acc)) xs x
-
-let warn_deprecated_include_type =
- CWarnings.create ~name:"deprecated-include-type" ~category:"deprecated"
- (fun () -> strbrk "Include Type is deprecated; use Include instead")
-
-(* Modules and Sections *)
-GEXTEND Gram
- GLOBAL: gallina_ext module_expr module_type section_subset_expr;
-
- gallina_ext:
- [ [ (* Interactive module declaration *)
- IDENT "Module"; export = export_token; id = identref;
- bl = LIST0 module_binder; sign = of_module_type;
- body = is_module_expr ->
- VernacDefineModule (export, id, bl, sign, body)
- | IDENT "Module"; "Type"; id = identref;
- bl = LIST0 module_binder; sign = check_module_types;
- body = is_module_type ->
- VernacDeclareModuleType (id, bl, sign, body)
- | IDENT "Declare"; IDENT "Module"; export = export_token; id = identref;
- bl = LIST0 module_binder; ":"; mty = module_type_inl ->
- VernacDeclareModule (export, id, bl, mty)
- (* Section beginning *)
- | IDENT "Section"; id = identref -> VernacBeginSection id
- | IDENT "Chapter"; id = identref -> VernacBeginSection id
-
- (* This end a Section a Module or a Module Type *)
- | IDENT "End"; id = identref -> VernacEndSegment id
-
- (* Naming a set of section hyps *)
- | IDENT "Collection"; id = identref; ":="; expr = section_subset_expr ->
- VernacNameSectionHypSet (id, expr)
-
- (* Requiring an already compiled module *)
- | IDENT "Require"; export = export_token; qidl = LIST1 global ->
- VernacRequire (None, export, qidl)
- | IDENT "From" ; ns = global ; IDENT "Require"; export = export_token
- ; qidl = LIST1 global ->
- VernacRequire (Some ns, export, qidl)
- | IDENT "Import"; qidl = LIST1 global -> VernacImport (false,qidl)
- | IDENT "Export"; qidl = LIST1 global -> VernacImport (true,qidl)
- | IDENT "Include"; e = module_type_inl; l = LIST0 ext_module_expr ->
- VernacInclude(e::l)
- | IDENT "Include"; "Type"; e = module_type_inl; l = LIST0 ext_module_type ->
- warn_deprecated_include_type ~loc:!@loc ();
- VernacInclude(e::l) ] ]
- ;
- export_token:
- [ [ IDENT "Import" -> Some false
- | IDENT "Export" -> Some true
- | -> None ] ]
- ;
- ext_module_type:
- [ [ "<+"; mty = module_type_inl -> mty ] ]
- ;
- ext_module_expr:
- [ [ "<+"; mexpr = module_expr_inl -> mexpr ] ]
- ;
- check_module_type:
- [ [ "<:"; mty = module_type_inl -> mty ] ]
- ;
- check_module_types:
- [ [ mtys = LIST0 check_module_type -> mtys ] ]
- ;
- of_module_type:
- [ [ ":"; mty = module_type_inl -> Enforce mty
- | mtys = check_module_types -> Check mtys ] ]
- ;
- is_module_type:
- [ [ ":="; mty = module_type_inl ; l = LIST0 ext_module_type -> (mty::l)
- | -> [] ] ]
- ;
- is_module_expr:
- [ [ ":="; mexpr = module_expr_inl; l = LIST0 ext_module_expr -> (mexpr::l)
- | -> [] ] ]
- ;
- functor_app_annot:
- [ [ "["; IDENT "inline"; "at"; IDENT "level"; i = INT; "]" ->
- InlineAt (int_of_string i)
- | "["; IDENT "no"; IDENT "inline"; "]" -> NoInline
- | -> DefaultInline
- ] ]
- ;
- module_expr_inl:
- [ [ "!"; me = module_expr -> (me,NoInline)
- | me = module_expr; a = functor_app_annot -> (me,a) ] ]
- ;
- module_type_inl:
- [ [ "!"; me = module_type -> (me,NoInline)
- | me = module_type; a = functor_app_annot -> (me,a) ] ]
- ;
- (* Module binder *)
- module_binder:
- [ [ "("; export = export_token; idl = LIST1 identref; ":";
- mty = module_type_inl; ")" -> (export,idl,mty) ] ]
- ;
- (* Module expressions *)
- module_expr:
- [ [ me = module_expr_atom -> me
- | me1 = module_expr; me2 = module_expr_atom -> CAst.make ~loc:!@loc @@ CMapply (me1,me2)
- ] ]
- ;
- module_expr_atom:
- [ [ qid = qualid -> CAst.make ~loc:!@loc @@ CMident (qid.CAst.v) | "("; me = module_expr; ")" -> me ] ]
- ;
- with_declaration:
- [ [ "Definition"; fqid = fullyqualid; udecl = OPT univ_decl; ":="; c = Constr.lconstr ->
- CWith_Definition (fqid,udecl,c)
- | IDENT "Module"; fqid = fullyqualid; ":="; qid = qualid ->
- CWith_Module (fqid,qid)
- ] ]
- ;
- module_type:
- [ [ qid = qualid -> CAst.make ~loc:!@loc @@ CMident (qid.CAst.v)
- | "("; mt = module_type; ")" -> mt
- | mty = module_type; me = module_expr_atom ->
- CAst.make ~loc:!@loc @@ CMapply (mty,me)
- | mty = module_type; "with"; decl = with_declaration ->
- CAst.make ~loc:!@loc @@ CMwith (mty,decl)
- ] ]
- ;
- (* Proof using *)
- section_subset_expr:
- [ [ only_starredidentrefs; l = LIST0 starredidentref ->
- starredidentreflist_to_expr l
- | e = ssexpr -> e ]]
- ;
- starredidentref:
- [ [ i = identref -> SsSingl i
- | i = identref; "*" -> SsFwdClose(SsSingl i)
- | "Type" -> SsType
- | "Type"; "*" -> SsFwdClose SsType ]]
- ;
- ssexpr:
- [ "35"
- [ "-"; e = ssexpr -> SsCompl e ]
- | "50"
- [ e1 = ssexpr; "-"; e2 = ssexpr->SsSubstr(e1,e2)
- | e1 = ssexpr; "+"; e2 = ssexpr->SsUnion(e1,e2)]
- | "0"
- [ i = starredidentref -> i
- | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"->
- starredidentreflist_to_expr l
- | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"; "*" ->
- SsFwdClose(starredidentreflist_to_expr l)
- | "("; e = ssexpr; ")"-> e
- | "("; e = ssexpr; ")"; "*" -> SsFwdClose e ] ]
- ;
-END
-
-let warn_deprecated_arguments_scope =
- CWarnings.create ~name:"deprecated-arguments-scope" ~category:"deprecated"
- (fun () -> strbrk "Arguments Scope is deprecated; use Arguments instead")
-
-let warn_deprecated_implicit_arguments =
- CWarnings.create ~name:"deprecated-implicit-arguments" ~category:"deprecated"
- (fun () -> strbrk "Implicit Arguments is deprecated; use Arguments instead")
-
-(* Extensions: implicits, coercions, etc. *)
-GEXTEND Gram
- GLOBAL: gallina_ext instance_name hint_info;
-
- gallina_ext:
- [ [ (* Transparent and Opaque *)
- IDENT "Transparent"; l = LIST1 smart_global ->
- VernacSetOpacity (Conv_oracle.transparent, l)
- | IDENT "Opaque"; l = LIST1 smart_global ->
- VernacSetOpacity (Conv_oracle.Opaque, l)
- | IDENT "Strategy"; l =
- LIST1 [ v=strategy_level; "["; q=LIST1 smart_global; "]" -> (v,q)] ->
- VernacSetStrategy l
- (* Canonical structure *)
- | IDENT "Canonical"; IDENT "Structure"; qid = global ->
- VernacCanonical CAst.(make ~loc:!@loc @@ AN qid)
- | IDENT "Canonical"; IDENT "Structure"; ntn = by_notation ->
- VernacCanonical CAst.(make ~loc:!@loc @@ ByNotation ntn)
- | IDENT "Canonical"; IDENT "Structure"; qid = global; d = def_body ->
- let s = coerce_reference_to_id qid in
- VernacDefinition ((NoDischarge,CanonicalStructure),((CAst.make (Name s)),None),d)
-
- (* Coercions *)
- | IDENT "Coercion"; qid = global; d = def_body ->
- let s = coerce_reference_to_id qid in
- VernacDefinition ((NoDischarge,Coercion),((CAst.make (Name s)),None),d)
- | IDENT "Identity"; IDENT "Coercion"; f = identref; ":";
- s = class_rawexpr; ">->"; t = class_rawexpr ->
- VernacIdentityCoercion (f, s, t)
- | IDENT "Coercion"; qid = global; ":"; s = class_rawexpr; ">->";
- t = class_rawexpr ->
- VernacCoercion (CAst.make ~loc:!@loc @@ AN qid, s, t)
- | IDENT "Coercion"; ntn = by_notation; ":"; s = class_rawexpr; ">->";
- t = class_rawexpr ->
- VernacCoercion (CAst.make ~loc:!@loc @@ ByNotation ntn, s, t)
-
- | IDENT "Context"; c = LIST1 binder ->
- VernacContext (List.flatten c)
-
- | IDENT "Instance"; namesup = instance_name; ":";
- expl = [ "!" -> Decl_kinds.Implicit | -> Decl_kinds.Explicit ] ; t = operconstr LEVEL "200";
- info = hint_info ;
- props = [ ":="; "{"; r = record_declaration; "}" -> Some (true,r) |
- ":="; c = lconstr -> Some (false,c) | -> None ] ->
- VernacInstance (false,snd namesup,(fst namesup,expl,t),props,info)
-
- | IDENT "Existing"; IDENT "Instance"; id = global;
- info = hint_info ->
- VernacDeclareInstances [id, info]
-
- | IDENT "Existing"; IDENT "Instances"; ids = LIST1 global;
- pri = OPT [ "|"; i = natural -> i ] ->
- let info = { hint_priority = pri; hint_pattern = None } in
- let insts = List.map (fun i -> (i, info)) ids in
- VernacDeclareInstances insts
-
- | IDENT "Existing"; IDENT "Class"; is = global -> VernacDeclareClass is
-
- (* Arguments *)
- | IDENT "Arguments"; qid = smart_global;
- args = LIST0 argument_spec_block;
- more_implicits = OPT
- [ ","; impl = LIST1
- [ impl = LIST0 more_implicits_block -> List.flatten impl]
- SEP "," -> impl
- ];
- mods = OPT [ ":"; l = LIST1 arguments_modifier SEP "," -> l ] ->
- let mods = match mods with None -> [] | Some l -> List.flatten l in
- let slash_position = ref None in
- let rec parse_args i = function
- | [] -> []
- | `Id x :: args -> x :: parse_args (i+1) args
- | `Slash :: args ->
- if Option.is_empty !slash_position then
- (slash_position := Some i; parse_args i args)
- else
- user_err Pp.(str "The \"/\" modifier can occur only once")
- in
- let args = parse_args 0 (List.flatten args) in
- let more_implicits = Option.default [] more_implicits in
- VernacArguments (qid, args, more_implicits, !slash_position, mods)
-
-
- (* moved there so that camlp5 factors it with the previous rule *)
- | IDENT "Arguments"; IDENT "Scope"; qid = smart_global;
- "["; scl = LIST0 [ "_" -> None | sc = IDENT -> Some sc ]; "]" ->
- warn_deprecated_arguments_scope ~loc:!@loc ();
- VernacArgumentsScope (qid,scl)
-
- (* Implicit *)
- | IDENT "Implicit"; IDENT "Arguments"; qid = smart_global;
- pos = LIST0 [ "["; l = LIST0 implicit_name; "]" ->
- List.map (fun (id,b,f) -> (ExplByName id,b,f)) l ] ->
- warn_deprecated_implicit_arguments ~loc:!@loc ();
- VernacDeclareImplicits (qid,pos)
-
- | IDENT "Implicit"; "Type"; bl = reserv_list ->
- VernacReserve bl
-
- | IDENT "Implicit"; IDENT "Types"; bl = reserv_list ->
- test_plural_form_types loc "Implicit Types" bl;
- VernacReserve bl
-
- | IDENT "Generalizable";
- gen = [IDENT "All"; IDENT "Variables" -> Some []
- | IDENT "No"; IDENT "Variables" -> None
- | ["Variable" | IDENT "Variables"];
- idl = LIST1 identref -> Some idl ] ->
- VernacGeneralizable gen ] ]
- ;
- arguments_modifier:
- [ [ IDENT "simpl"; IDENT "nomatch" -> [`ReductionDontExposeCase]
- | IDENT "simpl"; IDENT "never" -> [`ReductionNeverUnfold]
- | IDENT "default"; IDENT "implicits" -> [`DefaultImplicits]
- | IDENT "clear"; IDENT "implicits" -> [`ClearImplicits]
- | IDENT "clear"; IDENT "scopes" -> [`ClearScopes]
- | IDENT "rename" -> [`Rename]
- | IDENT "assert" -> [`Assert]
- | IDENT "extra"; IDENT "scopes" -> [`ExtraScopes]
- | IDENT "clear"; IDENT "scopes"; IDENT "and"; IDENT "implicits" ->
- [`ClearImplicits; `ClearScopes]
- | IDENT "clear"; IDENT "implicits"; IDENT "and"; IDENT "scopes" ->
- [`ClearImplicits; `ClearScopes]
- ] ]
- ;
- implicit_name:
- [ [ "!"; id = ident -> (id, false, true)
- | id = ident -> (id,false,false)
- | "["; "!"; id = ident; "]" -> (id,true,true)
- | "["; id = ident; "]" -> (id,true, false) ] ]
- ;
- scope:
- [ [ "%"; key = IDENT -> key ] ]
- ;
- argument_spec: [
- [ b = OPT "!"; id = name ; s = OPT scope ->
- id.CAst.v, not (Option.is_empty b), Option.map (fun x -> CAst.make ~loc:!@loc x) s
- ]
- ];
- (* List of arguments implicit status, scope, modifiers *)
- argument_spec_block: [
- [ item = argument_spec ->
- let name, recarg_like, notation_scope = item in
- [`Id { name=name; recarg_like=recarg_like;
- notation_scope=notation_scope;
- implicit_status = NotImplicit}]
- | "/" -> [`Slash]
- | "("; items = LIST1 argument_spec; ")"; sc = OPT scope ->
- let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
- | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
- List.map (fun (name,recarg_like,notation_scope) ->
- `Id { name=name; recarg_like=recarg_like;
- notation_scope=f notation_scope;
- implicit_status = NotImplicit}) items
- | "["; items = LIST1 argument_spec; "]"; sc = OPT scope ->
- let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
- | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
- List.map (fun (name,recarg_like,notation_scope) ->
- `Id { name=name; recarg_like=recarg_like;
- notation_scope=f notation_scope;
- implicit_status = Implicit}) items
- | "{"; items = LIST1 argument_spec; "}"; sc = OPT scope ->
- let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
- | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
- List.map (fun (name,recarg_like,notation_scope) ->
- `Id { name=name; recarg_like=recarg_like;
- notation_scope=f notation_scope;
- implicit_status = MaximallyImplicit}) items
- ]
- ];
- (* Same as [argument_spec_block], but with only implicit status and names *)
- more_implicits_block: [
- [ name = name -> [(name.CAst.v, Vernacexpr.NotImplicit)]
- | "["; items = LIST1 name; "]" ->
- List.map (fun name -> (name.CAst.v, Vernacexpr.Implicit)) items
- | "{"; items = LIST1 name; "}" ->
- List.map (fun name -> (name.CAst.v, Vernacexpr.MaximallyImplicit)) items
- ]
- ];
- strategy_level:
- [ [ IDENT "expand" -> Conv_oracle.Expand
- | IDENT "opaque" -> Conv_oracle.Opaque
- | n=INT -> Conv_oracle.Level (int_of_string n)
- | "-"; n=INT -> Conv_oracle.Level (- int_of_string n)
- | IDENT "transparent" -> Conv_oracle.transparent ] ]
- ;
- instance_name:
- [ [ name = ident_decl; sup = OPT binders ->
- (CAst.map (fun id -> Name id) (fst name), snd name),
- (Option.default [] sup)
- | -> ((CAst.make ~loc:!@loc Anonymous), None), [] ] ]
- ;
- hint_info:
- [ [ "|"; i = OPT natural; pat = OPT constr_pattern ->
- { hint_priority = i; hint_pattern = pat }
- | -> { hint_priority = None; hint_pattern = None } ] ]
- ;
- reserv_list:
- [ [ bl = LIST1 reserv_tuple -> bl | b = simple_reserv -> [b] ] ]
- ;
- reserv_tuple:
- [ [ "("; a = simple_reserv; ")" -> a ] ]
- ;
- simple_reserv:
- [ [ idl = LIST1 identref; ":"; c = lconstr -> (idl,c) ] ]
- ;
-
-END
-
-GEXTEND Gram
- GLOBAL: command query_command class_rawexpr gallina_ext;
-
- gallina_ext:
- [ [ IDENT "Export"; "Set"; table = option_table; v = option_value ->
- VernacSetOption (true, table, v)
- | IDENT "Export"; IDENT "Unset"; table = option_table ->
- VernacUnsetOption (true, table)
- ] ];
-
- command:
- [ [ IDENT "Comments"; l = LIST0 comment -> VernacComments l
-
- (* Hack! Should be in grammar_ext, but camlp5 factorizes badly *)
- | IDENT "Declare"; IDENT "Instance"; namesup = instance_name; ":";
- expl = [ "!" -> Decl_kinds.Implicit | -> Decl_kinds.Explicit ] ; t = operconstr LEVEL "200";
- info = hint_info ->
- VernacInstance (true, snd namesup, (fst namesup, expl, t), None, info)
-
- (* System directory *)
- | IDENT "Pwd" -> VernacChdir None
- | IDENT "Cd" -> VernacChdir None
- | IDENT "Cd"; dir = ne_string -> VernacChdir (Some dir)
-
- (* Toplevel control *)
- | IDENT "Drop" -> VernacToplevelControl Drop
- | IDENT "Quit" -> VernacToplevelControl Quit
-
- | IDENT "Load"; verbosely = [ IDENT "Verbose" -> true | -> false ];
- s = [ s = ne_string -> s | s = IDENT -> s ] ->
- VernacLoad (verbosely, s)
- | IDENT "Declare"; IDENT "ML"; IDENT "Module"; l = LIST1 ne_string ->
- VernacDeclareMLModule l
-
- | IDENT "Locate"; l = locatable -> VernacLocate l
-
- (* Managing load paths *)
- | IDENT "Add"; IDENT "LoadPath"; dir = ne_string; alias = as_dirpath ->
- VernacAddLoadPath (false, dir, alias)
- | IDENT "Add"; IDENT "Rec"; IDENT "LoadPath"; dir = ne_string;
- alias = as_dirpath -> VernacAddLoadPath (true, dir, alias)
- | IDENT "Remove"; IDENT "LoadPath"; dir = ne_string ->
- VernacRemoveLoadPath dir
-
- (* For compatibility *)
- | IDENT "AddPath"; dir = ne_string; "as"; alias = as_dirpath ->
- VernacAddLoadPath (false, dir, alias)
- | IDENT "AddRecPath"; dir = ne_string; "as"; alias = as_dirpath ->
- VernacAddLoadPath (true, dir, alias)
- | IDENT "DelPath"; dir = ne_string ->
- VernacRemoveLoadPath dir
-
- (* Type-Checking (pas dans le refman) *)
- | "Type"; c = lconstr -> VernacGlobalCheck c
-
- (* Printing (careful factorization of entries) *)
- | IDENT "Print"; p = printable -> VernacPrint p
- | IDENT "Print"; qid = smart_global; l = OPT univ_name_list -> VernacPrint (PrintName (qid,l))
- | IDENT "Print"; IDENT "Module"; "Type"; qid = global ->
- VernacPrint (PrintModuleType qid)
- | IDENT "Print"; IDENT "Module"; qid = global ->
- VernacPrint (PrintModule qid)
- | IDENT "Print"; IDENT "Namespace" ; ns = dirpath ->
- VernacPrint (PrintNamespace ns)
- | IDENT "Inspect"; n = natural -> VernacPrint (PrintInspect n)
-
- | IDENT "Add"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
- VernacAddMLPath (false, dir)
- | IDENT "Add"; IDENT "Rec"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
- VernacAddMLPath (true, dir)
-
- (* For acting on parameter tables *)
- | "Set"; table = option_table; v = option_value ->
- VernacSetOption (false, table, v)
- | IDENT "Unset"; table = option_table ->
- VernacUnsetOption (false, table)
-
- | IDENT "Print"; IDENT "Table"; table = option_table ->
- VernacPrintOption table
-
- | IDENT "Add"; table = IDENT; field = IDENT; v = LIST1 option_ref_value
- -> VernacAddOption ([table;field], v)
- (* A global value below will be hidden by a field above! *)
- (* In fact, we give priority to secondary tables *)
- (* No syntax for tertiary tables due to conflict *)
- (* (but they are unused anyway) *)
- | IDENT "Add"; table = IDENT; v = LIST1 option_ref_value ->
- VernacAddOption ([table], v)
-
- | IDENT "Test"; table = option_table; "for"; v = LIST1 option_ref_value
- -> VernacMemOption (table, v)
- | IDENT "Test"; table = option_table ->
- VernacPrintOption table
-
- | IDENT "Remove"; table = IDENT; field = IDENT; v= LIST1 option_ref_value
- -> VernacRemoveOption ([table;field], v)
- | IDENT "Remove"; table = IDENT; v = LIST1 option_ref_value ->
- VernacRemoveOption ([table], v) ]]
- ;
- query_command: (* TODO: rapprocher Eval et Check *)
- [ [ IDENT "Eval"; r = red_expr; "in"; c = lconstr; "." ->
- fun g -> VernacCheckMayEval (Some r, g, c)
- | IDENT "Compute"; c = lconstr; "." ->
- fun g -> VernacCheckMayEval (Some (Genredexpr.CbvVm None), g, c)
- | IDENT "Check"; c = lconstr; "." ->
- fun g -> VernacCheckMayEval (None, g, c)
- (* Searching the environment *)
- | IDENT "About"; qid = smart_global; l = OPT univ_name_list; "." ->
- fun g -> VernacPrint (PrintAbout (qid,l,g))
- | IDENT "SearchHead"; c = constr_pattern; l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchHead c,g, l)
- | IDENT "SearchPattern"; c = constr_pattern; l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchPattern c,g, l)
- | IDENT "SearchRewrite"; c = constr_pattern; l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchRewrite c,g, l)
- | IDENT "Search"; s = searchabout_query; l = searchabout_queries; "." ->
- let (sl,m) = l in fun g -> VernacSearch (SearchAbout (s::sl),g, m)
- (* compatibility: SearchAbout *)
- | IDENT "SearchAbout"; s = searchabout_query; l = searchabout_queries; "." ->
- fun g -> let (sl,m) = l in VernacSearch (SearchAbout (s::sl),g, m)
- (* compatibility: SearchAbout with "[ ... ]" *)
- | IDENT "SearchAbout"; "["; sl = LIST1 searchabout_query; "]";
- l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchAbout sl,g, l)
- ] ]
- ;
- printable:
- [ [ IDENT "Term"; qid = smart_global; l = OPT univ_name_list -> PrintName (qid,l)
- | IDENT "All" -> PrintFullContext
- | IDENT "Section"; s = global -> PrintSectionContext s
- | IDENT "Grammar"; ent = IDENT ->
- (* This should be in "syntax" section but is here for factorization*)
- PrintGrammar ent
- | IDENT "LoadPath"; dir = OPT dirpath -> PrintLoadPath dir
- | IDENT "Modules" ->
- user_err Pp.(str "Print Modules is obsolete; use Print Libraries instead")
- | IDENT "Libraries" -> PrintModules
-
- | IDENT "ML"; IDENT "Path" -> PrintMLLoadPath
- | IDENT "ML"; IDENT "Modules" -> PrintMLModules
- | IDENT "Debug"; IDENT "GC" -> PrintDebugGC
- | IDENT "Graph" -> PrintGraph
- | IDENT "Classes" -> PrintClasses
- | IDENT "TypeClasses" -> PrintTypeClasses
- | IDENT "Instances"; qid = smart_global -> PrintInstances qid
- | IDENT "Coercions" -> PrintCoercions
- | IDENT "Coercion"; IDENT "Paths"; s = class_rawexpr; t = class_rawexpr
- -> PrintCoercionPaths (s,t)
- | IDENT "Canonical"; IDENT "Projections" -> PrintCanonicalConversions
- | IDENT "Tables" -> PrintTables
- | IDENT "Options" -> PrintTables (* A Synonymous to Tables *)
- | IDENT "Hint" -> PrintHintGoal
- | IDENT "Hint"; qid = smart_global -> PrintHint qid
- | IDENT "Hint"; "*" -> PrintHintDb
- | IDENT "HintDb"; s = IDENT -> PrintHintDbName s
- | IDENT "Scopes" -> PrintScopes
- | IDENT "Scope"; s = IDENT -> PrintScope s
- | IDENT "Visibility"; s = OPT [x = IDENT -> x ] -> PrintVisibility s
- | IDENT "Implicit"; qid = smart_global -> PrintImplicit qid
- | IDENT "Universes"; fopt = OPT ne_string -> PrintUniverses (false, fopt)
- | IDENT "Sorted"; IDENT "Universes"; fopt = OPT ne_string -> PrintUniverses (true, fopt)
- | IDENT "Assumptions"; qid = smart_global -> PrintAssumptions (false, false, qid)
- | IDENT "Opaque"; IDENT "Dependencies"; qid = smart_global -> PrintAssumptions (true, false, qid)
- | IDENT "Transparent"; IDENT "Dependencies"; qid = smart_global -> PrintAssumptions (false, true, qid)
- | IDENT "All"; IDENT "Dependencies"; qid = smart_global -> PrintAssumptions (true, true, qid)
- | IDENT "Strategy"; qid = smart_global -> PrintStrategy (Some qid)
- | IDENT "Strategies" -> PrintStrategy None ] ]
- ;
- class_rawexpr:
- [ [ IDENT "Funclass" -> FunClass
- | IDENT "Sortclass" -> SortClass
- | qid = smart_global -> RefClass qid ] ]
- ;
- locatable:
- [ [ qid = smart_global -> LocateAny qid
- | IDENT "Term"; qid = smart_global -> LocateTerm qid
- | IDENT "File"; f = ne_string -> LocateFile f
- | IDENT "Library"; qid = global -> LocateLibrary qid
- | IDENT "Module"; qid = global -> LocateModule qid ] ]
- ;
- option_value:
- [ [ -> BoolValue true
- | n = integer -> IntValue (Some n)
- | s = STRING -> StringValue s ] ]
- ;
- option_ref_value:
- [ [ id = global -> QualidRefValue id
- | s = STRING -> StringRefValue s ] ]
- ;
- option_table:
- [ [ fl = LIST1 [ x = IDENT -> x ] -> fl ]]
- ;
- as_dirpath:
- [ [ d = OPT [ "as"; d = dirpath -> d ] -> d ] ]
- ;
- ne_in_or_out_modules:
- [ [ IDENT "inside"; l = LIST1 global -> SearchInside l
- | IDENT "outside"; l = LIST1 global -> SearchOutside l ] ]
- ;
- in_or_out_modules:
- [ [ m = ne_in_or_out_modules -> m
- | -> SearchOutside [] ] ]
- ;
- comment:
- [ [ c = constr -> CommentConstr c
- | s = STRING -> CommentString s
- | n = natural -> CommentInt n ] ]
- ;
- positive_search_mark:
- [ [ "-" -> false | -> true ] ]
- ;
- scope:
- [ [ "%"; key = IDENT -> key ] ]
- ;
- searchabout_query:
- [ [ b = positive_search_mark; s = ne_string; sc = OPT scope ->
- (b, SearchString (s,sc))
- | b = positive_search_mark; p = constr_pattern ->
- (b, SearchSubPattern p)
- ] ]
- ;
- searchabout_queries:
- [ [ m = ne_in_or_out_modules -> ([],m)
- | s = searchabout_query; l = searchabout_queries ->
- let (sl,m) = l in (s::sl,m)
- | -> ([],SearchOutside [])
- ] ]
- ;
- univ_name_list:
- [ [ "@{" ; l = LIST0 name; "}" -> l ] ]
- ;
-END;
-
-GEXTEND Gram
- command:
- [ [
-(* State management *)
- IDENT "Write"; IDENT "State"; s = IDENT -> VernacWriteState s
- | IDENT "Write"; IDENT "State"; s = ne_string -> VernacWriteState s
- | IDENT "Restore"; IDENT "State"; s = IDENT -> VernacRestoreState s
- | IDENT "Restore"; IDENT "State"; s = ne_string -> VernacRestoreState s
-
-(* Resetting *)
- | IDENT "Reset"; IDENT "Initial" -> VernacResetInitial
- | IDENT "Reset"; id = identref -> VernacResetName id
- | IDENT "Back" -> VernacBack 1
- | IDENT "Back"; n = natural -> VernacBack n
- | IDENT "BackTo"; n = natural -> VernacBackTo n
- | IDENT "Backtrack"; n = natural ; m = natural ; p = natural ->
- VernacBacktrack (n,m,p)
-
-(* Tactic Debugger *)
- | IDENT "Debug"; IDENT "On" ->
- VernacSetOption (false, ["Ltac";"Debug"], BoolValue true)
-
- | IDENT "Debug"; IDENT "Off" ->
- VernacSetOption (false, ["Ltac";"Debug"], BoolValue false)
-
-(* registration of a custom reduction *)
-
- | IDENT "Declare"; IDENT "Reduction"; s = IDENT; ":=";
- r = red_expr ->
- VernacDeclareReduction (s,r)
-
- ] ];
- END
-;;
-
-(* Grammar extensions *)
-
-GEXTEND Gram
- GLOBAL: syntax;
-
- syntax:
- [ [ IDENT "Open"; IDENT "Scope"; sc = IDENT ->
- VernacOpenCloseScope (true,sc)
-
- | IDENT "Close"; IDENT "Scope"; sc = IDENT ->
- VernacOpenCloseScope (false,sc)
-
- | IDENT "Delimit"; IDENT "Scope"; sc = IDENT; "with"; key = IDENT ->
- VernacDelimiters (sc, Some key)
- | IDENT "Undelimit"; IDENT "Scope"; sc = IDENT ->
- VernacDelimiters (sc, None)
-
- | IDENT "Bind"; IDENT "Scope"; sc = IDENT; "with";
- refl = LIST1 class_rawexpr -> VernacBindScope (sc,refl)
-
- | IDENT "Infix"; op = ne_lstring; ":="; p = constr;
- modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ];
- sc = OPT [ ":"; sc = IDENT -> sc ] ->
- VernacInfix ((op,modl),p,sc)
- | IDENT "Notation"; id = identref;
- idl = LIST0 ident; ":="; c = constr; b = only_parsing ->
- VernacSyntacticDefinition
- (id,(idl,c),b)
- | IDENT "Notation"; s = lstring; ":=";
- c = constr;
- modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ];
- sc = OPT [ ":"; sc = IDENT -> sc ] ->
- VernacNotation (c,(s,modl),sc)
- | IDENT "Format"; IDENT "Notation"; n = STRING; s = STRING; fmt = STRING ->
- VernacNotationAddFormat (n,s,fmt)
-
- | IDENT "Reserved"; IDENT "Infix"; s = ne_lstring;
- l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ] ->
- let s = CAst.map (fun s -> "x '"^s^"' y") s in
- VernacSyntaxExtension (true,(s,l))
-
- | IDENT "Reserved"; IDENT "Notation";
- s = ne_lstring;
- l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ]
- -> VernacSyntaxExtension (false, (s,l))
-
- (* "Print" "Grammar" should be here but is in "command" entry in order
- to factorize with other "Print"-based vernac entries *)
- ] ]
- ;
- only_parsing:
- [ [ "("; IDENT "only"; IDENT "parsing"; ")" ->
- Some Flags.Current
- | "("; IDENT "compat"; s = STRING; ")" ->
- Some (parse_compat_version s)
- | -> None ] ]
- ;
- level:
- [ [ IDENT "level"; n = natural -> NumLevel n
- | IDENT "next"; IDENT "level" -> NextLevel ] ]
- ;
- syntax_modifier:
- [ [ "at"; IDENT "level"; n = natural -> SetLevel n
- | IDENT "left"; IDENT "associativity" -> SetAssoc LeftA
- | IDENT "right"; IDENT "associativity" -> SetAssoc RightA
- | IDENT "no"; IDENT "associativity" -> SetAssoc NonA
- | IDENT "only"; IDENT "printing" -> SetOnlyPrinting
- | IDENT "only"; IDENT "parsing" -> SetOnlyParsing
- | IDENT "compat"; s = STRING ->
- SetCompatVersion (parse_compat_version s)
- | IDENT "format"; s1 = [s = STRING -> CAst.make ~loc:!@loc s];
- s2 = OPT [s = STRING -> CAst.make ~loc:!@loc s] ->
- begin match s1, s2 with
- | { CAst.v = k }, Some s -> SetFormat(k,s)
- | s, None -> SetFormat ("text",s) end
- | x = IDENT; ","; l = LIST1 [id = IDENT -> id ] SEP ","; "at";
- lev = level -> SetItemLevel (x::l,lev)
- | x = IDENT; "at"; lev = level -> SetItemLevel ([x],lev)
- | x = IDENT; "at"; lev = level; b = constr_as_binder_kind -> SetItemLevelAsBinder ([x],b,Some lev)
- | x = IDENT; b = constr_as_binder_kind -> SetItemLevelAsBinder ([x],b,None)
- | x = IDENT; typ = syntax_extension_type -> SetEntryType (x,typ)
- ] ]
- ;
- syntax_extension_type:
- [ [ IDENT "ident" -> ETName | IDENT "global" -> ETReference
- | IDENT "bigint" -> ETBigint
- | IDENT "binder" -> ETBinder true
- | IDENT "constr"; n = OPT at_level; b = constr_as_binder_kind -> ETConstrAsBinder (b,n)
- | IDENT "pattern" -> ETPattern (false,None)
- | IDENT "pattern"; "at"; IDENT "level"; n = natural -> ETPattern (false,Some n)
- | IDENT "strict"; IDENT "pattern" -> ETPattern (true,None)
- | IDENT "strict"; IDENT "pattern"; "at"; IDENT "level"; n = natural -> ETPattern (true,Some n)
- | IDENT "closed"; IDENT "binder" -> ETBinder false
- ] ]
- ;
- at_level:
- [ [ "at"; n = level -> n ] ]
- ;
- constr_as_binder_kind:
- [ [ "as"; IDENT "ident" -> AsIdent
- | "as"; IDENT "pattern" -> AsIdentOrPattern
- | "as"; IDENT "strict"; IDENT "pattern" -> AsStrictPattern ] ]
- ;
-END
diff --git a/parsing/notation_gram.ml b/parsing/notation_gram.ml
new file mode 100644
index 00000000..d8c08803
--- /dev/null
+++ b/parsing/notation_gram.ml
@@ -0,0 +1,43 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+open Names
+open Extend
+
+(** Dealing with precedences *)
+
+type precedence = int
+type parenRelation = L | E | Any | Prec of precedence
+type tolerability = precedence * parenRelation
+
+type level = Constrexpr.notation_entry * precedence * tolerability list * constr_entry_key list
+ (* first argument is InCustomEntry s for custom entries *)
+
+type grammar_constr_prod_item =
+ | GramConstrTerminal of Tok.t
+ | GramConstrNonTerminal of Extend.constr_prod_entry_key * Id.t option
+ | GramConstrListMark of int * bool * int
+ (* tells action rule to make a list of the n previous parsed items;
+ concat with last parsed list when true; additionally release
+ the p last items as if they were parsed autonomously *)
+
+(** Grammar rules for a notation *)
+
+type one_notation_grammar = {
+ notgram_level : level;
+ notgram_assoc : Extend.gram_assoc option;
+ notgram_notation : Constrexpr.notation;
+ notgram_prods : grammar_constr_prod_item list list;
+}
+
+type notation_grammar = {
+ notgram_onlyprinting : bool;
+ notgram_rules : one_notation_grammar list
+}
diff --git a/parsing/notgram_ops.ml b/parsing/notgram_ops.ml
new file mode 100644
index 00000000..5cc1292c
--- /dev/null
+++ b/parsing/notgram_ops.ml
@@ -0,0 +1,71 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+open Pp
+open CErrors
+open Util
+open Notation
+open Notation_gram
+
+(* Uninterpreted notation levels *)
+
+let notation_level_map = Summary.ref ~name:"notation_level_map" NotationMap.empty
+
+let declare_notation_level ?(onlyprint=false) ntn level =
+ try
+ let (level,onlyprint) = NotationMap.find ntn !notation_level_map in
+ if not onlyprint then anomaly (str "Notation " ++ pr_notation ntn ++ str " is already assigned a level.")
+ with Not_found ->
+ notation_level_map := NotationMap.add ntn (level,onlyprint) !notation_level_map
+
+let level_of_notation ?(onlyprint=false) ntn =
+ let (level,onlyprint') = NotationMap.find ntn !notation_level_map in
+ if onlyprint' && not onlyprint then raise Not_found;
+ level
+
+(**********************************************************************)
+(* Equality *)
+
+open Extend
+
+let parenRelation_eq t1 t2 = match t1, t2 with
+| L, L | E, E | Any, Any -> true
+| Prec l1, Prec l2 -> Int.equal l1 l2
+| _ -> false
+
+let production_position_eq pp1 pp2 = match (pp1,pp2) with
+| BorderProd (side1,assoc1), BorderProd (side2,assoc2) -> side1 = side2 && assoc1 = assoc2
+| InternalProd, InternalProd -> true
+| (BorderProd _ | InternalProd), _ -> false
+
+let production_level_eq l1 l2 = match (l1,l2) with
+| NextLevel, NextLevel -> true
+| NumLevel n1, NumLevel n2 -> Int.equal n1 n2
+| (NextLevel | NumLevel _), _ -> false
+
+let constr_entry_key_eq eq v1 v2 = match v1, v2 with
+| ETIdent, ETIdent -> true
+| ETGlobal, ETGlobal -> true
+| ETBigint, ETBigint -> true
+| ETBinder b1, ETBinder b2 -> b1 == b2
+| ETConstr (s1,bko1,lev1), ETConstr (s2,bko2,lev2) ->
+ notation_entry_eq s1 s2 && eq lev1 lev2 && Option.equal (=) bko1 bko2
+| ETPattern (b1,n1), ETPattern (b2,n2) -> b1 = b2 && Option.equal Int.equal n1 n2
+| (ETIdent | ETGlobal | ETBigint | ETBinder _ | ETConstr _ | ETPattern _), _ -> false
+
+let level_eq_gen strict (s1, l1, t1, u1) (s2, l2, t2, u2) =
+ let tolerability_eq (i1, r1) (i2, r2) = Int.equal i1 i2 && parenRelation_eq r1 r2 in
+ let prod_eq (l1,pp1) (l2,pp2) =
+ not strict ||
+ (production_level_eq l1 l2 && production_position_eq pp1 pp2) in
+ notation_entry_eq s1 s2 && Int.equal l1 l2 && List.equal tolerability_eq t1 t2
+ && List.equal (constr_entry_key_eq prod_eq) u1 u2
+
+let level_eq = level_eq_gen false
diff --git a/parsing/egramcoq.mli b/parsing/notgram_ops.mli
index e15add10..f427a607 100644
--- a/parsing/egramcoq.mli
+++ b/parsing/notgram_ops.mli
@@ -8,12 +8,13 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
-(** Mapping of grammar productions to camlp5 actions *)
+(* Merge with metasyntax? *)
+open Constrexpr
+open Notation_gram
-(** This is the part specific to Coq-level Notation and Tactic Notation.
- For the ML-level tactic and vernac extensions, see Egramml. *)
+val level_eq : level -> level -> bool
-(** {5 Adding notations} *)
+(** {6 Declare and test the level of a (possibly uninterpreted) notation } *)
-val extend_constr_grammar : Notation_term.one_notation_grammar -> unit
-(** Add a term notation rule to the parsing system. *)
+val declare_notation_level : ?onlyprint:bool -> notation -> level -> unit
+val level_of_notation : ?onlyprint:bool -> notation -> level (** raise [Not_found] if no level or not respecting onlyprint *)
diff --git a/parsing/parsing.mllib b/parsing/parsing.mllib
index 1f29636b..2154f2f8 100644
--- a/parsing/parsing.mllib
+++ b/parsing/parsing.mllib
@@ -1,9 +1,9 @@
Tok
CLexer
+Extend
+Notation_gram
+Ppextend
+Notgram_ops
Pcoq
-Egramml
-Egramcoq
G_constr
-G_vernac
G_prim
-G_proofs
diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml
index 258c4bb1..eb3e6338 100644
--- a/parsing/pcoq.ml
+++ b/parsing/pcoq.ml
@@ -17,9 +17,17 @@ let curry f x y = f (x, y)
let uncurry f (x,y) = f x y
(** Location Utils *)
+let ploc_file_of_coq_file = function
+| Loc.ToplevelInput -> ""
+| Loc.InFile f -> f
+
let coq_file_of_ploc_file s =
if s = "" then Loc.ToplevelInput else Loc.InFile s
+let of_coqloc loc =
+ let open Loc in
+ Ploc.make_loc (ploc_file_of_coq_file loc.fname) loc.line_nb loc.bol_pos (loc.bp, loc.ep) ""
+
let to_coqloc loc =
{ Loc.fname = coq_file_of_ploc_file (Ploc.file_name loc);
Loc.line_nb = Ploc.line_nb loc;
@@ -78,24 +86,15 @@ module type S =
type internal_entry = Tok.t Gramext.g_entry
type symbol = Tok.t Gramext.g_symbol
type action = Gramext.g_action
- type production_rule = symbol list * action
- type single_extend_statment =
- string option * Gramext.g_assoc option * production_rule list
- type extend_statment =
- Gramext.position option * single_extend_statment list
type coq_parsable
- val parsable : ?file:Loc.source -> char Stream.t -> coq_parsable
+ val coq_parsable : ?file:Loc.source -> char Stream.t -> coq_parsable
val action : 'a -> action
val entry_create : string -> 'a entry
val entry_parse : 'a entry -> coq_parsable -> 'a
- val entry_print : Format.formatter -> 'a entry -> unit
val comment_state : coq_parsable -> ((int * int) * string) list
- val srules' : production_rule list -> symbol
- val parse_tokens_after_filter : 'a entry -> Tok.t Stream.t -> 'a
-
end with type 'a Entry.e = 'a Grammar.GMake(CLexer).Entry.e = struct
include Grammar.GMake(CLexer)
@@ -104,15 +103,10 @@ end with type 'a Entry.e = 'a Grammar.GMake(CLexer).Entry.e = struct
type internal_entry = Tok.t Gramext.g_entry
type symbol = Tok.t Gramext.g_symbol
type action = Gramext.g_action
- type production_rule = symbol list * action
- type single_extend_statment =
- string option * Gramext.g_assoc option * production_rule list
- type extend_statment =
- Gramext.position option * single_extend_statment list
type coq_parsable = parsable * CLexer.lexer_state ref
- let parsable ?(file=Loc.ToplevelInput) c =
+ let coq_parsable ?(file=Loc.ToplevelInput) c =
let state = ref (CLexer.init_lexer_state file) in
CLexer.set_lexer_state !state;
let a = parsable c in
@@ -137,14 +131,25 @@ end with type 'a Entry.e = 'a Grammar.GMake(CLexer).Entry.e = struct
let comment_state (p,state) =
CLexer.get_comment_state !state
- let entry_print ft x = Entry.print ft x
-
- (* Not used *)
- let srules' = Gramext.srules
- let parse_tokens_after_filter = Entry.parse_token
+end
+module Parsable =
+struct
+ type t = G.coq_parsable
+ let make = G.coq_parsable
+ let comment_state = G.comment_state
end
+module Entry =
+struct
+
+ type 'a t = 'a Grammar.GMake(CLexer).Entry.e
+
+ let create = G.Entry.create
+ let parse = G.entry_parse
+ let print = G.Entry.print
+
+end
let warning_verbose = Gramext.warning_verbose
@@ -208,9 +213,9 @@ let camlp5_verbosity silent f x =
(** Grammar extensions *)
-(** NB: [extend_statment =
- gram_position option * single_extend_statment list]
- and [single_extend_statment =
+(** NB: [extend_statement =
+ gram_position option * single_extend_statement list]
+ and [single_extend_statement =
string option * gram_assoc option * production_rule list]
and [production_rule = symbol list * action]
@@ -237,9 +242,9 @@ let rec symbol_of_prod_entry_key : type s a. (s, a) symbol -> _ = function
| Aentry e ->
Symbols.snterm (G.Entry.obj e)
| Aentryl (e, n) ->
- Symbols.snterml (G.Entry.obj e, string_of_int n)
+ Symbols.snterml (G.Entry.obj e, n)
| Arules rs ->
- G.srules' (List.map symbol_of_rules rs)
+ Gramext.srules (List.map symbol_of_rules rs)
and symbol_of_rule : type s a r. (s, a, r) Extend.rule -> _ = function
| Stop -> fun accu -> accu
@@ -264,16 +269,23 @@ let of_coq_extend_statement (pos, st) =
type gram_reinit = gram_assoc * gram_position
type extend_rule =
-| ExtendRule : 'a G.entry * gram_reinit option * 'a extend_statment -> extend_rule
+| ExtendRule : 'a G.entry * gram_reinit option * 'a extend_statement -> extend_rule
+
+module EntryCommand = Dyn.Make ()
+module EntryData = struct type _ t = Ex : 'b G.entry String.Map.t -> ('a * 'b) t end
+module EntryDataMap = EntryCommand.Map(EntryData)
type ext_kind =
| ByGrammar of extend_rule
| ByEXTEND of (unit -> unit) * (unit -> unit)
+ | ByEntry : ('a * 'b) EntryCommand.tag * string * 'b G.entry -> ext_kind
(** The list of extensions *)
let camlp5_state = ref []
+let camlp5_entries = ref EntryDataMap.empty
+
(** Deletion *)
let grammar_delete e reinit (pos,rls) =
@@ -329,6 +341,7 @@ module Gram =
I'm not entirely sure it makes sense, but at least it would be more correct.
*)
G.delete_rule e pil
+ let gram_extend e ext = grammar_extend e None ext
end
(** Remove extensions
@@ -338,7 +351,7 @@ module Gram =
let rec remove_grammars n =
if n>0 then
- (match !camlp5_state with
+ match !camlp5_state with
| [] -> anomaly ~label:"Pcoq.remove_grammars" (Pp.str "too many rules to remove.")
| ByGrammar (ExtendRule (g, reinit, ext)) :: t ->
grammar_delete g reinit (of_coq_extend_statement ext);
@@ -349,21 +362,31 @@ let rec remove_grammars n =
camlp5_state := t;
remove_grammars n;
redo();
- camlp5_state := ByEXTEND (undo,redo) :: !camlp5_state)
+ camlp5_state := ByEXTEND (undo,redo) :: !camlp5_state
+ | ByEntry (tag, name, e) :: t ->
+ G.Unsafe.clear_entry e;
+ camlp5_state := t;
+ let EntryData.Ex entries =
+ try EntryDataMap.find tag !camlp5_entries
+ with Not_found -> EntryData.Ex String.Map.empty
+ in
+ let entries = String.Map.remove name entries in
+ camlp5_entries := EntryDataMap.add tag (EntryData.Ex entries) !camlp5_entries;
+ remove_grammars (n - 1)
let make_rule r = [None, None, r]
(** An entry that checks we reached the end of the input. *)
let eoi_entry en =
- let e = Gram.entry_create ((Gram.Entry.name en) ^ "_eoi") in
+ let e = Entry.create ((Gram.Entry.name en) ^ "_eoi") in
let symbs = [Symbols.snterm (Gram.Entry.obj en); Symbols.stoken Tok.EOI] in
let act = Gram.action (fun _ x loc -> x) in
uncurry (Gram.extend e) (None, make_rule [symbs, act]);
e
let map_entry f en =
- let e = Gram.entry_create ((Gram.Entry.name en) ^ "_map") in
+ let e = Entry.create ((Gram.Entry.name en) ^ "_map") in
let symbs = [Symbols.snterm (Gram.Entry.obj en)] in
let act = Gram.action (fun x loc -> f x) in
uncurry (Gram.extend e) (None, make_rule [symbs, act]);
@@ -373,7 +396,7 @@ let map_entry f en =
(use eoi_entry) *)
let parse_string f x =
- let strm = Stream.of_string x in Gram.entry_parse f (Gram.parsable strm)
+ let strm = Stream.of_string x in Gram.entry_parse f (Gram.coq_parsable strm)
type gram_universe = string
@@ -387,7 +410,6 @@ let create_universe u =
let uprim = create_universe "prim"
let uconstr = create_universe "constr"
let utactic = create_universe "tactic"
-let uvernac = create_universe "vernac"
let get_univ u =
if Hashtbl.mem utables u then u
@@ -395,14 +417,14 @@ let get_univ u =
let new_entry u s =
let ename = u ^ ":" ^ s in
- let e = Gram.entry_create ename in
+ let e = Entry.create ename in
e
let make_gen_entry u s = new_entry u s
module GrammarObj =
struct
- type ('r, _, _) obj = 'r Gram.entry
+ type ('r, _, _) obj = 'r Entry.t
let name = "grammar"
let default _ = None
end
@@ -412,7 +434,7 @@ module Grammar = Register(GrammarObj)
let register_grammar = Grammar.register0
let genarg_grammar = Grammar.obj
-let create_generic_entry (type a) u s (etyp : a raw_abstract_argument_type) : a Gram.entry =
+let create_generic_entry (type a) u s (etyp : a raw_abstract_argument_type) : a Entry.t =
let e = new_entry u s in
let Rawwit t = etyp in
let () = Grammar.register0 t e in
@@ -424,38 +446,38 @@ module Prim =
struct
let gec_gen n = make_gen_entry uprim n
- (* Entries that can be referred via the string -> Gram.entry table *)
+ (* Entries that can be referred via the string -> Entry.t table *)
(* Typically for tactic or vernac extensions *)
let preident = gec_gen "preident"
let ident = gec_gen "ident"
let natural = gec_gen "natural"
let integer = gec_gen "integer"
- let bigint = Gram.entry_create "Prim.bigint"
+ let bigint = Entry.create "Prim.bigint"
let string = gec_gen "string"
- let lstring = Gram.entry_create "Prim.lstring"
+ let lstring = Entry.create "Prim.lstring"
let reference = make_gen_entry uprim "reference"
- let by_notation = Gram.entry_create "by_notation"
- let smart_global = Gram.entry_create "smart_global"
+ let by_notation = Entry.create "by_notation"
+ let smart_global = Entry.create "smart_global"
(* parsed like ident but interpreted as a term *)
let var = gec_gen "var"
- let name = Gram.entry_create "Prim.name"
- let identref = Gram.entry_create "Prim.identref"
- let univ_decl = Gram.entry_create "Prim.univ_decl"
- let ident_decl = Gram.entry_create "Prim.ident_decl"
- let pattern_ident = Gram.entry_create "pattern_ident"
- let pattern_identref = Gram.entry_create "pattern_identref"
+ let name = Entry.create "Prim.name"
+ let identref = Entry.create "Prim.identref"
+ let univ_decl = Entry.create "Prim.univ_decl"
+ let ident_decl = Entry.create "Prim.ident_decl"
+ let pattern_ident = Entry.create "pattern_ident"
+ let pattern_identref = Entry.create "pattern_identref"
(* A synonym of ident - maybe ident will be located one day *)
- let base_ident = Gram.entry_create "Prim.base_ident"
+ let base_ident = Entry.create "Prim.base_ident"
- let qualid = Gram.entry_create "Prim.qualid"
- let fullyqualid = Gram.entry_create "Prim.fullyqualid"
- let dirpath = Gram.entry_create "Prim.dirpath"
+ let qualid = Entry.create "Prim.qualid"
+ let fullyqualid = Entry.create "Prim.fullyqualid"
+ let dirpath = Entry.create "Prim.dirpath"
- let ne_string = Gram.entry_create "Prim.ne_string"
- let ne_lstring = Gram.entry_create "Prim.ne_lstring"
+ let ne_string = Entry.create "Prim.ne_string"
+ let ne_lstring = Entry.create "Prim.ne_lstring"
end
@@ -463,7 +485,7 @@ module Constr =
struct
let gec_constr = make_gen_entry uconstr
- (* Entries that can be referred via the string -> Gram.entry table *)
+ (* Entries that can be referred via the string -> Entry.t table *)
let constr = gec_constr "constr"
let operconstr = gec_constr "operconstr"
let constr_eoi = eoi_entry constr
@@ -474,67 +496,29 @@ module Constr =
let universe_level = make_gen_entry uconstr "universe_level"
let sort = make_gen_entry uconstr "sort"
let sort_family = make_gen_entry uconstr "sort_family"
- let pattern = Gram.entry_create "constr:pattern"
+ let pattern = Entry.create "constr:pattern"
let constr_pattern = gec_constr "constr_pattern"
let lconstr_pattern = gec_constr "lconstr_pattern"
- let closed_binder = Gram.entry_create "constr:closed_binder"
- let binder = Gram.entry_create "constr:binder"
- let binders = Gram.entry_create "constr:binders"
- let open_binders = Gram.entry_create "constr:open_binders"
- let binders_fixannot = Gram.entry_create "constr:binders_fixannot"
- let typeclass_constraint = Gram.entry_create "constr:typeclass_constraint"
- let record_declaration = Gram.entry_create "constr:record_declaration"
- let appl_arg = Gram.entry_create "constr:appl_arg"
+ let closed_binder = Entry.create "constr:closed_binder"
+ let binder = Entry.create "constr:binder"
+ let binders = Entry.create "constr:binders"
+ let open_binders = Entry.create "constr:open_binders"
+ let binders_fixannot = Entry.create "constr:binders_fixannot"
+ let typeclass_constraint = Entry.create "constr:typeclass_constraint"
+ let record_declaration = Entry.create "constr:record_declaration"
+ let appl_arg = Entry.create "constr:appl_arg"
end
module Module =
struct
- let module_expr = Gram.entry_create "module_expr"
- let module_type = Gram.entry_create "module_type"
+ let module_expr = Entry.create "module_expr"
+ let module_type = Entry.create "module_type"
end
-module Vernac_ =
- struct
- let gec_vernac s = Gram.entry_create ("vernac:" ^ s)
-
- (* The different kinds of vernacular commands *)
- let gallina = gec_vernac "gallina"
- let gallina_ext = gec_vernac "gallina_ext"
- let command = gec_vernac "command"
- let syntax = gec_vernac "syntax_command"
- let vernac_control = gec_vernac "Vernac.vernac_control"
- let rec_definition = gec_vernac "Vernac.rec_definition"
- let red_expr = make_gen_entry utactic "red_expr"
- let hint_info = gec_vernac "hint_info"
- (* Main vernac entry *)
- let main_entry = Gram.entry_create "vernac"
- let noedit_mode = gec_vernac "noedit_command"
-
- let () =
- let act_vernac = Gram.action (fun v loc -> Some (to_coqloc loc, v)) in
- let act_eoi = Gram.action (fun _ loc -> None) in
- let rule = [
- ([ Symbols.stoken Tok.EOI ], act_eoi);
- ([ Symbols.snterm (Gram.Entry.obj vernac_control) ], act_vernac );
- ] in
- uncurry (Gram.extend main_entry) (None, make_rule rule)
-
- let command_entry_ref = ref noedit_mode
- let command_entry =
- Gram.Entry.of_parser "command_entry"
- (fun strm -> Gram.parse_tokens_after_filter !command_entry_ref strm)
-
- end
-
-let main_entry = Vernac_.main_entry
-
-let set_command_entry e = Vernac_.command_entry_ref := e
-let get_command_entry () = !Vernac_.command_entry_ref
-
let epsilon_value f e =
let r = Rule (Next (Stop, e), fun x _ -> f x) in
let ext = of_coq_extend_statement (None, [None, None, [r]]) in
- let entry = G.entry_create "epsilon" in
+ let entry = Gram.entry_create "epsilon" in
let () = uncurry (G.extend entry) ext in
try Some (parse_string entry "") with _ -> None
@@ -550,59 +534,119 @@ module GrammarInterpMap = GrammarCommand.Map(GrammarInterp)
let grammar_interp = ref GrammarInterpMap.empty
-let (grammar_stack : (int * GrammarCommand.t * GramState.t) list ref) = ref []
+type ('a, 'b) entry_extension = 'a -> GramState.t -> string list * GramState.t
+
+module EntryInterp = struct type _ t = Ex : ('a, 'b) entry_extension -> ('a * 'b) t end
+module EntryInterpMap = EntryCommand.Map(EntryInterp)
+
+let entry_interp = ref EntryInterpMap.empty
+
+type grammar_entry =
+| GramExt of int * GrammarCommand.t
+| EntryExt : int * ('a * 'b) EntryCommand.tag * 'a -> grammar_entry
+
+let (grammar_stack : (grammar_entry * GramState.t) list ref) = ref []
type 'a grammar_command = 'a GrammarCommand.tag
+type ('a, 'b) entry_command = ('a * 'b) EntryCommand.tag
let create_grammar_command name interp : _ grammar_command =
let obj = GrammarCommand.create name in
let () = grammar_interp := GrammarInterpMap.add obj interp !grammar_interp in
obj
+let create_entry_command name (interp : ('a, 'b) entry_extension) : ('a, 'b) entry_command =
+ let obj = EntryCommand.create name in
+ let () = entry_interp := EntryInterpMap.add obj (EntryInterp.Ex interp) !entry_interp in
+ obj
+
let extend_grammar_command tag g =
let modify = GrammarInterpMap.find tag !grammar_interp in
let grammar_state = match !grammar_stack with
| [] -> GramState.empty
- | (_, _, st) :: _ -> st
+ | (_, st) :: _ -> st
in
let (rules, st) = modify g grammar_state in
let iter (ExtendRule (e, reinit, ext)) = grammar_extend_sync e reinit ext in
let () = List.iter iter rules in
let nb = List.length rules in
- grammar_stack := (nb, GrammarCommand.Dyn (tag, g), st) :: !grammar_stack
+ grammar_stack := (GramExt (nb, GrammarCommand.Dyn (tag, g)), st) :: !grammar_stack
-let recover_grammar_command (type a) (tag : a grammar_command) : a list =
- let filter : _ -> a option = fun (_, GrammarCommand.Dyn (tag', v), _) ->
- match GrammarCommand.eq tag tag' with
- | None -> None
- | Some Refl -> Some v
+let extend_entry_command (type a) (type b) (tag : (a, b) entry_command) (g : a) : b Gram.entry list =
+ let EntryInterp.Ex modify = EntryInterpMap.find tag !entry_interp in
+ let grammar_state = match !grammar_stack with
+ | [] -> GramState.empty
+ | (_, st) :: _ -> st
+ in
+ let (names, st) = modify g grammar_state in
+ let entries = List.map (fun name -> Gram.entry_create name) names in
+ let iter name e =
+ camlp5_state := ByEntry (tag, name, e) :: !camlp5_state;
+ let EntryData.Ex old =
+ try EntryDataMap.find tag !camlp5_entries
+ with Not_found -> EntryData.Ex String.Map.empty
+ in
+ let entries = String.Map.add name e old in
+ camlp5_entries := EntryDataMap.add tag (EntryData.Ex entries) !camlp5_entries
in
- List.map_filter filter !grammar_stack
+ let () = List.iter2 iter names entries in
+ let nb = List.length entries in
+ let () = grammar_stack := (EntryExt (nb, tag, g), st) :: !grammar_stack in
+ entries
+
+let find_custom_entry tag name =
+ let EntryData.Ex map = EntryDataMap.find tag !camlp5_entries in
+ String.Map.find name map
+
+let extend_dyn_grammar (e, _) = match e with
+| GramExt (_, (GrammarCommand.Dyn (tag, g))) -> extend_grammar_command tag g
+| EntryExt (_, tag, g) -> ignore (extend_entry_command tag g)
+
+(** Registering extra grammar *)
-let extend_dyn_grammar (GrammarCommand.Dyn (tag, g)) = extend_grammar_command tag g
+type any_entry = AnyEntry : 'a Gram.entry -> any_entry
+
+let grammar_names : any_entry list String.Map.t ref = ref String.Map.empty
-(* Summary functions: the state of the lexer is included in that of the parser.
+let register_grammars_by_name name grams =
+ grammar_names := String.Map.add name grams !grammar_names
+
+let find_grammars_by_name name =
+ try String.Map.find name !grammar_names
+ with Not_found ->
+ let fold (EntryDataMap.Any (tag, EntryData.Ex map)) accu =
+ try AnyEntry (String.Map.find name map) :: accu
+ with Not_found -> accu
+ in
+ EntryDataMap.fold fold !camlp5_entries []
+
+(** Summary functions: the state of the lexer is included in that of the parser.
Because the grammar affects the set of keywords when adding or removing
grammar rules. *)
-type frozen_t = (int * GrammarCommand.t * GramState.t) list * CLexer.keyword_state
+type frozen_t =
+ (grammar_entry * GramState.t) list *
+ CLexer.keyword_state
-let freeze _ : frozen_t = (!grammar_stack, CLexer.get_keyword_state ())
+let freeze _ : frozen_t =
+ (!grammar_stack, CLexer.get_keyword_state ())
(* We compare the current state of the grammar and the state to unfreeze,
by computing the longest common suffixes *)
let factorize_grams l1 l2 =
if l1 == l2 then ([], [], l1) else List.share_tails l1 l2
-let number_of_entries gcl =
- List.fold_left (fun n (p,_,_) -> n + p) 0 gcl
+let rec number_of_entries accu = function
+| [] -> accu
+| ((GramExt (p, _) | EntryExt (p, _, _)), _) :: rem ->
+ number_of_entries (p + accu) rem
let unfreeze (grams, lex) =
let (undo, redo, common) = factorize_grams !grammar_stack grams in
- let n = number_of_entries undo in
+ let n = number_of_entries 0 undo in
remove_grammars n;
grammar_stack := common;
CLexer.set_keyword_state lex;
- List.iter extend_dyn_grammar (List.rev_map pi2 redo)
+ List.iter extend_dyn_grammar (List.rev redo)
(** No need to provide an init function : the grammar state is
statically available, and already empty initially, while
@@ -635,17 +679,4 @@ let () =
Grammar.register0 wit_ref (Prim.reference);
Grammar.register0 wit_sort_family (Constr.sort_family);
Grammar.register0 wit_constr (Constr.constr);
- Grammar.register0 wit_red_expr (Vernac_.red_expr);
()
-
-(** Registering extra grammar *)
-
-type any_entry = AnyEntry : 'a Gram.entry -> any_entry
-
-let grammar_names : any_entry list String.Map.t ref = ref String.Map.empty
-
-let register_grammars_by_name name grams =
- grammar_names := String.Map.add name grams !grammar_names
-
-let find_grammars_by_name name =
- String.Map.find name !grammar_names
diff --git a/parsing/pcoq.mli b/parsing/pcoq.mli
index 9f186224..e12ccaa6 100644
--- a/parsing/pcoq.mli
+++ b/parsing/pcoq.mli
@@ -10,85 +10,48 @@
open Names
open Extend
-open Vernacexpr
open Genarg
open Constrexpr
open Libnames
-open Misctypes
-open Genredexpr
(** The parser of Coq *)
+(** DO NOT USE EXTENSION FUNCTIONS IN THIS MODULE.
+ We only have it here to work with Camlp5. Handwritten grammar extensions
+ should use the safe [Pcoq.grammar_extend] function below. *)
module Gram : sig
include Grammar.S with type te = Tok.t
-(* Where Grammar.S is
+ type 'a entry = 'a Entry.e
+ [@@ocaml.deprecated "Use [Pcoq.Entry.t]"]
-module type S =
- sig
- type te = 'x;
- type parsable = 'x;
- value parsable : Stream.t char -> parsable;
- value tokens : string -> list (string * int);
- value glexer : Plexing.lexer te;
- value set_algorithm : parse_algorithm -> unit;
- module Entry :
- sig
- type e 'a = 'y;
- value create : string -> e 'a;
- value parse : e 'a -> parsable -> 'a;
- value parse_token : e 'a -> Stream.t te -> 'a;
- value name : e 'a -> string;
- value of_parser : string -> (Stream.t te -> 'a) -> e 'a;
- value print : Format.formatter -> e 'a -> unit;
- external obj : e 'a -> Gramext.g_entry te = "%identity";
- end
- ;
- module Unsafe :
- sig
- value gram_reinit : Plexing.lexer te -> unit;
- value clear_entry : Entry.e 'a -> unit;
- end
- ;
- value extend :
- Entry.e 'a -> option Gramext.position ->
- list
- (option string * option Gramext.g_assoc *
- list (list (Gramext.g_symbol te) * Gramext.g_action)) ->
- unit;
- value delete_rule : Entry.e 'a -> list (Gramext.g_symbol te) -> unit;
- end
+ [@@@ocaml.warning "-3"]
-*)
-
- type 'a entry = 'a Entry.e
- type internal_entry = Tok.t Gramext.g_entry
- type symbol = Tok.t Gramext.g_symbol
- type action = Gramext.g_action
- type production_rule = symbol list * action
- type single_extend_statment =
- string option * Gramext.g_assoc option * production_rule list
- type extend_statment =
- Gramext.position option * single_extend_statment list
-
- type coq_parsable
-
- val parsable : ?file:Loc.source -> char Stream.t -> coq_parsable
- val action : 'a -> action
val entry_create : string -> 'a entry
- val entry_parse : 'a entry -> coq_parsable -> 'a
- val entry_print : Format.formatter -> 'a entry -> unit
+ [@@ocaml.deprecated "Use [Pcoq.Entry.create]"]
- (* Get comment parsing information from the Lexer *)
- val comment_state : coq_parsable -> ((int * int) * string) list
+ val gram_extend : 'a entry -> 'a Extend.extend_statement -> unit
- (* Apparently not used *)
- val srules' : production_rule list -> symbol
- val parse_tokens_after_filter : 'a entry -> Tok.t Stream.t -> 'a
+ [@@@ocaml.warning "+3"]
end with type 'a Entry.e = 'a Grammar.GMake(CLexer).Entry.e
+module Parsable :
+sig
+ type t
+ val make : ?file:Loc.source -> char Stream.t -> t
+ (* Get comment parsing information from the Lexer *)
+ val comment_state : t -> ((int * int) * string) list
+end
+
+module Entry : sig
+ type 'a t = 'a Grammar.GMake(CLexer).Entry.e
+ val create : string -> 'a t
+ val parse : 'a t -> Parsable.t -> 'a
+ val print : Format.formatter -> 'a t -> unit
+end
+
(** The parser of Coq is built from three kinds of rule declarations:
- dynamic rules declared at the evaluation of Coq files (using
@@ -170,106 +133,88 @@ val camlp5_verbosity : bool -> ('a -> unit) -> 'a -> unit
(** Parse a string *)
-val parse_string : 'a Gram.entry -> string -> 'a
-val eoi_entry : 'a Gram.entry -> 'a Gram.entry
-val map_entry : ('a -> 'b) -> 'a Gram.entry -> 'b Gram.entry
+val parse_string : 'a Entry.t -> string -> 'a
+val eoi_entry : 'a Entry.t -> 'a Entry.t
+val map_entry : ('a -> 'b) -> 'a Entry.t -> 'b Entry.t
type gram_universe
val get_univ : string -> gram_universe
+val create_universe : string -> gram_universe
+
+val new_entry : gram_universe -> string -> 'a Entry.t
val uprim : gram_universe
val uconstr : gram_universe
val utactic : gram_universe
-val uvernac : gram_universe
-val register_grammar : ('raw, 'glb, 'top) genarg_type -> 'raw Gram.entry -> unit
-val genarg_grammar : ('raw, 'glb, 'top) genarg_type -> 'raw Gram.entry
+
+val register_grammar : ('raw, 'glb, 'top) genarg_type -> 'raw Entry.t -> unit
+val genarg_grammar : ('raw, 'glb, 'top) genarg_type -> 'raw Entry.t
val create_generic_entry : gram_universe -> string ->
- ('a, rlevel) abstract_argument_type -> 'a Gram.entry
+ ('a, rlevel) abstract_argument_type -> 'a Entry.t
module Prim :
sig
open Names
open Libnames
- val preident : string Gram.entry
- val ident : Id.t Gram.entry
- val name : lname Gram.entry
- val identref : lident Gram.entry
- val univ_decl : universe_decl_expr Gram.entry
- val ident_decl : ident_decl Gram.entry
- val pattern_ident : Id.t Gram.entry
- val pattern_identref : lident Gram.entry
- val base_ident : Id.t Gram.entry
- val natural : int Gram.entry
- val bigint : Constrexpr.raw_natural_number Gram.entry
- val integer : int Gram.entry
- val string : string Gram.entry
- val lstring : lstring Gram.entry
- val qualid : qualid CAst.t Gram.entry
- val fullyqualid : Id.t list CAst.t Gram.entry
- val reference : reference Gram.entry
- val by_notation : (string * string option) Gram.entry
- val smart_global : reference or_by_notation Gram.entry
- val dirpath : DirPath.t Gram.entry
- val ne_string : string Gram.entry
- val ne_lstring : lstring Gram.entry
- val var : lident Gram.entry
+ val preident : string Entry.t
+ val ident : Id.t Entry.t
+ val name : lname Entry.t
+ val identref : lident Entry.t
+ val univ_decl : universe_decl_expr Entry.t
+ val ident_decl : ident_decl Entry.t
+ val pattern_ident : Id.t Entry.t
+ val pattern_identref : lident Entry.t
+ val base_ident : Id.t Entry.t
+ val natural : int Entry.t
+ val bigint : Constrexpr.raw_natural_number Entry.t
+ val integer : int Entry.t
+ val string : string Entry.t
+ val lstring : lstring Entry.t
+ val reference : qualid Entry.t
+ val qualid : qualid Entry.t
+ val fullyqualid : Id.t list CAst.t Entry.t
+ val by_notation : (string * string option) Entry.t
+ val smart_global : qualid or_by_notation Entry.t
+ val dirpath : DirPath.t Entry.t
+ val ne_string : string Entry.t
+ val ne_lstring : lstring Entry.t
+ val var : lident Entry.t
end
module Constr :
sig
- val constr : constr_expr Gram.entry
- val constr_eoi : constr_expr Gram.entry
- val lconstr : constr_expr Gram.entry
- val binder_constr : constr_expr Gram.entry
- val operconstr : constr_expr Gram.entry
- val ident : Id.t Gram.entry
- val global : reference Gram.entry
- val universe_level : glob_level Gram.entry
- val sort : glob_sort Gram.entry
- val sort_family : Sorts.family Gram.entry
- val pattern : cases_pattern_expr Gram.entry
- val constr_pattern : constr_expr Gram.entry
- val lconstr_pattern : constr_expr Gram.entry
- val closed_binder : local_binder_expr list Gram.entry
- val binder : local_binder_expr list Gram.entry (* closed_binder or variable *)
- val binders : local_binder_expr list Gram.entry (* list of binder *)
- val open_binders : local_binder_expr list Gram.entry
- val binders_fixannot : (local_binder_expr list * (lident option * recursion_order_expr)) Gram.entry
- val typeclass_constraint : (lname * bool * constr_expr) Gram.entry
- val record_declaration : constr_expr Gram.entry
- val appl_arg : (constr_expr * explicitation CAst.t option) Gram.entry
+ val constr : constr_expr Entry.t
+ val constr_eoi : constr_expr Entry.t
+ val lconstr : constr_expr Entry.t
+ val binder_constr : constr_expr Entry.t
+ val operconstr : constr_expr Entry.t
+ val ident : Id.t Entry.t
+ val global : qualid Entry.t
+ val universe_level : Glob_term.glob_level Entry.t
+ val sort : Glob_term.glob_sort Entry.t
+ val sort_family : Sorts.family Entry.t
+ val pattern : cases_pattern_expr Entry.t
+ val constr_pattern : constr_expr Entry.t
+ val lconstr_pattern : constr_expr Entry.t
+ val closed_binder : local_binder_expr list Entry.t
+ val binder : local_binder_expr list Entry.t (* closed_binder or variable *)
+ val binders : local_binder_expr list Entry.t (* list of binder *)
+ val open_binders : local_binder_expr list Entry.t
+ val binders_fixannot : (local_binder_expr list * (lident option * recursion_order_expr)) Entry.t
+ val typeclass_constraint : (lname * bool * constr_expr) Entry.t
+ val record_declaration : constr_expr Entry.t
+ val appl_arg : (constr_expr * explicitation CAst.t option) Entry.t
end
module Module :
sig
- val module_expr : module_ast Gram.entry
- val module_type : module_ast Gram.entry
- end
-
-module Vernac_ :
- sig
- val gallina : vernac_expr Gram.entry
- val gallina_ext : vernac_expr Gram.entry
- val command : vernac_expr Gram.entry
- val syntax : vernac_expr Gram.entry
- val vernac_control : vernac_control Gram.entry
- val rec_definition : (fixpoint_expr * decl_notation list) Gram.entry
- val noedit_mode : vernac_expr Gram.entry
- val command_entry : vernac_expr Gram.entry
- val red_expr : raw_red_expr Gram.entry
- val hint_info : Vernacexpr.hint_info_expr Gram.entry
+ val module_expr : module_ast Entry.t
+ val module_type : module_ast Entry.t
end
-(** The main entry: reads an optional vernac command *)
-val main_entry : (Loc.t * vernac_control) option Gram.entry
-
-(** Handling of the proof mode entry *)
-val get_command_entry : unit -> vernac_expr Gram.entry
-val set_command_entry : vernac_expr Gram.entry -> unit
-
val epsilon_value : ('a -> 'self) -> ('self, 'a) Extend.symbol -> 'self option
(** {5 Extending the parser without synchronization} *)
@@ -277,8 +222,8 @@ val epsilon_value : ('a -> 'self) -> ('self, 'a) Extend.symbol -> 'self option
type gram_reinit = gram_assoc * gram_position
(** Type of reinitialization data *)
-val grammar_extend : 'a Gram.entry -> gram_reinit option ->
- 'a Extend.extend_statment -> unit
+val grammar_extend : 'a Entry.t -> gram_reinit option ->
+ 'a Extend.extend_statement -> unit
(** Extend the grammar of Coq, without synchronizing it with the backtracking
mechanism. This means that grammar extensions defined this way will survive
an undo. *)
@@ -288,12 +233,14 @@ val grammar_extend : 'a Gram.entry -> gram_reinit option ->
module GramState : Store.S
(** Auxiliary state of the grammar. Any added data must be marshallable. *)
+(** {6 Extension with parsing rules} *)
+
type 'a grammar_command
(** Type of synchronized parsing extensions. The ['a] type should be
marshallable. *)
type extend_rule =
-| ExtendRule : 'a Gram.entry * gram_reinit option * 'a extend_statment -> extend_rule
+| ExtendRule : 'a Entry.t * gram_reinit option * 'a extend_statement -> extend_rule
type 'a grammar_extension = 'a -> GramState.t -> extend_rule list * GramState.t
(** Grammar extension entry point. Given some ['a] and a current grammar state,
@@ -308,12 +255,35 @@ val create_grammar_command : string -> 'a grammar_extension -> 'a grammar_comman
val extend_grammar_command : 'a grammar_command -> 'a -> unit
(** Extend the grammar of Coq with the given data. *)
-val recover_grammar_command : 'a grammar_command -> 'a list
-(** Recover the current stack of grammar extensions. *)
+(** {6 Extension with parsing entries} *)
+
+type ('a, 'b) entry_command
+(** Type of synchronized entry creation. The ['a] type should be
+ marshallable. *)
+
+type ('a, 'b) entry_extension = 'a -> GramState.t -> string list * GramState.t
+(** Entry extension entry point. Given some ['a] and a current grammar state,
+ such a function must produce the list of entry extensions that will be
+ created and kept synchronized w.r.t. the summary, together
+ with a new state. It should be pure. *)
+
+val create_entry_command : string -> ('a, 'b) entry_extension -> ('a, 'b) entry_command
+(** Create a new entry-creating command with the given name. The extension
+ function is called to generate the new entries for a given data. *)
+
+val extend_entry_command : ('a, 'b) entry_command -> 'a -> 'b Entry.t list
+(** Create new synchronized entries using the provided data. *)
+
+val find_custom_entry : ('a, 'b) entry_command -> string -> 'b Entry.t
+(** Find an entry generated by the synchronized system in the current state.
+ @raise Not_found if non-existent. *)
+
+(** {6 Protection w.r.t. backtrack} *)
val with_grammar_rule_protection : ('a -> 'b) -> 'a -> 'b
(** Location Utils *)
+val of_coqloc : Loc.t -> Ploc.t
val to_coqloc : Ploc.t -> Loc.t
val (!@) : Ploc.t -> Loc.t
@@ -322,7 +292,7 @@ val parser_summary_tag : frozen_t Summary.Dyn.tag
(** Registering grammars by name *)
-type any_entry = AnyEntry : 'a Gram.entry -> any_entry
+type any_entry = AnyEntry : 'a Entry.t -> any_entry
val register_grammars_by_name : string -> any_entry list -> unit
val find_grammars_by_name : string -> any_entry list
diff --git a/parsing/ppextend.ml b/parsing/ppextend.ml
new file mode 100644
index 00000000..e1f5e201
--- /dev/null
+++ b/parsing/ppextend.ml
@@ -0,0 +1,77 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+open Util
+open Pp
+open CErrors
+open Notation
+open Notation_gram
+
+(*s Pretty-print. *)
+
+type ppbox =
+ | PpHB of int
+ | PpHOVB of int
+ | PpHVB of int
+ | PpVB of int
+
+type ppcut =
+ | PpBrk of int * int
+ | PpFnl
+
+let ppcmd_of_box = function
+ | PpHB n -> h n
+ | PpHOVB n -> hov n
+ | PpHVB n -> hv n
+ | PpVB n -> v n
+
+let ppcmd_of_cut = function
+ | PpFnl -> fnl ()
+ | PpBrk(n1,n2) -> brk(n1,n2)
+
+type unparsing =
+ | UnpMetaVar of int * parenRelation
+ | UnpBinderMetaVar of int * parenRelation
+ | UnpListMetaVar of int * parenRelation * unparsing list
+ | UnpBinderListMetaVar of int * bool * unparsing list
+ | UnpTerminal of string
+ | UnpBox of ppbox * unparsing Loc.located list
+ | UnpCut of ppcut
+
+type unparsing_rule = unparsing list * precedence
+type extra_unparsing_rules = (string * string) list
+(* Concrete syntax for symbolic-extension table *)
+let notation_rules =
+ Summary.ref ~name:"notation-rules" (NotationMap.empty : (unparsing_rule * extra_unparsing_rules * notation_grammar) NotationMap.t)
+
+let declare_notation_rule ntn ~extra unpl gram =
+ notation_rules := NotationMap.add ntn (unpl,extra,gram) !notation_rules
+
+let find_notation_printing_rule ntn =
+ try pi1 (NotationMap.find ntn !notation_rules)
+ with Not_found -> anomaly (str "No printing rule found for " ++ pr_notation ntn ++ str ".")
+let find_notation_extra_printing_rules ntn =
+ try pi2 (NotationMap.find ntn !notation_rules)
+ with Not_found -> []
+let find_notation_parsing_rules ntn =
+ try pi3 (NotationMap.find ntn !notation_rules)
+ with Not_found -> anomaly (str "No parsing rule found for " ++ pr_notation ntn ++ str ".")
+
+let get_defined_notations () =
+ NotationSet.elements @@ NotationMap.domain !notation_rules
+
+let add_notation_extra_printing_rule ntn k v =
+ try
+ notation_rules :=
+ let p, pp, gr = NotationMap.find ntn !notation_rules in
+ NotationMap.add ntn (p, (k,v) :: pp, gr) !notation_rules
+ with Not_found ->
+ user_err ~hdr:"add_notation_extra_printing_rule"
+ (str "No such Notation.")
diff --git a/parsing/ppextend.mli b/parsing/ppextend.mli
new file mode 100644
index 00000000..7eb5967a
--- /dev/null
+++ b/parsing/ppextend.mli
@@ -0,0 +1,51 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+open Constrexpr
+open Notation_gram
+
+(** {6 Pretty-print. } *)
+
+type ppbox =
+ | PpHB of int
+ | PpHOVB of int
+ | PpHVB of int
+ | PpVB of int
+
+type ppcut =
+ | PpBrk of int * int
+ | PpFnl
+
+val ppcmd_of_box : ppbox -> Pp.t -> Pp.t
+
+val ppcmd_of_cut : ppcut -> Pp.t
+
+(** {6 Printing rules for notations} *)
+
+(** Declare and look for the printing rule for symbolic notations *)
+type unparsing =
+ | UnpMetaVar of int * parenRelation
+ | UnpBinderMetaVar of int * parenRelation
+ | UnpListMetaVar of int * parenRelation * unparsing list
+ | UnpBinderListMetaVar of int * bool * unparsing list
+ | UnpTerminal of string
+ | UnpBox of ppbox * unparsing Loc.located list
+ | UnpCut of ppcut
+
+type unparsing_rule = unparsing list * precedence
+type extra_unparsing_rules = (string * string) list
+val declare_notation_rule : notation -> extra:extra_unparsing_rules -> unparsing_rule -> notation_grammar -> unit
+val find_notation_printing_rule : notation -> unparsing_rule
+val find_notation_extra_printing_rules : notation -> extra_unparsing_rules
+val find_notation_parsing_rules : notation -> notation_grammar
+val add_notation_extra_printing_rule : notation -> string -> string -> unit
+
+(** Returns notations with defined parsing/printing rules *)
+val get_defined_notations : unit -> notation list