aboutsummaryrefslogtreecommitdiffhomepage
path: root/parsing
diff options
context:
space:
mode:
authorGravatar filliatr <filliatr@85f007b7-540e-0410-9357-904b9bb8a0f7>1999-12-01 17:33:29 +0000
committerGravatar filliatr <filliatr@85f007b7-540e-0410-9357-904b9bb8a0f7>1999-12-01 17:33:29 +0000
commit5eae34363a4fc0b45037ad0eabcabe1ba1f1a651 (patch)
tree6ef70bde29206ba13d958582a37d8d563f2b149b /parsing
parent9a7f9eeb6e5388b56e575a60aeac87330744440c (diff)
module Egrammar
git-svn-id: svn+ssh://scm.gforge.inria.fr/svn/coq/trunk@176 85f007b7-540e-0410-9357-904b9bb8a0f7
Diffstat (limited to 'parsing')
-rw-r--r--parsing/astterm.mli3
-rw-r--r--parsing/egrammar.ml165
-rw-r--r--parsing/egrammar.mli22
-rw-r--r--parsing/lexer.mli4
-rw-r--r--parsing/lexer.mll27
5 files changed, 213 insertions, 8 deletions
diff --git a/parsing/astterm.mli b/parsing/astterm.mli
index 9ea2929ed..0b19d3b9c 100644
--- a/parsing/astterm.mli
+++ b/parsing/astterm.mli
@@ -54,8 +54,7 @@ val fconstr_of_com_sort : 'c evar_map -> env -> Coqast.t -> constr
(* Typing with Trad, and re-checking with Mach *)
val fconstruct :'c evar_map -> context -> Coqast.t -> unsafe_judgment
-val fconstruct_type :
- 'c evar_map -> context -> Coqast.t -> typed_type
+val fconstruct_type : 'c evar_map -> context -> Coqast.t -> typed_type
(* Typing, re-checking with universes constraints *)
val fconstruct_with_univ :
diff --git a/parsing/egrammar.ml b/parsing/egrammar.ml
new file mode 100644
index 000000000..c7931553b
--- /dev/null
+++ b/parsing/egrammar.ml
@@ -0,0 +1,165 @@
+
+(* $Id$ *)
+
+open Pp
+open Util
+open Pcoq
+open Coqast
+open Ast
+open Extend
+
+(* State of the grammar extensions *)
+
+let (grammar_state: grammar_command list ref) = ref []
+
+(* Interpretation of the right hand side of grammar rules *)
+
+(* When reporting errors, we add the name of the grammar rule that failed *)
+let specify_name name e =
+ match e with
+ | UserError(lab,strm) ->
+ UserError(lab, [< 'sTR"during interpretation of grammar rule ";
+ 'sTR name; 'sTR","; 'sPC; strm >])
+ | Anomaly(lab,strm) ->
+ Anomaly(lab, [< 'sTR"during interpretation of grammar rule ";
+ 'sTR name; 'sTR","; 'sPC; strm >])
+ | Failure s ->
+ Failure("during interpretation of grammar rule "^name^", "^s)
+ | e -> e
+
+let gram_action (name, etyp) env act dloc =
+ try
+ let v = Ast.eval_act dloc env act in
+ match etyp, v with
+ | (ETast, Vast ast) -> Obj.repr ast
+ | (ETastl, Vastlist astl) -> Obj.repr astl
+ | _ -> grammar_type_error (dloc, "Egrammar.gram_action")
+ with e ->
+ let (loc, exn) =
+ match e with
+ | Stdpp.Exc_located (loce, lexn) -> (loce, lexn)
+ | e -> (dloc, e)
+ in
+ Stdpp.raise_with_loc loc (specify_name name exn)
+
+(* Translation of environments: a production
+ * [ nt1($x1) ... nti($xi) ] -> act($x1..$xi)
+ * is written (with camlp4 conventions):
+ * (fun vi -> .... (fun v1 -> act(v1 .. vi) )..)
+ * where v1..vi are the values generated by non-terminals nt1..nti.
+ * Since the actions are executed by substituing an environment,
+ * make_act builds the following closure:
+ *
+ * ((fun env ->
+ * (fun vi ->
+ * (fun env -> ...
+ *
+ * (fun v1 ->
+ * (fun env -> gram_action .. env act)
+ * (($x1,v1)::env))
+ * ...)
+ * (($xi,vi)::env)))
+ * [])
+ *)
+
+let make_act name_typ a pil =
+ let act_without_arg env = Gramext.action (gram_action name_typ env a)
+ and make_prod_item act_tl = function
+ | (None, _) -> (* parse a non-binding item *)
+ (fun env -> Gramext.action (fun _ -> act_tl env))
+ | (Some p, ETast) -> (* non-terminal *)
+ (fun env -> Gramext.action (fun v -> act_tl ((p, Vast v) :: env)))
+ | (Some p, ETastl) -> (* non-terminal (list) *)
+ (fun env -> Gramext.action (fun v -> act_tl ((p, Vastlist v) :: env)))
+ in
+ (List.fold_left make_prod_item act_without_arg pil) []
+
+(* Grammar extension command. Rules are assumed correct.
+ * Type-checking of grammar rules is done during the translation of
+ * ast to the type grammar_command. We only check that the existing
+ * entries have the type assumed in the grammar command (these types
+ * annotations are added when type-checking the command, function
+ * Extend.of_ast) *)
+
+let check_entry_type (u,n) typ =
+ match force_entry_type u n typ with
+ | Ast e -> Gram.Entry.obj e
+ | ListAst e -> Gram.Entry.obj e
+
+let symbol_of_prod_item univ = function
+ | Term tok -> (Gramext.Stoken tok, (None, ETast))
+ | NonTerm (nt, nttyp, ovar) ->
+ let eobj = check_entry_type (qualified_nterm univ nt) nttyp in
+ (Gramext.Snterm eobj, (ovar, nttyp))
+
+let make_rule univ etyp rule =
+ let pil = List.map (symbol_of_prod_item univ) rule.gr_production in
+ let (symbs,ntl) = List.split pil in
+ let act = make_act (rule.gr_name,etyp) rule.gr_action ntl in
+ (symbs, act)
+
+(* Rules of a level are entered in reverse order, so that the first rules
+ are applied before the last ones *)
+let extend_entry univ (te, etyp, ass, rls) =
+ let rules = List.rev (List.map (make_rule univ etyp) rls) in
+ grammar_extend te None [(None, ass, rules)]
+
+(* Defines new entries. If the entry already exists, check its type *)
+let define_entry univ {ge_name=n; ge_type=typ; gl_assoc=ass; gl_rules=rls} =
+ let e = force_entry_type univ n typ in
+ (e,typ,ass,rls)
+
+(* Add a bunch of grammar rules. Does not check if it is well formed *)
+let extend_grammar gram =
+ let univ = get_univ gram.gc_univ in
+ let tl = List.map (define_entry univ) gram.gc_entries in
+ List.iter (extend_entry univ) tl;
+ grammar_state := gram :: !grammar_state
+
+(* backtrack the grammar extensions *)
+let remove_rule univ e rule =
+ let symbs =
+ List.map (fun pi -> fst (symbol_of_prod_item univ pi)) rule.gr_production
+ in
+ match e with
+ | Ast en -> Gram.delete_rule en symbs
+ | ListAst en -> Gram.delete_rule en symbs
+
+let remove_entry univ entry =
+ let e = get_entry univ entry.ge_name in
+ List.iter (remove_rule univ e) entry.gl_rules
+
+let remove_grammar gram =
+ let univ = get_univ gram.gc_univ in
+ List.iter (remove_entry univ) (List.rev gram.gc_entries)
+
+(* Summary functions: the state of the lexer is included in that of the parser.
+ Because the grammar affects the set of keywords when adding or removing
+ grammar rules. *)
+type frozen_t = grammar_command list * Lexer.frozen_t
+
+let freeze () = (!grammar_state, Lexer.freeze ())
+
+(* We compare the current state of the grammar and the state to unfreeze,
+ by computing the longest common suffixes *)
+let factorize_grams l1 l2 =
+ if l1 == l2 then ([], [], l1) else list_share_tails l1 l2
+
+let number_of_entries gcl =
+ List.fold_left (fun n gc -> n + (List.length gc.gc_entries)) 0 gcl
+
+let unfreeze (grams, lex) =
+ let (undo, redo, common) = factorize_grams !grammar_state grams in
+ (* List.iter remove_grammar undo;*)
+ remove_grammars (number_of_entries undo);
+ grammar_state := common;
+ Lexer.unfreeze lex;
+ List.iter extend_grammar (List.rev redo)
+
+let init_grammar () =
+ List.iter remove_grammar !grammar_state;
+ grammar_state := []
+
+let init () =
+ Lexer.init ();
+ init_grammar ()
diff --git a/parsing/egrammar.mli b/parsing/egrammar.mli
new file mode 100644
index 000000000..c581d9db0
--- /dev/null
+++ b/parsing/egrammar.mli
@@ -0,0 +1,22 @@
+
+(* $Id$ *)
+
+(*i*)
+open Coqast
+open Ast
+open Pcoq
+open Extend
+(*i*)
+
+type frozen_t
+
+val freeze : unit -> frozen_t
+val unfreeze : frozen_t -> unit
+val init : unit -> unit
+
+val extend_grammar : grammar_command -> unit
+
+val remove_rule : (string * gram_universe) -> typed_entry -> grammar_rule ->
+ unit
+val remove_entry : (string * gram_universe) -> grammar_entry -> unit
+val remove_grammar : grammar_command -> unit
diff --git a/parsing/lexer.mli b/parsing/lexer.mli
index 72ee09b1c..b8046716c 100644
--- a/parsing/lexer.mli
+++ b/parsing/lexer.mli
@@ -13,3 +13,7 @@ val tparse : string * string -> (string * string) Stream.t -> string
val token_text : string * string -> string
+type frozen_t
+val freeze : unit -> frozen_t
+val unfreeze : frozen_t -> unit
+val init : unit -> unit
diff --git a/parsing/lexer.mll b/parsing/lexer.mll
index cc0f8856c..0eaa87f93 100644
--- a/parsing/lexer.mll
+++ b/parsing/lexer.mll
@@ -11,18 +11,33 @@ type error =
exception Error of error * int * int
-let add_keyword,is_keyword =
- let table = Hashtbl.create 149 in
- List.iter (fun kw -> Hashtbl.add table kw ())
+type frozen_t = string list
+
+let kw_table = Hashtbl.create 149
+
+let init () =
+ Hashtbl.clear kw_table;
+ List.iter (fun kw -> Hashtbl.add kw_table kw ())
[ "Grammar"; "Syntax"; "Quit"; "Load"; "Compile";
"of"; "with"; "end"; "as"; "in"; "using";
"Cases"; "Fixpoint"; "CoFixpoint";
"Definition"; "Inductive"; "CoInductive";
"Theorem"; "Variable"; "Axiom"; "Parameter"; "Hypothesis";
"Orelse"; "Proof"; "Qed";
- "Prop"; "Set"; "Type" ];
- (fun s -> Hashtbl.add table s ()),
- (fun s -> try Hashtbl.find table s; true with Not_found -> false)
+ "Prop"; "Set"; "Type" ]
+
+let add_keyword s = Hashtbl.add kw_table s ()
+
+let is_keyword s = try Hashtbl.find kw_table s; true with Not_found -> false
+
+let freeze () =
+ let l = ref [] in
+ Hashtbl.iter (fun k _ -> l := k :: !l) kw_table;
+ !l
+
+let unfreeze ft =
+ init ();
+ List.iter add_keyword ft
let find_keyword s = if is_keyword s then s else raise Not_found