aboutsummaryrefslogtreecommitdiffhomepage
path: root/parsing/pcoq.ml
blob: 6fdd9ea9b968c47f5f8c8b75acd33e59e8403775 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
(************************************************************************)
(*         *   The Coq Proof Assistant / The Coq Development Team       *)
(*  v      *   INRIA, CNRS and contributors - Copyright 1999-2018       *)
(* <O___,, *       (see CREDITS file for the list of authors)           *)
(*   \VV/  **************************************************************)
(*    //   *    This file is distributed under the terms of the         *)
(*         *     GNU Lesser General Public License Version 2.1          *)
(*         *     (see LICENSE file for the text of the license)         *)
(************************************************************************)

open CErrors
open Util
open Extend
open Genarg

let curry f x y = f (x, y)
let uncurry f (x,y) = f x y

(** Location Utils  *)
let coq_file_of_ploc_file s =
  if s = "" then Loc.ToplevelInput else Loc.InFile s

let to_coqloc loc =
  { Loc.fname = coq_file_of_ploc_file (Ploc.file_name loc);
    Loc.line_nb = Ploc.line_nb loc;
    Loc.bol_pos = Ploc.bol_pos loc;
    Loc.bp = Ploc.first_pos loc;
    Loc.ep = Ploc.last_pos loc;
    Loc.line_nb_last = Ploc.line_nb_last loc;
    Loc.bol_pos_last = Ploc.bol_pos_last loc; }

let (!@) = to_coqloc

(** The parser of Coq *)
module G : sig

  include Grammar.S with type te = Tok.t

(* where Grammar.S

module type S =
  sig
    type te = 'x;
    type parsable = 'x;
    value parsable : Stream.t char -> parsable;
    value tokens : string -> list (string * int);
    value glexer : Plexing.lexer te;
    value set_algorithm : parse_algorithm -> unit;
    module Entry :
      sig
        type e 'a = 'y;
        value create : string -> e 'a;
        value parse : e 'a -> parsable -> 'a;
        value parse_token : e 'a -> Stream.t te -> 'a;
        value name : e 'a -> string;
        value of_parser : string -> (Stream.t te -> 'a) -> e 'a;
        value print : Format.formatter -> e 'a -> unit;
        external obj : e 'a -> Gramext.g_entry te = "%identity";
      end
    ;
    module Unsafe :
      sig
        value gram_reinit : Plexing.lexer te -> unit;
        value clear_entry : Entry.e 'a -> unit;
      end
    ;
    value extend :
      Entry.e 'a -> option Gramext.position ->
        list
          (option string * option Gramext.g_assoc *
           list (list (Gramext.g_symbol te) * Gramext.g_action)) ->
        unit;
    value delete_rule : Entry.e 'a -> list (Gramext.g_symbol te) -> unit;
  end
  *)

  type 'a entry = 'a Entry.e
  type internal_entry = Tok.t Gramext.g_entry
  type symbol = Tok.t Gramext.g_symbol
  type action = Gramext.g_action
  type production_rule = symbol list * action
  type single_extend_statement =
      string option * Gramext.g_assoc option * production_rule list
  type extend_statement =
      Gramext.position option * single_extend_statement list
  type coq_parsable

  val parsable : ?file:Loc.source -> char Stream.t -> coq_parsable
  val action : 'a -> action
  val entry_create : string -> 'a entry
  val entry_parse : 'a entry -> coq_parsable -> 'a
  val entry_print : Format.formatter -> 'a entry -> unit

  val comment_state : coq_parsable -> ((int * int) * string) list

  val srules' : production_rule list -> symbol
  val parse_tokens_after_filter : 'a entry -> Tok.t Stream.t -> 'a

end with type 'a Entry.e = 'a Grammar.GMake(CLexer).Entry.e = struct

  include Grammar.GMake(CLexer)

  type 'a entry = 'a Entry.e
  type internal_entry = Tok.t Gramext.g_entry
  type symbol = Tok.t Gramext.g_symbol
  type action = Gramext.g_action
  type production_rule = symbol list * action
  type single_extend_statement =
      string option * Gramext.g_assoc option * production_rule list
  type extend_statement =
      Gramext.position option * single_extend_statement list

  type coq_parsable = parsable * CLexer.lexer_state ref

  let parsable ?(file=Loc.ToplevelInput) c =
    let state = ref (CLexer.init_lexer_state file) in
    CLexer.set_lexer_state !state;
    let a = parsable c in
    state := CLexer.get_lexer_state ();
    (a,state)

  let action = Gramext.action
  let entry_create = Entry.create

  let entry_parse e (p,state) =
    CLexer.set_lexer_state !state;
    try
      let c = Entry.parse e p in
      state := CLexer.get_lexer_state ();
      c
    with Ploc.Exc (loc,e) ->
      CLexer.drop_lexer_state ();
      let loc' = Loc.get_loc (Exninfo.info e) in
      let loc = match loc' with None -> to_coqloc loc | Some loc -> loc in
      Loc.raise ~loc e

  let comment_state (p,state) =
    CLexer.get_comment_state !state

  let entry_print ft x = Entry.print ft x

  (* Not used *)
  let srules' = Gramext.srules
  let parse_tokens_after_filter = Entry.parse_token

end

let warning_verbose = Gramext.warning_verbose

let of_coq_assoc = function
| Extend.RightA -> Gramext.RightA
| Extend.LeftA -> Gramext.LeftA
| Extend.NonA -> Gramext.NonA

let of_coq_position = function
| Extend.First -> Gramext.First
| Extend.Last -> Gramext.Last
| Extend.Before s -> Gramext.Before s
| Extend.After s -> Gramext.After s
| Extend.Level s -> Gramext.Level s

module Symbols : sig
  val stoken : Tok.t -> G.symbol
  val sself : G.symbol
  val snext : G.symbol
  val slist0 : G.symbol -> G.symbol
  val slist0sep : G.symbol * G.symbol -> G.symbol
  val slist1 : G.symbol -> G.symbol
  val slist1sep : G.symbol * G.symbol -> G.symbol
  val sopt : G.symbol -> G.symbol
  val snterml : G.internal_entry * string -> G.symbol
  val snterm : G.internal_entry -> G.symbol
end = struct

  let stoken tok =
    let pattern = match tok with
    | Tok.KEYWORD s -> "", s
    | Tok.IDENT s -> "IDENT", s
    | Tok.PATTERNIDENT s -> "PATTERNIDENT", s
    | Tok.FIELD s -> "FIELD", s
    | Tok.INT s -> "INT", s
    | Tok.STRING s -> "STRING", s
    | Tok.LEFTQMARK -> "LEFTQMARK", ""
    | Tok.BULLET s -> "BULLET", s
    | Tok.EOI -> "EOI", ""
    in
    Gramext.Stoken pattern

     let slist0sep (x, y) = Gramext.Slist0sep (x, y, false)
     let slist1sep (x, y) = Gramext.Slist1sep (x, y, false)

  let snterml (x, y) = Gramext.Snterml (x, y)
  let snterm x = Gramext.Snterm x
  let sself = Gramext.Sself
  let snext = Gramext.Snext
  let slist0 x = Gramext.Slist0 x
  let slist1 x = Gramext.Slist1 x
  let sopt x = Gramext.Sopt x

end

let camlp5_verbosity silent f x =
  let a = !warning_verbose in
  warning_verbose := silent;
  f x;
  warning_verbose := a

(** Grammar extensions *)

(** NB: [extend_statement =
         gram_position option * single_extend_statement list]
    and [single_extend_statement =
         string option * gram_assoc option * production_rule list]
    and [production_rule = symbol list * action]

    In [single_extend_statement], first two parameters are name and
    assoc iff a level is created *)

(** Binding general entry keys to symbol *)

let rec of_coq_action : type a r. (r, a, Loc.t -> r) Extend.rule -> a -> G.action = function
| Stop -> fun f -> G.action (fun loc -> f (!@ loc))
| Next (r, _) -> fun f -> G.action (fun x -> of_coq_action r (f x))

let rec symbol_of_prod_entry_key : type s a. (s, a) symbol -> _ = function
  | Atoken t -> Symbols.stoken t
  | Alist1 s -> Symbols.slist1 (symbol_of_prod_entry_key s)
  | Alist1sep (s,sep) ->
      Symbols.slist1sep (symbol_of_prod_entry_key s, symbol_of_prod_entry_key sep)
  | Alist0 s -> Symbols.slist0 (symbol_of_prod_entry_key s)
  | Alist0sep (s,sep) ->
      Symbols.slist0sep (symbol_of_prod_entry_key s, symbol_of_prod_entry_key sep)
  | Aopt s -> Symbols.sopt (symbol_of_prod_entry_key s)
  | Aself -> Symbols.sself
  | Anext -> Symbols.snext
  | Aentry e ->
    Symbols.snterm (G.Entry.obj e)
  | Aentryl (e, n) ->
    Symbols.snterml (G.Entry.obj e, string_of_int n)
  | Arules rs ->
    G.srules' (List.map symbol_of_rules rs)

and symbol_of_rule : type s a r. (s, a, r) Extend.rule -> _ = function
| Stop -> fun accu -> accu
| Next (r, s) -> fun accu -> symbol_of_rule r (symbol_of_prod_entry_key s :: accu)

and symbol_of_rules : type a. a Extend.rules -> _ = function
| Rules (r, act) ->
  let symb = symbol_of_rule r.norec_rule [] in
  let act = of_coq_action r.norec_rule act in
  (symb, act)

let of_coq_production_rule : type a. a Extend.production_rule -> _ = function
| Rule (toks, act) -> (symbol_of_rule toks [], of_coq_action toks act)

let of_coq_single_extend_statement (lvl, assoc, rule) =
  (lvl, Option.map of_coq_assoc assoc, List.map of_coq_production_rule rule)

let of_coq_extend_statement (pos, st) =
  (Option.map of_coq_position pos, List.map of_coq_single_extend_statement st)

(** Type of reinitialization data *)
type gram_reinit = gram_assoc * gram_position

type extend_rule =
| ExtendRule : 'a G.entry * gram_reinit option * 'a extend_statement -> extend_rule

type ext_kind =
  | ByGrammar of extend_rule
  | ByEXTEND of (unit -> unit) * (unit -> unit)

(** The list of extensions *)

let camlp5_state = ref []

(** Deletion *)

let grammar_delete e reinit (pos,rls) =
  List.iter
    (fun (n,ass,lev) ->
      List.iter (fun (pil,_) -> G.delete_rule e pil) (List.rev lev))
    (List.rev rls);
  match reinit with
  | Some (a,ext) ->
    let a = of_coq_assoc a in
    let ext = of_coq_position ext in
    let lev = match pos with
    | Some (Gramext.Level n) -> n
    | _ -> assert false
    in
    (G.extend e) (Some ext) [Some lev,Some a,[]]
  | None -> ()

(** Extension *)

let grammar_extend e reinit ext =
  let ext = of_coq_extend_statement ext in
  let undo () = grammar_delete e reinit ext in
  let redo () = camlp5_verbosity false (uncurry (G.extend e)) ext in
  camlp5_state := ByEXTEND (undo, redo) :: !camlp5_state;
  redo ()

let grammar_extend_sync e reinit ext =
  camlp5_state := ByGrammar (ExtendRule (e, reinit, ext)) :: !camlp5_state;
  camlp5_verbosity false (uncurry (G.extend e)) (of_coq_extend_statement ext)

(** The apparent parser of Coq; encapsulate G to keep track
    of the extensions. *)

module Gram =
  struct
    include G
    let extend e =
      curry
    (fun ext ->
      camlp5_state :=
        (ByEXTEND ((fun () -> grammar_delete e None ext),
           (fun () -> uncurry (G.extend e) ext)))
      :: !camlp5_state;
      uncurry (G.extend e) ext)
    let delete_rule e pil =
      (* spiwack: if you use load an ML module which contains GDELETE_RULE
      in a section, God kills a kitty. As it would corrupt remove_grammars.
          There does not seem to be a good way to undo a delete rule. As deleting
      takes fewer arguments than extending. The production rule isn't returned
      by delete_rule. If we could retrieve the necessary information, then
      ByEXTEND provides just the framework we need to allow this in section.
      I'm not entirely sure it makes sense, but at least it would be more correct.
          *)
      G.delete_rule e pil
  end

(** Remove extensions

   [n] is the number of extended entries (not the number of Grammar commands!)
   to remove. *)

let rec remove_grammars n =
  if n>0 then
    (match !camlp5_state with
       | [] -> anomaly ~label:"Pcoq.remove_grammars" (Pp.str "too many rules to remove.")
       | ByGrammar (ExtendRule (g, reinit, ext)) :: t ->
           grammar_delete g reinit (of_coq_extend_statement ext);
           camlp5_state := t;
           remove_grammars (n-1)
       | ByEXTEND (undo,redo)::t ->
           undo();
           camlp5_state := t;
           remove_grammars n;
           redo();
           camlp5_state := ByEXTEND (undo,redo) :: !camlp5_state)

let make_rule r = [None, None, r]

(** An entry that checks we reached the end of the input. *)

let eoi_entry en =
  let e = Gram.entry_create ((Gram.Entry.name en) ^ "_eoi") in
  let symbs = [Symbols.snterm (Gram.Entry.obj en); Symbols.stoken Tok.EOI] in
  let act = Gram.action (fun _ x loc -> x) in
  uncurry (Gram.extend e) (None, make_rule [symbs, act]);
  e

let map_entry f en =
  let e = Gram.entry_create ((Gram.Entry.name en) ^ "_map") in
  let symbs = [Symbols.snterm (Gram.Entry.obj en)] in
  let act = Gram.action (fun x loc -> f x) in
  uncurry (Gram.extend e) (None, make_rule [symbs, act]);
  e

(* Parse a string, does NOT check if the entire string was read
   (use eoi_entry) *)

let parse_string f x =
  let strm = Stream.of_string x in Gram.entry_parse f (Gram.parsable strm)

type gram_universe = string

let utables : (string, unit) Hashtbl.t =
  Hashtbl.create 97

let create_universe u =
  let () = Hashtbl.add utables u () in
  u

let uprim   = create_universe "prim"
let uconstr = create_universe "constr"
let utactic = create_universe "tactic"

let get_univ u =
  if Hashtbl.mem utables u then u
  else raise Not_found

let new_entry u s =
  let ename = u ^ ":" ^ s in
  let e = Gram.entry_create ename in
  e

let make_gen_entry u s = new_entry u s

module GrammarObj =
struct
  type ('r, _, _) obj = 'r Gram.entry
  let name = "grammar"
  let default _ = None
end

module Grammar = Register(GrammarObj)

let register_grammar = Grammar.register0
let genarg_grammar = Grammar.obj

let create_generic_entry (type a) u s (etyp : a raw_abstract_argument_type) : a Gram.entry =
  let e = new_entry u s in
  let Rawwit t = etyp in
  let () = Grammar.register0 t e in
  e

(* Initial grammar entries *)

module Prim =
  struct
    let gec_gen n = make_gen_entry uprim n

    (* Entries that can be referred via the string -> Gram.entry table *)
    (* Typically for tactic or vernac extensions *)
    let preident = gec_gen "preident"
    let ident = gec_gen "ident"
    let natural = gec_gen "natural"
    let integer = gec_gen "integer"
    let bigint = Gram.entry_create "Prim.bigint"
    let string = gec_gen "string"
    let lstring = Gram.entry_create "Prim.lstring"
    let reference = make_gen_entry uprim "reference"
    let by_notation = Gram.entry_create "by_notation"
    let smart_global = Gram.entry_create "smart_global"

    (* parsed like ident but interpreted as a term *)
    let var = gec_gen "var"

    let name = Gram.entry_create "Prim.name"
    let identref = Gram.entry_create "Prim.identref"
    let univ_decl = Gram.entry_create "Prim.univ_decl"
    let ident_decl = Gram.entry_create "Prim.ident_decl"
    let pattern_ident = Gram.entry_create "pattern_ident"
    let pattern_identref = Gram.entry_create "pattern_identref"

    (* A synonym of ident - maybe ident will be located one day *)
    let base_ident = Gram.entry_create "Prim.base_ident"

    let qualid = Gram.entry_create "Prim.qualid"
    let fullyqualid = Gram.entry_create "Prim.fullyqualid"
    let dirpath = Gram.entry_create "Prim.dirpath"

    let ne_string = Gram.entry_create "Prim.ne_string"
    let ne_lstring = Gram.entry_create "Prim.ne_lstring"

  end

module Constr =
  struct
    let gec_constr = make_gen_entry uconstr

    (* Entries that can be referred via the string -> Gram.entry table *)
    let constr = gec_constr "constr"
    let operconstr = gec_constr "operconstr"
    let constr_eoi = eoi_entry constr
    let lconstr = gec_constr "lconstr"
    let binder_constr = gec_constr "binder_constr"
    let ident = make_gen_entry uconstr "ident"
    let global = make_gen_entry uconstr "global"
    let universe_level = make_gen_entry uconstr "universe_level"
    let sort = make_gen_entry uconstr "sort"
    let sort_family = make_gen_entry uconstr "sort_family"
    let pattern = Gram.entry_create "constr:pattern"
    let constr_pattern = gec_constr "constr_pattern"
    let lconstr_pattern = gec_constr "lconstr_pattern"
    let closed_binder = Gram.entry_create "constr:closed_binder"
    let binder = Gram.entry_create "constr:binder"
    let binders = Gram.entry_create "constr:binders"
    let open_binders = Gram.entry_create "constr:open_binders"
    let binders_fixannot = Gram.entry_create "constr:binders_fixannot"
    let typeclass_constraint = Gram.entry_create "constr:typeclass_constraint"
    let record_declaration = Gram.entry_create "constr:record_declaration"
    let appl_arg = Gram.entry_create "constr:appl_arg"
  end

module Module =
  struct
    let module_expr = Gram.entry_create "module_expr"
    let module_type = Gram.entry_create "module_type"
  end

let epsilon_value f e =
  let r = Rule (Next (Stop, e), fun x _ -> f x) in
  let ext = of_coq_extend_statement (None, [None, None, [r]]) in
  let entry = G.entry_create "epsilon" in
  let () = uncurry (G.extend entry) ext in
  try Some (parse_string entry "") with _ -> None

(** Synchronized grammar extensions *)

module GramState = Store.Make ()

type 'a grammar_extension = 'a -> GramState.t -> extend_rule list * GramState.t

module GrammarCommand = Dyn.Make ()
module GrammarInterp = struct type 'a t = 'a grammar_extension end
module GrammarInterpMap = GrammarCommand.Map(GrammarInterp)

let grammar_interp = ref GrammarInterpMap.empty

let (grammar_stack : (int * GrammarCommand.t * GramState.t) list ref) = ref []

type 'a grammar_command = 'a GrammarCommand.tag

let create_grammar_command name interp : _ grammar_command =
  let obj = GrammarCommand.create name in
  let () = grammar_interp := GrammarInterpMap.add obj interp !grammar_interp in
  obj

let extend_grammar_command tag g =
  let modify = GrammarInterpMap.find tag !grammar_interp in
  let grammar_state = match !grammar_stack with
  | [] -> GramState.empty
  | (_, _, st) :: _ -> st
  in
  let (rules, st) = modify g grammar_state in
  let iter (ExtendRule (e, reinit, ext)) = grammar_extend_sync e reinit ext in
  let () = List.iter iter rules in
  let nb = List.length rules in
  grammar_stack := (nb, GrammarCommand.Dyn (tag, g), st) :: !grammar_stack

let recover_grammar_command (type a) (tag : a grammar_command) : a list =
  let filter : _ -> a option = fun (_, GrammarCommand.Dyn (tag', v), _) ->
    match GrammarCommand.eq tag tag' with
    | None -> None
    | Some Refl -> Some v
  in
  List.map_filter filter !grammar_stack

let extend_dyn_grammar (GrammarCommand.Dyn (tag, g)) = extend_grammar_command tag g

(* Summary functions: the state of the lexer is included in that of the parser.
   Because the grammar affects the set of keywords when adding or removing
   grammar rules. *)
type frozen_t = (int * GrammarCommand.t * GramState.t) list * CLexer.keyword_state

let freeze _ : frozen_t = (!grammar_stack, CLexer.get_keyword_state ())

(* We compare the current state of the grammar and the state to unfreeze,
   by computing the longest common suffixes *)
let factorize_grams l1 l2 =
  if l1 == l2 then ([], [], l1) else List.share_tails l1 l2

let number_of_entries gcl =
  List.fold_left (fun n (p,_,_) -> n + p) 0 gcl

let unfreeze (grams, lex) =
  let (undo, redo, common) = factorize_grams !grammar_stack grams in
  let n = number_of_entries undo in
  remove_grammars n;
  grammar_stack := common;
  CLexer.set_keyword_state lex;
  List.iter extend_dyn_grammar (List.rev_map pi2 redo)

(** No need to provide an init function : the grammar state is
    statically available, and already empty initially, while
    the lexer state should not be resetted, since it contains
    keywords declared in g_*.ml4 *)

let parser_summary_tag =
  Summary.declare_summary_tag "GRAMMAR_LEXER"
    { Summary.freeze_function = freeze;
      Summary.unfreeze_function = unfreeze;
      Summary.init_function = Summary.nop }

let with_grammar_rule_protection f x =
  let fs = freeze false in
  try let a = f x in unfreeze fs; a
  with reraise ->
    let reraise = CErrors.push reraise in
    let () = unfreeze fs in
    iraise reraise

(** Registering grammar of generic arguments *)

let () =
  let open Stdarg in
  Grammar.register0 wit_int (Prim.integer);
  Grammar.register0 wit_string (Prim.string);
  Grammar.register0 wit_pre_ident (Prim.preident);
  Grammar.register0 wit_ident (Prim.ident);
  Grammar.register0 wit_var (Prim.var);
  Grammar.register0 wit_ref (Prim.reference);
  Grammar.register0 wit_sort_family (Constr.sort_family);
  Grammar.register0 wit_constr (Constr.constr);
  ()

(** Registering extra grammar *)

type any_entry = AnyEntry : 'a Gram.entry -> any_entry

let grammar_names : any_entry list String.Map.t ref = ref String.Map.empty

let register_grammars_by_name name grams =
  grammar_names := String.Map.add name grams !grammar_names

let find_grammars_by_name name =
  String.Map.find name !grammar_names