aboutsummaryrefslogtreecommitdiffhomepage
path: root/contrib/interface
diff options
context:
space:
mode:
authorGravatar bertot <bertot@85f007b7-540e-0410-9357-904b9bb8a0f7>2001-04-04 13:03:53 +0000
committerGravatar bertot <bertot@85f007b7-540e-0410-9357-904b9bb8a0f7>2001-04-04 13:03:53 +0000
commitd9765c49bd60bedb3071188aa4406c241813553e (patch)
tree9e2217a2d86670dc6bc00f657bafc0e8ff71c929 /contrib/interface
parent917e4b5b1f334282350278d53f3d94ecefb51b3a (diff)
These files are used to construct an independent parser, that is a small
coq that is only able to parse coq script files and produced a tree-like representation. For now this representation is only given in a postfix format, but other format (such as XML) could also be possible. git-svn-id: svn+ssh://scm.gforge.inria.fr/svn/coq/trunk@1540 85f007b7-540e-0410-9357-904b9bb8a0f7
Diffstat (limited to 'contrib/interface')
-rwxr-xr-xcontrib/interface/line_parser.ml235
-rw-r--r--contrib/interface/line_parser.mli5
-rw-r--r--contrib/interface/parse.ml425
-rw-r--r--contrib/interface/vernacrc16
4 files changed, 681 insertions, 0 deletions
diff --git a/contrib/interface/line_parser.ml b/contrib/interface/line_parser.ml
new file mode 100755
index 000000000..91eda0eed
--- /dev/null
+++ b/contrib/interface/line_parser.ml
@@ -0,0 +1,235 @@
+(* line-oriented Syntactic analyser for a Coq parser *)
+(* This parser expects a very small number of commands, each given on a complete
+line. Some of these commands are then followed by a text fragment terminated
+by a precise keyword, which is also expected to appear alone on a line. *)
+
+(* The main parsing loop procedure is "parser_loop", given at the end of this
+file. It read lines one by one and checks whether they can be parsed using
+a very simple parser. This very simple parser uses a lexer, which is also given
+in this file.
+
+The lexical analyser:
+ There are only 5 sorts of tokens *)
+type simple_tokens = Tspace | Tid of string | Tint of int | Tstring of string |
+ Tlbracket | Trbracket;;
+
+(* When recognizing identifiers or strings, the lexical analyser accumulates
+ the characters in a buffer, using the command add_in_buff. To recuperate
+ the characters, one can use get_buff (this code was inspired by the
+ code in src/meta/lexer.ml of Coq revision 6.1) *)
+let add_in_buff,get_buff =
+ let buff = ref (String.create 80) in
+ (fun i x ->
+ let len = String.length !buff in
+ if i >= len then (buff := !buff ^ (String.create len);());
+ String.set !buff i x;
+ succ i),
+ (fun len -> String.sub !buff 0 len);;
+
+(* Identifiers are [a-zA-Z_][a-zA-Z0-9_]*. When arriving here the first
+ character has already been recognized. *)
+let rec ident len = parser
+ [<''_' | 'a'..'z' | 'A'..'Z' | '0'..'9' as c; s >] ->
+ ident (add_in_buff len c) s
+| [< >] -> let str = get_buff len in Tid(str);;
+
+(* While recognizing integers, one constructs directly the integer value.
+ The ascii code of '0' is important for this. *)
+let code0 = Char.code '0';;
+
+let get_digit c = Char.code c - code0;;
+
+(* Integers are [0-9]*
+ The variable intval is the integer value of the text that has already
+ been recognized. As for identifiers, the first character has already been
+ recognized. *)
+
+let rec parse_int intval = parser
+ [< ''0'..'9' as c ; i=parse_int (10 * intval + get_digit c)>] -> i
+| [< >] -> Tint intval;;
+
+(* The string lexer is borrowed from the string parser of Coq V6.1
+ This may be a problem if convention have changed in Coq,
+ However this parser is only used to recognize file names which should
+ not contain too many special characters *)
+
+let rec spec_char = parser
+ [< ''n' >] -> '\n'
+| [< ''t' >] -> '\t'
+| [< ''b' >] -> '\008'
+| [< ''r' >] -> '\013'
+| [< ''0'..'9' as c; v= (spec1 (get_digit c)) >] ->
+ Char.chr v
+| [< 'x >] -> x
+
+and spec1 v = parser
+ [< ''0'..'9' as c; s >] -> spec1 (10*v+(get_digit c)) s
+| [< >] -> v
+;;
+
+(* This is the actual string lexical analyser. Strings are
+ QUOT([^QUOT\\]|\\[0-9]*|\\[^0-9])QUOT (the word QUOT is used
+ to represents double quotation characters, that cannot be used
+ freely, even inside comments. *)
+
+let rec string len = parser
+ [< ''"' >] -> len
+| [<''\\' ;
+ len = (parser [< ''\n' >] -> len
+ | [< c=spec_char >] -> add_in_buff len c);
+ s >] -> string len s
+| [< 'x; s >] -> string (add_in_buff len x) s;;
+
+(* The lexical analyser repeats the recognized given by next_token:
+ spaces and tabulations are ignored, identifiers, integers,
+ strings, opening and closing square brackets. Lexical errors are
+ ignored ! *)
+let rec next_token = parser count
+ [< '' ' | '\t'; tok = next_token >] -> tok
+| [< ''_' | 'a'..'z' | 'A'..'Z' as c;i = (ident (add_in_buff 0 c))>] -> i
+| [< ''0'..'9' as c ; i = (parse_int (get_digit c))>] -> i
+| [< ''"' ; len = (string 0) >] -> Tstring (get_buff len)
+| [< ''[' >] -> Tlbracket
+| [< '']' >] -> Trbracket
+| [< '_ ; x = next_token >] -> x;;
+
+(* A very simple lexical analyser to recognize a integer value behind
+ blank characters *)
+
+let rec next_int = parser count
+ [< '' ' | '\t'; v = next_int >] -> v
+| [< ''0'..'9' as c; i = (parse_int (get_digit c))>] ->
+ (match i with
+ Tint n -> n
+ | _ -> failwith "unexpected branch in next_int");;
+
+(* This is the actual lexical analyser, implemented as a function on a stream.
+ It will be used with the Stream.from primitive to construct a function
+ of type char Stream.t -> simple_token option Stream.t *)
+let token_stream cs _ =
+ try let tok = next_token cs in
+ Some tok
+ with Stream.Failure -> None;;
+
+(* Two of the actions of the parser request that one reads the rest of
+ the input up to a specific string stop_string. This is done
+ with a function that transform the input_channel into a pair of
+ char Stream.t, reading from the input_channel all the lines to
+ the stop_string first. *)
+
+
+let rec gather_strings stop_string input_channel =
+ let buff = input_line input_channel in
+ if buff = stop_string then
+ []
+ else
+ (buff::(gather_strings stop_string input_channel));;
+
+
+(* the result of this function is supposed to be used in a Stream.from
+ construction. *)
+
+let line_list_to_stream string_list =
+ let count = ref 0 in
+ let buff = ref "" in
+ let reserve = ref string_list in
+ let current_length = ref 0 in
+ (fun i -> if (i - !count) >= !current_length then
+ begin
+ count := !count + !current_length + 1;
+ match !reserve with
+ | [] -> None
+ | s1::rest ->
+ begin
+ buff := s1;
+ current_length := String.length !buff;
+ reserve := rest;
+ Some '\n'
+ end
+ end
+ else
+ Some(String.get !buff (i - !count)));;
+
+
+(* In older revisions of this file you would find a function that
+ does line oriented breakdown of the input channel without resorting to
+ a list of lines. However, the need for the list of line appeared when
+ we wanted to have a channel and a list of strings describing the same
+ data, one for regular parsing and the other for error recovery. *)
+
+let channel_to_stream_and_string_list stop_string input_channel =
+ let string_list = gather_strings stop_string input_channel in
+ (line_list_to_stream string_list, string_list);;
+
+let flush_until_end_of_stream char_stream =
+ Stream.iter (function _ -> ()) char_stream;;
+
+(* There are only 5 kinds of lines recognized by our little parser.
+ Unrecognized lines are ignored. *)
+type parser_request =
+ | PRINT_VERSION
+ | PARSE_STRING of string
+ (* parse_string <int> [<ident>] then text and && END--OF--DATA *)
+ | QUIET_PARSE_STRING
+ (* quiet_parse_string then text and && END--OF--DATA *)
+ | PARSE_FILE of string
+ (* parse_file <int> <string> *)
+ | ADD_PATH of string
+ (* add_path <int> <string> *)
+ | LOAD_SYNTAX of string
+ (* load_syntax_file <int> <ident> *)
+ | GARBAGE
+;;
+
+(* The procedure parser_loop should never terminate while the input_channel is
+ not closed. This procedure receives the functions called for each sentence
+ as arguments. Thus the code is completely independent from the Coq sources. *)
+let parser_loop functions input_channel =
+ let print_version_action,
+ parse_string_action,
+ quiet_parse_string_action,
+ parse_file_action,
+ add_path_action,
+ load_syntax_action = functions in
+ let rec parser_loop_rec input_channel =
+ (let line = input_line input_channel in
+ let reqid, parser_request =
+ try
+ (match Stream.from (token_stream (Stream.of_string line)) with
+ parser
+ | [< 'Tid "print_version" >] ->
+ 0, PRINT_VERSION
+ | [< 'Tid "parse_string" ; 'Tint reqid ; 'Tlbracket ;
+ 'Tid phylum ; 'Trbracket >]
+ -> reqid,PARSE_STRING phylum
+ | [< 'Tid "quiet_parse_string" >] ->
+ 0,QUIET_PARSE_STRING
+ | [< 'Tid "parse_file" ; 'Tint reqid ; 'Tstring fname >] ->
+ reqid, PARSE_FILE fname
+ | [< 'Tid "add_path"; 'Tint reqid ; 'Tstring directory >]
+ -> reqid, ADD_PATH directory
+ | [< 'Tid "load_syntax_file"; 'Tint reqid; 'Tid module_name >] ->
+ reqid, LOAD_SYNTAX module_name
+ | [< 'Tid "quit_parser" >] -> raise End_of_file
+ | [< >] -> 0, GARBAGE)
+ with
+ Stream.Failure | Stream.Error _ -> 0,GARBAGE in
+ match parser_request with
+ PRINT_VERSION -> print_version_action ()
+ | PARSE_STRING phylum ->
+ let regular_stream, string_list =
+ channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in
+ parse_string_action reqid phylum (Stream.from regular_stream)
+ string_list;()
+ | QUIET_PARSE_STRING ->
+ let regular_stream, string_list =
+ channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in
+ quiet_parse_string_action
+ (Stream.from regular_stream);()
+ | PARSE_FILE file_name ->
+ parse_file_action reqid file_name
+ | ADD_PATH path -> add_path_action reqid path
+ | LOAD_SYNTAX syn -> load_syntax_action reqid syn
+ | GARBAGE -> ());
+ parser_loop_rec input_channel in
+ parser_loop_rec input_channel;;
diff --git a/contrib/interface/line_parser.mli b/contrib/interface/line_parser.mli
new file mode 100644
index 000000000..1fcd42e6f
--- /dev/null
+++ b/contrib/interface/line_parser.mli
@@ -0,0 +1,5 @@
+val parser_loop :
+ (unit -> unit) * (int -> string -> char Stream.t -> string list -> 'a) *
+ (char Stream.t -> 'b) * (int -> string -> unit) * (int -> string -> unit) *
+ (int -> string -> unit) -> in_channel -> 'c
+val flush_until_end_of_stream : 'a Stream.t -> unit
diff --git a/contrib/interface/parse.ml b/contrib/interface/parse.ml
new file mode 100644
index 000000000..f1f04a2df
--- /dev/null
+++ b/contrib/interface/parse.ml
@@ -0,0 +1,425 @@
+open Util;;
+
+open System;;
+
+open Pp;;
+
+open Coqast;;
+
+open Library;;
+
+open Ascent;;
+
+open Vtp;;
+
+open Xlate;;
+
+open Line_parser;;
+
+open Pcoq;;
+
+type parsed_tree =
+ | P_cl of ct_COMMAND_LIST
+ | P_c of ct_COMMAND
+ | P_t of ct_TACTIC_COM
+ | P_f of ct_FORMULA
+ | P_id of ct_ID
+ | P_s of ct_STRING
+ | P_i of ct_INT;;
+
+let print_parse_results n msg =
+ print_string "message\nparsed\n";
+ print_int n;
+ print_string "\n";
+ (match msg with
+ | P_cl x -> fCOMMAND_LIST x
+ | P_c x -> fCOMMAND x
+ | P_t x -> fTACTIC_COM x
+ | P_f x -> fFORMULA x
+ | P_id x -> fID x
+ | P_s x -> fSTRING x
+ | P_i x -> fINT x);
+ print_string "e\nblabla\n";
+ flush stdout;;
+
+let ctf_SyntaxErrorMessage reqid pps =
+ [< 'fNL; 'sTR "message"; 'fNL; 'sTR "syntax_error"; 'fNL; 'iNT reqid; 'fNL;
+ pps; 'fNL; 'sTR "E-n-d---M-e-s-s-a-g-e"; 'fNL >];;
+let ctf_SyntaxWarningMessage reqid pps =
+ [< 'fNL; 'sTR "message"; 'fNL; 'sTR "syntax_warning"; 'fNL; 'iNT reqid; 'fNL;
+ pps; 'fNL; 'sTR "E-n-d---M-e-s-s-a-g-e"; 'fNL >];;
+
+let ctf_FileErrorMessage reqid pps =
+ [< 'fNL; 'sTR "message"; 'fNL; 'sTR "file_error"; 'fNL; 'iNT reqid; 'fNL;
+ pps; 'fNL; 'sTR "E-n-d---M-e-s-s-a-g-e"; 'fNL >];;
+
+(*In the code for CoqV6.2, the require_module call is encapsulated in
+ a function "without_mes_ambig". Here I have supposed that this
+ function has no effect on parsing *)
+let try_require_module import specif name fname =
+ try Library.require_module (if specif = "UNSPECIFIED" then None
+ else Some (specif = "SPECIFICATION")) name fname (import = "IMPORT")
+ with
+ | e -> mSGNL [< 'sTR "Reinterning of "; 'sTR name; 'sTR " failed" >];;
+
+let execute_when_necessary ast =
+ (match ast with
+ | Node (_, "GRAMMAR", ((Nvar (_, s)) :: ((Node (_, "ASTLIST", al)) :: []))) ->
+ Metasyntax.add_grammar_obj s al
+ | Node (_, "TOKEN", ((Str (_, s)) :: [])) -> Metasyntax.add_token_obj s
+ | Node (_, "Require",
+ ((Str (_, import)) ::
+ ((Str (_, specif)) :: ((Nvar (_, mname)) :: [])))) ->
+ try_require_module import specif mname None
+ | Node (_, "RequireFrom",
+ ((Str (_, import)) ::
+ ((Str (_, specif)) ::
+ ((Nvar (_, mname)) :: ((Str (_, file_name)) :: []))))) ->
+ try_require_module import specif mname (Some file_name)
+ | _ -> ()); ast;;
+
+let parse_to_dot =
+ let rec dot = parser
+ [< '("", ".") >] -> ()
+ | [< '("EOI", "") >] -> raise End_of_file
+ | [< '_; s >] -> dot s
+ in Gram.Entry.of_parser "Coqtoplevel.dot" dot;;
+
+let rec discard_to_dot stream =
+ try Gram.Entry.parse parse_to_dot (Gram.parsable stream) with
+ | Stdpp.Exc_located(_, Token.Error _) -> discard_to_dot stream;;
+
+let rec decompose_string s n =
+ try let index = String.index_from s n '\n' in
+ (Ast.str (String.sub s n (index - n)))::
+ (decompose_string s (index + 1))
+ with Not_found -> [Ast.str(String.sub s n ((String.length s) - n))];;
+
+let make_string_list file_chan fst_pos snd_pos =
+ let len = (snd_pos - fst_pos) in
+ let s = String.create len in
+ begin
+ seek_in file_chan fst_pos;
+ really_input file_chan s 0 len;
+ decompose_string s 0;
+ end;;
+
+let rec get_sub_aux string_list snd_pos =
+ match string_list with
+ [] -> []
+ | s::l ->
+ let len = String.length s in
+ if len >= snd_pos then
+ if snd_pos < 0 then
+ []
+ else
+ [String.sub s 0 snd_pos]
+ else
+ s::(get_sub_aux l (snd_pos - len - 1));;
+
+let rec get_substring_list string_list fst_pos snd_pos =
+ match string_list with
+ [] -> []
+ | s::l ->
+ let len = String.length s in
+ if fst_pos > len then
+ get_substring_list l (fst_pos - len - 1) (snd_pos - len - 1)
+ else
+ (* take into account the fact that carriage returns are not in the *)
+ (* strings. *)
+ let fst_pos2 = if fst_pos = 0 then 1 else fst_pos in
+ if snd_pos > len then
+ String.sub s (fst_pos2 - 1) (len + 1 - fst_pos2)::
+ (get_sub_aux l (snd_pos - len - 2))
+ else
+ let gap = (snd_pos - fst_pos2) in
+ if gap < 0 then
+ []
+ else
+ [String.sub s (fst_pos2 - 1) gap];;
+
+(* When parsing a list of commands, we try to recover error messages for
+ each individual command. *)
+let parse_command_list reqid stream string_list =
+ let rec parse_whole_stream () =
+ let this_pos = Stream.count stream in
+ let first_ast =
+ try Gram.Entry.parse Pcoq.main_entry (Gram.parsable stream)
+ with
+ | (Stdpp.Exc_located(l, Stream.Error txt)) as e ->
+ begin
+ mSGNL (ctf_SyntaxWarningMessage reqid (Errors.explain_exn e));
+ try
+ discard_to_dot stream;
+ mSGNL [< 'sTR "debug"; 'fNL; 'iNT this_pos; 'fNL; 'iNT
+ (Stream.count stream) >];
+ Some( Node(l, "PARSING_ERROR",
+ List.map Ast.str
+ (get_substring_list string_list this_pos
+ (Stream.count stream))))
+ with End_of_file -> None
+ end
+ | e->
+ begin
+ discard_to_dot stream;
+ Some(Node((0,0), "PARSING_ERROR2",
+ List.map Ast.str
+ (get_substring_list string_list this_pos
+ (Stream.count stream))))
+ end in
+ match first_ast with
+ | Some ast ->
+ let ast0 = (execute_when_necessary ast) in
+ xlate_vernac ast::parse_whole_stream()
+ | None -> [] in
+ match parse_whole_stream () with
+ | first_one::tail -> (P_cl (CT_command_list(first_one, tail)))
+ | [] -> raise (UserError ("parse_string", [< 'sTR "empty text." >]));;
+
+(*When parsing a string using a phylum, the string is first transformed
+ into a Coq Ast using the regular Coq parser, then it is transformed into
+ the right ascent term using xlate functions, then it is transformed into
+ a stream, using the right vtp function. There is a special case for commands,
+ since some of these must be executed!*)
+let parse_string_action reqid phylum char_stream string_list =
+ try let msg =
+ match phylum with
+ | "COMMAND_LIST" ->
+ parse_command_list reqid char_stream string_list
+ | "COMMAND" ->
+ P_c
+ (xlate_vernac
+ (execute_when_necessary
+ (Gram.Entry.parse Pcoq.Vernac.vernac_eoi (Gram.parsable char_stream))))
+ | "TACTIC_COM" ->
+ P_t
+ (xlate_tactic (Gram.Entry.parse Pcoq.Tactic.tactic_eoi
+ (Gram.parsable char_stream)))
+ | "FORMULA" ->
+ P_f
+ (xlate_formula
+ (Gram.Entry.parse Pcoq.Constr.constr_eoi (Gram.parsable char_stream)))
+ | "ID" -> P_id (xlate_id
+ (Gram.Entry.parse Pcoq.Prim.ident
+ (Gram.parsable char_stream)))
+ | "STRING" ->
+ P_s
+ (xlate_string (Gram.Entry.parse Pcoq.Prim.string
+ (Gram.parsable char_stream)))
+ | "INT" ->
+ P_i (xlate_int (Gram.Entry.parse Pcoq.Prim.number
+ (Gram.parsable char_stream)))
+ | _ -> error "parse_string_action : bad phylum" in
+ print_parse_results reqid msg
+ with
+ | Stdpp.Exc_located(l,Match_failure(_,_,_)) ->
+ flush_until_end_of_stream char_stream;
+ mSGNL (ctf_SyntaxErrorMessage reqid
+ (Errors.explain_exn
+ (Stdpp.Exc_located(l,Stream.Error "match failure"))))
+ | e ->
+ flush_until_end_of_stream char_stream;
+ mSGNL (ctf_SyntaxErrorMessage reqid (Errors.explain_exn e));;
+
+
+let quiet_parse_string_action char_stream =
+ try let _ =
+ Gram.Entry.parse Pcoq.Vernac.vernac_eoi (Gram.parsable char_stream) in
+ ()
+ with
+ | _ -> flush_until_end_of_stream char_stream; ();;
+
+
+let parse_file_action reqid file_name =
+ try let file_chan = open_in file_name in
+ (* file_chan_err, stream_err are the channel and stream used to
+ get the text when a syntax error occurs *)
+ let file_chan_err = open_in file_name in
+ let stream = Stream.of_channel file_chan in
+ let stream_err = Stream.of_channel file_chan_err in
+ match let rec parse_whole_file () =
+ let this_pos = Stream.count stream in
+ match
+ try
+ let this_ast =
+ Gram.Entry.parse Pcoq.main_entry (Gram.parsable stream) in
+ this_ast
+ with
+ | Stdpp.Exc_located(l,Stream.Error txt ) ->
+ mSGNL (ctf_SyntaxWarningMessage reqid
+ [< 'sTR "Error with file"; 'sPC; 'sTR file_name; 'fNL;
+ Errors.explain_exn
+ (Stdpp.Exc_located(l,Stream.Error txt)) >]);
+ let rec discard_to_dot () =
+ try Gram.Entry.parse parse_to_dot (Gram.parsable stream)
+ with Stdpp.Exc_located(_,Token.Error _) -> discard_to_dot()
+ in (try
+ discard_to_dot ();
+ Some( Node(l, "PARSING_ERROR",
+ (make_string_list file_chan_err this_pos
+ (Stream.count stream))))
+ with End_of_file -> None)
+ | e ->
+ begin
+ Gram.Entry.parse parse_to_dot (Gram.parsable stream);
+ Some( Node((0,0), "PARSING_ERROR2",
+ (make_string_list file_chan this_pos
+ (Stream.count stream))))
+ end
+
+ with
+ | Some ast -> let ast0=(execute_when_necessary ast) in
+ xlate_vernac ast::parse_whole_file ()
+ | None -> [] in
+ parse_whole_file () with
+ | first_one :: tail ->
+ print_parse_results reqid
+ (P_cl (CT_command_list (first_one, tail)))
+ | [] -> raise (UserError ("parse_file_action", [< 'sTR "empty file." >]))
+ with
+ | Stdpp.Exc_located(l,Match_failure(_,_,_)) ->
+ mSGNL
+ (ctf_SyntaxErrorMessage reqid
+ [< 'sTR "Error with file"; 'sPC; 'sTR file_name; 'fNL;
+ Errors.explain_exn (Stdpp.Exc_located(l,Stream.Error "match failure")) >])
+ | e ->
+ mSGNL
+ (ctf_SyntaxErrorMessage reqid
+ [< 'sTR "Error with file"; 'sPC; 'sTR file_name; 'fNL;
+ Errors.explain_exn e >]);;
+
+(* This function is taken from Mltop.add_path *)
+
+let add_path dir coq_dirpath =
+ if coq_dirpath = [] then anomaly "add_path: empty path in library";
+ if exists_dir dir then
+ begin
+ Library.add_load_path_entry
+ { directory = dir; coq_dirpath = coq_dirpath };
+ Nametab.push_library_root (List.hd coq_dirpath)
+ end
+ else
+ wARNING [< 'sTR ("Cannot open " ^ dir) >]
+
+let add_rec_path dir coq_dirpath =
+ if coq_dirpath = [] then anomaly "add_path: empty path in library";
+ let dirs = all_subdirs dir (Some coq_dirpath) in
+ if dirs <> [] then
+ begin
+ List.iter Library.add_load_path_entry dirs;
+ Nametab.push_library_root (List.hd coq_dirpath)
+ end
+ else
+ wARNING [< 'sTR ("Cannot open " ^ dir) >];;
+
+let add_path_action reqid string_arg =
+ let directory_name = glob string_arg in
+ let alias = Filename.basename directory_name in
+ begin
+ add_path directory_name [alias]
+ end;;
+
+let print_version_action () =
+ mSGNL [< >];
+ mSGNL [< 'sTR "$Id$" >];;
+
+let load_syntax_action reqid module_name =
+ mSG [< 'sTR "loading "; 'sTR module_name; 'sTR "... " >];
+ try (load_module module_name None;
+ mSG [< 'sTR "opening... ">];
+ import_module module_name;
+ mSGNL [< 'sTR "done"; 'fNL >];
+ ())
+ with
+ | UserError (label, pp_stream) ->
+ (*This one may be necessary to make sure that the message won't be indented *)
+ mSGNL [< >];
+ mSGNL
+ [< 'fNL; 'sTR "error while loading syntax module "; 'sTR module_name;
+ 'sTR ": "; 'sTR label; 'fNL; pp_stream >]
+ | e ->
+ mSGNL [< >];
+ mSGNL
+ [< 'fNL; 'sTR "message"; 'fNL; 'sTR "load_error"; 'fNL; 'iNT reqid; 'fNL >];
+ ();;
+
+let coqparser_loop inchan =
+ (parser_loop : (unit -> unit) *
+ (int -> string -> char Stream.t -> string list -> unit) *
+ (char Stream.t -> unit) * (int -> string -> unit) *
+ (int -> string -> unit) * (int -> string -> unit) ->
+ in_channel -> unit)
+ (print_version_action, parse_string_action, quiet_parse_string_action, parse_file_action,
+ add_path_action, load_syntax_action) inchan;;
+
+if !Sys.interactive then ()
+ else
+Libobject.relax true;
+(let coqdir =
+ try Sys.getenv "COQDIR"
+ with Not_found ->
+ let coqdir = Coq_config.coqlib in
+ if Sys.file_exists coqdir then
+ coqdir
+ else
+ (mSGNL [< 'sTR "could not find the value of COQDIR" >]; exit 1) in
+ begin
+ add_rec_path (Filename.concat coqdir "theories") [Nametab.coq_root];
+ add_path (Filename.concat coqdir "tactics") [Nametab.coq_root];
+ add_rec_path (Filename.concat coqdir "contrib") [Nametab.coq_root];
+ List.iter (fun {directory=a} -> mSGNL [< 'sTR a >]) (get_load_path())
+ end;
+(try
+ (match create_entry (get_univ "nat") "number" ETast with
+ (Ast number) ->
+ Gram.extend number None
+ [None, None,
+ [[Gramext.Stoken ("INT", "")],
+ Gramext.action
+ (fun s loc ->
+ Node((0,0),"XTRA",[Str((0,0),"omega_integer_for_ctcoq");
+ Num((0,0),int_of_string s)]))]]
+ | _ -> mSGNL [< 'sTR "unpredicted behavior of Grammar.extend" >])
+
+ with
+ e -> mSGNL [< 'sTR "could not add a parser for numbers" >]);
+(let vernacrc =
+ try
+ Sys.getenv "VERNACRC"
+ with
+ Not_found ->
+ List.fold_left
+ (fun s1 s2 -> (Filename.concat s1 s2))
+ coqdir [ "contrib"; "interface"; "vernacrc"] in
+ try
+ (Gramext.warning_verbose := false;
+ Esyntax.warning_verbose := false;
+ coqparser_loop (open_in vernacrc))
+ with
+ | End_of_file -> ()
+ | e ->
+ (mSGNL (Errors.explain_exn e);
+ mSGNL [< 'sTR "could not load the VERNACRC file" >]);
+ try
+ mSGNL [< 'sTR vernacrc >]
+ with
+ e -> ());
+(try let user_vernacrc =
+ try Some(Sys.getenv "USERVERNACRC")
+ with
+ | Not_found as e ->
+ mSGNL [< 'sTR "no .vernacrc file" >]; None in
+ (match user_vernacrc with
+ Some f -> coqparser_loop (open_in f)
+ | None -> ())
+ with
+ | End_of_file -> ()
+ | e ->
+ mSGNL (Errors.explain_exn e);
+ mSGNL [< 'sTR "error in your .vernacrc file" >]);
+mSGNL [< 'sTR "Starting Centaur Specialized Parser Loop" >];
+try
+ coqparser_loop stdin
+with
+ | End_of_file -> ()
+ | e -> mSGNL(Errors.explain_exn e))
diff --git a/contrib/interface/vernacrc b/contrib/interface/vernacrc
new file mode 100644
index 000000000..092bc0709
--- /dev/null
+++ b/contrib/interface/vernacrc
@@ -0,0 +1,16 @@
+# $Id$
+
+# This file is loaded initially by ./vernacparser.
+
+load_syntax_file 17 LogicSyntax
+load_syntax_file 36 SpecifSyntax
+load_syntax_file 18 Logic_TypeSyntax
+load_syntax_file 19 DatatypesSyntax
+load_syntax_file 21 Equality
+load_syntax_file 22 Inv
+load_syntax_file 26 Tauto
+load_syntax_file 34 Omega
+quiet_parse_string
+Goal a.
+&& END--OF--DATA
+print_version