1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259
|
(************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
(* v * Copyright INRIA, CNRS and contributors *)
(* <O___,, * (see version control and CREDITS file for authors & dates) *)
(* \VV/ **************************************************************)
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
{
open Filename
open Lexing
type qualid = string list
type load = Logical of string | Physical of string
type coq_token =
| Require of qualid option * qualid list
| Declare of string list
| Load of load
| External of qualid * string
exception Fin_fichier
exception Syntax_error of int*int
let unquote_string s =
String.sub s 1 (String.length s - 2)
let unquote_vfile_string s =
let f = unquote_string s in
if check_suffix f ".v" then chop_suffix f ".v" else f
let backtrack lexbuf = lexbuf.lex_curr_pos <- lexbuf.lex_start_pos;
lexbuf.lex_curr_p <- lexbuf.lex_start_p
let syntax_error lexbuf =
raise (Syntax_error (Lexing.lexeme_start lexbuf, Lexing.lexeme_end lexbuf))
let check_valid lexbuf s =
match Unicode.ident_refutation s with
| None -> s
| Some _ -> syntax_error lexbuf
let get_ident lexbuf =
let s = Lexing.lexeme lexbuf in check_valid lexbuf s
let get_field_name lexbuf =
let s = Lexing.lexeme lexbuf in
check_valid lexbuf (String.sub s 1 (String.length s - 1))
}
let space = [' ' '\t' '\n' '\r']
let lowercase = ['a'-'z']
let uppercase = ['A'-'Z']
let identchar = ['A'-'Z' 'a'-'z' '_' '\'' '0'-'9']
let caml_up_ident = uppercase identchar*
let caml_low_ident = lowercase identchar*
(* This is an overapproximation, we check correctness afterwards *)
let coq_ident = ['A'-'Z' 'a'-'z' '_' '\128'-'\255'] ['A'-'Z' 'a'-'z' '_' '\'' '0'-'9' '\128'-'\255']*
let coq_field = '.' coq_ident
let coq_qual_id_rex = coq_ident coq_field+
let dot = '.' ( space+ | eof)
rule coq_action = parse
| "Require" space+
{ require_modifiers None lexbuf }
| "Local" space+ "Declare" space+ "ML" space+ "Module" space+
{ modules [] lexbuf }
| "Declare" space+ "ML" space+ "Module" space+
{ modules [] lexbuf }
| "Load" space+
{ load_file lexbuf }
| "Time" space+
{ coq_action lexbuf }
| "Timeout" space+ ['0'-'9']+ space+
{ coq_action lexbuf }
| "From" space+
{ from_rule false lexbuf }
| "Comments" space+ "From" space+
{ from_rule true lexbuf }
| space+
{ coq_action lexbuf }
| "(*"
{ comment lexbuf; coq_action lexbuf }
| eof
{ raise Fin_fichier}
| _
{ skip_to_dot lexbuf; coq_action lexbuf }
and from_rule only_extra_dep = parse
| "(*"
{ comment lexbuf; from_rule only_extra_dep lexbuf }
| space+
{ from_rule only_extra_dep lexbuf }
| coq_ident
{ let from = coq_qual_id_tail [get_ident lexbuf] lexbuf in
consume_require_or_extradeps only_extra_dep (Some from) lexbuf }
| eof
{ syntax_error lexbuf }
| _
{ syntax_error lexbuf }
and extra_dep_rule from = parse
| "(*"
{ comment lexbuf; extra_dep_rule from lexbuf }
| space+
{ extra_dep_rule from lexbuf }
| eof
{ syntax_error lexbuf }
| '"' ([^ '"']+ as f) '"' (*'"'*)
{ skip_to_dot lexbuf;
External (from,f) }
and require_modifiers from = parse
| "(*"
{ comment lexbuf; require_modifiers from lexbuf }
| "Import" space*
{ require_file from lexbuf }
| "Export" space*
{ require_file from lexbuf }
| space+
{ require_modifiers from lexbuf }
| eof
{ syntax_error lexbuf }
| _
{ backtrack lexbuf ; require_file from lexbuf }
and consume_require_or_extradeps only_extra_dep from = parse
| "(*"
{ comment lexbuf; consume_require_or_extradeps only_extra_dep from lexbuf }
| space+
{ consume_require_or_extradeps only_extra_dep from lexbuf }
| "Require" space+
{ if only_extra_dep then syntax_error lexbuf; require_modifiers from lexbuf }
| "Extra" space+ "Dependency" space+
{ match from with
| None -> syntax_error lexbuf (* Extra Dependency requires From *)
| Some from -> extra_dep_rule from lexbuf }
| _
{ syntax_error lexbuf }
and comment = parse
| "(*"
{ comment lexbuf; comment lexbuf }
| "*)"
{ () }
| "'" [^ '\\' '\''] "'"
{ comment lexbuf }
| "'" '\\' ['\\' '\'' 'n' 't' 'b' 'r'] "'"
{ comment lexbuf }
| "'" '\\' ['0'-'9'] ['0'-'9'] ['0'-'9'] "'"
{ comment lexbuf }
| eof
{ raise Fin_fichier }
| _
{ comment lexbuf }
and skip_parenthesized = parse
| "(*"
{ comment lexbuf; skip_parenthesized lexbuf }
| "("
{ skip_parenthesized lexbuf; skip_parenthesized lexbuf }
| ")"
{ () }
| eof
{ raise Fin_fichier }
| _
{ skip_parenthesized lexbuf }
and load_file = parse
| '"' [^ '"']* '"' (*'"'*)
{ let s = lexeme lexbuf in
parse_dot lexbuf;
Load (Physical (unquote_vfile_string s)) }
| coq_ident
{ let s = get_ident lexbuf in skip_to_dot lexbuf; Load (Logical s) }
| eof
{ syntax_error lexbuf }
| _
{ syntax_error lexbuf }
and require_file from = parse
| "(*"
{ comment lexbuf; require_file from lexbuf }
| ("-" space*)? "("
{ skip_parenthesized lexbuf; require_file from lexbuf }
| space+
{ require_file from lexbuf }
| coq_ident
{ let name = coq_qual_id_tail [get_ident lexbuf] lexbuf in
let qid = coq_qual_id_list [name] lexbuf in
parse_dot lexbuf;
Require (from, qid) }
| eof
{ syntax_error lexbuf }
| _
{ syntax_error lexbuf }
and skip_to_dot = parse
| "(*"
{ comment lexbuf; skip_to_dot lexbuf }
| dot { () }
| eof { syntax_error lexbuf }
| _ { skip_to_dot lexbuf }
and parse_dot = parse
| dot { () }
| eof { syntax_error lexbuf }
| _ { syntax_error lexbuf }
and coq_qual_id_tail module_name = parse
| "(*"
{ comment lexbuf; coq_qual_id_tail module_name lexbuf }
| space+
{ coq_qual_id_tail module_name lexbuf }
| coq_field
{ coq_qual_id_tail (get_field_name lexbuf :: module_name) lexbuf }
| eof
{ syntax_error lexbuf }
| _
{ backtrack lexbuf;
List.rev module_name }
and coq_qual_id_list module_names = parse
| "(*"
{ comment lexbuf; coq_qual_id_list module_names lexbuf }
| "("
{ skip_parenthesized lexbuf; coq_qual_id_list module_names lexbuf }
| space+
{ coq_qual_id_list module_names lexbuf }
| coq_ident
{ let name = coq_qual_id_tail [get_ident lexbuf] lexbuf in
coq_qual_id_list (name :: module_names) lexbuf
}
| eof
{ syntax_error lexbuf }
| _
{ backtrack lexbuf;
List.rev module_names }
and modules mllist = parse
| space+
{ modules mllist lexbuf }
| "(*"
{ comment lexbuf; modules mllist lexbuf }
| '"' [^'"']* '"'
{ let lex = (Lexing.lexeme lexbuf) in
let str = String.sub lex 1 (String.length lex - 2) in
modules (str :: mllist) lexbuf}
| eof
{ syntax_error lexbuf }
| _
{ Declare (List.rev mllist) }
|