blob: a79535e33736cbce8bce3ac0dbb9d0043ecfcc10 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
|
(**
Run the QSP parser and apply the analyzer over it.
See [syntax/S]
*)
let parse :
type a.
(module Qsp_syntax.S.Analyzer with type Location.t = a) ->
Lexbuf.t ->
(a * Qsp_syntax.Report.t list, Qsp_syntax.Report.t) Result.t =
fun (module S : Qsp_syntax.S.Analyzer with type Location.t = a) ->
let module Parser = Parser.Make (S) in
let module IncrementalParser =
Interpreter.Interpreter (Parser.MenhirInterpreter) in
fun l ->
let lexer = Lexbuf.tokenize Lexer.main l in
let init = Parser.Incremental.main (fst (Lexbuf.positions l)) in
let evaluation =
try IncrementalParser.of_lexbuf lexer l init with
| Lexer.LexError message ->
let start_pos, end_pos = Lexbuf.positions l in
let err =
IncrementalParser.
{ code = Interpreter.Custom message; start_pos; end_pos }
in
Error err
| Lexer.UnclosedQuote ->
let start_pos, end_pos = Lexbuf.positions l in
let err =
IncrementalParser.
{ code = Interpreter.Custom "Unclosed text"; start_pos; end_pos }
in
Error err
in
evaluation
|> Result.map_error (fun e ->
let message =
match e.IncrementalParser.code with
| Interpreter.InvalidSyntax -> "Invalid Syntax"
| Interpreter.UnrecoverableError -> "UnrecoverableError"
| Interpreter.Custom msg -> msg
| Interpreter.MenhirCode c ->
let message_content =
try Parser_messages.message c
with Not_found ->
String.concat "" [ "(Error code "; string_of_int c; ")" ]
in
String.concat "" [ String.trim @@ message_content ]
in
let report =
Qsp_syntax.Report.error (e.start_pos, e.end_pos) message
in
(* Discard the remaining file to read. The parser is now in a blank
state, it does not make sense to keep feeding it with the new
tokens. *)
let () = try Lexer.discard l with _ -> () in
report)
|