blob: 6d090215853c4c4aefea9b9c436971eb71002147 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
|
(**
Run the QSP parser and apply the analyzer over it.
See [syntax/S]
*)
let parse :
type a context.
(module Qsp_syntax.S.Analyzer
with type Location.t = a
and type context = context) ->
Lexbuf.t ->
context ->
(a * Qsp_syntax.Report.t list, Qsp_syntax.Report.t) Result.t =
fun (module S : Qsp_syntax.S.Analyzer
with type Location.t = a
and type context = context) ->
let module Parser = Parser.Make (S) in
let module IncrementalParser =
Interpreter.Interpreter (Parser.MenhirInterpreter) in
fun l context ->
let lexer = Lexbuf.tokenize Lexer.main l in
let init = Parser.Incremental.main (fst (Lexbuf.positions l)) in
(* Firslty, check if we are able to read the whole syntax from the source *)
let evaluation =
try IncrementalParser.of_lexbuf lexer l init with
| Lexer.LexError message ->
let start_pos, end_pos = Lexbuf.positions l in
let err =
IncrementalParser.
{ code = Interpreter.Custom message; start_pos; end_pos }
in
Error err
| Lexer.UnclosedQuote ->
let start_pos, end_pos = Lexbuf.positions l in
let err =
IncrementalParser.
{ code = Interpreter.Custom "Unclosed text"; start_pos; end_pos }
in
Error err
in
(* Then apply the checks over the result of the parsing *)
evaluation
|> Result.map (fun r ->
let r' = r context in
(r', S.Location.v r'))
|> Result.map_error (fun e ->
let message =
match e.IncrementalParser.code with
| Interpreter.InvalidSyntax -> "Invalid Syntax"
| Interpreter.UnrecoverableError -> "UnrecoverableError"
| Interpreter.Custom msg -> msg
| Interpreter.MenhirCode c ->
let message_content =
try Parser_messages.message c
with Not_found ->
String.concat "" [ "(Error code "; string_of_int c; ")" ]
in
String.concat "" [ String.trim @@ message_content ]
in
let report =
Qsp_syntax.Report.error (e.start_pos, e.end_pos) message
in
(* Discard the remaining file to read. The parser is now in a blank
state, it does not make sense to keep feeding it with the new
tokens. *)
let () = try Lexer.discard l with _ -> () in
report)
|