aboutsummaryrefslogtreecommitdiff
path: root/lib/qparser/analyzer.ml
blob: da1adbf17830ae9a163ce7e6919457ba59ac37aa (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
(** 
    Run the QSP parser and apply the analyzer over it.

    See [syntax/S]
 *)
let parse :
    (module Qsp_syntax.S.Analyzer with type Location.repr = 'a) ->
    Lexbuf.t ->
    ('a, Qsp_syntax.Report.t) Result.t =
 fun (type a) (module S : Qsp_syntax.S.Analyzer with type Location.repr = a) ->
  let module Parser = Parser.Make (S) in
  let module IncrementalParser =
    Interpreter.Interpreter (Parser.MenhirInterpreter) in
  fun l ->
    let lexer = Lexbuf.tokenize Lexer.token l in

    let init = Parser.Incremental.main (fst (Lexbuf.positions l)) in

    IncrementalParser.of_lexbuf lexer l init
    |> Result.map_error (fun e ->
           let message =
             match e.IncrementalParser.code with
             | Interpreter.InvalidSyntax -> "Invalid Syntax"
             | Interpreter.UnrecoverableError -> "UnrecoverableError"
             | Interpreter.MenhirCode c ->
                 let message_content =
                   try Parser_messages.message c
                   with Not_found ->
                     String.concat "" [ "(Error code "; string_of_int c; ")" ]
                 in

                 String.concat "" [ String.trim @@ message_content ]
           in
           let report =
             Qsp_syntax.Report.error (e.start_pos, e.end_pos) message
           in

           (* Discard the remaining file to read. The parser is now in a blank
              state, it does not make sense to keep feeding it with the new
              tokens. *)
           Lexer.discard l;
           report)