(** Run the QSP parser and apply the analyzer over it. See [syntax/S] *) let parse : (module Qsp_syntax.S.Analyzer with type Location.repr = 'a) -> Lexbuf.t -> ('a, Qsp_syntax.Report.t) Result.t = fun (type a) (module S : Qsp_syntax.S.Analyzer with type Location.repr = a) -> let module Parser = Parser.Make (S) in let module IncrementalParser = Interpreter.Interpreter (Parser.MenhirInterpreter) in fun l -> let lexer = Lexbuf.tokenize Lexer.token l in let init = Parser.Incremental.main (fst (Lexbuf.positions l)) in IncrementalParser.of_lexbuf lexer l init |> Result.map_error (fun e -> let message = match e.IncrementalParser.code with | Interpreter.InvalidSyntax -> "Invalid Syntax" | Interpreter.UnrecoverableError -> "UnrecoverableError" | Interpreter.MenhirCode c -> let message_content = try Parser_messages.message c with Not_found -> String.concat "" [ "(Error code "; string_of_int c; ")" ] in String.concat "" [ String.trim @@ message_content ] in let report = Qsp_syntax.Report.error (e.start_pos, e.end_pos) message in (* Discard the remaining file to read. The parser is now in a blank state, it does not make sense to keep feeding it with the new tokens. *) Lexer.discard l; report)