aboutsummaryrefslogtreecommitdiff
path: root/lib/qparser
diff options
context:
space:
mode:
Diffstat (limited to 'lib/qparser')
-rw-r--r--lib/qparser/analyzer.ml36
-rw-r--r--lib/qparser/analyzer.mli8
-rw-r--r--lib/qparser/lexbuf.ml6
-rw-r--r--lib/qparser/lexbuf.mli37
-rw-r--r--lib/qparser/lexer.ml22
-rw-r--r--lib/qparser/lexer.mli2
-rw-r--r--lib/qparser/parser.mly19
-rw-r--r--lib/qparser/tokens.mly1
8 files changed, 86 insertions, 45 deletions
diff --git a/lib/qparser/analyzer.ml b/lib/qparser/analyzer.ml
index ca2b54f..b4eeba0 100644
--- a/lib/qparser/analyzer.ml
+++ b/lib/qparser/analyzer.ml
@@ -1,15 +1,23 @@
type 'a result = { content : 'a; report : Qsp_syntax.Report.t list }
+type lexer = Location | Dynamic
-(**
- Run the QSP parser and apply the analyzer over it.
+let get_lexer :
+ Lexbuf.t ->
+ lexer ->
+ unit ->
+ Tokens.token * Lexing.position * Lexing.position =
+ fun l -> function
+ | Location -> Lexbuf.tokenize Lexer.main l
+ | Dynamic -> Lexbuf.tokenize Lexer.dynamics l
+
+(** Run the QSP parser and apply the analyzer over it.
- See [syntax/S]
- *)
-let rec parse :
- type a context.
+ See [syntax/S] *)
+let rec parse : type a context.
(module Qsp_syntax.S.Analyzer
with type Location.t = a
and type context = context) ->
+ lexer ->
Lexbuf.t ->
context ->
(a result, Qsp_syntax.Report.t) Result.t =
@@ -19,10 +27,18 @@ let rec parse :
let module Parser = Parser.Make (S) in
let module IncrementalParser =
Interpreter.Interpreter (Parser.MenhirInterpreter) in
- fun l context ->
- let lexer = Lexbuf.tokenize Lexer.main l in
+ fun lexer_type l context ->
+ let get_parser :
+ lexer ->
+ Lexing.position ->
+ (context -> a) Parser.MenhirInterpreter.checkpoint = function
+ | Location -> Parser.Incremental.main
+ | Dynamic -> Parser.Incremental.dynamics
+ in
+
+ let lexer = get_lexer l lexer_type in
- let init = Parser.Incremental.main (fst (Lexbuf.positions l)) in
+ let init = (get_parser lexer_type) (fst (Lexbuf.positions l)) in
(* Firslty, check if we are able to read the whole syntax from the source *)
let evaluation =
@@ -59,7 +75,7 @@ let rec parse :
application attempt to start from a clean state in the next
location, but may fail to detect the correct position. If so, we
just start again until we hook the next location *)
- parse (module S) l context
+ parse (module S) lexer_type l context
| Error e, _ ->
let message =
match e.IncrementalParser.code with
diff --git a/lib/qparser/analyzer.mli b/lib/qparser/analyzer.mli
index 949db16..817be6c 100644
--- a/lib/qparser/analyzer.mli
+++ b/lib/qparser/analyzer.mli
@@ -1,13 +1,15 @@
type 'a result = { content : 'a; report : Qsp_syntax.Report.t list }
+type lexer = Location | Dynamic
val parse :
(module Qsp_syntax.S.Analyzer
with type Location.t = 'a
and type context = 'context) ->
+ lexer ->
Lexbuf.t ->
'context ->
('a result, Qsp_syntax.Report.t) Result.t
-(** Read the source and build a analyzis over it.
+(** Read the source and build a analyzis over it.
-This method make the link between the source file and how to read it
-(encoding…) and the AST we want to build. *)
+ This method make the link between the source file and how to read it
+ (encoding…) and the AST we want to build. *)
diff --git a/lib/qparser/lexbuf.ml b/lib/qparser/lexbuf.ml
index afc3bac..dbed622 100644
--- a/lib/qparser/lexbuf.ml
+++ b/lib/qparser/lexbuf.ml
@@ -62,8 +62,10 @@ let positions : t -> Lexing.position * Lexing.position =
let content : t -> string = fun t -> Sedlexing.Utf8.lexeme t.buffer
-let from_lexbuf : ?reset_line:bool -> Sedlexing.lexbuf -> t =
- fun ?(reset_line = true) t ->
+let from_lexbuf :
+ ?position:Lexing.position -> ?reset_line:bool -> Sedlexing.lexbuf -> t =
+ fun ?position ?(reset_line = true) t ->
+ Option.iter (Sedlexing.set_position t) position;
{
buffer = t;
start_p = None;
diff --git a/lib/qparser/lexbuf.mli b/lib/qparser/lexbuf.mli
index 4283db1..d656642 100644
--- a/lib/qparser/lexbuf.mli
+++ b/lib/qparser/lexbuf.mli
@@ -3,8 +3,11 @@
type t
(** The state of the buffer *)
-val from_lexbuf : ?reset_line:bool -> Sedlexing.lexbuf -> t
-(** Create a new buffer *)
+val from_lexbuf :
+ ?position:Lexing.position -> ?reset_line:bool -> Sedlexing.lexbuf -> t
+(** Create a new buffer.
+
+ If a position is given, start from this position in the file. *)
val start : t -> unit
(** Intialize a new run. *)
@@ -13,11 +16,10 @@ val buffer : t -> Sedlexing.lexbuf
(** Extract the sedlex buffer. Required in each rule. *)
val positions : t -> Lexing.position * Lexing.position
-(** Extract the starting and ending position for the matched token.
+(** Extract the starting and ending position for the matched token.
- This function is used outside of the parser, in order to get the position
- of the latest token in the case of an error.
- *)
+ This function is used outside of the parser, in order to get the position of
+ the latest token in the case of an error. *)
val content : t -> string
(** Extract the token matched by the rule *)
@@ -33,15 +35,14 @@ val tokenize : (t -> 'a) -> t -> unit -> 'a * Lexing.position * Lexing.position
val rollback : t -> unit
(** Rollback the latest token matched *)
-(** {1 State in expressions}
+(** {1 State in expressions}
- The comment system is terrible. The same symbol can be used for :
- - starting a comment
- - inequality operation
+ The comment system is terrible. The same symbol can be used for :
+ - starting a comment
+ - inequality operation
- In order to manage this, I try to identify the context in a very basic way,
- using a stack for determining the token to send.
-*)
+ In order to manage this, I try to identify the context in a very basic way,
+ using a stack for determining the token to send. *)
type lexer = t -> Tokens.token
and buffer_builder = ?nested:bool -> Buffer.t -> t -> Tokens.token
@@ -64,14 +65,14 @@ type state =
| String of stringWraper (** String enclosed by [''] *)
| MString of int (** String enclosed by [{}]*)
| EndString of stringWraper
- (** State raised just before closing the string.
- The buffer is rollbacked and the position is the closing symbol. *)
+ (** State raised just before closing the string. The buffer is rollbacked
+ and the position is the closing symbol. *)
| Expression (** Expression where [!] is an operator *)
val pp_state : Format.formatter -> state -> unit
val state : t -> state option
-(** Get the current state for the lexer.
+(** Get the current state for the lexer.
@return [None] when in the default state *)
@@ -84,8 +85,8 @@ val leave_state : t -> unit
val overlay : t -> lexer -> lexer
val start_recovery : t -> unit
-(** Set the lexer in recovery mode, the lexer raise this mode after an error,
- in order to ignore the further errors until a new location *)
+(** Set the lexer in recovery mode, the lexer raise this mode after an error, in
+ order to ignore the further errors until a new location *)
val is_recovery : t -> bool
(** Check if the lexer is in recovery mode *)
diff --git a/lib/qparser/lexer.ml b/lib/qparser/lexer.ml
index 814c97f..470cdc7 100644
--- a/lib/qparser/lexer.ml
+++ b/lib/qparser/lexer.ml
@@ -1,6 +1,4 @@
-(**
- Lexer using sedlex
- *)
+(** Lexer using sedlex *)
open Tokens
open StdLabels
@@ -12,7 +10,8 @@ exception EOF
(* Extract the location name from the pattern *)
let location_name = Str.regexp {|# *\(.*\)|}
-(** Remove all the expression state when we are leaving the expression itself. *)
+(** Remove all the expression state when we are leaving the expression itself.
+*)
let rec leave_expression buffer =
match Lexbuf.state buffer with
| Some Lexbuf.Expression ->
@@ -21,7 +20,7 @@ let rec leave_expression buffer =
| _ -> ()
(** Try to read the identifier and check if this is a function, a keyword, or
- just a variable.
+ just a variable.
See the tests [Syntax.Operator2] and [Syntax.Call Nl] for two cases. *)
let build_ident buffer =
@@ -124,8 +123,7 @@ let rec read_long_string : ?nested:bool -> int -> Buffer.t -> Lexbuf.t -> token
rollbacked, leaving the state in [Lexbuf.EndString _].
The next call to [main] will call the associated function, effectively
- leaving the string mode in the parser.
- *)
+ leaving the string mode in the parser. *)
let rec read_quoted_string : Lexbuf.stringWraper -> Lexbuf.buffer_builder =
fun f ?(nested = false) buf buffer ->
let lexbuf = Lexbuf.buffer buffer in
@@ -153,11 +151,9 @@ let rec read_quoted_string : Lexbuf.stringWraper -> Lexbuf.buffer_builder =
(f.wrap ~nested (read_quoted_string f)) buf buffer
| _ -> raise Not_found
-(** Track the kind of nested string inside a multiline string inside a
- comment.
+(** Track the kind of nested string inside a multiline string inside a comment.
- Some constructions are not allowed in this specific case (see later)
-*)
+ Some constructions are not allowed in this specific case (see later) *)
type commentedString = None | Quote | DQuote
let rec skip_comment buffer =
@@ -333,6 +329,10 @@ let main buffer =
in
parser buffer
+(** Function used inside the dynamics expressions. Here, we give the EOF token
+ to the parser. *)
+let dynamics buffer = try main buffer with EOF -> Tokens.EOF
+
let rec discard buffer =
let () = Lexbuf.start_recovery buffer in
let lexbuf = Lexbuf.buffer buffer in
diff --git a/lib/qparser/lexer.mli b/lib/qparser/lexer.mli
index 854bb1e..70902e6 100644
--- a/lib/qparser/lexer.mli
+++ b/lib/qparser/lexer.mli
@@ -18,3 +18,5 @@ val discard : Lexbuf.t -> unit
val main : Lexbuf.t -> Tokens.token
(** Main entry point. This function is called after each token returned *)
+
+val dynamics : Lexbuf.t -> Tokens.token
diff --git a/lib/qparser/parser.mly b/lib/qparser/parser.mly
index d075e3e..469cf79 100644
--- a/lib/qparser/parser.mly
+++ b/lib/qparser/parser.mly
@@ -19,11 +19,13 @@
%parameter<Analyzer: Qsp_syntax.S.Analyzer>
%start <(Analyzer.context -> Analyzer.Location.t)>main
+%start<(Analyzer.context -> Analyzer.Location.t)>dynamics
+
%on_error_reduce expression instruction unary_operator assignation_operator
%%
-main:
+main:
| before_location*
start_location
EOL+
@@ -34,6 +36,21 @@ main:
fun context -> Analyzer.Location.location context $loc instructions
}
+dynamics:
+ | EOL*
+ instructions = line_statement+
+ EOF
+ {
+ let instructions = List.map instructions ~f:(Analyzer.Instruction.v) in
+ fun context -> Analyzer.Location.location context $loc instructions
+ }
+ | EOL*
+ b = inlined_block(EOF)
+ {
+ let instruction = (Analyzer.Instruction.v b) in
+ fun context -> Analyzer.Location.location context $loc [instruction]
+ }
+
before_location:
| EOL {}
| COMMENT EOL { }
diff --git a/lib/qparser/tokens.mly b/lib/qparser/tokens.mly
index 0ba5486..42856ef 100644
--- a/lib/qparser/tokens.mly
+++ b/lib/qparser/tokens.mly
@@ -20,6 +20,7 @@
%token AND OR
%token EOL
+%token EOF
%token <string>IDENT
%token <string>LITERAL