Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New functions to retrieve position #155

Merged
merged 3 commits into from
Oct 16, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 36 additions & 30 deletions src/lib/sedlexing.ml
Original file line number Diff line number Diff line change
Expand Up @@ -261,40 +261,46 @@ let lexeme lexbuf =

let lexeme_char lexbuf pos = lexbuf.buf.(lexbuf.start_pos + pos)

let lexing_position_start lexbuf =
{
Lexing.pos_fname = lexbuf.filename;
pos_lnum = lexbuf.start_line;
pos_cnum = lexbuf.start_pos + lexbuf.offset;
pos_bol = lexbuf.start_bol;
}

let lexing_position_curr lexbuf =
{
Lexing.pos_fname = lexbuf.filename;
pos_lnum = lexbuf.curr_line;
pos_cnum = lexbuf.pos + lexbuf.offset;
pos_bol = lexbuf.curr_bol;
}

let lexing_positions lexbuf =
let start_p =
{
Lexing.pos_fname = lexbuf.filename;
pos_lnum = lexbuf.start_line;
pos_cnum = lexbuf.start_pos + lexbuf.offset;
pos_bol = lexbuf.start_bol;
}
and curr_p =
{
Lexing.pos_fname = lexbuf.filename;
pos_lnum = lexbuf.curr_line;
pos_cnum = lexbuf.pos + lexbuf.offset;
pos_bol = lexbuf.curr_bol;
}
in
let start_p = lexing_position_start lexbuf
and curr_p = lexing_position_curr lexbuf in
(start_p, curr_p)

let lexing_bytes_position_start lexbuf =
{
Lexing.pos_fname = lexbuf.filename;
pos_lnum = lexbuf.start_line;
pos_cnum = lexbuf.start_bytes_pos + lexbuf.bytes_offset;
pos_bol = lexbuf.start_bytes_bol;
}

let lexing_bytes_position_curr lexbuf =
{
Lexing.pos_fname = lexbuf.filename;
pos_lnum = lexbuf.curr_line;
pos_cnum = lexbuf.bytes_pos + lexbuf.bytes_offset;
pos_bol = lexbuf.curr_bytes_bol;
}

let lexing_bytes_positions lexbuf =
let start_p =
{
Lexing.pos_fname = lexbuf.filename;
pos_lnum = lexbuf.start_line;
pos_cnum = lexbuf.start_bytes_pos + lexbuf.bytes_offset;
pos_bol = lexbuf.start_bytes_bol;
}
and curr_p =
{
Lexing.pos_fname = lexbuf.filename;
pos_lnum = lexbuf.curr_line;
pos_cnum = lexbuf.bytes_pos + lexbuf.bytes_offset;
pos_bol = lexbuf.curr_bytes_bol;
}
in
let start_p = lexing_bytes_position_start lexbuf
and curr_p = lexing_bytes_position_curr lexbuf in
(start_p, curr_p)

let with_tokenizer lexer' lexbuf =
Expand Down
16 changes: 16 additions & 0 deletions src/lib/sedlexing.mli
Original file line number Diff line number Diff line change
Expand Up @@ -133,12 +133,28 @@ val lexeme_bytes_length : lexbuf -> int
by parsers like those generated by [Menhir]. *)
val lexing_positions : lexbuf -> Lexing.position * Lexing.position

(** [Sedlexing.lexing_position_start lexbuf] returns the start
position, in code points, of the current token. *)
val lexing_position_start : lexbuf -> Lexing.position

(** [Sedlexing.lexing_position_curr lexbuf] returns the end
position, in code points, of the current token. *)
val lexing_position_curr : lexbuf -> Lexing.position

(** [Sedlexing.lexing_bytes_positions lexbuf] returns the start and end
positions, in bytes, of the current token, using a record of type
[Lexing.position]. This is intended for consumption
by parsers like those generated by [Menhir]. *)
val lexing_bytes_positions : lexbuf -> Lexing.position * Lexing.position

(** [Sedlexing.lexing_bytes_position_start lexbuf] returns the start
position, in bytes, of the current token. *)
val lexing_bytes_position_start : lexbuf -> Lexing.position

(** [Sedlexing.lexing_bytes_position_curr lexbuf] returns the end
position, in bytes, of the current token. *)
val lexing_bytes_position_curr : lexbuf -> Lexing.position

(** [Sedlexing.new_line lexbuf] increments the line count and
sets the beginning of line to the current position, as though
a newline character had been encountered in the input. *)
Expand Down
Loading