forked from quickwit-oss/tantivy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
raw_tokenizer.rs
69 lines (60 loc) · 1.79 KB
/
raw_tokenizer.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
use super::{Token, TokenStream, Tokenizer};
/// For each value of the field, emit a single unprocessed token.
#[derive(Clone, Default)]
pub struct RawTokenizer {
token: Token,
}
pub struct RawTokenStream<'a> {
token: &'a mut Token,
has_token: bool,
}
impl Tokenizer for RawTokenizer {
type TokenStream<'a> = RawTokenStream<'a>;
fn token_stream<'a>(&'a mut self, text: &str) -> RawTokenStream<'a> {
self.token.reset();
self.token.position = 0;
self.token.position_length = 1;
self.token.offset_from = 0;
self.token.offset_to = text.len();
self.token.text.clear();
self.token.text.push_str(text);
RawTokenStream {
token: &mut self.token,
has_token: true,
}
}
}
impl<'a> TokenStream for RawTokenStream<'a> {
fn advance(&mut self) -> bool {
let result = self.has_token;
self.has_token = false;
result
}
fn token(&self) -> &Token {
self.token
}
fn token_mut(&mut self) -> &mut Token {
self.token
}
}
#[cfg(test)]
mod tests {
use crate::tokenizer::tests::assert_token;
use crate::tokenizer::{RawTokenizer, TextAnalyzer, Token};
#[test]
fn test_raw_tokenizer() {
let tokens = token_stream_helper("Hello, happy tax payer!");
assert_eq!(tokens.len(), 1);
assert_token(&tokens[0], 0, "Hello, happy tax payer!", 0, 23);
}
fn token_stream_helper(text: &str) -> Vec<Token> {
let mut a = TextAnalyzer::from(RawTokenizer::default());
let mut token_stream = a.token_stream(text);
let mut tokens: Vec<Token> = vec![];
let mut add_token = |token: &Token| {
tokens.push(token.clone());
};
token_stream.process(&mut add_token);
tokens
}
}