forked from deib-polimi/renoir
-
Notifications
You must be signed in to change notification settings - Fork 0
/
wordcount.rs
93 lines (81 loc) · 2.37 KB
/
wordcount.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
use std::time::Instant;
use regex::Regex;
use renoir::prelude::*;
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
// /// Simpler alternative tokenization
// fn tokenize(s: &str) -> Vec<String> {
// s.split_whitespace().map(str::to_lowercase).collect()
// }
#[cfg(not(feature = "tokio"))]
fn main() {
tracing_subscriber::fmt::init();
let (config, args) = RuntimeConfig::from_args();
if args.len() != 2 {
panic!("Pass the dataset path as an argument");
}
let path = &args[1];
config.spawn_remote_workers();
let env = StreamContext::new(config);
let source = FileSource::new(path);
let tokenizer = Tokenizer::new();
let result = env
.stream(source)
.batch_mode(BatchMode::fixed(1024))
.flat_map(move |line| tokenizer.tokenize(line))
.group_by(|word| word.clone())
.fold(0, |count, _word| *count += 1)
.collect_vec();
let start = Instant::now();
env.execute_blocking();
let elapsed = start.elapsed();
if let Some(_res) = result.get() {
eprintln!("Output: {:?}", _res.len());
println!("{elapsed:?}");
}
}
#[cfg(feature = "tokio")]
#[tokio::main()]
async fn main() {
tracing_subscriber::fmt::init();
let (config, args) = RuntimeConfig::from_args();
if args.len() != 1 {
panic!("Pass the dataset path as an argument");
}
let path = &args[0];
config.spawn_remote_workers();
let env = StreamContext::new(config);
let source = FileSource::new(path);
let tokenizer = Tokenizer::new();
let result = env
.stream(source)
.batch_mode(BatchMode::fixed(1024))
.flat_map(move |line| tokenizer.tokenize(line))
.group_by(|word| word.clone())
.fold(0, |count, _word| *count += 1)
.collect_vec();
let start = Instant::now();
env.execute().await;
let elapsed = start.elapsed();
if let Some(_res) = result.get() {
eprintln!("Output: {:?}", _res.len());
println!("{:?}", elapsed);
}
}
#[derive(Clone)]
struct Tokenizer {
re: Regex,
}
impl Tokenizer {
fn new() -> Self {
Self {
re: Regex::new(r"[A-Za-z]+").unwrap(),
}
}
fn tokenize(&self, value: String) -> Vec<String> {
self.re
.find_iter(&value)
.map(|t| t.as_str().to_lowercase())
.collect()
}
}