-
Notifications
You must be signed in to change notification settings - Fork 377
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Dataframe v2: reference docs (#7820)
Add a reference page for the dataframe APIs. It's still very barebones at this point because #7819 makes it very difficult to write snippets for this. But it is literally infinitely better than what's there right now: nothing. - DNM: requires #7817 - Closes #7828
- Loading branch information
Showing
3 changed files
with
100 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
"""Query and display the first 10 rows of a recording.""" | ||
|
||
import sys | ||
|
||
import rerun as rr | ||
|
||
path_to_rrd = sys.argv[1] | ||
|
||
recording = rr.dataframe.load_recording(path_to_rrd) | ||
view = recording.view(index="log_time", contents="/**") | ||
batches = view.select() | ||
|
||
for _ in range(10): | ||
row = batches.read_next_batch() | ||
if row is None: | ||
break | ||
# Each row is a `RecordBatch`, which can be easily passed around across different data ecosystems. | ||
print(row) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
//! Query and display the first 10 rows of a recording. | ||
#![allow(clippy::unwrap_used)] | ||
|
||
use rerun::{ | ||
dataframe::{QueryCache, QueryEngine, QueryExpression, SparseFillStrategy, Timeline}, | ||
ChunkStore, ChunkStoreConfig, VersionPolicy, | ||
}; | ||
|
||
fn main() -> Result<(), Box<dyn std::error::Error>> { | ||
let args = std::env::args().collect::<Vec<_>>(); | ||
|
||
let path_to_rrd = &args[1]; | ||
let timeline = Timeline::log_time(); | ||
|
||
let stores = ChunkStore::from_rrd_filepath( | ||
&ChunkStoreConfig::DEFAULT, | ||
path_to_rrd, | ||
VersionPolicy::Warn, | ||
)?; | ||
let (_, store) = stores.first_key_value().unwrap(); | ||
|
||
let query_cache = QueryCache::new(store); | ||
let query_engine = QueryEngine { | ||
store, | ||
cache: &query_cache, | ||
}; | ||
|
||
let query = QueryExpression { | ||
filtered_index: Some(timeline), | ||
sparse_fill_strategy: SparseFillStrategy::LatestAtGlobal, | ||
..Default::default() | ||
}; | ||
|
||
let query_handle = query_engine.query(query.clone()); | ||
for row in query_handle.batch_iter().take(10) { | ||
// Each row is a `RecordBatch`, which can be easily passed around across different data ecosystems. | ||
println!("{row}"); | ||
} | ||
|
||
Ok(()) | ||
} |