Skip to content

Commit

Permalink
Expand remote APIs to allow for server-side query execution (#8537)
Browse files Browse the repository at this point in the history
### What
This mirrors the existing Recording API surface and query-construction,
but dispatches the query through the the server.

Without access to the schema, we are not yet able to support
glob-expressions for content
(rerun-io/dataplatform#105)

Recommend viewing without whitespace:
https://github.com/rerun-io/rerun/pull/8537/files?w=1
  • Loading branch information
jleibs authored Dec 19, 2024
1 parent 662e404 commit d65c9ec
Show file tree
Hide file tree
Showing 7 changed files with 547 additions and 107 deletions.
1 change: 1 addition & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -7287,6 +7287,7 @@ dependencies = [
"re_chunk",
"re_chunk_store",
"re_dataframe",
"re_grpc_client",
"re_log",
"re_log_encoding",
"re_log_types",
Expand Down
6 changes: 3 additions & 3 deletions crates/store/re_grpc_client/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ use re_protos::remote_store::v0::{

/// Wrapper with a nicer error message
#[derive(Debug)]
struct TonicStatusError(tonic::Status);
pub struct TonicStatusError(pub tonic::Status);

impl std::fmt::Display for TonicStatusError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Expand All @@ -65,7 +65,7 @@ impl Error for TonicStatusError {
}

#[derive(thiserror::Error, Debug)]
enum StreamError {
pub enum StreamError {
/// Native connection error
#[cfg(not(target_arch = "wasm32"))]
#[error(transparent)]
Expand Down Expand Up @@ -268,7 +268,7 @@ async fn stream_recording_async(
Ok(())
}

fn store_info_from_catalog_chunk(
pub fn store_info_from_catalog_chunk(
tc: &TransportChunk,
recording_id: &str,
) -> Result<StoreInfo, StreamError> {
Expand Down
6 changes: 6 additions & 0 deletions crates/store/re_log_types/src/path/entity_path_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -587,6 +587,12 @@ impl EntityPathFilter {
// inclusion rule and didn't hit an exclusion rule.
true
}

#[inline]
/// Iterate over all rules in the filter.
pub fn rules(&self) -> impl Iterator<Item = (&EntityPathRule, &RuleEffect)> {
self.rules.iter()
}
}

impl EntityPathRule {
Expand Down
2 changes: 2 additions & 0 deletions rerun_py/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ remote = [
"dep:object_store",
"dep:re_protos",
"dep:re_ws_comms",
"dep:re_grpc_client",
"dep:tokio",
"dep:tokio-stream",
"dep:tonic",
Expand All @@ -61,6 +62,7 @@ re_build_info.workspace = true
re_chunk = { workspace = true, features = ["arrow"] }
re_chunk_store.workspace = true
re_dataframe.workspace = true
re_grpc_client = { workspace = true, optional = true }
re_log = { workspace = true, features = ["setup"] }
re_log_encoding = { workspace = true }
re_log_types.workspace = true
Expand Down
22 changes: 21 additions & 1 deletion rerun_py/rerun_bindings/rerun_bindings.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,27 @@ class StorageNodeClient:
"""
Open a [`Recording`][rerun.dataframe.Recording] by id to use with the dataframe APIs.
This currently downloads the full recording to the local machine.
This will run queries against the remote storage node and stream the results. Faster for small
numbers of queries with small results.
Parameters
----------
id : str
The id of the recording to open.
Returns
-------
Recording
The opened recording.
"""
...

def download_recording(self, id: str) -> Recording:
"""
Download a [`Recording`][rerun.dataframe.Recording] by id to use with the dataframe APIs.
This will download the full recording to memory and run queries against a local chunk store.
Parameters
----------
Expand Down
Loading

0 comments on commit d65c9ec

Please sign in to comment.