Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DataBroker Rewrite] Sim MVP #25

Merged
merged 36 commits into from
Jun 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
1f769c6
Active Tasks
victoryforphil Apr 25, 2024
c9686f4
Added active task reading from runtimes
victoryforphil Apr 25, 2024
de7fcd2
Moved data to UAV
victoryforphil Apr 25, 2024
edd0a8c
Added Database Mutex and base framework for UAV runner
victoryforphil Apr 25, 2024
aef14cc
Some work importing sim code
victoryforphil Apr 27, 2024
6db8544
More Sim classes
victoryforphil Apr 28, 2024
c4bdd6a
Runner Work
victoryforphil Apr 29, 2024
1f99ccf
Added Channel Act Hardware
victoryforphil Apr 29, 2024
63512f3
Awlays Run CI
victoryforphil Apr 29, 2024
f9f878d
CI
victoryforphil Apr 29, 2024
0cc5599
Coverage Tweaks
victoryforphil Apr 29, 2024
b550b77
Commnet out docks
victoryforphil Apr 29, 2024
7745443
Working UAV w/ Detached Runner
victoryforphil Apr 30, 2024
1ddbb00
tracy
victoryforphil Apr 30, 2024
7731c89
Work
victoryforphil Apr 30, 2024
b8dd1b0
Working RUnners
victoryforphil Apr 30, 2024
dd50fbb
Working multi-uav
victoryforphil Apr 30, 2024
04961f2
Start of complex broker type traits
victoryforphil May 2, 2024
df5de29
Patched Tests
victoryforphil Jun 2, 2024
97b7f69
Ported Tasks to use JSON
victoryforphil Jun 2, 2024
8c40668
Basic Hover Test
victoryforphil Jun 2, 2024
91c5fec
Added Testing Fixtures w/ Hover Test
victoryforphil Jun 2, 2024
db7c27b
Cargo FMT
victoryforphil Jun 2, 2024
d082d5e
Cargo Fix
victoryforphil Jun 2, 2024
5d9349f
Some fixes
victoryforphil Jun 2, 2024
dbac1f7
Basic rerun binding
victoryforphil Jun 2, 2024
4e28cb8
Rerun view
victoryforphil Jun 2, 2024
590af91
Fix Test
victoryforphil Jun 2, 2024
59304ac
Removed Logger
victoryforphil Jun 2, 2024
5ab1eb5
Formatting
victoryforphil Jun 2, 2024
87f9e74
Remove Tracy for rn
victoryforphil Jun 2, 2024
29fc109
Removed Tracy
victoryforphil Jun 2, 2024
a6707f5
CI profile
victoryforphil Jun 2, 2024
d2193c0
Tomllll
victoryforphil Jun 2, 2024
36f672d
junit.xml"
victoryforphil Jun 2, 2024
10dba33
Disable env_logging
victoryforphil Jun 2, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .config/nextest.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[profile.ci]
failure-output = "immediate-final"

[profile.ci.junit] # this can be some other profile, too
path = "junit.xml"
store-success-output = true
store-failure-output = true
43 changes: 21 additions & 22 deletions .github/workflows/rust.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@ name: Rust

on:
push:
branches: ["main"]
branches:
- main
pull_request:
branches: ["main"]

env:
CARGO_TERM_COLOR: always

Expand All @@ -29,33 +28,33 @@ jobs:
- uses: taiki-e/install-action@nextest
- uses: actions/checkout@v4
- name: Install Rust
run: rustup update nightly
run: |
rustup update nightly
- name: Install cargo-llvm-cov
run: rustup component add llvm-tools-preview --toolchain nightly
- uses: taiki-e/install-action@cargo-llvm-cov
- uses: taiki-e/install-action@nextest
- name: Collect coverage data (including doctests)
run: |
cargo llvm-cov --no-report nextest --ignore-filename-regex calloop-0.10.6/*
cargo llvm-cov --no-report --doc --ignore-filename-regex calloop-0.10.6/*
cargo llvm-cov report --doctests --lcov --output-path lcov.info --ignore-filename-regex calloop-0.10.6/*
cargo llvm-cov report --lcov --output-path lcov.info --ignore-filename-regex calloop-0.10.6/*
- name: Upload coverage
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: true
docs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: taiki-e/install-action@nextest
- uses: actions/checkout@v4
- name: Install Rust
run: rustup update stable
- name: Build docs
run: cargo doc --verbose --all-features --workspace
- name: Deploy docs
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./target/doc
fail_ci_if_error: false
# docs:
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v3
# - uses: taiki-e/install-action@nextest
# - uses: actions/checkout@v4
# - name: Install Rust
# run: rustup update stable
# - name: Build docs
# run: cargo doc --verbose --all-features --workspace
# - name: Deploy docs
# uses: peaceiris/actions-gh-pages@v3
# with:
# github_token: ${{ secrets.GITHUB_TOKEN }}
# publish_dir: ./target/doc
3 changes: 3 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
{
"rust-analyzer.linkedProjects": [
"./lil-broker/Cargo.toml"
],
"cSpell.words": [
"tasker"
]
}
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[workspace]

members = [
"lil-broker", "lil-helper", "lil-quad", "lil-sym",
"lil-broker", "lil-helper", "lil-quad", "lil-sym", "lil-viz",
]
resolver = "2"
2 changes: 2 additions & 0 deletions lil-broker/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,5 @@ serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.117"
tracing = {version = "0.1.40", features = ["log"]}
json-unflattening = "0.1.2"
lil-helper = { path = "../lil-helper" }
anyhow = "1.0.82"
2 changes: 1 addition & 1 deletion lil-broker/src/buckets/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::collections::{BTreeMap, BTreeSet};
use tracing::debug;

mod querying;

#[derive(Debug)]
pub struct Bucket {
pub name: String,
pub bucket_tags: BTreeSet<Tag>,
Expand Down
2 changes: 1 addition & 1 deletion lil-broker/src/buckets/querying.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ impl Bucket {
#[cfg(test)]
mod tests {
use super::*;
use crate::{types::Tag, Primatives, Timestamp};
use crate::{Primatives, Timestamp};

/// Querying tests
#[test]
Expand Down
18 changes: 15 additions & 3 deletions lil-broker/src/database/mod.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,27 @@
mod query;
pub use query::*;
use std::collections::BTreeMap;
use tracing::{debug, error, info};

use crate::{Bucket, Tag};
use tracing::{error, info};

use crate::{Bucket, Primatives, Tag, Timestamp};
#[derive(Debug)]
pub struct Database {
pub current_t: Option<Timestamp>,
pub buckets: BTreeMap<String, Bucket>,
}

impl Database {
pub fn new() -> Database {
Database {
buckets: BTreeMap::new(),
current_t: None,
}
}

pub fn set_time(&mut self, time: Timestamp) {
self.current_t = Some(time);
}

pub fn get_keys(&self) -> Vec<String> {
self.buckets.keys().cloned().collect()
}
Expand All @@ -27,6 +34,11 @@ impl Database {
}
}

pub fn quick_write(&mut self, topic: &str, data: Primatives) -> Result<QueryResponse, String> {
let query = WriteQuery::new(topic.to_string(), data, self.current_t.unwrap());
self.query_write(query)
}

pub fn add_tag_to_bucket(&mut self, bucket_name: &str, tag: Tag) {
//Create a new bucket if it doesn't exist
if !self.buckets.contains_key(bucket_name) {
Expand Down
38 changes: 31 additions & 7 deletions lil-broker/src/database/query/get_latest.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::{Database, QueryCommand, QueryResponse};

use super::tag_filter::TagFilter;
use tracing::{debug, info};
use tracing::debug;
#[derive(Debug, Clone)]
pub struct GetLatestQuery {
pub topics: Vec<String>,
Expand Down Expand Up @@ -119,14 +119,42 @@

Ok(response)
}

pub fn query_get_latest_stripped(
&mut self,
query: GetLatestQuery,
) -> Result<QueryResponse, String> {
let mut response = QueryResponse::default();
debug!("Querying for latest data: {:?}", query);
let all_bucket_keys = self.get_keys().into_iter();
let matching_keys = all_bucket_keys.filter(|key| {
for topic in &query.topics {
if key.starts_with(topic) {
return true;
}

Check warning on line 134 in lil-broker/src/database/query/get_latest.rs

View check run for this annotation

Codecov / codecov/patch

lil-broker/src/database/query/get_latest.rs#L123-L134

Added lines #L123 - L134 were not covered by tests
}
false
});
// Read the matching keys
let addtional_keys = self.read_bucket(matching_keys.collect(), &query, &mut response);

Check warning on line 139 in lil-broker/src/database/query/get_latest.rs

View check run for this annotation

Codecov / codecov/patch

lil-broker/src/database/query/get_latest.rs#L136-L139

Added lines #L136 - L139 were not covered by tests

if let Some(addtional_keys) = addtional_keys {
debug!("Additional keys to read: {:?}", addtional_keys);
self.read_bucket(addtional_keys, &query, &mut response);
}

Check warning on line 144 in lil-broker/src/database/query/get_latest.rs

View check run for this annotation

Codecov / codecov/patch

lil-broker/src/database/query/get_latest.rs#L141-L144

Added lines #L141 - L144 were not covered by tests

Ok(response)
}

Check warning on line 147 in lil-broker/src/database/query/get_latest.rs

View check run for this annotation

Codecov / codecov/patch

lil-broker/src/database/query/get_latest.rs#L146-L147

Added lines #L146 - L147 were not covered by tests
}

#[cfg(test)]
mod tests {
use crate::{Primatives, Tag, Timestamp, WriteQuery};
use pretty_assertions::{assert_eq, assert_ne};
use crate::{Primatives, Timestamp, WriteQuery};
use pretty_assertions::assert_eq;
use serde_json::json;

use tracing::info;

use super::*;

#[test]
Expand Down Expand Up @@ -154,7 +182,6 @@

#[test]
fn test_get_latest_json_struct() {
env_logger::init();
let mut db = Database::new();

#[derive(Debug, Clone, PartialEq, serde::Deserialize, serde::Serialize)]
Expand Down Expand Up @@ -239,7 +266,6 @@

#[test]
fn test_get_latest_query_ack() {
env_logger::init();
let mut db = Database::new();
let query1 = WriteQuery::new("test/a/1".into(), 1.0.into(), Timestamp::from_seconds(1.0));
let _write_res = db.query_batch(vec![query1.into()]).unwrap();
Expand Down Expand Up @@ -271,7 +297,6 @@

#[test]
fn test_get_latest_query_bucket_tags() {
env_logger::init();
let mut db = Database::new();
let query1 = WriteQuery::new("test/a/1".into(), 1.0.into(), Timestamp::from_seconds(1.0));
db.add_tag_to_bucket("test/a/1", "user/test_tag".into());
Expand All @@ -293,7 +318,6 @@

#[test]
fn test_get_latest_query_additonial() {
env_logger::init();
let mut db = Database::new();

let queries = vec![
Expand Down
5 changes: 1 addition & 4 deletions lil-broker/src/database/query/lookup_range.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ mod tests {
use serde_json::json;
use tracing::info;

use crate::{Primatives, Tag, Timestamp, WriteQuery};
use crate::{Timestamp, WriteQuery};

use super::*;

Expand Down Expand Up @@ -129,7 +129,6 @@ mod tests {
}
#[test]
fn test_lookup_range_basic() {
env_logger::init();
let mut db = generate_data();

let read_query = LookupRangeQuery {
Expand All @@ -149,7 +148,6 @@ mod tests {

#[test]
fn test_get_latest_json_struct() {
env_logger::init();
let mut db = Database::new();

#[derive(Debug, Clone, PartialEq, serde::Deserialize, serde::Serialize)]
Expand Down Expand Up @@ -213,7 +211,6 @@ mod tests {

#[test]
fn test_lookup_range_wild_card() {
env_logger::init();
let mut db = generate_data();

let read_query = LookupRangeQuery {
Expand Down
33 changes: 31 additions & 2 deletions lil-broker/src/database/query/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::collections::BTreeMap;

use crate::DataPoint;
use crate::{DataPoint, Primatives, Timestamp};

mod get_latest;
mod lookup;
Expand All @@ -11,10 +11,13 @@ mod write;
pub use get_latest::*;
use json_unflattening::unflattening::unflatten;
pub use lookup::*;

pub use lookup_range::*;

use serde_json::{json, Value};
pub use tag_filter::*;

use tracing::debug;
pub use write::*;
#[derive(Debug, Clone)]
pub enum QueryCommand {
Expand Down Expand Up @@ -45,6 +48,31 @@ impl QueryResponse {
}
}

pub fn from_data(data: BTreeMap<String, Vec<DataPoint>>) -> QueryResponse {
let len = data.len();
QueryResponse {
data,
metadata: QueryResponseMetadata {
n_results: len,
was_successful: true,
},
}
}

pub fn from_json(json: Value) -> QueryResponse {
let mut data = BTreeMap::new();
for (key, value) in json.as_object().unwrap() {
let mut data_points = Vec::new();
for (timestamp, data) in value.as_object().unwrap() {
let timestamp = Timestamp::new(timestamp.parse().unwrap());
let data = Primatives::from_value(data.clone()).unwrap();
data_points.push(DataPoint::new(timestamp, data));
}
data.insert(key.to_string(), data_points);
}
QueryResponse::from_data(data)
}

pub fn to_json(&self, prefix: &str) -> Value {
let mut values: serde_json::Map<String, Value> = serde_json::Map::new();
for (key, data) in self.data.iter() {
Expand All @@ -57,8 +85,9 @@ impl QueryResponse {
values.insert(key.to_string(), json!(data_points.last()));
}
// Strip Prefix
debug!("{:?}", values);

let unflattened_json = unflatten(&values).unwrap();
let unflattened_json = unflatten(&values).unwrap_or_default();
unflattened_json
}

Expand Down
8 changes: 4 additions & 4 deletions lil-broker/src/database/query/write.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use crate::{Bucket, DataPoint, Database, Primatives, QueryCommand, QueryResponse, Tag, Timestamp};

use flatten_json_object::{ArrayFormatting, Flattener};
use json_unflattening::flattening::flatten;
use serde::de::value;

use serde_json::Value;
use tracing::{error, info};
use tracing::info;

#[derive(Debug, Clone)]
pub struct WriteQuery {
pub topic: String,
Expand Down Expand Up @@ -120,7 +120,7 @@ mod tests {

#[test]
fn test_write_query_from_json_complex() {
env_logger::init();
//env_logger::init();

let json = json!(
{
Expand Down
Loading
Loading