Skip to content

Commit

Permalink
Extracted OwnedFile into a library
Browse files Browse the repository at this point in the history
Extracted `AsyncReadObj` to `async_utils` crate, `S3Context` to `aws_utils` crate
AccessError became part of odf

ODF crates new structure:
 - src/domain/odf/odf-metadata (old opendatafabric)
 - src/domain/odf/odf-storage (repository interfaces)
 - src/infra/odf-storage (repository implementations).

Moved `NamedObjectReppository` for now from `core` - with all implementations and tests.

aws-utils` renamed to `s3-utils`.

Moved `ObjectRepository`
Moved ReferenceRepository and BlockRef
Moved MetadataBlockRepository
ObjectStoreRegistry separated
Moved `MetadataChain` and it's visitors + `MetadataFactory` testing tools
Moved `DatasetSummary`
More odf catalogs reshuffling

Started `odf/dataset` crate, moved `DatasetSummary` there
Moved `MetadataChain` to `odf-dataset`
Moved `BlockRef` into `odf-dataset`
Moved `Dataset` to `odf-dataset`
Moved `DatasetFactory` & `OdfServerAccessTokenResolver` to `odf-dataset`
`DatasetStorageUnit` is a new name for `DatasetRepository`
Renamed storage unit methods

opendatafabric => odf_metadata
Started defining `odf` metacrate. Converted uses of `odf_dataset`, `odf_metadata` in `adapter/auth-oso`, `adapter-graphl`.
odf meta-crate applied in adapter/http
odf meta-crate used in adapter/odata, adapter/oauth
Use odf meta-crate in app/cli
Applied odf meta-crate in all domains except core
Using odf meta-crate in domain/core
Web-UI build correction
  • Loading branch information
zaychenko-sergei committed Dec 27, 2024
1 parent eb5ede4 commit 6537db5
Show file tree
Hide file tree
Showing 684 changed files with 6,239 additions and 5,226 deletions.
323 changes: 254 additions & 69 deletions Cargo.lock

Large diffs are not rendered by default.

26 changes: 23 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ members = [
"src/utils/enum-variants",
"src/utils/event-sourcing",
"src/utils/event-sourcing-macros",
"src/utils/file-utils",
"src/utils/http-common",
"src/utils/init-on-startup",
"src/utils/internal-error",
Expand All @@ -19,6 +20,8 @@ members = [
"src/utils/observability",
"src/utils/random-names",
"src/utils/repo-tools",
"src/utils/s3-utils",
"src/utils/test-utils",
"src/utils/time-source",
"src/utils/tracing-perfetto",
# Domain
Expand All @@ -27,7 +30,10 @@ members = [
"src/domain/core",
"src/domain/datasets/domain",
"src/domain/flow-system/domain",
"src/domain/opendatafabric",
"src/domain/odf/odf",
"src/domain/odf/metadata",
"src/domain/odf/dataset",
"src/domain/odf/storage",
"src/domain/task-system/domain",
# Domain service layer
"src/domain/accounts/services",
Expand All @@ -38,6 +44,9 @@ members = [
# Infra
"src/infra/core",
"src/infra/ingest-datafusion",
## Open Data Fabric
"src/infra/odf/dataset",
"src/infra/odf/storage",
## Flow System
"src/infra/flow-system/repo-tests",
"src/infra/flow-system/inmem",
Expand Down Expand Up @@ -102,6 +111,7 @@ database-common-macros = { version = "0.215.0", path = "src/utils/database-commo
enum-variants = { version = "0.215.0", path = "src/utils/enum-variants", default-features = false }
event-sourcing = { version = "0.215.0", path = "src/utils/event-sourcing", default-features = false }
event-sourcing-macros = { version = "0.215.0", path = "src/utils/event-sourcing-macros", default-features = false }
file-utils = { version = "0.215.0", path = "src/utils/file-utils", default-features = false }
http-common = { version = "0.215.0", path = "src/utils/http-common", default-features = false }
init-on-startup = { version = "0.215.0", path = "src/utils/init-on-startup", default-features = false }
internal-error = { version = "0.215.0", path = "src/utils/internal-error", default-features = false }
Expand All @@ -112,6 +122,8 @@ messaging-outbox = { version = "0.215.0", path = "src/utils/messaging-outbox", d
multiformats = { version = "0.215.0", path = "src/utils/multiformats", default-features = false }
observability = { version = "0.215.0", path = "src/utils/observability", default-features = false }
random-names = { version = "0.215.0", path = "src/utils/random-names", default-features = false }
s3-utils = { version = "0.215.0", path = "src/utils/s3-utils", default-features = false }
test-utils = { version = "0.215.0", path = "src/utils/test-utils", default-features = false }
time-source = { version = "0.215.0", path = "src/utils/time-source", default-features = false }
tracing-perfetto = { version = "0.215.0", path = "src/utils/tracing-perfetto", default-features = false }

Expand All @@ -122,7 +134,12 @@ kamu-core = { version = "0.215.0", path = "src/domain/core", default-features =
kamu-datasets = { version = "0.215.0", path = "src/domain/datasets/domain", default-features = false }
kamu-flow-system = { version = "0.215.0", path = "src/domain/flow-system/domain", default-features = false }
kamu-task-system = { version = "0.215.0", path = "src/domain/task-system/domain", default-features = false }
opendatafabric = { version = "0.215.0", path = "src/domain/opendatafabric", default-features = false }

## Open Data Fabric
odf = { version = "0.215.0", path = "src/domain/odf/odf", default-features = false }
odf-metadata = { version = "0.215.0", path = "src/domain/odf/metadata", default-features = false }
odf-dataset = { version = "0.215.0", path = "src/domain/odf/dataset", default-features = false }
odf-storage = { version = "0.215.0", path = "src/domain/odf/storage", default-features = false }

# Domain service layer
kamu-accounts-services = { version = "0.215.0", path = "src/domain/accounts/services", default-features = false }
Expand All @@ -134,6 +151,9 @@ kamu-task-system-services = { version = "0.215.0", path = "src/domain/task-syste
# Infra
kamu = { version = "0.215.0", path = "src/infra/core", default-features = false }
kamu-ingest-datafusion = { version = "0.215.0", path = "src/infra/ingest-datafusion", default-features = false }
## Open Data Fabric infra implementation
odf-dataset-impl = { version = "0.215.0", path = "src/infra/odf/dataset", default-features = false }
odf-storage-impl = { version = "0.215.0", path = "src/infra/odf/storage", default-features = false }
## Flow System
kamu-flow-system-repo-tests = { version = "0.215.0", path = "src/infra/flow-system/repo-tests", default-features = false }
kamu-flow-system-inmem = { version = "0.215.0", path = "src/infra/flow-system/inmem", default-features = false }
Expand Down Expand Up @@ -244,7 +264,7 @@ debug = 2
# We don't emit info for dependencies as this significantly increases binary size.
# See: https://doc.rust-lang.org/cargo/reference/profiles.html#debug
[profile.release.package]
opendatafabric = { debug = 1 }
odf-metadata = { debug = 1 }
kamu = { debug = 1 }
kamu-cli = { debug = 1 }

Expand Down
5 changes: 1 addition & 4 deletions deny.toml
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,5 @@ ignore = [
"RUSTSEC-2024-0370",
# Unmaintained (instant)
# https://rustsec.org/advisories/RUSTSEC-2024-0384.html
"RUSTSEC-2024-0384",
# Security: requires update in hickory-resolver, which is not available yet
# https://github.com/rust-lang/crates.io-index
"RUSTSEC-2024-0421"
"RUSTSEC-2024-0384"
]
3 changes: 2 additions & 1 deletion src/adapter/auth-oso/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ doctest = false
[dependencies]
internal-error = { workspace = true }
messaging-outbox = { workspace = true }
opendatafabric = { workspace = true }
odf = { workspace = true }
kamu-accounts = { workspace = true }
kamu-core = { workspace = true }

Expand All @@ -35,6 +35,7 @@ tracing = { version = "0.1", default-features = false }

[dev-dependencies]
kamu = { workspace = true, features = ["testing"] }
odf-storage-impl = { workspace = true, features = ["testing"]}
time-source = { workspace = true }

tempfile = "3"
Expand Down
21 changes: 11 additions & 10 deletions src/adapter/auth-oso/src/oso_dataset_authorizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@ use dill::*;
use internal_error::{ErrorIntoInternal, InternalError, ResultIntoInternal};
use kamu_accounts::CurrentAccountSubject;
use kamu_core::auth::*;
use kamu_core::AccessError;
use opendatafabric::DatasetHandle;
use oso::Oso;

use crate::dataset_resource::*;
Expand Down Expand Up @@ -55,7 +53,7 @@ impl OsoDatasetAuthorizer {
}
}

fn dataset_resource(&self, dataset_handle: &DatasetHandle) -> DatasetResource {
fn dataset_resource(&self, dataset_handle: &odf::DatasetHandle) -> DatasetResource {
let dataset_alias = &dataset_handle.alias;
let creator = dataset_alias.account_name.as_ref().map_or_else(
|| {
Expand All @@ -79,7 +77,7 @@ impl DatasetActionAuthorizer for OsoDatasetAuthorizer {
#[tracing::instrument(level = "debug", skip_all, fields(%dataset_handle, ?action))]
async fn check_action_allowed(
&self,
dataset_handle: &DatasetHandle,
dataset_handle: &odf::DatasetHandle,
action: DatasetAction,
) -> Result<(), DatasetActionUnauthorizedError> {
let actor = self.actor();
Expand All @@ -94,7 +92,7 @@ impl DatasetActionAuthorizer for OsoDatasetAuthorizer {
Ok(())
} else {
Err(DatasetActionUnauthorizedError::Access(
AccessError::Forbidden(
odf::AccessError::Forbidden(
DatasetActionNotEnoughPermissionsError {
action,
dataset_ref: dataset_handle.as_local_ref(),
Expand All @@ -109,7 +107,10 @@ impl DatasetActionAuthorizer for OsoDatasetAuthorizer {
}

#[tracing::instrument(level = "debug", skip_all, fields(%dataset_handle))]
async fn get_allowed_actions(&self, dataset_handle: &DatasetHandle) -> HashSet<DatasetAction> {
async fn get_allowed_actions(
&self,
dataset_handle: &odf::DatasetHandle,
) -> HashSet<DatasetAction> {
let actor = self.actor();
let dataset_resource = self.dataset_resource(dataset_handle);

Expand All @@ -130,9 +131,9 @@ impl DatasetActionAuthorizer for OsoDatasetAuthorizer {
#[tracing::instrument(level = "debug", skip_all, fields(dataset_handles=?dataset_handles, action=%action))]
async fn filter_datasets_allowing(
&self,
dataset_handles: Vec<DatasetHandle>,
dataset_handles: Vec<odf::DatasetHandle>,
action: DatasetAction,
) -> Result<Vec<DatasetHandle>, InternalError> {
) -> Result<Vec<odf::DatasetHandle>, InternalError> {
let mut matched_dataset_handles = Vec::new();
for hdl in dataset_handles {
let is_allowed = self
Expand All @@ -154,7 +155,7 @@ impl DatasetActionAuthorizer for OsoDatasetAuthorizer {
#[tracing::instrument(level = "debug", skip_all, fields(dataset_handles=?dataset_handles, action=%action))]
async fn classify_datasets_by_allowance(
&self,
dataset_handles: Vec<DatasetHandle>,
dataset_handles: Vec<odf::DatasetHandle>,
action: DatasetAction,
) -> Result<ClassifyByAllowanceResponse, InternalError> {
let mut matched_dataset_handles = Vec::with_capacity(dataset_handles.len());
Expand All @@ -175,7 +176,7 @@ impl DatasetActionAuthorizer for OsoDatasetAuthorizer {
let dataset_ref = hdl.as_local_ref();
unmatched_results.push((
hdl,
DatasetActionUnauthorizedError::Access(AccessError::Forbidden(
DatasetActionUnauthorizedError::Access(odf::AccessError::Forbidden(
DatasetActionNotEnoughPermissionsError {
action,
dataset_ref,
Expand Down
31 changes: 16 additions & 15 deletions src/adapter/auth-oso/tests/tests/test_oso_dataset_authorizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,13 @@ use std::collections::HashSet;
use std::sync::Arc;

use dill::{Catalog, Component};
use kamu::testing::MetadataFactory;
use kamu::{CreateDatasetUseCaseImpl, DatasetRepositoryLocalFs, DatasetRepositoryWriter};
use kamu::{CreateDatasetUseCaseImpl, DatasetStorageUnitLocalFs, DatasetStorageUnitWriter};
use kamu_accounts::CurrentAccountSubject;
use kamu_adapter_auth_oso::{KamuAuthOso, OsoDatasetAuthorizer};
use kamu_core::auth::{DatasetAction, DatasetActionAuthorizer, DatasetActionUnauthorizedError};
use kamu_core::{AccessError, CreateDatasetUseCase, DatasetRepository, TenancyConfig};
use kamu_core::{CreateDatasetUseCase, TenancyConfig};
use messaging_outbox::DummyOutboxImpl;
use opendatafabric::{AccountID, AccountName, DatasetAlias, DatasetHandle, DatasetKind};
use odf_storage_impl::testing::MetadataFactory;
use tempfile::TempDir;
use time_source::SystemTimeSourceDefault;

Expand All @@ -29,7 +28,7 @@ use time_source::SystemTimeSourceDefault;
async fn test_owner_can_read_and_write() {
let harness = DatasetAuthorizerHarness::new("john");
let dataset_handle = harness
.create_dataset(&DatasetAlias::try_from("john/foo").unwrap())
.create_dataset(&odf::DatasetAlias::try_from("john/foo").unwrap())
.await;

let read_result = harness
Expand Down Expand Up @@ -62,7 +61,7 @@ async fn test_owner_can_read_and_write() {
async fn test_guest_can_read_but_not_write() {
let harness = DatasetAuthorizerHarness::new("kate");
let dataset_handle = harness
.create_dataset(&DatasetAlias::try_from("john/foo").unwrap())
.create_dataset(&odf::DatasetAlias::try_from("john/foo").unwrap())
.await;

let read_result = harness
Expand All @@ -84,7 +83,7 @@ async fn test_guest_can_read_but_not_write() {
assert_matches!(
write_result,
Err(DatasetActionUnauthorizedError::Access(
AccessError::Forbidden(_)
odf::AccessError::Forbidden(_)
))
);

Expand All @@ -110,16 +109,16 @@ impl DatasetAuthorizerHarness {
.add::<SystemTimeSourceDefault>()
.add::<DummyOutboxImpl>()
.add_value(CurrentAccountSubject::logged(
AccountID::new_seeded_ed25519(current_account_name.as_bytes()),
AccountName::new_unchecked(current_account_name),
odf::AccountID::new_seeded_ed25519(current_account_name.as_bytes()),
odf::AccountName::new_unchecked(current_account_name),
false,
))
.add::<KamuAuthOso>()
.add::<OsoDatasetAuthorizer>()
.add_value(TenancyConfig::MultiTenant)
.add_builder(DatasetRepositoryLocalFs::builder().with_root(datasets_dir))
.bind::<dyn DatasetRepository, DatasetRepositoryLocalFs>()
.bind::<dyn DatasetRepositoryWriter, DatasetRepositoryLocalFs>()
.add_builder(DatasetStorageUnitLocalFs::builder().with_root(datasets_dir))
.bind::<dyn odf::dataset::DatasetStorageUnit, DatasetStorageUnitLocalFs>()
.bind::<dyn DatasetStorageUnitWriter, DatasetStorageUnitLocalFs>()
.add::<CreateDatasetUseCaseImpl>()
.build();

Expand All @@ -132,14 +131,16 @@ impl DatasetAuthorizerHarness {
}
}

pub async fn create_dataset(&self, alias: &DatasetAlias) -> DatasetHandle {
pub async fn create_dataset(&self, alias: &odf::DatasetAlias) -> odf::DatasetHandle {
let create_dataset = self.catalog.get_one::<dyn CreateDatasetUseCase>().unwrap();

create_dataset
.execute(
alias,
MetadataFactory::metadata_block(MetadataFactory::seed(DatasetKind::Root).build())
.build_typed(),
MetadataFactory::metadata_block(
MetadataFactory::seed(odf::DatasetKind::Root).build(),
)
.build_typed(),
Default::default(),
)
.await
Expand Down
4 changes: 3 additions & 1 deletion src/adapter/graphql/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ doctest = false
[dependencies]
database-common = { workspace = true }
internal-error = { workspace = true }
opendatafabric = { workspace = true, features = ["arrow"] }
odf = { workspace = true, features = ["arrow"] }

kamu = { workspace = true }
kamu-accounts = { workspace = true }
Expand Down Expand Up @@ -61,6 +61,7 @@ uuid = { version = "1", default-features = false }
[dev-dependencies]
# TODO: Limit to mock or in-memory implementations only
container-runtime = { workspace = true }
file-utils = { workspace = true }
init-on-startup = { workspace = true }
kamu = { workspace = true, features = ["testing"] }
kamu-accounts = { workspace = true, features = ["testing"] }
Expand All @@ -72,6 +73,7 @@ kamu-flow-system-inmem = { workspace = true }
kamu-task-system-inmem = { workspace = true }
kamu-task-system-services = { workspace = true }
messaging-outbox = { workspace = true }
odf-storage-impl = { workspace = true, features = ["testing"]}
time-source = { workspace = true }

indoc = "2"
Expand Down
1 change: 0 additions & 1 deletion src/adapter/graphql/src/mutations/dataset_env_vars_mut.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ use kamu_datasets::{
ModifyDatasetEnvVarError,
SaveDatasetEnvVarError,
};
use opendatafabric as odf;
use secrecy::SecretString;

use crate::prelude::*;
Expand Down
Loading

0 comments on commit 6537db5

Please sign in to comment.