Skip to content

Commit

Permalink
Moved MetadataChain and it's visitors + MetadataFactory testing t…
Browse files Browse the repository at this point in the history
…ools
  • Loading branch information
zaychenko-sergei committed Dec 25, 2024
1 parent 62caba4 commit b2006a7
Show file tree
Hide file tree
Showing 154 changed files with 402 additions and 253 deletions.
15 changes: 15 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions src/adapter/auth-oso/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ tracing = { version = "0.1", default-features = false }

[dev-dependencies]
kamu = { workspace = true, features = ["testing"] }
odf-storage-impl = { workspace = true, features = ["testing"]}
time-source = { workspace = true }

tempfile = "3"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ use std::collections::HashSet;
use std::sync::Arc;

use dill::{Catalog, Component};
use kamu::testing::MetadataFactory;
use kamu::{CreateDatasetUseCaseImpl, DatasetRepositoryLocalFs, DatasetRepositoryWriter};
use kamu_accounts::CurrentAccountSubject;
use kamu_adapter_auth_oso::{KamuAuthOso, OsoDatasetAuthorizer};
use kamu_core::auth::{DatasetAction, DatasetActionAuthorizer, DatasetActionUnauthorizedError};
use kamu_core::{CreateDatasetUseCase, DatasetRepository, TenancyConfig};
use messaging_outbox::DummyOutboxImpl;
use odf_storage_impl::testing::MetadataFactory;
use opendatafabric as odf;
use tempfile::TempDir;
use time_source::SystemTimeSourceDefault;
Expand Down
1 change: 1 addition & 0 deletions src/adapter/graphql/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ kamu-flow-system-inmem = { workspace = true }
kamu-task-system-inmem = { workspace = true }
kamu-task-system-services = { workspace = true }
messaging-outbox = { workspace = true }
odf-storage-impl = { workspace = true, features = ["testing"]}
time-source = { workspace = true }

indoc = "2"
Expand Down
8 changes: 2 additions & 6 deletions src/adapter/graphql/src/mutations/dataset_metadata_mut.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,8 @@
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.

use kamu_core::{
self as domain,
CommitDatasetEventUseCase,
MetadataChainExt,
SearchSetAttachmentsVisitor,
};
use kamu_core::{self as domain, CommitDatasetEventUseCase};
use odf_storage::{MetadataChainExt, SearchSetAttachmentsVisitor};
use opendatafabric as odf;

use super::{CommitResultAppendError, CommitResultSuccess, NoChanges};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.

use kamu_core::{DatasetRegistry, GetSummaryOpts, MetadataChainExt};
use kamu_core::{DatasetRegistry, GetSummaryOpts};
use odf_storage::MetadataChainExt;
use {kamu_flow_system as fs, opendatafabric as odf};

use super::FlowNotFound;
Expand Down
7 changes: 4 additions & 3 deletions src/adapter/graphql/src/queries/datasets/dataset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
// by the Apache License, Version 2.0.

use chrono::prelude::*;
use kamu_core::{self as domain, MetadataChainExt, SearchSeedVisitor, ServerUrlConfig};
use kamu_core::{GetSummaryOpts, ServerUrlConfig};
use odf_storage::{MetadataChainExt, SearchSeedVisitor};
use opendatafabric as odf;

use crate::prelude::*;
Expand All @@ -35,7 +36,7 @@ impl Dataset {

#[graphql(skip)]
pub async fn from_ref(ctx: &Context<'_>, dataset_ref: &odf::DatasetRef) -> Result<Dataset> {
let dataset_registry = from_catalog_n!(ctx, dyn domain::DatasetRegistry);
let dataset_registry = from_catalog_n!(ctx, dyn kamu_core::DatasetRegistry);

// TODO: Should we resolve reference at this point or allow unresolved and fail
// later?
Expand Down Expand Up @@ -75,7 +76,7 @@ impl Dataset {
async fn kind(&self, ctx: &Context<'_>) -> Result<DatasetKind> {
let resolved_dataset = get_dataset(ctx, &self.dataset_handle)?;
let summary = resolved_dataset
.get_summary(domain::GetSummaryOpts::default())
.get_summary(GetSummaryOpts::default())
.await
.int_err()?;
Ok(summary.kind.into())
Expand Down
25 changes: 12 additions & 13 deletions src/adapter/graphql/src/queries/datasets/dataset_metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
// by the Apache License, Version 2.0.

use chrono::prelude::*;
use kamu_core::{
self as domain,
use odf_storage::{
MetadataChainExt,
SearchSetAttachmentsVisitor,
SearchSetInfoVisitor,
Expand Down Expand Up @@ -60,7 +59,7 @@ impl DatasetMetadata {
ctx: &Context<'_>,
format: Option<DataSchemaFormat>,
) -> Result<Option<DataSchema>> {
let query_svc = from_catalog_n!(ctx, dyn domain::QueryService);
let query_svc = from_catalog_n!(ctx, dyn kamu_core::QueryService);

// TODO: Default to Arrow eventually
let format = format.unwrap_or(DataSchemaFormat::Parquet);
Expand All @@ -83,8 +82,8 @@ impl DatasetMetadata {
async fn current_upstream_dependencies(&self, ctx: &Context<'_>) -> Result<Vec<Dataset>> {
let (dependency_graph_service, dataset_registry) = from_catalog_n!(
ctx,
dyn domain::DependencyGraphService,
dyn domain::DatasetRegistry
dyn kamu_core::DependencyGraphService,
dyn kamu_core::DatasetRegistry
);

use tokio_stream::StreamExt;
Expand Down Expand Up @@ -120,8 +119,8 @@ impl DatasetMetadata {
async fn current_downstream_dependencies(&self, ctx: &Context<'_>) -> Result<Vec<Dataset>> {
let (dependency_graph_service, dataset_registry) = from_catalog_n!(
ctx,
dyn domain::DependencyGraphService,
dyn domain::DatasetRegistry
dyn kamu_core::DependencyGraphService,
dyn kamu_core::DatasetRegistry
);

use tokio_stream::StreamExt;
Expand Down Expand Up @@ -156,8 +155,8 @@ impl DatasetMetadata {
async fn current_polling_source(&self, ctx: &Context<'_>) -> Result<Option<SetPollingSource>> {
let (dataset_registry, metadata_query_service) = from_catalog_n!(
ctx,
dyn domain::DatasetRegistry,
dyn domain::MetadataQueryService
dyn kamu_core::DatasetRegistry,
dyn kamu_core::MetadataQueryService
);

let target = dataset_registry.get_dataset_by_handle(&self.dataset_handle);
Expand All @@ -173,8 +172,8 @@ impl DatasetMetadata {
async fn current_push_sources(&self, ctx: &Context<'_>) -> Result<Vec<AddPushSource>> {
let (metadata_query_service, dataset_registry) = from_catalog_n!(
ctx,
dyn domain::MetadataQueryService,
dyn domain::DatasetRegistry
dyn kamu_core::MetadataQueryService,
dyn kamu_core::DatasetRegistry
);

let target = dataset_registry.get_dataset_by_handle(&self.dataset_handle);
Expand All @@ -193,7 +192,7 @@ impl DatasetMetadata {

/// Sync statuses of push remotes
async fn push_sync_statuses(&self, ctx: &Context<'_>) -> Result<DatasetPushStatuses> {
let service = from_catalog_n!(ctx, dyn domain::RemoteStatusService);
let service = from_catalog_n!(ctx, dyn kamu_core::RemoteStatusService);
let statuses = service.check_remotes_status(&self.dataset_handle).await?;

Ok(statuses.into())
Expand All @@ -204,7 +203,7 @@ impl DatasetMetadata {
let (metadata_query_service, dataset_registry) = from_catalog_n!(
ctx,
dyn kamu_core::MetadataQueryService,
dyn domain::DatasetRegistry
dyn kamu_core::DatasetRegistry
);

let target = dataset_registry.get_dataset_by_handle(&self.dataset_handle);
Expand Down
2 changes: 1 addition & 1 deletion src/adapter/graphql/src/queries/datasets/metadata_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
// by the Apache License, Version 2.0.

use futures::{StreamExt, TryStreamExt};
use kamu_core::MetadataChainExt;
use odf_storage::MetadataChainExt;
use opendatafabric as odf;

use crate::prelude::*;
Expand Down
6 changes: 3 additions & 3 deletions src/adapter/graphql/src/queries/search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

use futures::TryStreamExt;
use kamu_core::auth::DatasetAction;
use kamu_core::{self as domain, TryStreamExtExt};
use odf_storage::TryStreamExtExt;

use crate::prelude::*;
use crate::queries::{Account, Dataset};
Expand All @@ -34,8 +34,8 @@ impl Search {
) -> Result<SearchResultConnection> {
let (dataset_registry, dataset_action_authorizer) = from_catalog_n!(
ctx,
dyn domain::DatasetRegistry,
dyn domain::auth::DatasetActionAuthorizer
dyn kamu_core::DatasetRegistry,
dyn kamu_core::auth::DatasetActionAuthorizer
);

let page = page.unwrap_or(0);
Expand Down
2 changes: 1 addition & 1 deletion src/adapter/graphql/src/scalars/flow_configuration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.

use kamu_core::MetadataChainExt;
use kamu_flow_system::{
CompactionRule,
CompactionRuleFull,
Expand All @@ -22,6 +21,7 @@ use kamu_flow_system::{
ScheduleTimeDelta,
TransformRule,
};
use odf_storage::MetadataChainExt;
use opendatafabric::DatasetHandle;

use crate::mutations::{FlowInvalidRunConfigurations, FlowTypeIsNotSupported};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use chrono::Duration;
use database_common::{DatabaseTransactionRunner, NoOpDatabasePlugin};
use dill::Component;
use indoc::indoc;
use kamu::testing::{MetadataFactory, MockDatasetActionAuthorizer, MockDatasetChangesService};
use kamu::testing::{MockDatasetActionAuthorizer, MockDatasetChangesService};
use kamu::{
CreateDatasetFromSnapshotUseCaseImpl,
DatasetOwnershipServiceInMemory,
Expand All @@ -35,6 +35,7 @@ use kamu_flow_system_inmem::{InMemoryFlowConfigurationEventStore, InMemoryFlowEv
use kamu_task_system_inmem::InMemoryTaskEventStore;
use kamu_task_system_services::TaskSchedulerImpl;
use messaging_outbox::{register_message_dispatcher, Outbox, OutboxImmediateImpl};
use odf_storage_impl::testing::MetadataFactory;
use opendatafabric::{AccountName, DatasetAlias, DatasetID, DatasetKind, DatasetName};
use time_source::SystemTimeSourceDefault;

Expand Down
3 changes: 2 additions & 1 deletion src/adapter/graphql/tests/tests/test_gql_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use datafusion::arrow::datatypes::{DataType, Field, Schema};
use datafusion::arrow::record_batch::RecordBatch;
use dill::Component;
use file_utils::OwnedFile;
use kamu::testing::{MetadataFactory, ParquetWriterHelper};
use kamu::testing::ParquetWriterHelper;
use kamu::*;
use kamu_accounts::*;
use kamu_accounts_inmem::{InMemoryAccessTokenRepository, InMemoryAccountRepository};
Expand All @@ -30,6 +30,7 @@ use kamu_core::*;
use kamu_datasets_inmem::InMemoryDatasetDependencyRepository;
use kamu_datasets_services::DependencyGraphServiceImpl;
use messaging_outbox::DummyOutboxImpl;
use odf_storage_impl::testing::MetadataFactory;
use opendatafabric::*;
use serde_json::json;
use time_source::SystemTimeSourceDefault;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ use async_graphql::value;
use database_common::{DatabaseTransactionRunner, NoOpDatabasePlugin};
use dill::Component;
use indoc::indoc;
use kamu::testing::MetadataFactory;
use kamu::{
CreateDatasetFromSnapshotUseCaseImpl,
DatasetRegistryRepoBridge,
Expand All @@ -29,6 +28,7 @@ use kamu_datasets::DatasetEnvVarsConfig;
use kamu_datasets_inmem::{InMemoryDatasetDependencyRepository, InMemoryDatasetEnvVarRepository};
use kamu_datasets_services::{DatasetEnvVarServiceImpl, DependencyGraphServiceImpl};
use messaging_outbox::DummyOutboxImpl;
use odf_storage_impl::testing::MetadataFactory;
use opendatafabric::DatasetKind;
use time_source::SystemTimeSourceDefault;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ use async_graphql::value;
use database_common::{DatabaseTransactionRunner, NoOpDatabasePlugin};
use dill::Component;
use indoc::indoc;
use kamu::testing::MetadataFactory;
use kamu::{
CreateDatasetFromSnapshotUseCaseImpl,
DatasetRegistryRepoBridge,
Expand All @@ -31,6 +30,7 @@ use kamu_datasets_services::DependencyGraphServiceImpl;
use kamu_flow_system_inmem::InMemoryFlowConfigurationEventStore;
use kamu_flow_system_services::FlowConfigurationServiceImpl;
use messaging_outbox::DummyOutboxImpl;
use odf_storage_impl::testing::MetadataFactory;
use opendatafabric::*;
use time_source::SystemTimeSourceDefault;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use database_common::{DatabaseTransactionRunner, NoOpDatabasePlugin};
use dill::Component;
use futures::TryStreamExt;
use indoc::indoc;
use kamu::testing::{MetadataFactory, MockDatasetChangesService};
use kamu::testing::MockDatasetChangesService;
use kamu::{
CreateDatasetFromSnapshotUseCaseImpl,
DatasetOwnershipServiceInMemory,
Expand Down Expand Up @@ -66,6 +66,7 @@ use kamu_task_system::{self as ts, TaskMetadata};
use kamu_task_system_inmem::InMemoryTaskEventStore;
use kamu_task_system_services::TaskSchedulerImpl;
use messaging_outbox::{register_message_dispatcher, Outbox, OutboxExt, OutboxImmediateImpl};
use odf_storage_impl::testing::MetadataFactory;
use opendatafabric::{AccountID, DatasetID, DatasetKind, Multihash};
use time_source::SystemTimeSourceDefault;

Expand Down
Loading

0 comments on commit b2006a7

Please sign in to comment.