Skip to content

Commit

Permalink
GQL: Datasets: auth checks
Browse files Browse the repository at this point in the history
  • Loading branch information
s373r committed Dec 27, 2024
1 parent e5bfda0 commit 1fba5cc
Show file tree
Hide file tree
Showing 14 changed files with 122 additions and 96 deletions.
4 changes: 2 additions & 2 deletions resources/schema.gql
Original file line number Diff line number Diff line change
Expand Up @@ -2024,7 +2024,7 @@ type ViewAccessToken {
"""
createdAt: DateTime!
"""
Date of token revokation
Date of token revocation
"""
revokedAt: DateTime
"""
Expand All @@ -2043,7 +2043,7 @@ type ViewDatasetEnvVar {
"""
key: String!
"""
Non sercret value of dataset environment variable
Non secret value of dataset environment variable
"""
value: String
"""
Expand Down
12 changes: 7 additions & 5 deletions src/adapter/graphql/src/mutations/flows_mut/flows_mut_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,12 +49,14 @@ pub(crate) async fn check_if_flow_belongs_to_dataset(
return Ok(Some(FlowInDatasetError::NotFound(FlowNotFound { flow_id })))
}
},
Err(e) => match e {
fs::GetFlowError::NotFound(_) => {
return Ok(Some(FlowInDatasetError::NotFound(FlowNotFound { flow_id })))
Err(e) => {
return match e {
fs::GetFlowError::NotFound(_) => {
Ok(Some(FlowInDatasetError::NotFound(FlowNotFound { flow_id })))
}
fs::GetFlowError::Internal(e) => Err(GqlError::Internal(e)),
}
fs::GetFlowError::Internal(e) => return Err(GqlError::Internal(e)),
},
}
}

Ok(None)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ impl ViewAccessToken {
self.token.created_at
}

/// Date of token revokation
/// Date of token revocation
async fn revoked_at(&self) -> Option<DateTime<Utc>> {
self.token.revoked_at
}
Expand Down
5 changes: 4 additions & 1 deletion src/adapter/graphql/src/queries/accounts/account.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,10 @@ impl Account {
alias: &odf::DatasetAlias,
) -> Result<Option<Self>, InternalError> {
if alias.is_multi_tenant() {
Ok(Self::from_account_name(ctx, alias.account_name.as_ref().unwrap().clone()).await?)
// Safety: In multi-tenant, we have a name.
let account_name = alias.account_name.as_ref().unwrap().clone();

Ok(Self::from_account_name(ctx, account_name).await?)
} else {
let current_account_subject = from_catalog_n!(ctx, CurrentAccountSubject);

Expand Down
4 changes: 2 additions & 2 deletions src/adapter/graphql/src/queries/accounts/account_flow_runs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ impl AccountFlowRuns {
by_flow_status: filters.by_status.map(Into::into),
by_dataset_ids: filters
.by_dataset_ids
.iter()
.map(|dataset_id| dataset_id.clone().into())
.into_iter()
.map(Into::into)
.collect::<HashSet<_>>(),
by_initiator: match filters.by_initiator {
Some(initiator_filter) => match initiator_filter {
Expand Down
4 changes: 4 additions & 0 deletions src/adapter/graphql/src/queries/admin/admin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
use crate::prelude::*;
use crate::AdminGuard;

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

pub struct Admin;

#[Object]
Expand All @@ -20,3 +22,5 @@ impl Admin {
Ok("OK".to_string())
}
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
15 changes: 9 additions & 6 deletions src/adapter/graphql/src/queries/datasets/dataset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use opendatafabric as odf;

use crate::prelude::*;
use crate::queries::*;
use crate::utils::{ensure_dataset_env_vars_enabled, get_dataset};
use crate::utils::{check_dataset_read_access, ensure_dataset_env_vars_enabled, get_dataset};

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

Expand All @@ -38,15 +38,18 @@ impl Dataset {
let dataset_registry = from_catalog_n!(ctx, dyn domain::DatasetRegistry);

// TODO: Should we resolve reference at this point or allow unresolved and fail
// later?
let hdl = dataset_registry
// later?
let handle = dataset_registry
.resolve_dataset_handle_by_ref(dataset_ref)
.await
.int_err()?;
let account = Account::from_dataset_alias(ctx, &hdl.alias)

check_dataset_read_access(ctx, &handle).await?;

let account = Account::from_dataset_alias(ctx, &handle.alias)
.await?
.expect("Account must exist");
Ok(Dataset::new(account, hdl))
Ok(Dataset::new(account, handle))
}

/// Unique identifier of the dataset
Expand Down Expand Up @@ -112,7 +115,7 @@ impl Dataset {
}

/// Access to the environment variable of this dataset
#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn env_vars(&self, ctx: &Context<'_>) -> Result<DatasetEnvVars> {
ensure_dataset_env_vars_enabled(ctx)?;

Expand Down
22 changes: 13 additions & 9 deletions src/adapter/graphql/src/queries/datasets/dataset_endpoints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ use opendatafabric as odf;
use crate::prelude::*;
use crate::queries::*;

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

pub struct DatasetEndpoints<'a> {
owner: &'a Account,
dataset_handle: odf::DatasetHandle,
Expand Down Expand Up @@ -48,7 +50,7 @@ impl<'a> DatasetEndpoints<'a> {
self.dataset_handle.alias.dataset_name.as_str()
}

#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn web_link(&self) -> Result<LinkProtocolDesc> {
let url = format!(
"{}{}/{}",
Expand All @@ -60,7 +62,7 @@ impl<'a> DatasetEndpoints<'a> {
Ok(LinkProtocolDesc { url })
}

#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn cli(&self) -> Result<CliProtocolDesc> {
let url = format!(
"odf+{}{}",
Expand All @@ -78,7 +80,7 @@ impl<'a> DatasetEndpoints<'a> {
})
}

#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn rest(&self) -> Result<RestProtocolDesc> {
let dataset_base_url = format!(
"{}{}",
Expand All @@ -101,14 +103,14 @@ impl<'a> DatasetEndpoints<'a> {
})
}

#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn flightsql(&self) -> Result<FlightSqlDesc> {
Ok(FlightSqlDesc {
url: self.config.protocols.base_url_flightsql.to_string(),
})
}

#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn jdbc(&self) -> Result<JdbcDesc> {
let mut url = self.config.protocols.base_url_flightsql.clone();

Expand All @@ -119,28 +121,28 @@ impl<'a> DatasetEndpoints<'a> {
})
}

#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn postgresql(&self) -> Result<PostgreSqlDesl> {
Ok(PostgreSqlDesl {
url: "- coming soon -".to_string(),
})
}

#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn kafka(&self) -> Result<KafkaProtocolDesc> {
Ok(KafkaProtocolDesc {
url: "- coming soon -".to_string(),
})
}

#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn websocket(&self) -> Result<WebSocketProtocolDesc> {
Ok(WebSocketProtocolDesc {
url: "- coming soon -".to_string(),
})
}

#[allow(clippy::unused_async)]
#[expect(clippy::unused_async)]
async fn odata(&self) -> Result<OdataProtocolDesc> {
let mut url = format!("{}odata", self.config.protocols.base_url_rest);
// to respect both kinds of workspaces: single-tenant & multi-tenant
Expand Down Expand Up @@ -168,3 +170,5 @@ impl<'a> DatasetEndpoints<'a> {
})
}
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ impl ViewDatasetEnvVar {
self.env_var.key.clone()
}

/// Non sercret value of dataset environment variable
/// Non secret value of dataset environment variable
#[allow(clippy::unused_async)]
async fn value(&self) -> Option<String> {
self.env_var.get_non_secret_value()
Expand Down
5 changes: 0 additions & 5 deletions src/adapter/graphql/src/queries/datasets/dataset_env_vars.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ use opendatafabric as odf;

use super::ViewDatasetEnvVar;
use crate::prelude::*;
use crate::utils;

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

Expand All @@ -35,8 +34,6 @@ impl DatasetEnvVars {
ctx: &Context<'_>,
dataset_env_var_id: DatasetEnvVarID,
) -> Result<String> {
utils::check_dataset_read_access(ctx, &self.dataset_handle).await?;

let dataset_env_var_service = from_catalog_n!(ctx, dyn DatasetEnvVarService);
let dataset_env_var = dataset_env_var_service
.get_dataset_env_var_by_id(&dataset_env_var_id)
Expand All @@ -57,8 +54,6 @@ impl DatasetEnvVars {
page: Option<usize>,
per_page: Option<usize>,
) -> Result<ViewDatasetEnvVarConnection> {
utils::check_dataset_read_access(ctx, &self.dataset_handle).await?;

let dataset_env_var_service = from_catalog_n!(ctx, dyn DatasetEnvVarService);

let page = page.unwrap_or(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ use kamu_flow_system::{FlowConfigurationService, FlowKeyDataset};
use opendatafabric as odf;

use crate::prelude::*;
use crate::utils::check_dataset_read_access;

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

Expand All @@ -32,8 +31,6 @@ impl DatasetFlowConfigs {
ctx: &Context<'_>,
dataset_flow_type: DatasetFlowType,
) -> Result<Option<FlowConfiguration>> {
check_dataset_read_access(ctx, &self.dataset_handle).await?;

let flow_config_service = from_catalog_n!(ctx, dyn FlowConfigurationService);
let maybe_flow_config = flow_config_service
.find_configuration(
Expand All @@ -48,8 +45,6 @@ impl DatasetFlowConfigs {

/// Checks if all configs of this dataset are disabled
async fn all_paused(&self, ctx: &Context<'_>) -> Result<bool> {
check_dataset_read_access(ctx, &self.dataset_handle).await?;

let flow_config_service = from_catalog_n!(ctx, dyn FlowConfigurationService);
for dataset_flow_type in kamu_flow_system::DatasetFlowType::all() {
let maybe_flow_config = flow_config_service
Expand Down
7 changes: 0 additions & 7 deletions src/adapter/graphql/src/queries/datasets/dataset_flow_runs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ use {kamu_flow_system as fs, opendatafabric as odf};
use crate::mutations::{check_if_flow_belongs_to_dataset, FlowInDatasetError, FlowNotFound};
use crate::prelude::*;
use crate::queries::{Account, Flow};
use crate::utils;

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

Expand All @@ -35,8 +34,6 @@ impl DatasetFlowRuns {
}

async fn get_flow(&self, ctx: &Context<'_>, flow_id: FlowID) -> Result<GetFlowResult> {
utils::check_dataset_read_access(ctx, &self.dataset_handle).await?;

if let Some(error) =
check_if_flow_belongs_to_dataset(ctx, flow_id, &self.dataset_handle).await?
{
Expand Down Expand Up @@ -64,8 +61,6 @@ impl DatasetFlowRuns {
per_page: Option<usize>,
filters: Option<DatasetFlowFilters>,
) -> Result<FlowConnection> {
utils::check_dataset_read_access(ctx, &self.dataset_handle).await?;

let flow_query_service = from_catalog_n!(ctx, dyn fs::FlowQueryService);

let page = page.unwrap_or(0);
Expand Down Expand Up @@ -122,8 +117,6 @@ impl DatasetFlowRuns {
}

async fn list_flow_initiators(&self, ctx: &Context<'_>) -> Result<AccountConnection> {
utils::check_dataset_read_access(ctx, &self.dataset_handle).await?;

let flow_query_service = from_catalog_n!(ctx, dyn fs::FlowQueryService);

let flow_initiator_ids: Vec<_> = flow_query_service
Expand Down
Loading

0 comments on commit 1fba5cc

Please sign in to comment.