Skip to content

Commit

Permalink
Bug/swallow error messages (#592)
Browse files Browse the repository at this point in the history
* Fix no tty error swallows
* Remove paused from compacting config
  • Loading branch information
rmn-boiko authored Apr 12, 2024
1 parent 1a70f7e commit 1e1f0ef
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 11 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## Unreleased
### Fixed
- CLI show errors not only under TTY
- Removed `paused` from `setConfigCompacting` mutation

## [0.174.1] - 2024-04-12
### Fixed
- Set correct ODF push/pull websocket protocol
Expand Down
2 changes: 1 addition & 1 deletion resources/schema.gql
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ type DatasetFlowConfigs {
type DatasetFlowConfigsMut {
setConfigSchedule(datasetFlowType: DatasetFlowType!, paused: Boolean!, schedule: ScheduleInput!): SetFlowConfigResult!
setConfigBatching(datasetFlowType: DatasetFlowType!, paused: Boolean!, batching: BatchingConditionInput!): SetFlowConfigResult!
setConfigCompacting(datasetFlowType: DatasetFlowType!, paused: Boolean!, compactingArgs: CompactingConditionInput!): SetFlowConfigResult!
setConfigCompacting(datasetFlowType: DatasetFlowType!, compactingArgs: CompactingConditionInput!): SetFlowConfigResult!
pauseFlows(datasetFlowType: DatasetFlowType): Boolean!
resumeFlows(datasetFlowType: DatasetFlowType): Boolean!
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,6 @@ impl DatasetFlowConfigsMut {
&self,
ctx: &Context<'_>,
dataset_flow_type: DatasetFlowType,
paused: bool,
compacting_args: CompactingConditionInput,
) -> Result<SetFlowConfigResult> {
if !ensure_set_config_flow_supported(
Expand Down Expand Up @@ -206,7 +205,7 @@ impl DatasetFlowConfigsMut {
Utc::now(),
FlowKeyDataset::new(self.dataset_handle.id.clone(), dataset_flow_type.into())
.into(),
paused,
true,
FlowConfigurationRule::CompactingRule(compacting_rule),
)
.await
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -641,7 +641,6 @@ async fn test_crud_compacting_root_dataset() {
let mutation_code = FlowConfigHarness::set_config_compacting_mutation(
&create_result.dataset_handle.id,
"HARD_COMPACTING",
false,
1_000_000,
10000,
);
Expand All @@ -666,7 +665,7 @@ async fn test_crud_compacting_root_dataset() {
"message": "Success",
"config": {
"__typename": "FlowConfiguration",
"paused": false,
"paused": true,
"schedule": null,
"batching": null,
"compacting": {
Expand Down Expand Up @@ -776,7 +775,6 @@ async fn test_compacting_config_validation() {
let mutation_code = FlowConfigHarness::set_config_compacting_mutation(
&create_derived_result.dataset_handle.id,
"HARD_COMPACTING",
false,
test_case.0,
test_case.1,
);
Expand Down Expand Up @@ -1305,7 +1303,6 @@ async fn test_incorrect_dataset_kinds_for_flow_type() {
let mutation_code = FlowConfigHarness::set_config_compacting_mutation(
&create_derived_result.dataset_handle.id,
"HARD_COMPACTING",
false,
1000,
1000,
);
Expand Down Expand Up @@ -1756,7 +1753,6 @@ impl FlowConfigHarness {
fn set_config_compacting_mutation(
id: &DatasetID,
dataset_flow_type: &str,
paused: bool,
max_slice_size: u64,
max_slice_records: u64,
) -> String {
Expand All @@ -1769,7 +1765,6 @@ impl FlowConfigHarness {
configs {
setConfigCompacting (
datasetFlowType: "<dataset_flow_type>",
paused: <paused>,
compactingArgs: {
maxSliceSize: <maxSliceSize>,
maxSliceRecords: <maxSliceRecords>
Expand Down Expand Up @@ -1808,7 +1803,6 @@ impl FlowConfigHarness {
)
.replace("<id>", &id.to_string())
.replace("<dataset_flow_type>", dataset_flow_type)
.replace("<paused>", if paused { "true" } else { "false" })
.replace("<maxSliceRecords>", &max_slice_records.to_string())
.replace("<maxSliceSize>", &max_slice_size.to_string())
}
Expand Down
2 changes: 1 addition & 1 deletion src/app/cli/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ pub async fn run(
"Command failed",
);

if output_config.is_tty && output_config.verbosity_level == 0 {
if output_config.verbosity_level == 0 {
eprintln!("{}", err.pretty(false));
}
}
Expand Down

0 comments on commit 1e1f0ef

Please sign in to comment.