Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve sorting of listed flows #1022

Merged
merged 8 commits into from
Jan 8, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ Recommendation: for ease of reading, use the following order:
-->

## [Unreleased]
### Changed
- GraphQL: accountListFlows returns list sorted by status and last event time
### Fixed
- GQL api flows queries now fetch dataset polling source only once per dataset(and only if Ingest flow type is here)
- Flow trigger status now become disable on flow fail
Expand Down
79 changes: 49 additions & 30 deletions src/infra/flow-system/inmem/src/flow/inmem_flow_event_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -482,25 +482,7 @@ impl FlowEventStore for InMemoryFlowEventStore {
filters: &DatasetFlowFilters,
pagination: PaginationOpts,
) -> FlowIDStream {
let flow_ids_page: Vec<_> = {
let state = self.inner.as_state();
let g = state.lock().unwrap();
g.all_flows_by_dataset
.get(dataset_id)
.map(|dataset_flow_ids| {
dataset_flow_ids
.iter()
.rev()
.filter(|flow_id| g.matches_dataset_flow(**flow_id, filters))
.skip(pagination.offset)
.take(pagination.limit)
.map(|flow_id| Ok(*flow_id))
.collect()
})
.unwrap_or_default()
};

Box::pin(futures::stream::iter(flow_ids_page))
self.get_all_flow_ids_by_datasets(HashSet::from([dataset_id.clone()]), filters, pagination)
}

#[tracing::instrument(level = "debug", skip_all, fields(%dataset_id))]
Expand Down Expand Up @@ -548,25 +530,62 @@ impl FlowEventStore for InMemoryFlowEventStore {
let flow_ids_page: Vec<_> = {
let state = self.inner.as_state();
let g = state.lock().unwrap();
let mut result: Vec<Result<FlowID, _>> = vec![];
let mut total_count = 0;
for flow_id in g.all_flows.iter().rev() {

// Collect FlowID -> Most recent event time, for sorting purposes
let recent_events: HashMap<FlowID, DateTime<Utc>> = g.events.iter().fold(
HashMap::new(),
|mut acc: HashMap<FlowID, DateTime<Utc>>, i: &FlowEvent| {
let event_time = i.event_time();
acc.entry(i.flow_id())
.and_modify(|val| {
if event_time.gt(val) {
*val = event_time;
};
})
.or_insert(event_time);
acc
},
);

// Split events by type
let mut waiting_flows: Vec<_> = vec![];
let mut running_flows: Vec<_> = vec![];
let mut finished_flows: Vec<_> = vec![];
for flow_id in &g.all_flows {
// Also also apply given filters on this stage in order to reduce amount of
// items to process in further steps
let flow_key = g.flow_key_by_flow_id.get(flow_id).unwrap();
if let FlowKey::Dataset(flow_key_dataset) = flow_key {
if dataset_ids.contains(&flow_key_dataset.dataset_id)
&& g.matches_dataset_flow(*flow_id, filters)
{
if result.len() >= pagination.limit {
break;
}
if total_count >= pagination.offset {
result.push(Ok(*flow_id));
if let Some(flow) = g.flow_search_index.get(flow_id) {
let item = (flow_id, recent_events.get(flow_id));
match flow.flow_status {
FlowStatus::Waiting => waiting_flows.push(item),
FlowStatus::Running => running_flows.push(item),
FlowStatus::Finished => finished_flows.push(item),
}
}
total_count += 1;
}
};
}
}
result
// Sort every group separately
waiting_flows.sort_by(|a, b| b.cmp(a));
running_flows.sort_by(|a, b| b.cmp(a));
finished_flows.sort_by(|a, b| b.cmp(a));

let mut ordered_flows = vec![];
ordered_flows.append(&mut waiting_flows);
ordered_flows.append(&mut running_flows);
ordered_flows.append(&mut finished_flows);

ordered_flows
.iter()
.skip(pagination.offset)
.take(pagination.limit)
.map(|(flow_id, _)| Ok(**flow_id))
.collect()
};

Box::pin(futures::stream::iter(flow_ids_page))
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -597,7 +597,7 @@ impl FlowEventStore for PostgresFlowEventStore {
AND (cast($2 as dataset_flow_type) IS NULL OR dataset_flow_type = $2)
AND (cast($3 as flow_status_type) IS NULL OR flow_status = $3)
AND (cast($4 as TEXT[]) IS NULL OR initiator = ANY($4))
ORDER BY flow_id DESC
ORDER BY flow_status, last_event_id DESC
LIMIT $5 OFFSET $6
"#,
dataset_id,
Expand Down Expand Up @@ -683,7 +683,7 @@ impl FlowEventStore for PostgresFlowEventStore {
AND (cast($2 as dataset_flow_type) IS NULL OR dataset_flow_type = $2)
AND (cast($3 as flow_status_type) IS NULL OR flow_status = $3)
AND (cast($4 as TEXT[]) IS NULL OR initiator = ANY($4))
ORDER BY flow_id DESC
ORDER BY flow_status, last_event_id DESC
LIMIT $5 OFFSET $6
"#,
dataset_ids as Vec<String>,
Expand Down
Loading
Loading