Skip to content

Commit

Permalink
dual delete implementation
Browse files Browse the repository at this point in the history
use constant for data source type

account for cascades and schedule deletions

hook it up
  • Loading branch information
mifu67 committed Dec 20, 2024
1 parent b83688b commit cc0af05
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 4 deletions.
16 changes: 12 additions & 4 deletions src/sentry/incidents/logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@
from sentry.utils import metrics
from sentry.utils.audit import create_audit_entry_from_user
from sentry.utils.snuba import is_measurement
from sentry.workflow_engine.migration_helpers.alert_rule import dual_delete_migrated_alert_rule

if TYPE_CHECKING:
from sentry.incidents.utils.types import AlertRuleActivationConditionType
Expand Down Expand Up @@ -1022,9 +1023,13 @@ def delete_alert_rule(
data=alert_rule.get_audit_log_data(),
event=audit_log.get_event_id("ALERT_RULE_REMOVE"),
)

subscriptions = _unpack_snuba_query(alert_rule).subscriptions.all()
bulk_delete_snuba_subscriptions(subscriptions)
if not features.has(
"organizations:workflow-engine-metric-alert-dual-write", alert_rule.organization
):
# NOTE: we will delete the subscription within the dual delete helpers
# if the organization is flagged into dual write
subscriptions = _unpack_snuba_query(alert_rule).subscriptions.all()
bulk_delete_snuba_subscriptions(subscriptions)

schedule_update_project_config(alert_rule, [sub.project for sub in subscriptions])

Expand All @@ -1049,7 +1054,10 @@ def delete_alert_rule(
)
else:
RegionScheduledDeletion.schedule(instance=alert_rule, days=0, actor=user)

if features.has(
"organizations:workflow-engine-metric-alert-dual-write", alert_rule.organization
):
dual_delete_migrated_alert_rule(alert_rule=alert_rule, user=user)
alert_rule.update(status=AlertRuleStatus.SNAPSHOT.value)

if alert_rule.id:
Expand Down
65 changes: 65 additions & 0 deletions src/sentry/workflow_engine/migration_helpers/alert_rule.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
# NOTE: will have to rebase and add these changes to the file created by Colleen once her changes land
from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
from sentry.incidents.models.alert_rule import AlertRule
from sentry.incidents.utils.types import DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION
from sentry.snuba.models import QuerySubscription
from sentry.snuba.subscriptions import bulk_delete_snuba_subscriptions
from sentry.users.services.user import RpcUser
from sentry.workflow_engine.models import (
AlertRuleDetector,
DataConditionGroup,
DataSource,
Detector,
)


def get_data_source(alert_rule: AlertRule) -> DataSource | None:
# TODO: if dual deleting, then we should delete the subscriptions here and not in logic.py
snuba_query = alert_rule.snuba_query
organization = alert_rule.organization
if not snuba_query or not organization:
# This shouldn't be possible, but just in case.
return None
try:
query_subscription = QuerySubscription.objects.get(snuba_query=snuba_query.id)
except QuerySubscription.DoesNotExist:
return None
try:
data_source = DataSource.objects.get(
organization=organization,
query_id=query_subscription.id,
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)
except DataSource.DoesNotExist:
return None
bulk_delete_snuba_subscriptions([QuerySubscription])
return data_source


def dual_delete_migrated_alert_rule(
alert_rule: AlertRule,
user: RpcUser | None = None,
) -> None:
try:
alert_rule_detector = AlertRuleDetector.objects.get(alert_rule=alert_rule)
except AlertRuleDetector.DoesNotExist:
# TODO: log failure
return

detector: Detector = alert_rule_detector.detector
data_condition_group: DataConditionGroup | None = detector.workflow_condition_group

data_source = get_data_source(alert_rule=alert_rule)
if data_source is None:
# TODO: log failure
return

# deleting the alert_rule also deletes alert_rule_workflow (in main delete logic)
# also deletes alert_rule_detector, detector_workflow, detector_state
RegionScheduledDeletion.schedule(instance=detector, days=0, actor=user)
# also deletes workflow_data_condition_group
if data_condition_group:
RegionScheduledDeletion.schedule(instance=data_condition_group, days=0, actor=user)
RegionScheduledDeletion.schedule(instance=data_source, days=0, actor=user)

return

0 comments on commit cc0af05

Please sign in to comment.