diff --git a/.eslintrc.js b/.eslintrc.js index 8ef646861cd018..dfb3482ca6aa79 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -12,44 +12,15 @@ const baseRules = { /** * Variables */ - // https://eslint.org/docs/rules/no-shadow - 'no-shadow': ['error'], - // https://eslint.org/docs/rules/no-shadow-restricted-names 'no-shadow-restricted-names': ['error'], - // https://eslint.org/docs/rules/no-undef - 'no-undef': ['error'], - - // https://eslint.org/docs/rules/no-unused-vars - 'no-unused-vars': [ - 'error', - { - vars: 'all', - args: 'none', - - // Ignore vars that start with an underscore - // e.g. if you want to omit a property using object spread: - // - // const {name: _name, ...props} = this.props; - // - varsIgnorePattern: '^_', - argsIgnorePattern: '^_', - }, - ], - - // https://eslint.org/docs/rules/no-use-before-define - 'no-use-before-define': ['error', {functions: false}], - /** * Possible errors */ // https://eslint.org/docs/rules/no-cond-assign 'no-cond-assign': ['error', 'always'], - // https://eslint.org/docs/rules/no-console - 'no-console': ['warn'], - // https://eslint.org/docs/rules/no-alert 'no-alert': ['error'], @@ -272,14 +243,6 @@ const reactReactRules = { // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-deprecated.md 'react/no-deprecated': ['error'], - // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-is-mounted.md - 'react/no-is-mounted': ['warn'], - - // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-find-dom-node.md - // Recommended to use callback refs instead - // TODO: Upgrade sentry to use callback refs - 'react/no-find-dom-node': ['warn'], - // Prevent usage of the return value of React.render // deprecation: https://facebook.github.io/react/docs/react-dom.html#render // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-render-return-value.md @@ -311,10 +274,6 @@ const reactReactRules = { // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-typos.md 'react/no-typos': ['error'], - // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-string-refs.md - // This is now considered legacy, callback refs preferred - 'react/no-string-refs': ['warn'], - // Prevent invalid characters from appearing in markup // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unescaped-entities.md 'react/no-unescaped-entities': ['off'], @@ -416,16 +375,6 @@ const reactImportRules = { }, ], - // Enforce a convention in module import order - // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/order.md - 'import/order': [ - 'error', - { - groups: ['builtin', 'external', 'internal', ['parent', 'sibling', 'index']], - 'newlines-between': 'always', - }, - ], - // Require a newline after the last import/require in a group // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/newline-after-import.md 'import/newline-after-import': ['error'], @@ -499,7 +448,6 @@ const reactImportRules = { }; const reactJestRules = { - 'jest/no-large-snapshots': ['warn', {maxSize: 2000}], 'jest/no-disabled-tests': 'error', }; @@ -540,22 +488,6 @@ const reactRules = { 'asc', {caseSensitive: true, natural: false, requiredFirst: true}, ], - - // Disallow importing `import React from 'react'`. This is not needed since - // React 17. We prefer the named imports for potential tree-shaking gains - // in the future. - 'no-restricted-imports': [ - 'error', - { - paths: [ - { - name: 'react', - importNames: ['default'], - message: 'Prefer named React imports (React types DO NOT need imported!)', - }, - ], - }, - ], }; const appRules = { @@ -572,6 +504,7 @@ const appRules = { // no-undef is redundant with typescript as tsc will complain // A downside is that we won't get eslint errors about it, but your editors should // support tsc errors so.... + // https://eslint.org/docs/rules/no-undef 'no-undef': 'off', // Let formatter handle this @@ -579,12 +512,14 @@ const appRules = { /** * Need to use typescript version of these rules + * https://eslint.org/docs/rules/no-shadow */ 'no-shadow': 'off', '@typescript-eslint/no-shadow': 'error', // This only override the `args` rule (which is "none"). There are too many errors and it's difficult to manually // fix them all, so we'll have to incrementally update. + // https://eslint.org/docs/rules/no-unused-vars 'no-unused-vars': 'off', '@typescript-eslint/no-unused-vars': [ 'error', @@ -606,6 +541,7 @@ const appRules = { }, ], + // https://eslint.org/docs/rules/no-use-before-define 'no-use-before-define': 'off', // This seems to have been turned on while previously it had been off '@typescript-eslint/no-use-before-define': ['off'], @@ -787,6 +723,7 @@ const appRules = { }; const strictRules = { + // https://eslint.org/docs/rules/no-console 'no-console': ['error'], // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-is-mounted.md diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 7564d8e8cbccb1..933b585b428c33 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -228,8 +228,6 @@ yarn.lock @getsentry/owners-js-de /tests/snuba/api/endpoints/test_organization_events_vitals.py @getsentry/visibility /tests/snuba/api/endpoints/test_organization_tagkey_values.py @getsentry/visibility -/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py @getsentry/data - /src/sentry/spans/ @getsentry/visibility /tests/sentry/spans/ @getsentry/visibility diff --git a/.github/workflows/test_docker_compose_acceptance.yml b/.github/workflows/test_docker_compose_acceptance.yml index bc5a852ae2f2ff..aa81794eb085ba 100644 --- a/.github/workflows/test_docker_compose_acceptance.yml +++ b/.github/workflows/test_docker_compose_acceptance.yml @@ -4,7 +4,7 @@ name: test-docker-compose-acceptance on: schedule: - - cron: '0 * * * *' + - cron: '0 0 * * *' # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value diff --git a/.github/workflows/test_docker_compose_backend.yml b/.github/workflows/test_docker_compose_backend.yml index 0108f97c489184..179b1efee17362 100644 --- a/.github/workflows/test_docker_compose_backend.yml +++ b/.github/workflows/test_docker_compose_backend.yml @@ -2,7 +2,7 @@ name: test-docker-compose-backend on: schedule: - - cron: '0 * * * *' + - cron: '0 0 * * *' # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value diff --git a/CHANGES b/CHANGES index d3edb43d6cd166..84afe672222eeb 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,35 @@ +24.11.1 +------- + +### Various fixes & improvements + +- feat(toolbar): Make the login-success page have styles that blend with the login flow (#81230) by @ryan953 +- fix(issues): Revert to app external issue name (#81277) by @scttcper +- Revert "chore(similarity): Do not send > 30 system frames to seer (#81259)" (104352cb) by @getsentry-bot +- :wrench: chore(slos): Update Halt to Success for Bot Commands (#81271) by @iamrajjoshi +- fix(auth): Adding scoping_organization_id to replica (#81213) by @sentaur-athena +- chore(similarity): Do not send > 30 system frames to seer (#81259) by @jangjodi +- fix(issues): Animate dropdown chevrons, button sizes (#81262) by @scttcper +- feat(eap): Add missing profile.id column to EAP (#81263) by @Zylphrex +- ref(dashboards): Modify how permissions are handled for editing/deleting dashboards (#80684) by @harshithadurai +- feat(explore): Format numeric tags nicely in explore (#81255) by @Zylphrex +- fix(explore): Preserve sort when adding group by (#81258) by @Zylphrex +- ref(insights): remove insights-domain-view flag part 1 (#81241) by @DominikB2014 +- chore(alerts): Drop included and excluded projects (#81250) by @ceorourke +- ref: fix flaky digests test (#81256) by @asottile-sentry +- chore(sentryapps) Remove option for sentryapp RPC transition (#81245) by @markstory +- fix(grouping): Only collect metadata timing metric when actually getting metadata (#81252) by @lobsterkatie +- chore(performance): Remove old anomaly detection backend (#80696) by @gggritso +- Revert "chore(similarity): Add logging for over 30 system frames (#81130)" (7b7e7955) by @getsentry-bot +- ♻️ chore(slo): SLOs for CommitContextIntegration (#81225) by @iamrajjoshi +- chore(widget-builder): Remove organization props (#81248) by @narsaynorath +- chore(integrations): SourceCodeSearchEndpoint metrics (#80956) by @mifu67 +- chore(vsts): vsts installation step metrics (#80789) by @cathteng +- Remove excluded_projects & include_all_projects columns (#81204) by @ceorourke +- fix(issues): Wrap solutions in error boundary (#81244) by @scttcper + +_Plus 338 more_ + 24.11.0 ------- diff --git a/devservices/config.yml b/devservices/config.yml index 470dec710c08de..c7c73ccb1528aa 100644 --- a/devservices/config.yml +++ b/devservices/config.yml @@ -53,10 +53,12 @@ services: - 127.0.0.1:5432:5432 extra_hosts: - host.docker.internal:host-gateway + restart: unless-stopped networks: devservices: name: devservices + external: true volumes: postgres-data: diff --git a/fixtures/backup/model_dependencies/detailed.json b/fixtures/backup/model_dependencies/detailed.json index e8f1a408d32fd0..aae54f78099416 100644 --- a/fixtures/backup/model_dependencies/detailed.json +++ b/fixtures/backup/model_dependencies/detailed.json @@ -119,6 +119,11 @@ "model": "sentry.organization", "nullable": true }, + "scoping_organization_id": { + "kind": "HybridCloudForeignKey", + "model": "sentry.organization", + "nullable": true + }, "user_id": { "kind": "HybridCloudForeignKey", "model": "sentry.user", @@ -481,34 +486,6 @@ "table_name": "sentry_alertruleactivity", "uniques": [] }, - "sentry.alertruleexcludedprojects": { - "dangling": false, - "foreign_keys": { - "alert_rule": { - "kind": "FlexibleForeignKey", - "model": "sentry.alertrule", - "nullable": false - }, - "project": { - "kind": "FlexibleForeignKey", - "model": "sentry.project", - "nullable": false - } - }, - "model": "sentry.alertruleexcludedprojects", - "relocation_dependencies": [], - "relocation_scope": "Organization", - "silos": [ - "Region" - ], - "table_name": "sentry_alertruleexcludedprojects", - "uniques": [ - [ - "alert_rule", - "project" - ] - ] - }, "sentry.alertruleprojects": { "dangling": false, "foreign_keys": { @@ -588,34 +565,6 @@ "table_name": "sentry_alertruletriggeraction", "uniques": [] }, - "sentry.alertruletriggerexclusion": { - "dangling": false, - "foreign_keys": { - "alert_rule_trigger": { - "kind": "FlexibleForeignKey", - "model": "sentry.alertruletrigger", - "nullable": false - }, - "query_subscription": { - "kind": "FlexibleForeignKey", - "model": "sentry.querysubscription", - "nullable": false - } - }, - "model": "sentry.alertruletriggerexclusion", - "relocation_dependencies": [], - "relocation_scope": "Organization", - "silos": [ - "Region" - ], - "table_name": "sentry_alertruletriggerexclusion", - "uniques": [ - [ - "alert_rule_trigger", - "query_subscription" - ] - ] - }, "sentry.apiapplication": { "dangling": false, "foreign_keys": { @@ -6505,12 +6454,17 @@ ] }, "workflow_engine.detector": { - "dangling": false, + "dangling": true, "foreign_keys": { + "created_by_id": { + "kind": "HybridCloudForeignKey", + "model": "sentry.user", + "nullable": true + }, "organization": { "kind": "FlexibleForeignKey", "model": "sentry.organization", - "nullable": false + "nullable": true }, "owner_team": { "kind": "FlexibleForeignKey", @@ -6522,6 +6476,11 @@ "model": "sentry.user", "nullable": true }, + "project": { + "kind": "FlexibleForeignKey", + "model": "sentry.project", + "nullable": true + }, "workflow_condition_group": { "kind": "FlexibleForeignKey", "model": "workflow_engine.dataconditiongroup", @@ -6591,11 +6550,31 @@ "workflow_engine.workflow": { "dangling": false, "foreign_keys": { + "created_by_id": { + "kind": "HybridCloudForeignKey", + "model": "sentry.user", + "nullable": true + }, + "environment": { + "kind": "FlexibleForeignKey", + "model": "sentry.environment", + "nullable": true + }, "organization": { "kind": "FlexibleForeignKey", "model": "sentry.organization", "nullable": false }, + "owner_team": { + "kind": "FlexibleForeignKey", + "model": "sentry.team", + "nullable": true + }, + "owner_user_id": { + "kind": "HybridCloudForeignKey", + "model": "sentry.user", + "nullable": true + }, "when_condition_group": { "kind": "FlexibleForeignKey", "model": "workflow_engine.dataconditiongroup", diff --git a/fixtures/backup/model_dependencies/flat.json b/fixtures/backup/model_dependencies/flat.json index 42e4fe237389f3..be2e561c779936 100644 --- a/fixtures/backup/model_dependencies/flat.json +++ b/fixtures/backup/model_dependencies/flat.json @@ -68,10 +68,6 @@ "sentry.alertrule", "sentry.user" ], - "sentry.alertruleexcludedprojects": [ - "sentry.alertrule", - "sentry.project" - ], "sentry.alertruleprojects": [ "sentry.alertrule", "sentry.project" @@ -84,10 +80,6 @@ "sentry.integration", "sentry.sentryapp" ], - "sentry.alertruletriggerexclusion": [ - "sentry.alertruletrigger", - "sentry.querysubscription" - ], "sentry.apiapplication": [ "sentry.user" ], @@ -899,6 +891,7 @@ ], "workflow_engine.detector": [ "sentry.organization", + "sentry.project", "sentry.team", "sentry.user", "workflow_engine.dataconditiongroup" @@ -911,7 +904,10 @@ "workflow_engine.workflow" ], "workflow_engine.workflow": [ + "sentry.environment", "sentry.organization", + "sentry.team", + "sentry.user", "workflow_engine.dataconditiongroup" ], "workflow_engine.workflowdataconditiongroup": [ diff --git a/fixtures/backup/model_dependencies/sorted.json b/fixtures/backup/model_dependencies/sorted.json index 66ced698878d1a..fc4fbb2c04246f 100644 --- a/fixtures/backup/model_dependencies/sorted.json +++ b/fixtures/backup/model_dependencies/sorted.json @@ -53,12 +53,6 @@ "workflow_engine.dataconditiongroup", "workflow_engine.dataconditiongroupaction", "workflow_engine.datasource", - "workflow_engine.detector", - "workflow_engine.detectorstate", - "workflow_engine.workflow", - "workflow_engine.workflowdataconditiongroup", - "workflow_engine.detectorworkflow", - "workflow_engine.datasourcedetector", "workflow_engine.datacondition", "sentry.savedsearch", "sentry.rollbackuser", @@ -119,6 +113,9 @@ "flags.flagwebhooksigningsecretmodel", "flags.flagauditlogmodel", "feedback.feedback", + "workflow_engine.workflow", + "workflow_engine.detector", + "workflow_engine.datasourcedetector", "uptime.projectuptimesubscription", "sentry.userreport", "sentry.useroption", @@ -205,6 +202,9 @@ "sentry.apiauthorization", "sentry.alertrule", "hybridcloud.apitokenreplica", + "workflow_engine.workflowdataconditiongroup", + "workflow_engine.detectorworkflow", + "workflow_engine.detectorstate", "sentry.teamkeytransaction", "sentry.snubaqueryeventtype", "sentry.sentryappinstallation", @@ -225,7 +225,6 @@ "sentry.dashboardpermissionsteam", "sentry.alertruletrigger", "sentry.alertruleprojects", - "sentry.alertruleexcludedprojects", "sentry.alertruleactivity", "sentry.alertruleactivations", "sentry.alertruleactivationcondition", @@ -234,7 +233,6 @@ "sentry.sentryappinstallationforprovider", "sentry.incident", "sentry.dashboardwidgetqueryondemand", - "sentry.alertruletriggerexclusion", "sentry.alertruletriggeraction", "sentry.timeseriessnapshot", "sentry.servicehookproject", diff --git a/fixtures/backup/model_dependencies/truncate.json b/fixtures/backup/model_dependencies/truncate.json index 8f0aa950ece716..ad922a9a06ffba 100644 --- a/fixtures/backup/model_dependencies/truncate.json +++ b/fixtures/backup/model_dependencies/truncate.json @@ -53,12 +53,6 @@ "workflow_engine_dataconditiongroup", "workflow_engine_dataconditiongroupaction", "workflow_engine_datasource", - "workflow_engine_detector", - "workflow_engine_detectorstate", - "workflow_engine_workflow", - "workflow_engine_workflowdataconditiongroup", - "workflow_engine_detectorworkflow", - "workflow_engine_datasourcedetector", "workflow_engine_datacondition", "sentry_savedsearch", "sentry_rollbackuser", @@ -119,6 +113,9 @@ "flags_webhooksigningsecret", "flags_audit_log", "feedback_feedback", + "workflow_engine_workflow", + "workflow_engine_detector", + "workflow_engine_datasourcedetector", "uptime_projectuptimesubscription", "sentry_userreport", "sentry_useroption", @@ -205,6 +202,9 @@ "sentry_apiauthorization", "sentry_alertrule", "hybridcloud_apitokenreplica", + "workflow_engine_workflowdataconditiongroup", + "workflow_engine_detectorworkflow", + "workflow_engine_detectorstate", "sentry_performanceteamkeytransaction", "sentry_snubaqueryeventtype", "sentry_sentryappinstallation", @@ -225,7 +225,6 @@ "sentry_dashboardpermissionsteam", "sentry_alertruletrigger", "sentry_alertruleprojects", - "sentry_alertruleexcludedprojects", "sentry_alertruleactivity", "sentry_alertruleactivations", "sentry_alertruleactivationcondition", @@ -234,7 +233,6 @@ "sentry_sentryappinstallationforprovider", "sentry_incident", "sentry_dashboardwidgetqueryondemand", - "sentry_alertruletriggerexclusion", "sentry_alertruletriggeraction", "sentry_timeseriessnapshot", "sentry_servicehookproject", diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..27b7cdf8c1b768 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0001_initial.py @@ -0,0 +1,27 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField(null=True)), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0002_delete_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0002_delete_pending.py new file mode 100644 index 00000000000000..c4933d32cfcede --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0002_delete_pending.py @@ -0,0 +1,18 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_field_double_pending_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0003_double_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0003_double_pending.py new file mode 100644 index 00000000000000..ce220395342ff9 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0003_double_pending.py @@ -0,0 +1,18 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_field_double_pending_app", "0002_delete_pending"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..4446837fa33671 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py @@ -0,0 +1,45 @@ +import django +from django.db import migrations, models + +import sentry +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "fk_table", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="bad_flow_delete_field_pending_with_fk_constraint_app.fktable", + db_index=False, + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..f036396f10a590 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,19 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + atomic = False + + dependencies = [ + ("bad_flow_delete_field_pending_with_fk_constraint_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="fk_table", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/models.py new file mode 100644 index 00000000000000..d936d5039213f7 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/models.py @@ -0,0 +1,12 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class FkTable(models.Model): + field = models.IntegerField(default=0, null=False) + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) + fk_table = FlexibleForeignKey(FkTable, db_index=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..fd1e8c68b7800c --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py @@ -0,0 +1,25 @@ +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField()), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..6502d91d97cd39 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("bad_flow_delete_field_pending_with_not_null_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..482ebcd0ab28f4 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0001_initial.py @@ -0,0 +1,27 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField()), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..b4cad5043fc589 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,18 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_field_without_pending_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..69de29635772a4 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0001_initial.py @@ -0,0 +1,26 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0002_delete_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0002_delete_pending.py new file mode 100644 index 00000000000000..917b83001e8a8d --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0002_delete_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_model_double_pending_app", "0001_initial"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0003_double_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0003_double_pending.py new file mode 100644 index 00000000000000..e52a64272a2020 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0003_double_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_model_double_pending_app", "0002_delete_pending"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..69de29635772a4 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0001_initial.py @@ -0,0 +1,26 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..10d83c4d9c3e39 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_model_without_pending_app", "0001_initial"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..2747f841d8a570 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py @@ -0,0 +1,45 @@ +import django +from django.db import migrations, models + +import sentry +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "fk_table", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="bad_flow_delete_pending_with_fk_constraints_app.fktable", + db_index=False, + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..721c22b2798a0e --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,18 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + atomic = False + + dependencies = [ + ("bad_flow_delete_pending_with_fk_constraints_app", "0001_initial"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/models.py new file mode 100644 index 00000000000000..d936d5039213f7 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/models.py @@ -0,0 +1,12 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class FkTable(models.Model): + field = models.IntegerField(default=0, null=False) + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) + fk_table = FlexibleForeignKey(FkTable, db_index=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..8d119afd028622 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/0001_initial.py @@ -0,0 +1,16 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + allow_run_sql = False + + operations = [migrations.RunSQL("select 1;")] diff --git a/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/models.py b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..85f25fffd727f5 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py @@ -0,0 +1,47 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 +import django +from django.db import migrations, models + +import sentry +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "fk_table", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="good_flow_delete_field_pending_with_fk_constraint_app.fktable", + db_index=False, + null=True, + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_remove_constraints_and_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_remove_constraints_and_pending.py new file mode 100644 index 00000000000000..767c38e610f98b --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_remove_constraints_and_pending.py @@ -0,0 +1,34 @@ +import django +from django.db import migrations + +import sentry +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + atomic = False + + dependencies = [ + ("good_flow_delete_field_pending_with_fk_constraint_app", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="TestTable", + name="fk_table", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="good_flow_delete_field_pending_with_fk_constraint_app.fktable", + db_index=False, + db_constraint=False, + null=True, + ), + ), + SafeRemoveField( + model_name="testtable", + name="fk_table", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..adae7ffa190165 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0003_delete.py @@ -0,0 +1,20 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ( + "good_flow_delete_field_pending_with_fk_constraint_app", + "0002_remove_constraints_and_pending", + ), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="fk_table", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/models.py new file mode 100644 index 00000000000000..e78bdd99a9322d --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/models.py @@ -0,0 +1,12 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class FkTable(models.Model): + field = models.IntegerField(default=0, null=False) + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) + fk_table = FlexibleForeignKey(FkTable, null=True, db_index=False) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..1a0f0a785f5159 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py @@ -0,0 +1,37 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField(null=True)), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0002_remove_not_null_and_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0002_remove_not_null_and_pending.py new file mode 100644 index 00000000000000..e413d67d2a23b6 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0002_remove_not_null_and_pending.py @@ -0,0 +1,24 @@ +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_pending_with_not_null_app", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="TestTable", + name="field", + field=models.IntegerField(null=True), + ), + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..98ed7ffbe84c48 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0003_delete.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_pending_with_not_null_app", "0002_remove_not_null_and_pending"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..0c0b198c43b88f --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0001_initial.py @@ -0,0 +1,55 @@ +from django.db import migrations, models + +from sentry.db.models import FlexibleForeignKey +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + checked = False + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="OtherTable", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False)), + ], + ), + migrations.CreateModel( + name="M2MTable", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False)), + ( + "alert_rule", + FlexibleForeignKey( + on_delete=models.deletion.CASCADE, + to="good_flow_delete_field_pending_with_not_null_m2m_app.othertable", + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False)), + ( + "excluded_projects", + models.ManyToManyField( + through="good_flow_delete_field_pending_with_not_null_m2m_app.M2MTable", + to="good_flow_delete_field_pending_with_not_null_m2m_app.othertable", + ), + ), + ], + ), + migrations.AddField( + model_name="m2mtable", + name="test_table", + field=FlexibleForeignKey( + on_delete=models.deletion.CASCADE, + to="good_flow_delete_field_pending_with_not_null_m2m_app.testtable", + ), + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..7cd3b1a05e1e07 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_pending_with_not_null_m2m_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="excluded_projects", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/models.py new file mode 100644 index 00000000000000..90c1bc2b9e0f5e --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/models.py @@ -0,0 +1,18 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class OtherTable(models.Model): + pass + + +class M2MTable(models.Model): + alert_rule = FlexibleForeignKey(OtherTable) + test_table = FlexibleForeignKey( + "good_flow_delete_field_pending_with_not_null_m2m_app.TestTable" + ) + + +class TestTable(models.Model): + excluded_projects = models.ManyToManyField(OtherTable, through=M2MTable) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..2540b245ec0644 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0001_initial.py @@ -0,0 +1,23 @@ +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + initial = True + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField(null=True)), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0002_set_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0002_set_pending.py new file mode 100644 index 00000000000000..48b502df50c460 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0002_set_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_simple_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..e57524bf66d90d --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0003_delete.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_simple_app", "0002_set_pending"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/models.py new file mode 100644 index 00000000000000..f472e7d8e90506 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=True) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0002_delete_model_state.py b/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0002_delete_model_state.py index aa7a0b3105125d..4d9779f1c977f0 100644 --- a/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0002_delete_model_state.py +++ b/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0002_delete_model_state.py @@ -1,8 +1,7 @@ # Generated by Django 3.1 on 2019-09-22 21:47 - -from django.db import migrations - from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction class Migration(CheckedMigration): @@ -12,11 +11,5 @@ class Migration(CheckedMigration): ] operations = [ - migrations.SeparateDatabaseAndState( - state_operations=[ - migrations.DeleteModel( - name="TestTable", - ), - ] - ) + SafeDeleteModel(name="TestTable", deletion_action=DeletionAction.MOVE_TO_PENDING), ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0003_delete_table.py b/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0003_delete_table.py index f0a85b49040a3e..8a6e05080a31bc 100644 --- a/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0003_delete_table.py +++ b/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0003_delete_table.py @@ -1,16 +1,15 @@ # Generated by Django 3.1 on 2019-09-22 21:47 - -from django.db import migrations - from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction class Migration(CheckedMigration): dependencies = [ - ("good_flow_delete_model_state_app", "0001_initial"), + ("good_flow_delete_model_state_app", "0002_delete_model_state"), ] operations = [ - migrations.RunSQL('DROP TABLE "good_flow_delete_model_state_app_testtable";'), + SafeDeleteModel(name="TestTable", deletion_action=DeletionAction.DELETE), ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..6f55cccc2e683d --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py @@ -0,0 +1,46 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 +import django +from django.db import migrations, models + +import sentry +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "fk_table", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="good_flow_delete_pending_with_fk_constraints_app.fktable", + db_index=False, + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0002_remove_constraints_and_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0002_remove_constraints_and_pending.py new file mode 100644 index 00000000000000..cdb0408e096d99 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0002_remove_constraints_and_pending.py @@ -0,0 +1,32 @@ +import django +from django.db import migrations + +import sentry +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + atomic = False + + dependencies = [ + ("good_flow_delete_pending_with_fk_constraints_app", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="TestTable", + name="fk_table", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="good_flow_delete_pending_with_fk_constraints_app.fktable", + db_index=False, + db_constraint=False, + ), + ), + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..ac2813a8d7f014 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0003_delete.py @@ -0,0 +1,16 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_pending_with_fk_constraints_app", "0002_remove_constraints_and_pending"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/models.py new file mode 100644 index 00000000000000..d936d5039213f7 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/models.py @@ -0,0 +1,12 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class FkTable(models.Model): + field = models.IntegerField(default=0, null=False) + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) + fk_table = FlexibleForeignKey(FkTable, db_index=False) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..2b6d293ee049e6 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0001_initial.py @@ -0,0 +1,22 @@ +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + initial = True + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0002_set_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0002_set_pending.py new file mode 100644 index 00000000000000..c7475e451e0bcd --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0002_set_pending.py @@ -0,0 +1,16 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_simple_app", "0001_initial"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..796cf774758675 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0003_delete.py @@ -0,0 +1,16 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_simple_app", "0002_set_pending"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..8939a5ba3393b7 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/0001_initial.py @@ -0,0 +1,16 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + allow_run_sql = True + + operations = [migrations.RunSQL("select 1;")] diff --git a/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/models.py b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index 2d5deeb60aa756..1fb2b2b2a6d5e9 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -6,11 +6,19 @@ To resolve this, rebase against latest master and regenerate your migration. Thi will then be regenerated, and you should be able to merge without conflicts. feedback: 0004_index_together -hybridcloud: 0016_add_control_cacheversion + +hybridcloud: 0017_add_scoping_organization_apitokenreplica + nodestore: 0002_nodestore_no_dictfield + remote_subscriptions: 0003_drop_remote_subscription + replays: 0004_index_together -sentry: 0792_add_unique_index_apiauthorization + +sentry: 0797_drop_excluded_project_triggers + social_auth: 0002_default_auto_field + uptime: 0018_add_trace_sampling_field_to_uptime -workflow_engine: 0013_related_name_conditions_on_dcg + +workflow_engine: 0014_model_additions_for_milestones diff --git a/pyproject.toml b/pyproject.toml index 2430cb3c36673c..bdeefc42f039fb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -180,7 +180,6 @@ module = [ "sentry.api.serializers.models.rule", "sentry.api.serializers.models.team", "sentry.api.serializers.rest_framework.mentions", - "sentry.api.serializers.rest_framework.notification_action", "sentry.auth.helper", "sentry.auth.provider", "sentry.auth.system", @@ -319,7 +318,6 @@ module = [ "sentry.testutils.helpers.notifications", "sentry.utils.auth", "sentry.utils.committers", - "sentry.utils.services", "sentry.web.forms.accounts", "sentry.web.frontend.auth_login", "sentry.web.frontend.auth_organization_login", @@ -496,6 +494,7 @@ module = [ "sentry.utils.redis", "sentry.utils.redis_metrics", "sentry.utils.sentry_apps.*", + "sentry.utils.services", "sentry.utils.sms", "sentry.utils.snowflake", "sentry.utils.urls", diff --git a/requirements-base.txt b/requirements-base.txt index 6ea0481cb0af49..3fe990ec7d9918 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -68,7 +68,7 @@ rfc3986-validator>=0.1.1 sentry-arroyo>=2.16.5 sentry-kafka-schemas>=0.1.120 sentry-ophio==1.0.0 -sentry-protos>=0.1.34 +sentry-protos>=0.1.37 sentry-redis-tools>=0.1.7 sentry-relay>=0.9.3 sentry-sdk[http2]>=2.18.0 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 08b036ada00811..17b19f48dc53e2 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -36,7 +36,7 @@ cryptography==43.0.1 cssselect==1.0.3 cssutils==2.9.0 datadog==0.49.1 -devservices==1.0.3 +devservices==1.0.4 distlib==0.3.8 distro==1.8.0 django==5.1.1 @@ -146,13 +146,13 @@ pyflakes==3.2.0 pyjwt==2.4.0 pymemcache==4.0.0 pysocks==1.7.1 -pytest==8.1.2 +pytest==8.3.3 pytest-cov==4.0.0 pytest-django==4.9.0 pytest-fail-slow==0.3.0 pytest-json-report==1.5.0 pytest-metadata==3.1.1 -pytest-rerunfailures==14.0 +pytest-rerunfailures==15.0 pytest-sentry==0.3.0 pytest-xdist==3.0.2 python-dateutil==2.9.0 @@ -182,12 +182,13 @@ s3transfer==0.10.0 selenium==4.16.0 sentry-arroyo==2.16.5 sentry-cli==2.16.0 +sentry-covdefaults-disable-branch-coverage==1.0.2 sentry-devenv==1.13.0 sentry-forked-django-stubs==5.1.1.post1 sentry-forked-djangorestframework-stubs==3.15.1.post2 sentry-kafka-schemas==0.1.120 sentry-ophio==1.0.0 -sentry-protos==0.1.34 +sentry-protos==0.1.37 sentry-redis-tools==0.1.7 sentry-relay==0.9.3 sentry-sdk==2.18.0 diff --git a/requirements-dev.txt b/requirements-dev.txt index b19d6fb72bfc58..5e01bcaaf657c3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,20 +1,21 @@ --index-url https://pypi.devinfra.sentry.io/simple sentry-devenv>=1.13.0 -devservices>=1.0.3 +devservices>=1.0.4 covdefaults>=2.3.0 +sentry-covdefaults-disable-branch-coverage>=1.0.2 docker>=7 time-machine>=2.16.0 honcho>=2 openapi-core>=0.18.2 openapi-pydantic>=0.4.0 -pytest>=8.1 +pytest>=8.3 pytest-cov>=4.0.0 pytest-django>=4.9.0 pytest-fail-slow>=0.3.0 pytest-json-report>=1.5.0 -pytest-rerunfailures>=14 +pytest-rerunfailures>=15 pytest-sentry>=0.3.0 pytest-xdist>=3 responses>=0.23.1 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 0dff6e9bf70562..26164f4a387bd0 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -127,7 +127,7 @@ s3transfer==0.10.0 sentry-arroyo==2.16.5 sentry-kafka-schemas==0.1.120 sentry-ophio==1.0.0 -sentry-protos==0.1.34 +sentry-protos==0.1.37 sentry-redis-tools==0.1.7 sentry-relay==0.9.3 sentry-sdk==2.18.0 diff --git a/scripts/use-colima.sh b/scripts/use-colima.sh deleted file mode 100755 index 1ff1dca0dde8e2..00000000000000 --- a/scripts/use-colima.sh +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/bash - -POSTGRES_CONTAINER="sentry_postgres" -USE_NEW_DEVSERVICES=${USE_NEW_DEVSERVICES:-"0"} -if [ "$USE_NEW_DEVSERVICES" == "1" ]; then - POSTGRES_CONTAINER="sentry-postgres-1" -fi - -if ! [[ -x ~/.local/share/sentry-devenv/bin/colima ]]; then - echo "You need to install devenv! https://github.com/getsentry/devenv/#install" - exit 1 -fi - -if [[ "$(sysctl -n machdep.cpu.brand_string)" != Intel* ]]; then - case "$(sw_vers -productVersion)" in - *12.*|*13.*) - echo "Your ARM Mac is on a version incompatible with colima." - echo "Use Docker Desktop for now until you upgrade to at least MacOS 14." - exit 1 - ;; - esac -fi - -echo "Copying your postgres volume for use with colima. Will take a few minutes." -tmpdir=$(mktemp -d) -docker context use desktop-linux -docker run --rm -v $POSTGRES_CONTAINER:/from -v "${tmpdir}:/to" alpine ash -c "cd /from ; cp -a . /to" || { echo "You need to start Docker Desktop."; exit 1; } - -echo "Stopping Docker.app. If a 'process terminated unexpectedly' dialog appears, dismiss it." -osascript - <<'EOF' || exit -quit application "Docker" -EOF - -# We aren't uninstalling for now - this makes rolling back to docker desktop faster. -# Also, less breakage as people may be using things like docker-credential-desktop. -# echo "Uninstalling docker cask (which includes Docker Desktop)." -# brew uninstall --cask docker - -# We do want to get people on just the docker cli though, to enable uninstalling the cask. -echo "Installing docker (cli only)." -brew install docker -# Unlinks docker (cask). -brew unlink docker -brew link --overwrite docker - -# This removes credsStore, saving it under oldCredsStore so it can be restored later. -# The right value under colima for this is "colima", but I think vast majority of people -# are authing their docker through gcloud, not docker cli. -python3 <<'EOF' -import os -import json -with open(os.path.expanduser("~/.docker/config.json"), "rb") as f: - config = json.loads(f.read()) - credsStore = config.get("credsStore") - if credsStore is None: - exit(0) - config["oldCredsStore"] = credsStore - del config["credsStore"] -with open(os.path.expanduser("~/.docker/config.json"), "w") as f: - f.write(json.dumps(config)) -EOF - -echo "Starting colima." -devenv colima start - -echo "Recreating your postgres volume for use with colima. May take a few minutes." -docker volume create --name $POSTGRES_CONTAINER -docker run --rm -v "${tmpdir}:/from" -v $POSTGRES_CONTAINER:/to alpine ash -c "cd /from ; cp -a . /to" -rm -rf "$tmpdir" - -echo "-----------------------------------------------" -echo "All done. Start devservices at your discretion." diff --git a/scripts/use-docker-desktop.sh b/scripts/use-docker-desktop.sh deleted file mode 100755 index f926cf27e9df66..00000000000000 --- a/scripts/use-docker-desktop.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -set -e - -~/.local/share/sentry-devenv/bin/colima stop - -echo "Using docker cli from cask. You may be prompted for your password." -# brew --prefix doesn't seem to apply here - it's just /usr/local -sudo ln -svf /Applications/Docker.app/Contents/Resources/bin/docker "/usr/local/bin/docker" - -# this restores the old credsStore value -python3 <<'EOF' -import os -import json -with open(os.path.expanduser("~/.docker/config.json"), "rb") as f: - config = json.loads(f.read()) - oldCredsStore = config.get("oldCredsStore") - if oldCredsStore is None: - exit(0) - config["credsStore"] = oldCredsStore - del config["oldCredsStore"] -with open(os.path.expanduser("~/.docker/config.json"), "w") as f: - f.write(json.dumps(config)) -EOF - -echo "Starting Docker." -open -a /Applications/Docker.app --args --unattended - -echo "-----------------------------------------------" -echo "All done. Start devservices at your discretion." diff --git a/setup.cfg b/setup.cfg index 0f75e4bda257df..aae014f9aa7768 100644 --- a/setup.cfg +++ b/setup.cfg @@ -112,7 +112,9 @@ extension = [coverage:run] omit = src/sentry/migrations/* -plugins = covdefaults +plugins = + covdefaults + sentry_covdefaults_disable_branch_coverage [coverage:report] # Setting this to 0 makes it falsy, and it gets ignored, so we set it to diff --git a/src/sentry/api/authentication.py b/src/sentry/api/authentication.py index f67fec72026128..eb0fd888294f29 100644 --- a/src/sentry/api/authentication.py +++ b/src/sentry/api/authentication.py @@ -422,11 +422,11 @@ def authenticate_token(self, request: Request, token_str: str) -> tuple[Any, Any if application_is_inactive: raise AuthenticationFailed("UserApplication inactive or deleted") - if token.organization_id: + if token.scoping_organization_id: # We need to make sure the organization to which the token has access is the same as the one in the URL organization = None organization_context = organization_service.get_organization_by_id( - id=token.organization_id + id=token.organization_id, include_projects=False, include_teams=False ) if organization_context: organization = organization_context.organization @@ -439,30 +439,15 @@ def authenticate_token(self, request: Request, token_str: str) -> tuple[Any, Any organization.slug != target_org_id_or_slug and organization.id != target_org_id_or_slug ): - # TODO (@athena): We want to raise auth excecption here but to be sure - # I soft launch this by only logging the error for now - # raise AuthenticationFailed("Unauthorized organization access") - logger.info( - "Token has access to organization %s but wants to get access to organization %s: %s", - organization.slug, - target_org_id_or_slug, - request.path_info, - ) - else: - # TODO (@athena): We want to limit org level token's access to org level endpoints only - # so in the future this will be an auth exception but for now we soft launch by logging an error - logger.info( - "Token has only access to organization %s but is calling an endpoint for multiple organizations: %s", - organization.slug, - request.path_info, + raise AuthenticationFailed("Unauthorized organization access.") + # We want to limit org scoped tokens access to org level endpoints only + # Except some none-org level endpoints that we added special treatments for + elif resolved_url.url_name not in ["sentry-api-0-organizations"]: + raise AuthenticationFailed( + "This token access is limited to organization endpoints." ) else: - # TODO (@athena): If there is an organization token we should be able to fetch organization context - # Otherwise we should raise an exception - # For now adding logging to investigate if this is a valid case we need to address - logger.info( - "Token has access to an unknown organization: %s", token.organization_id - ) + raise AuthenticationFailed("Cannot resolve organization from token.") return self.transform_auth( user, diff --git a/src/sentry/api/endpoints/event_ai_suggested_fix.py b/src/sentry/api/endpoints/event_ai_suggested_fix.py deleted file mode 100644 index 213ec900ac702c..00000000000000 --- a/src/sentry/api/endpoints/event_ai_suggested_fix.py +++ /dev/null @@ -1,398 +0,0 @@ -from __future__ import annotations - -import logging -import random -from typing import Any - -import orjson -from django.conf import settings -from django.dispatch import Signal -from django.http import HttpResponse, StreamingHttpResponse -from openai import OpenAI, RateLimitError - -from sentry import eventstore -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.project import ProjectEndpoint -from sentry.api.exceptions import ResourceDoesNotExist -from sentry.types.ratelimit import RateLimit, RateLimitCategory -from sentry.utils.cache import cache - -logger = logging.getLogger(__name__) - -from rest_framework.request import Request - -openai_policy_check = Signal() - -# How many stacktrace frames do we want per exception? -MAX_STACKTRACE_FRAMES = 15 - -# How many exceptions do we want? -MAX_EXCEPTIONS = 3 - -# Do we want tags? They don't seem particularly useful -ADD_TAGS = False - -FUN_PROMPT_CHOICES = [ - "[haiku about the error]", - "[hip hop rhyme about the error]", - "[4 line rhyme about the error]", - "[2 stanza rhyme about the error]", -] - -PROMPT = """\ -You are an assistant that analyses software errors, describing the problem with the following rules: - -* Be helpful, playful and a bit snarky and sarcastic -* Do not talk about the rules in explanations -* Use emojis frequently in the snarky greeting and closing prompt -* The frames of a stack trace is shown with most recent call first -* Stack frames are either from app code or third party libraries -* Never show code examples as diff -* When describing the problem: - * Explain the error and message - * Explain where in the code the issue happend - * Explain the nature of the issue -* When proposing a solution: - * Explain what code changes are necessary to resolve it - * Explain where the solution should be - * Mention best practices for preventing this -* Remember Sentry's marketing message: "Sentry can't fix this" - -Write the answers into the following template: - -``` -[snarky greeting] - -#### Problem Description - -[detailed description of the problem] - -#### Proposed Solution - -[proposed solution to fix this issue] - -[fixed code example] - -#### What Else - -[uplifting closing statements] - -___FUN_PROMPT___ -``` -""" - -# Theset tags are removed because they are quite unstable between different events -# of the same issue, and typically unrelated to something that the AI assistant -# can answer. -BLOCKED_TAGS = frozenset( - [ - "user", - "server_name", - "host", - "release", - "handled", - "client_os", - "client_os.name", - "browser", - "browser.name", - "environment", - "runtime", - "device", - "device.family", - "gpu", - "gpu.name", - "gpu.vendor", - "url", - "trace", - "otel", - ] -) - -openai_client: OpenAI | None = None - - -def get_openai_client() -> OpenAI: - global openai_client - - if openai_client: - return openai_client - - # this will raise if OPENAI_API_KEY is not set - openai_client = OpenAI(api_key=settings.OPENAI_API_KEY) - - return openai_client - - -def get_openai_policy(organization, user, pii_certified): - """Uses a signal to determine what the policy for OpenAI should be.""" - results = openai_policy_check.send( - sender=EventAiSuggestedFixEndpoint, - organization=organization, - user=user, - pii_certified=pii_certified, - ) - result = "allowed" - - # Last one wins - for _, new_result in results: - if new_result is not None: - result = new_result - - return result - - -def set_if_value(d, key, value): - if value is not None: - d[key] = value - - -def trim_frames(frames, frame_allowance=MAX_STACKTRACE_FRAMES): - frames_len = 0 - app_frames = [] - system_frames = [] - - for frame in frames: - frames_len += 1 - if frame.get("in_app"): - app_frames.append(frame) - else: - system_frames.append(frame) - - if frames_len <= frame_allowance: - return frames - - remaining = frames_len - frame_allowance - app_count = len(app_frames) - system_allowance = max(frame_allowance - app_count, 0) - if system_allowance: - half_max = int(system_allowance / 2) - # prioritize trimming system frames - for frame in system_frames[half_max:-half_max]: - frame["delete"] = True - remaining -= 1 - else: - for frame in system_frames: - frame["delete"] = True - remaining -= 1 - - if remaining: - app_allowance = app_count - remaining - half_max = int(app_allowance / 2) - - for frame in app_frames[half_max:-half_max]: - frame["delete"] = True - - return [x for x in frames if not x.get("delete")] - - -def describe_event_for_ai(event, model): - detailed = model.startswith("gpt-4") - data = {} - - msg = event.get("logentry") - if msg: - data["message"] = msg - - platform = event.get("platform") - if platform and platform != "other": - data["language"] = platform - - exceptions = data.setdefault("exceptions", []) - for idx, exc in enumerate( - reversed((event.get("exception", {})).get("values", ())[:MAX_EXCEPTIONS]) - ): - exception: dict[str, Any] = {} - if idx > 0: - exception["raised_during_handling_of_previous_exception"] = True - exception["num"] = idx + 1 - exc_type = exc.get("type") - if exc_type: - exception["type"] = exc_type - exception["message"] = exc.get("value") - mechanism = exc.get("mechanism") or {} - exc_meta = mechanism.get("meta") - if exc_meta: - exception["exception_info"] = exc_meta - if mechanism.get("handled") is False: - exception["unhandled"] = True - - frames = exc.get("stacktrace", {}).get("frames") - first_in_app = True - if frames: - stacktrace = [] - for frame in reversed(frames): - if frame is None: - continue - stack_frame: dict[str, Any] = {} - set_if_value(stack_frame, "func", frame.get("function")) - set_if_value(stack_frame, "module", frame.get("module")) - set_if_value(stack_frame, "file", frame.get("filename")) - set_if_value(stack_frame, "line", frame.get("lineno")) - if frame.get("in_app"): - stack_frame["in_app"] = True - crashed_here = False - if first_in_app: - crashed_here = True - stack_frame["crash"] = "here" - first_in_app = False - line = frame.get("context_line") or "" - if (crashed_here and idx == 0) or detailed: - pre_context = frame.get("pre_context") - if pre_context: - stack_frame["code_before"] = pre_context - stack_frame["code"] = line - post_context = frame.get("post_context") - if post_context: - stack_frame["code_after"] = post_context - # {snip} usually appears in minified lines. skip that - elif "{snip}" not in line: - set_if_value(stack_frame, "code", line.strip()) - stacktrace.append(stack_frame) - if stacktrace: - exception["stacktrace"] = trim_frames(stacktrace) - exceptions.append(exception) - - if ADD_TAGS: - tags = data.setdefault("tags", {}) - for tag_key, tag_value in sorted(event["tags"]): - if tag_key not in BLOCKED_TAGS: - tags[tag_key] = tag_value - - return data - - -def suggest_fix(event_data, model=settings.SENTRY_AI_SUGGESTED_FIX_MODEL, stream=False): - """Runs an OpenAI request to suggest a fix.""" - prompt = PROMPT.replace("___FUN_PROMPT___", random.choice(FUN_PROMPT_CHOICES)) - event_info = describe_event_for_ai(event_data, model=model) - - client = get_openai_client() - - response = client.chat.completions.create( - model=model, - temperature=0.7, - messages=[ - {"role": "system", "content": prompt}, - {"role": "user", "content": orjson.dumps(event_info).decode()}, - ], - stream=stream, - ) - if stream: - return reduce_stream(response) - return response.choices[0].message.content - - -def reduce_stream(response): - for chunk in response: - delta = chunk["choices"][0]["delta"] - if "content" in delta: - yield delta["content"] - - -@region_silo_endpoint -class EventAiSuggestedFixEndpoint(ProjectEndpoint): - owner = ApiOwner.ML_AI - publish_status = { - "GET": ApiPublishStatus.PRIVATE, - } - enforce_rate_limit = True - rate_limits = { - "GET": { - RateLimitCategory.IP: RateLimit(limit=5, window=1), - RateLimitCategory.USER: RateLimit(limit=5, window=1), - RateLimitCategory.ORGANIZATION: RateLimit(limit=5, window=1), - }, - } - - def get(self, request: Request, project, event_id) -> HttpResponse | StreamingHttpResponse: - """ - Makes AI make suggestions about an event - ```````````````````````````````````````` - - This endpoint returns a JSON response that provides helpful suggestions about how to - understand or resolve an event. - """ - # To use this feature you need openai to be configured - if not settings.OPENAI_API_KEY: - raise ResourceDoesNotExist - - event = eventstore.backend.get_event_by_id(project.id, event_id) - if event is None: - raise ResourceDoesNotExist - - policy_failure = None - # If the option has specifically been set to False, - if not bool(request.organization.get_option("sentry:ai_suggested_solution", default=False)): - policy_failure = "organization_consent_required" - else: - # Check the OpenAI access policy - policy = get_openai_policy( - request.organization, - request.user, - pii_certified=request.GET.get("pii_certified") == "yes", - ) - stream = request.GET.get("stream") == "yes" - - if policy == "subprocessor": - policy_failure = "subprocessor" - elif policy == "individual_consent": - if request.GET.get("consent") != "yes": - policy_failure = "individual_consent" - elif policy == "pii_certification_required": - policy_failure = "pii_certification_required" - elif policy == "allowed": - pass - else: - logger.warning("Unknown OpenAI policy state") - - if policy_failure is not None: - return HttpResponse( - orjson.dumps({"restriction": policy_failure}), - content_type="application/json", - status=403, - ) - - # Cache the suggestion for a certain amount by primary hash, so even when new events - # come into the group, we are sharing the same response. - cache_key = "ai:" + event.get_primary_hash() - suggestion = cache.get(cache_key) - if suggestion is None: - try: - suggestion = suggest_fix(event.data, stream=stream) - except RateLimitError as err: - return HttpResponse( - orjson.dumps({"error": err.response.json()["error"]}), - content_type="text/plain; charset=utf-8", - status=429, - ) - - if stream: - - def stream_response(): - buffer = [] - for item in suggestion: - buffer.append(item) - yield item.encode("utf-8") - cache.set(cache_key, "".join(buffer), 300) - - resp = StreamingHttpResponse(stream_response(), content_type="text/event-stream") - # make nginx happy - resp["x-accel-buffering"] = "no" - # make webpack devserver happy - resp["cache-control"] = "no-transform" - return resp - - cache.set(cache_key, suggestion, 300) - - if stream: - return HttpResponse( - suggestion, - content_type="text/plain; charset=utf-8", - ) - - return HttpResponse( - orjson.dumps({"suggestion": suggestion}), - content_type="application/json", - ) diff --git a/src/sentry/api/endpoints/group_attachments.py b/src/sentry/api/endpoints/group_attachments.py index 4ae767fe5527cf..d91eaec82f0cd8 100644 --- a/src/sentry/api/endpoints/group_attachments.py +++ b/src/sentry/api/endpoints/group_attachments.py @@ -1,3 +1,7 @@ +from datetime import datetime, timedelta + +from django.utils import timezone +from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response @@ -5,9 +9,56 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import EnvironmentMixin, region_silo_endpoint from sentry.api.bases.group import GroupEndpoint +from sentry.api.exceptions import ResourceDoesNotExist +from sentry.api.helpers.environments import get_environments +from sentry.api.helpers.events import get_query_builder_for_group from sentry.api.paginator import DateTimePaginator from sentry.api.serializers import EventAttachmentSerializer, serialize +from sentry.api.utils import get_date_range_from_params +from sentry.exceptions import InvalidParams from sentry.models.eventattachment import EventAttachment, event_attachment_screenshot_filter +from sentry.models.group import Group +from sentry.search.events.types import ParamsType + + +def get_event_ids_from_filters( + request: Request, + group: Group, + start: datetime | None, + end: datetime | None, +) -> list[str] | None: + default_end = timezone.now() + default_start = default_end - timedelta(days=90) + try: + environments = get_environments(request, group.project.organization) + except ResourceDoesNotExist: + environments = [] + query = request.GET.get("query", "") + + # Exit early if no query or environment is specified + if not query and not environments: + return None + + params: ParamsType = { + "project_id": [group.project_id], + "organization_id": group.project.organization_id, + "start": start if start else default_start, + "end": end if end else default_end, + } + + if environments: + params["environment"] = [env.name for env in environments] + + snuba_query = get_query_builder_for_group( + query=query, + snuba_params=params, + group=group, + limit=10000, + offset=0, + ) + referrer = f"api.group-attachments.{group.issue_category.name.lower()}" + results = snuba_query.run_query(referrer=referrer) + return [evt["id"] for evt in results["data"]] @region_silo_endpoint @@ -25,6 +76,11 @@ def get(self, request: Request, group) -> Response: :pparam string issue_id: the ID of the issue to retrieve. :pparam list types: a list of attachment types to filter for. + :qparam string start: Beginning date. You must also provide ``end``. + :qparam string end: End date. You must also provide ``start``. + :qparam string statsPeriod: An optional stat period (defaults to ``"90d"``). + :qparam string query: If set, will filter to only attachments from events matching that query. + :qparam string environment: If set, will filter to only attachments from events within a specific environment. :auth: required """ @@ -36,9 +92,27 @@ def get(self, request: Request, group) -> Response: attachments = EventAttachment.objects.filter(group_id=group.id) types = request.GET.getlist("types") or () - event_ids = request.GET.getlist("event_id") or () + event_ids = request.GET.getlist("event_id") or None screenshot = "screenshot" in request.GET + try: + start, end = get_date_range_from_params(request.GET, optional=True) + except InvalidParams as e: + raise ParseError(detail=str(e)) + + if start: + attachments = attachments.filter(date_added__gte=start) + if end: + attachments = attachments.filter(date_added__lte=end) + + if not event_ids: + event_ids = get_event_ids_from_filters( + request=request, + group=group, + start=start, + end=end, + ) + if screenshot: attachments = event_attachment_screenshot_filter(attachments) if types: diff --git a/src/sentry/api/endpoints/group_autofix_setup_check.py b/src/sentry/api/endpoints/group_autofix_setup_check.py index d4c42fc75a5f23..8cb0167f8fc238 100644 --- a/src/sentry/api/endpoints/group_autofix_setup_check.py +++ b/src/sentry/api/endpoints/group_autofix_setup_check.py @@ -119,8 +119,14 @@ def get(self, request: Request, group: Group) -> Response: organization=org, project=group.project ) - repos = get_repos_and_access(group.project) - write_access_ok = len(repos) > 0 and all(repo["ok"] for repo in repos) + write_integration_check = None + if request.query_params.get("check_write_access", False): + repos = get_repos_and_access(group.project) + write_access_ok = len(repos) > 0 and all(repo["ok"] for repo in repos) + write_integration_check = { + "ok": write_access_ok, + "repos": repos, + } return Response( { @@ -132,9 +138,6 @@ def get(self, request: Request, group: Group) -> Response: "ok": integration_check is None, "reason": integration_check, }, - "githubWriteIntegration": { - "ok": write_access_ok, - "repos": repos, - }, + "githubWriteIntegration": write_integration_check, } ) diff --git a/src/sentry/api/endpoints/group_integration_details.py b/src/sentry/api/endpoints/group_integration_details.py index 6698a685d65b7f..7663f366a458c0 100644 --- a/src/sentry/api/endpoints/group_integration_details.py +++ b/src/sentry/api/endpoints/group_integration_details.py @@ -13,6 +13,10 @@ from sentry.integrations.api.serializers.models.integration import IntegrationSerializer from sentry.integrations.base import IntegrationFeatures, IntegrationInstallation from sentry.integrations.models.external_issue import ExternalIssue +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, +) from sentry.integrations.services.integration import RpcIntegration, integration_service from sentry.models.activity import Activity from sentry.models.group import Group @@ -162,62 +166,72 @@ def put(self, request: Request, group, integration_id) -> Response: if not integration or not org_integration: return Response(status=404) - if not self._has_issue_feature_on_integration(integration): - return Response( - {"detail": "This feature is not supported for this integration."}, status=400 - ) + with ProjectManagementEvent( + action_type=ProjectManagementActionType.LINK_EXTERNAL_ISSUE, + integration=integration, + ).capture() as lifecycle: + if not self._has_issue_feature_on_integration(integration): + return Response( + {"detail": "This feature is not supported for this integration."}, status=400 + ) - installation = integration.get_installation(organization_id=organization_id) - try: - data = installation.get_issue(external_issue_id, data=request.data) - except IntegrationFormError as exc: - return Response(exc.field_errors, status=400) - except IntegrationError as e: - return Response({"non_field_errors": [str(e)]}, status=400) + installation = integration.get_installation(organization_id=organization_id) - defaults = { - "title": data.get("title"), - "description": data.get("description"), - "metadata": data.get("metadata"), - } + try: + data = installation.get_issue(external_issue_id, data=request.data) + except IntegrationFormError as exc: + lifecycle.record_halt(exc) + return Response(exc.field_errors, status=400) + except IntegrationError as e: + lifecycle.record_failure(e) + return Response({"non_field_errors": [str(e)]}, status=400) - external_issue_key = installation.make_external_key(data) - external_issue, created = ExternalIssue.objects.get_or_create( - organization_id=organization_id, - integration_id=integration.id, - key=external_issue_key, - defaults=defaults, - ) + defaults = { + "title": data.get("title"), + "description": data.get("description"), + "metadata": data.get("metadata"), + } - if created: - integration_issue_linked.send_robust( - integration=integration, - organization=group.project.organization, - user=request.user, - sender=self.__class__, + external_issue_key = installation.make_external_key(data) + external_issue, created = ExternalIssue.objects.get_or_create( + organization_id=organization_id, + integration_id=integration.id, + key=external_issue_key, + defaults=defaults, ) - else: - external_issue.update(**defaults) - installation.store_issue_last_defaults(group.project, request.user, request.data) - try: - installation.after_link_issue(external_issue, data=request.data) - except IntegrationFormError as exc: - return Response(exc.field_errors, status=400) - except IntegrationError as e: - return Response({"non_field_errors": [str(e)]}, status=400) - - try: - with transaction.atomic(router.db_for_write(GroupLink)): - GroupLink.objects.create( - group_id=group.id, - project_id=group.project_id, - linked_type=GroupLink.LinkedType.issue, - linked_id=external_issue.id, - relationship=GroupLink.Relationship.references, + if created: + integration_issue_linked.send_robust( + integration=integration, + organization=group.project.organization, + user=request.user, + sender=self.__class__, ) - except IntegrityError: - return Response({"non_field_errors": ["That issue is already linked"]}, status=400) + else: + external_issue.update(**defaults) + + installation.store_issue_last_defaults(group.project, request.user, request.data) + try: + installation.after_link_issue(external_issue, data=request.data) + except IntegrationFormError as exc: + lifecycle.record_halt(exc) + return Response(exc.field_errors, status=400) + except IntegrationError as e: + lifecycle.record_failure(e) + return Response({"non_field_errors": [str(e)]}, status=400) + + try: + with transaction.atomic(router.db_for_write(GroupLink)): + GroupLink.objects.create( + group_id=group.id, + project_id=group.project_id, + linked_type=GroupLink.LinkedType.issue, + linked_id=external_issue.id, + relationship=GroupLink.Relationship.references, + ) + except IntegrityError as exc: + lifecycle.record_halt(exc) + return Response({"non_field_errors": ["That issue is already linked"]}, status=400) self.create_issue_activity(request, group, installation, external_issue, new=False) diff --git a/src/sentry/api/endpoints/organization_dashboard_details.py b/src/sentry/api/endpoints/organization_dashboard_details.py index 20241d1612703c..595d04af937f4e 100644 --- a/src/sentry/api/endpoints/organization_dashboard_details.py +++ b/src/sentry/api/endpoints/organization_dashboard_details.py @@ -3,7 +3,6 @@ from django.db.models import F from django.utils import timezone from drf_spectacular.utils import extend_schema -from rest_framework.permissions import BasePermission from rest_framework.request import Request from rest_framework.response import Response @@ -31,32 +30,9 @@ READ_FEATURE = "organizations:dashboards-basic" -class DashboardPermissions(BasePermission): - """ - Django Permissions Class for managing Dashboard Edit - permissions defined in the DashboardPermissions Model - """ - - scope_map = { - "GET": ["org:read", "org:write", "org:admin"], - "POST": ["org:read", "org:write", "org:admin"], - "PUT": ["org:read", "org:write", "org:admin"], - "DELETE": ["org:read", "org:write", "org:admin"], - } - - def has_object_permission(self, request: Request, view, obj): - if isinstance(obj, Dashboard) and features.has( - "organizations:dashboards-edit-access", obj.organization, actor=request.user - ): - # Check if user has permissions to edit dashboard - if hasattr(obj, "permissions"): - return obj.permissions.has_edit_permissions(request.user.id) - return True - - class OrganizationDashboardBase(OrganizationEndpoint): owner = ApiOwner.PERFORMANCE - permission_classes = (OrganizationDashboardsPermission, DashboardPermissions) + permission_classes = (OrganizationDashboardsPermission,) def convert_args( self, request: Request, organization_id_or_slug, dashboard_id, *args, **kwargs diff --git a/src/sentry/api/endpoints/organization_dashboards.py b/src/sentry/api/endpoints/organization_dashboards.py index da0ffa59be03c5..e54d0aaf799e7f 100644 --- a/src/sentry/api/endpoints/organization_dashboards.py +++ b/src/sentry/api/endpoints/organization_dashboards.py @@ -48,25 +48,40 @@ def has_object_permission(self, request: Request, view, obj): return super().has_object_permission(request, view, obj) if isinstance(obj, Dashboard): - # 1. Dashboard contains certain projects - if obj.projects.exists(): - return request.access.has_projects_access(obj.projects.all()) + if features.has( + "organizations:dashboards-edit-access", obj.organization, actor=request.user + ): + # allow for Managers and Owners + if request.access.has_scope("org:write"): + return True + + # check if user is restricted from editing dashboard + if hasattr(obj, "permissions"): + return obj.permissions.has_edit_permissions(request.user.id) + + # if no permissions are assigned, it is considered accessible to all users + return True - # 2. Dashboard covers all projects or all my projects + else: + # 1. Dashboard contains certain projects + if obj.projects.exists(): + return request.access.has_projects_access(obj.projects.all()) - # allow when Open Membership - if obj.organization.flags.allow_joinleave: - return True + # 2. Dashboard covers all projects or all my projects - # allow for Managers and Owners - if request.access.has_scope("org:write"): - return True + # allow when Open Membership + if obj.organization.flags.allow_joinleave: + return True - # allow for creator - if request.user.id == obj.created_by_id: - return True + # allow for Managers and Owners + if request.access.has_scope("org:write"): + return True + + # allow for creator + if request.user.id == obj.created_by_id: + return True - return False + return False return True diff --git a/src/sentry/api/endpoints/organization_derive_code_mappings.py b/src/sentry/api/endpoints/organization_derive_code_mappings.py index 110654113c28c6..485dec0e3f3755 100644 --- a/src/sentry/api/endpoints/organization_derive_code_mappings.py +++ b/src/sentry/api/endpoints/organization_derive_code_mappings.py @@ -11,6 +11,7 @@ OrganizationIntegrationsLoosePermission, ) from sentry.api.serializers import serialize +from sentry.integrations.github.integration import GitHubIntegration from sentry.integrations.utils.code_mapping import ( CodeMapping, CodeMappingTreesHelper, @@ -45,7 +46,7 @@ def get(self, request: Request, organization: Organization) -> Response: return Response(status=status.HTTP_403_FORBIDDEN) stacktrace_filename = request.GET.get("stacktraceFilename") - installation, _ = get_installation(organization) + installation, _ = get_installation(organization) # only returns GitHub integrations if not installation: return self.respond( {"text": "Could not find this integration installed on your organization"}, @@ -53,7 +54,14 @@ def get(self, request: Request, organization: Organization) -> Response: ) # This method is specific to the GithubIntegration - trees = installation.get_trees_for_org() # type: ignore[attr-defined] + if not isinstance(installation, GitHubIntegration): + return self.respond( + { + "text": f"The {installation.model.provider} integration does not support derived code mappings" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + trees = installation.get_trees_for_org() trees_helper = CodeMappingTreesHelper(trees) possible_code_mappings: list[dict[str, str]] = [] resp_status: int = status.HTTP_204_NO_CONTENT diff --git a/src/sentry/api/endpoints/organization_details.py b/src/sentry/api/endpoints/organization_details.py index 22c97e291220b2..4ff95b7a5e7e1d 100644 --- a/src/sentry/api/endpoints/organization_details.py +++ b/src/sentry/api/endpoints/organization_details.py @@ -42,7 +42,6 @@ from sentry.auth.staff import is_active_staff from sentry.constants import ( ACCOUNT_RATE_LIMIT_DEFAULT, - AI_SUGGESTED_SOLUTION, ALERTS_MEMBER_WRITE_DEFAULT, ATTACHMENTS_ROLE_DEFAULT, DEBUG_FILES_ROLE_DEFAULT, @@ -180,12 +179,6 @@ ("relayPiiConfig", "sentry:relay_pii_config", str, None), ("allowJoinRequests", "sentry:join_requests", bool, JOIN_REQUESTS_DEFAULT), ("apdexThreshold", "sentry:apdex_threshold", int, None), - ( - "aiSuggestedSolution", - "sentry:ai_suggested_solution", - bool, - AI_SUGGESTED_SOLUTION, - ), ( "hideAiFeatures", "sentry:hide_ai_features", @@ -282,7 +275,6 @@ class OrganizationSerializer(BaseOrganizationSerializer): scrubIPAddresses = serializers.BooleanField(required=False) scrapeJavaScript = serializers.BooleanField(required=False) isEarlyAdopter = serializers.BooleanField(required=False) - aiSuggestedSolution = serializers.BooleanField(required=False) hideAiFeatures = serializers.BooleanField(required=False) codecovAccess = serializers.BooleanField(required=False) githubOpenPRBot = serializers.BooleanField(required=False) @@ -665,10 +657,6 @@ class OrganizationDetailsPutSerializer(serializers.Serializer): help_text="Specify `true` to opt-in to new features before they're released to the public.", required=False, ) - aiSuggestedSolution = serializers.BooleanField( - help_text="Specify `true` to opt-in to [AI Suggested Solution](/product/issues/issue-details/ai-suggested-solution/) to get AI help on how to solve an issue.", - required=False, - ) hideAiFeatures = serializers.BooleanField( help_text="Specify `true` to hide AI features from the organization.", required=False, diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 867f0df65e27bb..4022ba905bf0a6 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -288,6 +288,21 @@ def _get_event_stats( comparison_delta: datetime | None, ) -> SnubaTSResult | dict[str, SnubaTSResult]: if top_events > 0: + if use_rpc and dataset == spans_eap: + return spans_rpc.run_top_events_timeseries_query( + params=snuba_params, + query_string=query, + y_axes=query_columns, + groupby=self.get_field_list(organization, request), + orderby=self.get_orderby(request), + limit=top_events, + referrer=referrer, + granularity_secs=rollup, + config=SearchResolverConfig( + auto_fields=False, + use_aggregate_conditions=False, + ), + ) return scoped_dataset.top_events_timeseries( timeseries_columns=query_columns, selected_columns=self.get_field_list(organization, request), diff --git a/src/sentry/api/endpoints/organization_index.py b/src/sentry/api/endpoints/organization_index.py index 0cf1493ab8776f..be6bac096edbc2 100644 --- a/src/sentry/api/endpoints/organization_index.py +++ b/src/sentry/api/endpoints/organization_index.py @@ -1,3 +1,5 @@ +import logging + from django.conf import settings from django.db import IntegrityError from django.db.models import Count, Q, Sum @@ -35,6 +37,8 @@ from sentry.signals import org_setup_complete, terms_accepted from sentry.users.services.user.service import user_service +logger = logging.getLogger(__name__) + class OrganizationPostSerializer(BaseOrganizationSerializer): defaultTeam = serializers.BooleanField(required=False) @@ -120,6 +124,20 @@ def get(self, request: Request) -> Response: "organization" ) ) + if request.auth and request.auth.organization_id is not None and queryset.count() > 1: + # TODO: @athena Remove the temporary logging + # If a token is limitted to one organization, this case should not happen + # So ideally here we should limit the query set to that one org + # Adding some logging to verify if this is going to be a breaking change + logger.info( + "organization_index.unexpected_results", + extra={ + "token_org": request.auth.organization_id, + "org_count": queryset.count(), + "user_id": request.auth.user_id, + "app_id": request.auth.application_id, + }, + ) query = request.GET.get("query") if query: diff --git a/src/sentry/api/endpoints/organization_member_unreleased_commits.py b/src/sentry/api/endpoints/organization_member_unreleased_commits.py deleted file mode 100644 index 4d49426ab48613..00000000000000 --- a/src/sentry/api/endpoints/organization_member_unreleased_commits.py +++ /dev/null @@ -1,92 +0,0 @@ -from django.db import connections - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases import OrganizationMemberEndpoint -from sentry.api.serializers import serialize -from sentry.models.commit import Commit -from sentry.models.repository import Repository -from sentry.users.services.user.service import user_service - -# TODO(dcramer): once LatestRepoReleaseEnvironment is backfilled, change this query to use the new -# schema [performance] -query = """ -select c1.* -from sentry_commit c1 -join ( - select max(c2.date_added) as date_added, c2.repository_id - from sentry_commit as c2 - join ( - select distinct commit_id from sentry_releasecommit - where organization_id = %%s - ) as rc2 - on c2.id = rc2.commit_id - group by c2.repository_id -) as cmax -on c1.repository_id = cmax.repository_id -where c1.date_added > cmax.date_added -and c1.author_id IN ( - select id - from sentry_commitauthor - where organization_id = %%s - and upper(email) IN (%s) -) -order by c1.date_added desc -""" - -quote_name = connections["default"].ops.quote_name - - -from rest_framework.request import Request -from rest_framework.response import Response - - -@region_silo_endpoint -class OrganizationMemberUnreleasedCommitsEndpoint(OrganizationMemberEndpoint): - owner = ApiOwner.UNOWNED - publish_status = { - "GET": ApiPublishStatus.UNKNOWN, - } - - def get(self, request: Request, organization, member) -> Response: - user = user_service.get_user(member.user_id) - if user is None: - email_list = [] - else: - email_list = [e.email for e in user.useremails if e.is_verified] - - if not email_list: - return self.respond( - {"commits": [], "repositories": {}, "errors": {"missing_emails": True}} - ) - - params = [organization.id, organization.id] - for e in email_list: - params.append(e.upper()) - - queryset = Commit.objects.raw(query % (", ".join("%s" for _ in email_list),), params) - - results = list(queryset) - - if results: - repos = list(Repository.objects.filter(id__in={r.repository_id for r in results})) - else: - repos = [] - - return self.respond( - { - "commits": [ - { - "id": c.key, - "message": c.message, - "dateCreated": c.date_added, - "repositoryID": str(c.repository_id), - } - for c in results - ], - "repositories": { - str(r.id): d for r, d in zip(repos, serialize(repos, request.user)) - }, - } - ) diff --git a/src/sentry/api/endpoints/organization_metrics_samples.py b/src/sentry/api/endpoints/organization_metrics_samples.py deleted file mode 100644 index f81916c0159c8d..00000000000000 --- a/src/sentry/api/endpoints/organization_metrics_samples.py +++ /dev/null @@ -1,107 +0,0 @@ -import sentry_sdk -from rest_framework import serializers -from rest_framework.exceptions import ParseError -from rest_framework.request import Request -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase -from sentry.api.paginator import GenericOffsetPaginator -from sentry.api.utils import handle_query_errors -from sentry.exceptions import InvalidSearchQuery -from sentry.models.organization import Organization -from sentry.sentry_metrics.querying.samples_list import get_sample_list_executor_cls -from sentry.snuba.metrics.naming_layer.mri import is_mri -from sentry.snuba.referrer import Referrer -from sentry.utils.dates import get_rollup_from_request -from sentry.utils.snuba import SnubaError - - -class MetricsSamplesSerializer(serializers.Serializer): - mri = serializers.CharField(required=True) - field = serializers.ListField(required=True, allow_empty=False, child=serializers.CharField()) - max = serializers.FloatField(required=False) - min = serializers.FloatField(required=False) - operation = serializers.CharField(required=False) - query = serializers.CharField(required=False) - referrer = serializers.CharField(required=False) - sort = serializers.CharField(required=False) - - def validate_mri(self, mri: str) -> str: - if not is_mri(mri): - raise serializers.ValidationError(f"Invalid MRI: {mri}") - - return mri - - -@region_silo_endpoint -class OrganizationMetricsSamplesEndpoint(OrganizationEventsV2EndpointBase): - publish_status = { - "GET": ApiPublishStatus.EXPERIMENTAL, - } - owner = ApiOwner.TELEMETRY_EXPERIENCE - snuba_methods = ["GET"] - - def get(self, request: Request, organization: Organization) -> Response: - try: - snuba_params = self.get_snuba_params(request, organization) - except NoProjects: - return Response(status=404) - - try: - rollup = get_rollup_from_request( - request, - snuba_params.end_date - snuba_params.start_date, - default_interval=None, - error=InvalidSearchQuery(), - ) - except InvalidSearchQuery: - rollup = 3600 # use a default of 1 hour - - serializer = MetricsSamplesSerializer(data=request.GET) - if not serializer.is_valid(): - return Response(serializer.errors, status=400) - - serialized = serializer.validated_data - - executor_cls = get_sample_list_executor_cls(serialized["mri"]) - if not executor_cls: - raise ParseError(f"Unsupported MRI: {serialized['mri']}") - - sort = serialized.get("sort") - if sort is not None: - column = sort[1:] if sort.startswith("-") else sort - if not executor_cls.supports_sort(column): - raise ParseError(f"Unsupported sort: {sort} for MRI") - - executor = executor_cls( - mri=serialized["mri"], - snuba_params=snuba_params, - fields=serialized["field"], - operation=serialized.get("operation"), - query=serialized.get("query", ""), - min=serialized.get("min"), - max=serialized.get("max"), - sort=serialized.get("sort"), - rollup=rollup, - referrer=Referrer.API_ORGANIZATION_METRICS_SAMPLES, - ) - - with handle_query_errors(): - try: - return self.paginate( - request=request, - paginator=GenericOffsetPaginator(data_fn=executor.get_matching_spans), - on_results=lambda results: self.handle_results_with_meta( - request, - organization, - snuba_params.project_ids, - results, - standard_meta=True, - ), - ) - except SnubaError as exc: - sentry_sdk.capture_exception(exc) - raise diff --git a/src/sentry/api/endpoints/organization_profiling_profiles.py b/src/sentry/api/endpoints/organization_profiling_profiles.py index 46fb9e0217b46f..4e9e20a41f489e 100644 --- a/src/sentry/api/endpoints/organization_profiling_profiles.py +++ b/src/sentry/api/endpoints/organization_profiling_profiles.py @@ -1,4 +1,3 @@ -import sentry_sdk from django.http import HttpResponse from rest_framework import serializers from rest_framework.exceptions import ParseError @@ -18,8 +17,6 @@ from sentry.profiles.flamegraph import ( FlamegraphExecutor, get_chunks_from_spans_metadata, - get_profile_ids, - get_profiles_with_function, get_spans_from_group, ) from sentry.profiles.profile_chunks import get_chunk_ids @@ -71,36 +68,6 @@ def get(self, request: Request, organization: Organization) -> HttpResponse: if not features.has("organizations:profiling", organization, actor=request.user): return Response(status=404) - if not features.has( - "organizations:continuous-profiling-compat", organization, actor=request.user - ): - snuba_params = self.get_snuba_params(request, organization) - - project_ids = snuba_params.project_ids - if len(project_ids) > 1: - raise ParseError(detail="You cannot get a flamegraph from multiple projects.") - - if request.query_params.get("fingerprint"): - sentry_sdk.set_tag("data source", "functions") - function_fingerprint = int(request.query_params["fingerprint"]) - - profile_ids = get_profiles_with_function( - organization.id, - project_ids[0], - function_fingerprint, - snuba_params, - request.GET.get("query", ""), - ) - else: - sentry_sdk.set_tag("data source", "profiles") - profile_ids = get_profile_ids(snuba_params, request.query_params.get("query", None)) - - return proxy_profiling_service( - method="POST", - path=f"/organizations/{organization.id}/projects/{project_ids[0]}/flamegraph", - json_data=profile_ids, - ) - try: snuba_params = self.get_snuba_params(request, organization) except NoProjects: diff --git a/src/sentry/api/endpoints/organization_tags.py b/src/sentry/api/endpoints/organization_tags.py index 197210992ecd40..ccaeeeaa10d658 100644 --- a/src/sentry/api/endpoints/organization_tags.py +++ b/src/sentry/api/endpoints/organization_tags.py @@ -1,16 +1,18 @@ +import datetime + import sentry_sdk from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response -from sentry import tagstore +from sentry import features, options, tagstore from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases import NoProjects from sentry.api.bases.organization import OrganizationEndpoint from sentry.api.serializers import serialize -from sentry.api.utils import handle_query_errors +from sentry.api.utils import clamp_date_range, handle_query_errors from sentry.snuba.dataset import Dataset from sentry.utils.numbers import format_grouped_length from sentry.utils.sdk import set_measurement @@ -39,11 +41,23 @@ def get(self, request: Request, organization) -> Response: with sentry_sdk.start_span(op="tagstore", name="get_tag_keys_for_projects"): with handle_query_errors(): + start = filter_params["start"] + end = filter_params["end"] + + if features.has("organizations:tag-key-sample-n", organization) and start and end: + # Tag queries longer than 14 days tend to time out for large customers. For getting a list of tags, clamping to 14 days is a reasonable compromise of speed vs. completeness + (start, end) = clamp_date_range( + (start, end), + datetime.timedelta( + days=options.get("visibility.tag-key-max-date-range.days") + ), + ) + results = tagstore.backend.get_tag_keys_for_projects( filter_params["project_id"], filter_params.get("environment"), - filter_params["start"], - filter_params["end"], + start, + end, use_cache=request.GET.get("use_cache", "0") == "1", dataset=dataset, tenant_ids={"organization_id": organization.id}, diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py index de02c38b752986..6ed0fe1ad71e00 100644 --- a/src/sentry/api/endpoints/organization_traces.py +++ b/src/sentry/api/endpoints/organization_traces.py @@ -12,7 +12,7 @@ from rest_framework.exceptions import ParseError, ValidationError from rest_framework.request import Request from rest_framework.response import Response -from snuba_sdk import And, BooleanCondition, BooleanOp, Column, Condition, Function, Op, Or +from snuba_sdk import BooleanCondition, BooleanOp, Column, Condition, Function, Op from urllib3.exceptions import ReadTimeoutError from sentry import features, options @@ -35,7 +35,6 @@ ) from sentry.search.events.constants import TIMEOUT_SPAN_ERROR_MESSAGE from sentry.search.events.types import QueryBuilderConfig, SnubaParams, WhereType -from sentry.sentry_metrics.querying.samples_list import SpanKey, get_sample_list_executor_cls from sentry.snuba import discover, spans_indexed from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer @@ -311,7 +310,9 @@ def get(self, request: Request, organization: Organization) -> Response: zerofill = not ( request.GET.get("withoutZerofill") == "1" and features.get( - "organizations:performance-chart-interpolation", organization, actor=request.user + "organizations:performance-chart-interpolation", + organization, + actor=request.user, ) ) @@ -499,76 +500,8 @@ def get_traces_matching_conditions( self, snuba_params: SnubaParams, ) -> tuple[datetime, datetime, list[str]]: - if self.mri is not None: - sentry_sdk.set_tag("mri", self.mri) - return self.get_traces_matching_metric_conditions(snuba_params) - return self.get_traces_matching_span_conditions(snuba_params) - def get_traces_matching_metric_conditions( - self, - snuba_params: SnubaParams, - ) -> tuple[datetime, datetime, list[str]]: - assert self.mri is not None - - executor_cls = get_sample_list_executor_cls(self.mri) - if executor_cls is None: - raise ParseError(detail=f"Unsupported MRI: {self.mri}") - - executor = executor_cls( - mri=self.mri, - snuba_params=snuba_params, - fields=["trace"], - max=self.metrics_max, - min=self.metrics_min, - operation=self.metrics_operation, - query=self.metrics_query, - referrer=Referrer.API_TRACE_EXPLORER_METRICS_SPANS_LIST, - ) - - trace_ids, timestamps = executor.get_matching_traces(MAX_SNUBA_RESULTS) - - min_timestamp = snuba_params.end - max_timestamp = snuba_params.start - assert min_timestamp is not None - assert max_timestamp is not None - - for timestamp in timestamps: - min_timestamp = min(min_timestamp, timestamp) - max_timestamp = max(max_timestamp, timestamp) - - if not trace_ids or min_timestamp > max_timestamp: - return min_timestamp, max_timestamp, [] - - self.refine_params(min_timestamp, max_timestamp) - - if self.user_queries: - # If there are user queries, further refine the trace ids by applying them - # leaving us with only traces where the metric exists and matches the user - # queries. - ( - min_timestamp, - max_timestamp, - trace_ids, - ) = self.get_traces_matching_span_conditions_in_traces(snuba_params, trace_ids) - - if not trace_ids: - return min_timestamp, max_timestamp, [] - else: - # No user queries so take the first N trace ids as our list - min_timestamp = snuba_params.end - max_timestamp = snuba_params.start - assert min_timestamp is not None - assert max_timestamp is not None - - trace_ids = trace_ids[: self.limit] - timestamps = timestamps[: self.limit] - for timestamp in timestamps: - min_timestamp = min(min_timestamp, timestamp) - max_timestamp = max(max_timestamp, timestamp) - - return min_timestamp, max_timestamp, trace_ids - def get_traces_matching_span_conditions( self, snuba_params: SnubaParams, @@ -1248,54 +1181,20 @@ def __init__( def execute(self, offset: int, limit: int): with handle_span_query_errors(): - span_keys = self.get_metrics_span_keys() - - with handle_span_query_errors(): - spans = self.get_user_spans( + return self.get_user_spans( self.snuba_params, - span_keys, offset=offset, limit=limit, ) - return spans - - def get_metrics_span_keys(self) -> list[SpanKey] | None: - if self.mri is None: - return None - - executor_cls = get_sample_list_executor_cls(self.mri) - if executor_cls is None: - raise ParseError(detail=f"Unsupported MRI: {self.mri}") - - executor = executor_cls( - mri=self.mri, - snuba_params=self.snuba_params, - fields=["trace"], - max=self.metrics_max, - min=self.metrics_min, - operation=self.metrics_operation, - query=self.metrics_query, - referrer=Referrer.API_TRACE_EXPLORER_METRICS_SPANS_LIST, - ) - - span_keys = executor.get_matching_spans_from_traces( - [self.trace_id], - MAX_SNUBA_RESULTS, - ) - - return span_keys - def get_user_spans( self, snuba_params: SnubaParams, - span_keys: list[SpanKey] | None, limit: int, offset: int, ): user_spans_query = self.get_user_spans_query( snuba_params, - span_keys, limit=limit, offset=offset, ) @@ -1313,7 +1212,6 @@ def get_user_spans( def get_user_spans_query( self, snuba_params: SnubaParams, - span_keys: list[SpanKey] | None, limit: int, offset: int, ) -> BaseQueryBuilder: @@ -1321,7 +1219,7 @@ def get_user_spans_query( # span_keys is not supported in EAP mode because that's a legacy # code path to support metrics that no longer exists return self.get_user_spans_query_eap(snuba_params, limit, offset) - return self.get_user_spans_query_indexed(snuba_params, span_keys, limit, offset) + return self.get_user_spans_query_indexed(snuba_params, limit, offset) def get_user_spans_query_eap( self, @@ -1383,7 +1281,6 @@ def get_user_spans_query_eap( def get_user_spans_query_indexed( self, snuba_params: SnubaParams, - span_keys: list[SpanKey] | None, limit: int, offset: int, ) -> BaseQueryBuilder: @@ -1411,69 +1308,30 @@ def get_user_spans_query_indexed( conditions = [] - if span_keys is None: - # Next we have to turn the user queries into the appropriate conditions in - # the SnQL that we produce. + # Next we have to turn the user queries into the appropriate conditions in + # the SnQL that we produce. - # There are multiple sets of user conditions that needs to be satisfied - # and if a span satisfy any of them, it should be considered. - # - # To handle this use case, we want to OR all the user specified - # conditions together in this query. - for where in user_conditions: - if len(where) > 1: - conditions.append(BooleanCondition(op=BooleanOp.AND, conditions=where)) - elif len(where) == 1: - conditions.append(where[0]) - - if len(conditions) > 1: - # More than 1 set of conditions were specified, we want to show - # spans that match any 1 of them so join the conditions with `OR`s. - user_spans_query.add_conditions( - [BooleanCondition(op=BooleanOp.OR, conditions=conditions)] - ) - elif len(conditions) == 1: - # Only 1 set of user conditions were specified, simply insert them into - # the final query. - user_spans_query.add_conditions([conditions[0]]) - else: - # Next if there are known span_keys, we only try to fetch those spans - # This are the additional conditions to better take advantage of the ORDER BY - # on the spans table. This creates a list of conditions to be `OR`ed together - # that can will be used by ClickHouse to narrow down the granules. - # - # The span ids are not in this condition because they are more effective when - # specified within the `PREWHERE` clause. So, it's in a separate condition. - conditions = [ - And( - [ - Condition(user_spans_query.column("span.group"), Op.EQ, key.group), - Condition( - user_spans_query.column("timestamp"), - Op.EQ, - datetime.fromisoformat(key.timestamp), - ), - ] - ) - for key in span_keys - ] + # There are multiple sets of user conditions that needs to be satisfied + # and if a span satisfy any of them, it should be considered. + # + # To handle this use case, we want to OR all the user specified + # conditions together in this query. + for where in user_conditions: + if len(where) > 1: + conditions.append(BooleanCondition(op=BooleanOp.AND, conditions=where)) + elif len(where) == 1: + conditions.append(where[0]) - if len(conditions) == 1: - order_by_condition = conditions[0] - else: - order_by_condition = Or(conditions) - - # Using `IN` combined with putting the list in a SnQL "tuple" triggers an optimizer - # in snuba where it - # 1. moves the condition into the `PREWHERE` clause - # 2. maps the ids to the underlying UInt64 and uses the bloom filter index - span_id_condition = Condition( - user_spans_query.column("id"), - Op.IN, - Function("tuple", [key.span_id for key in span_keys]), + if len(conditions) > 1: + # More than 1 set of conditions were specified, we want to show + # spans that match any 1 of them so join the conditions with `OR`s. + user_spans_query.add_conditions( + [BooleanCondition(op=BooleanOp.OR, conditions=conditions)] ) - - user_spans_query.add_conditions([order_by_condition, span_id_condition]) + elif len(conditions) == 1: + # Only 1 set of user conditions were specified, simply insert them into + # the final query. + user_spans_query.add_conditions([conditions[0]]) return user_spans_query diff --git a/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py b/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py deleted file mode 100644 index b3bd015ccb08d5..00000000000000 --- a/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py +++ /dev/null @@ -1,143 +0,0 @@ -from collections import namedtuple -from datetime import datetime, timedelta, timezone - -import orjson -from django.conf import settings -from rest_framework.request import Request -from rest_framework.response import Response -from urllib3 import Retry - -from sentry import features -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases import OrganizationEventsEndpointBase -from sentry.api.utils import get_date_range_from_params, handle_query_errors -from sentry.net.http import connection_from_url -from sentry.snuba.metrics_enhanced_performance import timeseries_query - -ads_connection_pool = connection_from_url( - settings.SEER_ANOMALY_DETECTION_URL, - retries=Retry( - total=5, - status_forcelist=[408, 429, 502, 503, 504], - ), - timeout=settings.SEER_ANOMALY_DETECTION_TIMEOUT, -) - -MappedParams = namedtuple("MappedParams", ["query_start", "query_end", "granularity"]) - - -def get_anomalies(snuba_io): - response = ads_connection_pool.urlopen( - "POST", - "/anomaly/predict", - body=orjson.dumps(snuba_io, option=orjson.OPT_UTC_Z), - headers={"content-type": "application/json;charset=utf-8"}, - ) - return Response(orjson.loads(response.data), status=200) - - -def get_time_params(start: datetime, end: datetime) -> MappedParams: - """ - Takes visualization start/end timestamps - and returns the start/end/granularity - of the snuba query that we should execute - Attributes: - start: datetime representing start of visualization window - end: datetime representing end of visualization window - Returns: - results: namedtuple containing - query_start: datetime representing start of query window - query_end: datetime representing end of query window - granularity: granularity to use (in seconds) - """ - anomaly_detection_range = end - start - - if anomaly_detection_range > timedelta(days=14): - snuba_range = timedelta(days=90) - granularity = 3600 - - elif anomaly_detection_range > timedelta(days=1): - granularity = 1200 - snuba_range = timedelta(days=28) - - else: - snuba_range = timedelta(days=14) - granularity = 600 - - additional_time_needed = snuba_range - anomaly_detection_range - now = datetime.now(timezone.utc) - start_limit = now - timedelta(days=90) - end_limit = now - start = max(start, start_limit) - end = min(end, end_limit) - # By default, expand windows equally in both directions - window_increase = additional_time_needed / 2 - query_start, query_end = None, None - - # If window will go back farther than 90 days, use today - 90 as start - if start - window_increase < start_limit: - query_start = now - timedelta(days=90) - additional_time_needed -= start - query_start - window_increase = additional_time_needed - # If window extends beyond today, use today as end - if end + window_increase > end_limit: - query_end = now - additional_time_needed -= query_end - end - window_increase = additional_time_needed - - query_start = query_start or max(start - window_increase, start_limit) - query_end = query_end or min(end + window_increase, end_limit) - - return MappedParams( - query_start, - query_end, - granularity, - ) - - -@region_silo_endpoint -class OrganizationTransactionAnomalyDetectionEndpoint(OrganizationEventsEndpointBase): - publish_status = { - "GET": ApiPublishStatus.PRIVATE, - } - - def has_feature(self, organization, request): - return features.has( - "organizations:performance-anomaly-detection-ui", organization, actor=request.user - ) - - def get(self, request: Request, organization) -> Response: - if not self.has_feature(organization, request): - return Response(status=404) - - start, end = get_date_range_from_params(request.GET) - time_params = get_time_params(start, end) - snuba_params = self.get_snuba_params(request, organization) - query = request.GET.get("query") - query = f"{query} event.type:transaction" if query else "event.type:transaction" - - datetime_format = "%Y-%m-%d %H:%M:%S" - ads_request = { - "query": query, - "start": start.strftime(datetime_format), - "end": end.strftime(datetime_format), - "granularity": time_params.granularity, - } - - # overwrite relevant time params - snuba_params.start = time_params.query_start - snuba_params.end = time_params.query_end - - with handle_query_errors(): - snuba_response = timeseries_query( - selected_columns=["count()"], - query=query, - snuba_params=snuba_params, - rollup=time_params.granularity, - referrer="transaction-anomaly-detection", - zerofill_results=False, - ) - ads_request["data"] = snuba_response.data["data"] - - return get_anomalies(ads_request) diff --git a/src/sentry/api/endpoints/project_filters.py b/src/sentry/api/endpoints/project_filters.py index 899e28cac08d13..afeddac6afd1cb 100644 --- a/src/sentry/api/endpoints/project_filters.py +++ b/src/sentry/api/endpoints/project_filters.py @@ -1,26 +1,51 @@ +from typing import TypedDict + +from drf_spectacular.utils import extend_schema from rest_framework.request import Request from rest_framework.response import Response +from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint +from sentry.apidocs.constants import RESPONSE_FORBIDDEN +from sentry.apidocs.examples.project_examples import ProjectExamples +from sentry.apidocs.parameters import GlobalParams +from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.ingest import inbound_filters +class ProjectFilterResponse(TypedDict): + id: str + active: bool | list[str] + + @region_silo_endpoint +@extend_schema(tags=["Projects"]) class ProjectFiltersEndpoint(ProjectEndpoint): + owner = ApiOwner.UNOWNED publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PUBLIC, } + @extend_schema( + operation_id="List a Project's Data Filters", + parameters=[ + GlobalParams.ORG_ID_OR_SLUG, + GlobalParams.PROJECT_ID_OR_SLUG, + ], + responses={ + 200: inline_sentry_response_serializer( + "ProjectFilterResponse", list[ProjectFilterResponse] + ), + 403: RESPONSE_FORBIDDEN, + }, + examples=ProjectExamples.GET_PROJECT_FILTERS, + ) def get(self, request: Request, project) -> Response: """ - List a project's filters - Retrieve a list of filters for a given project. - - {method} {path} - + `active` will be either a boolean or a list for the legacy browser filters. """ results = [] for flt in inbound_filters.get_all_filter_specs(): diff --git a/src/sentry/api/endpoints/team_projects.py b/src/sentry/api/endpoints/team_projects.py index c13586e89d3c22..697c447e00b047 100644 --- a/src/sentry/api/endpoints/team_projects.py +++ b/src/sentry/api/endpoints/team_projects.py @@ -13,6 +13,7 @@ from sentry.api.bases.team import TeamEndpoint, TeamPermission from sentry.api.fields.sentry_slug import SentrySerializerSlugField from sentry.api.helpers.default_inbound_filters import set_default_inbound_filters +from sentry.api.helpers.default_symbol_sources import set_default_symbol_sources from sentry.api.paginator import OffsetPaginator from sentry.api.serializers import ProjectSummarySerializer, serialize from sentry.api.serializers.models.project import OrganizationProjectResponse, ProjectSerializer @@ -203,6 +204,8 @@ def post(self, request: Request, team: Team) -> Response: if project.platform and project.platform.startswith("javascript"): set_default_inbound_filters(project, team.organization) + set_default_symbol_sources(project) + self.create_audit_entry( request=request, organization=team.organization, diff --git a/src/sentry/api/helpers/default_symbol_sources.py b/src/sentry/api/helpers/default_symbol_sources.py new file mode 100644 index 00000000000000..4615d9fd6cf443 --- /dev/null +++ b/src/sentry/api/helpers/default_symbol_sources.py @@ -0,0 +1,14 @@ +from sentry.models.project import Project +from sentry.projects.services.project import RpcProject + +DEFAULT_SYMBOL_SOURCES = { + "electron": ["ios", "microsoft", "electron"], + "javascript-electron": ["ios", "microsoft", "electron"], +} + + +def set_default_symbol_sources(project: Project | RpcProject): + if project.platform and project.platform in DEFAULT_SYMBOL_SOURCES: + project.update_option( + "sentry:builtin_symbol_sources", DEFAULT_SYMBOL_SOURCES[project.platform] + ) diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py index 94651042b5a7fc..e0f0a9db6c09a2 100644 --- a/src/sentry/api/helpers/group_index/update.py +++ b/src/sentry/api/helpers/group_index/update.py @@ -216,13 +216,7 @@ def update_groups( project_lookup = {p.id: p for p in projects} acting_user = user if user.is_authenticated else None - self_assign_issue = "0" - if acting_user: - user_options = user_option_service.get_many( - filter={"user_ids": [acting_user.id], "keys": ["self_assign_issue"]} - ) - if user_options: - self_assign_issue = user_options[0].value + if search_fn and not group_ids: try: cursor_result, _ = search_fn( @@ -237,8 +231,6 @@ def update_groups( group_list = list(cursor_result) group_ids = [g.id for g in group_list] - is_bulk = len(group_ids) > 1 - group_project_ids = {g.project_id for g in group_list} # filter projects down to only those that have groups in the search results projects = [p for p in projects if p.id in group_project_ids] @@ -251,11 +243,7 @@ def update_groups( status_details = result.pop("statusDetails", result) status = result.get("status") - release = None - commit = None res_type = None - activity_type = None - activity_data: MutableMapping[str, Any | None] | None = None if "priority" in result: handle_priority( priority=result["priority"], @@ -264,308 +252,20 @@ def update_groups( project_lookup=project_lookup, ) if status in ("resolved", "resolvedInNextRelease"): - res_status = None - if status == "resolvedInNextRelease" or status_details.get("inNextRelease"): - # TODO(jess): We may want to support this for multi project, but punting on it for now - if len(projects) > 1: - return Response( - {"detail": "Cannot set resolved in next release for multiple projects."}, - status=400, - ) - # may not be a release yet - release = status_details.get("inNextRelease") or get_release_to_resolve_by(projects[0]) - - activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value - activity_data = { - # no version yet - "version": "" - } - - serialized_user = user_service.serialize_many( - filter=dict(user_ids=[user.id]), as_user=user - ) - new_status_details = { - "inNextRelease": True, - } - if serialized_user: - new_status_details["actor"] = serialized_user[0] - res_type = GroupResolution.Type.in_next_release - res_type_str = "in_next_release" - res_status = GroupResolution.Status.pending - elif status_details.get("inUpcomingRelease"): - if len(projects) > 1: - return Response( - {"detail": "Cannot set resolved in upcoming release for multiple projects."}, - status=400, - ) - release = status_details.get("inUpcomingRelease") or most_recent_release(projects[0]) - activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value - activity_data = {"version": ""} - - serialized_user = user_service.serialize_many( - filter=dict(user_ids=[user.id]), as_user=user - ) - new_status_details = { - "inUpcomingRelease": True, - } - if serialized_user: - new_status_details["actor"] = serialized_user[0] - res_type = GroupResolution.Type.in_upcoming_release - res_type_str = "in_upcoming_release" - res_status = GroupResolution.Status.pending - elif status_details.get("inRelease"): - # TODO(jess): We could update validation to check if release - # applies to multiple projects, but I think we agreed to punt - # on this for now - if len(projects) > 1: - return Response( - {"detail": "Cannot set resolved in release for multiple projects."}, status=400 - ) - release = status_details["inRelease"] - activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value - activity_data = { - # no version yet - "version": release.version - } - - serialized_user = user_service.serialize_many( - filter=dict(user_ids=[user.id]), as_user=user - ) - new_status_details = { - "inRelease": release.version, - } - if serialized_user: - new_status_details["actor"] = serialized_user[0] - res_type = GroupResolution.Type.in_release - res_type_str = "in_release" - res_status = GroupResolution.Status.resolved - elif status_details.get("inCommit"): - # TODO(jess): Same here, this is probably something we could do, but - # punting for now. - if len(projects) > 1: - return Response( - {"detail": "Cannot set resolved in commit for multiple projects."}, status=400 - ) - commit = status_details["inCommit"] - activity_type = ActivityType.SET_RESOLVED_IN_COMMIT.value - activity_data = {"commit": commit.id} - serialized_user = user_service.serialize_many( - filter=dict(user_ids=[user.id]), as_user=user - ) - - new_status_details = { - "inCommit": serialize(commit, user), - } - if serialized_user: - new_status_details["actor"] = serialized_user[0] - res_type_str = "in_commit" - else: - res_type_str = "now" - activity_type = ActivityType.SET_RESOLVED.value - activity_data = {} - new_status_details = {} - - now = django_timezone.now() - metrics.incr("group.resolved", instance=res_type_str, skip_internal=True) - - # if we've specified a commit, let's see if its already been released - # this will allow us to associate the resolution to a release as if we - # were simply using 'inRelease' above - # Note: this is different than the way commit resolution works on deploy - # creation, as a given deploy is connected to an explicit release, and - # in this case we're simply choosing the most recent release which contains - # the commit. - if commit and not release: - # TODO(jess): If we support multiple projects for release / commit resolution, - # we need to update this to find the release for each project (we shouldn't assume - # it's the same) - try: - release = most_recent_release_matching_commit(projects, commit) - res_type = GroupResolution.Type.in_release - res_status = GroupResolution.Status.resolved - except IndexError: - release = None - for group in group_list: - with transaction.atomic(router.db_for_write(Group)): - resolution = None - created = None - if release: - resolution_params = { - "release": release, - "type": res_type, - "status": res_status, - "actor_id": user.id if user.is_authenticated else None, - } - - # We only set `current_release_version` if GroupResolution type is - # in_next_release, because we need to store information about the latest/most - # recent release that was associated with a group and that is required for - # release comparisons (i.e. handling regressions) - if res_type == GroupResolution.Type.in_next_release: - # Check if semver versioning scheme is followed - follows_semver = follows_semver_versioning_scheme( - org_id=group.organization.id, - project_id=group.project.id, - release_version=release.version, - ) - - current_release_version = get_current_release_version_of_group( - group, follows_semver - ) - - if current_release_version: - resolution_params.update( - {"current_release_version": current_release_version} - ) - - # Sets `current_release_version` for activity, since there is no point - # waiting for when a new release is created i.e. - # clear_expired_resolutions task to be run. - # Activity should look like "... resolved in version - # >current_release_version" in the UI - if follows_semver: - activity_data.update( - {"current_release_version": current_release_version} - ) - - # In semver projects, and thereby semver releases, we determine - # resolutions by comparing against an expression rather than a - # specific release (i.e. >current_release_version). Consequently, - # at this point we can consider this GroupResolution as resolved - # in release - resolution_params.update( - { - "type": GroupResolution.Type.in_release, - "status": GroupResolution.Status.resolved, - } - ) - else: - # If we already know the `next` release in date based ordering - # when clicking on `resolvedInNextRelease` because it is already - # been released, there is no point in setting GroupResolution to - # be of type in_next_release but rather in_release would suffice - - try: - # Get current release object from current_release_version - current_release_obj = Release.objects.get( - version=current_release_version, - organization_id=projects[0].organization_id, - ) - - date_order_q = Q( - date_added__gt=current_release_obj.date_added - ) | Q( - date_added=current_release_obj.date_added, - id__gt=current_release_obj.id, - ) - - # Find the next release after the current_release_version - # i.e. the release that resolves the issue - resolved_in_release = ( - Release.objects.filter( - date_order_q, - projects=projects[0], - organization_id=projects[0].organization_id, - ) - .extra( - select={"sort": "COALESCE(date_released, date_added)"} - ) - .order_by("sort", "id")[:1] - .get() - ) - - # If we get here, we assume it exists and so we update - # GroupResolution and Activity - resolution_params.update( - { - "release": resolved_in_release, - "type": GroupResolution.Type.in_release, - "status": GroupResolution.Status.resolved, - } - ) - activity_data.update({"version": resolved_in_release.version}) - except Release.DoesNotExist: - # If it gets here, it means we don't know the upcoming - # release yet because it does not exist, and so we should - # fall back to our current model - ... - - resolution, created = GroupResolution.objects.get_or_create( - group=group, defaults=resolution_params - ) - if not created: - resolution.update(datetime=django_timezone.now(), **resolution_params) - - if commit: - GroupLink.objects.create( - group_id=group.id, - project_id=group.project_id, - linked_type=GroupLink.LinkedType.commit, - relationship=GroupLink.Relationship.resolves, - linked_id=commit.id, - ) - - affected = Group.objects.filter(id=group.id).update( - status=GroupStatus.RESOLVED, resolved_at=now, substatus=None - ) - if not resolution: - created = affected - - group.status = GroupStatus.RESOLVED - group.substatus = None - group.resolved_at = now - if affected and not options.get("groups.enable-post-update-signal"): - post_save.send( - sender=Group, - instance=group, - created=False, - update_fields=["resolved_at", "status", "substatus"], - ) - remove_group_from_inbox( - group, action=GroupInboxRemoveAction.RESOLVED, user=acting_user - ) - result["inbox"] = None - - assigned_to = self_subscribe_and_assign_issue(acting_user, group, self_assign_issue) - if assigned_to is not None: - result["assignedTo"] = assigned_to - - if created: - activity = Activity.objects.create( - project=project_lookup[group.project_id], - group=group, - type=activity_type, - user_id=acting_user.id, - ident=resolution.id if resolution else None, - data=activity_data, - ) - record_group_history_from_activity_type(group, activity_type, actor=acting_user) - - # TODO(dcramer): we need a solution for activity rollups - # before sending notifications on bulk changes - if not is_bulk: - transaction.on_commit( - lambda: activity.send_notification(), router.db_for_write(Group) - ) - - issue_resolved.send_robust( - organization_id=organization_id, - user=(acting_user or user), - group=group, - project=project_lookup[group.project_id], - resolution_type=res_type_str, - sender=update_groups, - ) - - kick_off_status_syncs.apply_async( - kwargs={"project_id": group.project_id, "group_id": group.id} - ) - - result.update({"status": "resolved", "statusDetails": new_status_details}) - + result, res_type = handle_resolve_in_release( + status, + status_details, + group_list, + projects, + project_lookup, + acting_user, + user, + result, + ) + if isinstance(result, Response): + return result elif status: - # The previous if statement handles the resolved and resolvedInNextRelease status updates - activity_type, activity_data, result = handle_other_status_updates( + result = handle_other_status_updates( result, group_list, group_ids, @@ -589,6 +289,336 @@ def update_groups( ) +def handle_resolve_in_release( + status: str, + status_details: Mapping[str, Any], + group_list: Sequence[Group], + projects: Sequence[Project], + project_lookup: Mapping[int, Project], + acting_user: User | None, + user: User | RpcUser, + result: MutableMapping[str, Any], +) -> tuple[dict[str, Any], GroupResolution.Type | None] | Response: + res_type = None + release = None + commit = None + self_assign_issue = "0" + if acting_user: + user_options = user_option_service.get_many( + filter={"user_ids": [acting_user.id], "keys": ["self_assign_issue"]} + ) + if user_options: + self_assign_issue = user_options[0].value + res_status = None + if status == "resolvedInNextRelease" or status_details.get("inNextRelease"): + # TODO(jess): We may want to support this for multi project, but punting on it for now + if len(projects) > 1: + return Response( + {"detail": "Cannot set resolved in next release for multiple projects."}, + status=400, + ) + # may not be a release yet + release = status_details.get("inNextRelease") or get_release_to_resolve_by(projects[0]) + + activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value + activity_data = { + # no version yet + "version": "" + } + + serialized_user = user_service.serialize_many(filter=dict(user_ids=[user.id]), as_user=user) + new_status_details = { + "inNextRelease": True, + } + if serialized_user: + new_status_details["actor"] = serialized_user[0] + res_type = GroupResolution.Type.in_next_release + res_type_str = "in_next_release" + res_status = GroupResolution.Status.pending + elif status_details.get("inUpcomingRelease"): + if len(projects) > 1: + return Response( + {"detail": "Cannot set resolved in upcoming release for multiple projects."}, + status=400, + ) + release = status_details.get("inUpcomingRelease") or most_recent_release(projects[0]) + activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value + activity_data = {"version": ""} + + serialized_user = user_service.serialize_many(filter=dict(user_ids=[user.id]), as_user=user) + new_status_details = { + "inUpcomingRelease": True, + } + if serialized_user: + new_status_details["actor"] = serialized_user[0] + res_type = GroupResolution.Type.in_upcoming_release + res_type_str = "in_upcoming_release" + res_status = GroupResolution.Status.pending + elif status_details.get("inRelease"): + # TODO(jess): We could update validation to check if release + # applies to multiple projects, but I think we agreed to punt + # on this for now + if len(projects) > 1: + return Response( + {"detail": "Cannot set resolved in release for multiple projects."}, status=400 + ) + release = status_details["inRelease"] + activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value + activity_data = { + # no version yet + "version": release.version + } + + serialized_user = user_service.serialize_many(filter=dict(user_ids=[user.id]), as_user=user) + new_status_details = { + "inRelease": release.version, + } + if serialized_user: + new_status_details["actor"] = serialized_user[0] + res_type = GroupResolution.Type.in_release + res_type_str = "in_release" + res_status = GroupResolution.Status.resolved + elif status_details.get("inCommit"): + # TODO(jess): Same here, this is probably something we could do, but + # punting for now. + if len(projects) > 1: + return Response( + {"detail": "Cannot set resolved in commit for multiple projects."}, status=400 + ) + commit = status_details["inCommit"] + activity_type = ActivityType.SET_RESOLVED_IN_COMMIT.value + activity_data = {"commit": commit.id} + serialized_user = user_service.serialize_many(filter=dict(user_ids=[user.id]), as_user=user) + + new_status_details = { + "inCommit": serialize(commit, user), + } + if serialized_user: + new_status_details["actor"] = serialized_user[0] + res_type_str = "in_commit" + else: + res_type_str = "now" + activity_type = ActivityType.SET_RESOLVED.value + activity_data = {} + new_status_details = {} + + metrics.incr("group.resolved", instance=res_type_str, skip_internal=True) + + # if we've specified a commit, let's see if its already been released + # this will allow us to associate the resolution to a release as if we + # were simply using 'inRelease' above + # Note: this is different than the way commit resolution works on deploy + # creation, as a given deploy is connected to an explicit release, and + # in this case we're simply choosing the most recent release which contains + # the commit. + if commit and not release: + # TODO(jess): If we support multiple projects for release / commit resolution, + # we need to update this to find the release for each project (we shouldn't assume + # it's the same) + try: + release = most_recent_release_matching_commit(projects, commit) + res_type = GroupResolution.Type.in_release + res_status = GroupResolution.Status.resolved + except IndexError: + release = None + for group in group_list: + with transaction.atomic(router.db_for_write(Group)): + process_group_resolution( + group, + group_list, + release, + commit, + res_type, + res_status, + acting_user, + user, + self_assign_issue, + activity_type, + activity_data, + result, + ) + + issue_resolved.send_robust( + organization_id=projects[0].organization_id, + user=(acting_user or user), + group=group, + project=project_lookup[group.project_id], + resolution_type=res_type_str, + sender=update_groups, + ) + + kick_off_status_syncs.apply_async( + kwargs={"project_id": group.project_id, "group_id": group.id} + ) + + result.update({"status": "resolved", "statusDetails": new_status_details}) + + return result, res_type + + +def process_group_resolution( + group: Group, + group_list: Sequence[Group], + release: Release | None, + commit: Commit | None, + res_type: GroupResolution.Type, + res_status: GroupResolution.Status, + acting_user: User | None, + user: User | RpcUser, + self_assign_issue: str, + activity_type: ActivityType, + activity_data: Mapping[str, Any], + result: MutableMapping[str, Any], +): + now = django_timezone.now() + resolution = None + created = None + if release: + resolution_params = { + "release": release, + "type": res_type, + "status": res_status, + "actor_id": user.id if user.is_authenticated else None, + } + + # We only set `current_release_version` if GroupResolution type is + # in_next_release, because we need to store information about the latest/most + # recent release that was associated with a group and that is required for + # release comparisons (i.e. handling regressions) + if res_type == GroupResolution.Type.in_next_release: + # Check if semver versioning scheme is followed + follows_semver = follows_semver_versioning_scheme( + org_id=group.project.organization_id, + project_id=group.project_id, + release_version=release.version, + ) + + current_release_version = get_current_release_version_of_group(group, follows_semver) + + if current_release_version: + resolution_params.update({"current_release_version": current_release_version}) + + # Sets `current_release_version` for activity, since there is no point + # waiting for when a new release is created i.e. + # clear_expired_resolutions task to be run. + # Activity should look like "... resolved in version + # >current_release_version" in the UI + if follows_semver: + activity_data.update({"current_release_version": current_release_version}) + + # In semver projects, and thereby semver releases, we determine + # resolutions by comparing against an expression rather than a + # specific release (i.e. >current_release_version). Consequently, + # at this point we can consider this GroupResolution as resolved + # in release + resolution_params.update( + { + "type": GroupResolution.Type.in_release, + "status": GroupResolution.Status.resolved, + } + ) + else: + # If we already know the `next` release in date based ordering + # when clicking on `resolvedInNextRelease` because it is already + # been released, there is no point in setting GroupResolution to + # be of type in_next_release but rather in_release would suffice + + try: + # Get current release object from current_release_version + current_release_obj = Release.objects.get( + version=current_release_version, + organization_id=group.project.organization_id, + ) + + date_order_q = Q(date_added__gt=current_release_obj.date_added) | Q( + date_added=current_release_obj.date_added, + id__gt=current_release_obj.id, + ) + + # Find the next release after the current_release_version + # i.e. the release that resolves the issue + resolved_in_release = ( + Release.objects.filter( + date_order_q, + projects=group.project, + organization_id=group.project.organization_id, + ) + .extra(select={"sort": "COALESCE(date_released, date_added)"}) + .order_by("sort", "id")[:1] + .get() + ) + + # If we get here, we assume it exists and so we update + # GroupResolution and Activity + resolution_params.update( + { + "release": resolved_in_release, + "type": GroupResolution.Type.in_release, + "status": GroupResolution.Status.resolved, + } + ) + activity_data.update({"version": resolved_in_release.version}) + except Release.DoesNotExist: + # If it gets here, it means we don't know the upcoming + # release yet because it does not exist, and so we should + # fall back to our current model + ... + + resolution, created = GroupResolution.objects.get_or_create( + group=group, defaults=resolution_params + ) + if not created: + resolution.update(datetime=django_timezone.now(), **resolution_params) + + if commit: + GroupLink.objects.create( + group_id=group.id, + project_id=group.project_id, + linked_type=GroupLink.LinkedType.commit, + relationship=GroupLink.Relationship.resolves, + linked_id=commit.id, + ) + + affected = Group.objects.filter(id=group.id).update( + status=GroupStatus.RESOLVED, resolved_at=now, substatus=None + ) + if not resolution: + created = affected + + group.status = GroupStatus.RESOLVED + group.substatus = None + group.resolved_at = now + if affected and not options.get("groups.enable-post-update-signal"): + post_save.send( + sender=Group, + instance=group, + created=False, + update_fields=["resolved_at", "status", "substatus"], + ) + remove_group_from_inbox(group, action=GroupInboxRemoveAction.RESOLVED, user=acting_user) + result["inbox"] = None + + assigned_to = self_subscribe_and_assign_issue(acting_user, group, self_assign_issue) + if assigned_to is not None: + result["assignedTo"] = assigned_to + + if created: + activity = Activity.objects.create( + project=group.project, + group=group, + type=activity_type, + user_id=acting_user.id, + ident=resolution.id if resolution else None, + data=activity_data, + ) + record_group_history_from_activity_type(group, activity_type, actor=acting_user) + + # TODO(dcramer): we need a solution for activity rollups + # before sending notifications on bulk changes + if not len(group_list) > 1: + transaction.on_commit(lambda: activity.send_notification(), router.db_for_write(Group)) + + def merge_groups( group_list: Sequence[Group], project_lookup: Mapping[int, Project], @@ -629,9 +659,7 @@ def handle_other_status_updates( status_details: Mapping[str, Any], acting_user: User, user: User, -): - activity_type = None - activity_data: MutableMapping[str, Any | None] | None = None +) -> dict[str, Any]: queryset = Group.objects.filter(id__in=group_ids) new_status = STATUS_UPDATE_CHOICES[result["status"]] new_substatus = ( @@ -661,7 +689,7 @@ def handle_other_status_updates( result["statusDetails"] = {} if group_list and status_updated: - activity_type, activity_data = handle_status_update( + handle_status_update( group_list=group_list, projects=projects, project_lookup=project_lookup, @@ -672,11 +700,11 @@ def handle_other_status_updates( status_details=result.get("statusDetails", {}), sender=update_groups, ) - return activity_type, activity_data, result + return result def prepare_response( - result: dict[str, Any], + result: Mapping[str, Any], group_list: Sequence[Group], group_ids: Sequence[Group], project_lookup: Mapping[int, Project], diff --git a/src/sentry/api/serializers/models/dashboard.py b/src/sentry/api/serializers/models/dashboard.py index 1b8731adb11c62..90d85c37c4468c 100644 --- a/src/sentry/api/serializers/models/dashboard.py +++ b/src/sentry/api/serializers/models/dashboard.py @@ -192,6 +192,7 @@ class DashboardListResponse(TypedDict): createdBy: UserSerializerResponse widgetDisplay: list[str] widgetPreview: list[dict[str, str]] + permissions: DashboardPermissionsResponse | None class DashboardListSerializer(Serializer): @@ -250,6 +251,7 @@ def serialize(self, obj, attrs, user, **kwargs) -> DashboardListResponse: "createdBy": attrs.get("created_by"), "widgetDisplay": attrs.get("widget_display", []), "widgetPreview": attrs.get("widget_preview", []), + "permissions": serialize(obj.permissions) if hasattr(obj, "permissions") else None, } return data diff --git a/src/sentry/api/serializers/models/event.py b/src/sentry/api/serializers/models/event.py index ccde2d14b2d637..1d2255fa2e3941 100644 --- a/src/sentry/api/serializers/models/event.py +++ b/src/sentry/api/serializers/models/event.py @@ -586,6 +586,7 @@ def serialize(self, obj, attrs, user, **kwargs): "platform": str, "dateCreated": datetime, "crashFile": str | None, + "metadata": dict[str, Any] | None, }, ) @@ -642,6 +643,7 @@ def serialize(self, obj: BaseEvent, attrs, user, **kwargs) -> SimpleEventSeriali "dateCreated": obj.datetime, # Needed to generate minidump links in UI "crashFile": attrs["crash_file"], + "metadata": obj.get_event_metadata(), } return response diff --git a/src/sentry/api/serializers/models/organization.py b/src/sentry/api/serializers/models/organization.py index d104484153a859..97b422d1d2bdbf 100644 --- a/src/sentry/api/serializers/models/organization.py +++ b/src/sentry/api/serializers/models/organization.py @@ -30,7 +30,6 @@ from sentry.auth.services.auth import RpcOrganizationAuthConfig, auth_service from sentry.constants import ( ACCOUNT_RATE_LIMIT_DEFAULT, - AI_SUGGESTED_SOLUTION, ALERTS_MEMBER_WRITE_DEFAULT, ATTACHMENTS_ROLE_DEFAULT, DATA_CONSENT_DEFAULT, @@ -482,7 +481,6 @@ class DetailedOrganizationSerializerResponse(_DetailedOrganizationSerializerResp pendingAccessRequests: int onboardingTasks: list[OnboardingTasksSerializerResponse] codecovAccess: bool - aiSuggestedSolution: bool hideAiFeatures: bool githubPRBot: bool githubOpenPRBot: bool @@ -599,9 +597,6 @@ def serialize( # type: ignore[explicit-override, override] ), "relayPiiConfig": str(obj.get_option("sentry:relay_pii_config") or "") or None, "codecovAccess": bool(obj.flags.codecov_access), - "aiSuggestedSolution": bool( - obj.get_option("sentry:ai_suggested_solution", AI_SUGGESTED_SOLUTION) - ), "hideAiFeatures": bool( obj.get_option("sentry:hide_ai_features", HIDE_AI_FEATURES_DEFAULT) ), diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py index 9574a289bfb6c1..840d4af67c0dce 100644 --- a/src/sentry/api/serializers/models/project.py +++ b/src/sentry/api/serializers/models/project.py @@ -78,6 +78,9 @@ "first-event-severity-calculation", "alert-filters", "servicehooks", + "similarity-embeddings", + "similarity-embeddings-delete-by-hash", + "similarity-embeddings-backfill", } @@ -770,14 +773,16 @@ def serialize( ) if not self._collapse(LATEST_DEPLOYS_KEY): context[LATEST_DEPLOYS_KEY] = attrs["deploys"] - if "stats" in attrs: - context.update(stats=attrs["stats"]) - if "transactionStats" in attrs: - context.update(transactionStats=attrs["transactionStats"]) - if "sessionStats" in attrs: - context.update(sessionStats=attrs["sessionStats"]) - if "options" in attrs: - context.update(options=attrs["options"]) + + if attrs["has_access"] or user.is_staff: + if "stats" in attrs: + context.update(stats=attrs["stats"]) + if "transactionStats" in attrs: + context.update(transactionStats=attrs["transactionStats"]) + if "sessionStats" in attrs: + context.update(sessionStats=attrs["sessionStats"]) + if "options" in attrs: + context.update(options=attrs["options"]) return context diff --git a/src/sentry/api/serializers/rest_framework/dashboard.py b/src/sentry/api/serializers/rest_framework/dashboard.py index 7d2fa91c3e9a9c..fe8743fc0d4c11 100644 --- a/src/sentry/api/serializers/rest_framework/dashboard.py +++ b/src/sentry/api/serializers/rest_framework/dashboard.py @@ -26,6 +26,7 @@ DashboardWidgetTypes, DatasetSourcesTypes, ) +from sentry.models.organization import Organization from sentry.models.team import Team from sentry.relay.config.metric_extraction import get_current_widget_specs, widget_exceeds_max_specs from sentry.search.events.builder.discover import UnresolvedQuery @@ -38,6 +39,7 @@ set_or_create_on_demand_state, ) from sentry.tasks.relay import schedule_invalidate_project_config +from sentry.users.models.user import User from sentry.utils.dates import parse_stats_period from sentry.utils.strings import oxfordize_list @@ -171,6 +173,33 @@ class DashboardWidgetQuerySerializer(CamelSnakeSerializer[Dashboard]): validate_id = validate_id + def get_metrics_features( + self, organization: Organization | None, user: User | None + ) -> dict[str, bool | None]: + if organization is None or user is None: + return {} + + feature_names = [ + "organizations:mep-rollout-flag", + "organizations:dynamic-sampling", + "organizations:performance-use-metrics", + "organizations:dashboards-mep", + ] + batch_features = features.batch_has( + feature_names, + organization=organization, + actor=user, + ) + + return ( + batch_features.get(f"organization:{organization.id}", {}) + if batch_features is not None + else { + feature_name: features.has(feature_name, organization=organization, actor=user) + for feature_name in feature_names + } + ) + def validate(self, data): if not data.get("id"): keys = set(data.keys()) @@ -185,7 +214,7 @@ def validate(self, data): # Validate the query that would be created when run. conditions = self._get_attr(data, "conditions", "") orderby = self._get_attr(data, "orderby", "") - is_table = is_table_display_type(self.context.get("displayType")) + is_table = is_table_display_type(self.context.get("display_type")) columns = self._get_attr(data, "columns", []).copy() aggregates = self._get_attr(data, "aggregates", []).copy() fields = columns + aggregates @@ -226,6 +255,17 @@ def validate(self, data): data["issue_query_error"] = {"conditions": [f"Invalid conditions: {err}"]} try: + batch_features = self.get_metrics_features( + self.context.get("organization"), self.context.get("user") + ) + use_metrics = bool( + ( + batch_features.get("organizations:mep-rollout-flag", False) + and batch_features.get("organizations:dynamic-sampling", False) + ) + or batch_features.get("organizations:performance-use-metrics", False) + or batch_features.get("organizations:dashboards-mep", False) + ) # When using the eps/epm functions, they require an interval argument # or to provide the start/end so that the interval can be computed. # This uses a hard coded start/end to ensure the validation succeeds @@ -239,6 +279,7 @@ def validate(self, data): "aggregates_only": not is_table, }, use_aggregate_conditions=True, + has_metrics=use_metrics, ), ) @@ -321,6 +362,20 @@ def validate_interval(self, interval): raise serializers.ValidationError("Invalid interval") return interval + def to_internal_value(self, data): + # Update the context for the queries serializer because the display type is + # required for validation of the queries + queries_serializer = self.fields["queries"] + additional_context = {} + + if data.get("display_type"): + additional_context["display_type"] = data.get("display_type") + if self.context.get("request") and self.context["request"].user: + additional_context["user"] = self.context["request"].user + + queries_serializer.context.update(additional_context) + return super().to_internal_value(data) + def validate(self, data): query_errors = [] all_columns: set[str] = set() @@ -567,7 +622,9 @@ def validate(self, data): permissions = data.get("permissions") if permissions and self.instance: currentUser = self.context["request"].user - if self.instance.created_by_id != currentUser.id: + # managers and owners + has_write_access = self.context["request"].access.has_scope("org:write") + if self.instance.created_by_id != currentUser.id and not has_write_access: raise serializers.ValidationError( "Only the Dashboard Creator may modify Dashboard Edit Access" ) diff --git a/src/sentry/api/serializers/rest_framework/notification_action.py b/src/sentry/api/serializers/rest_framework/notification_action.py index 5304efa6b040ab..a0df4108607086 100644 --- a/src/sentry/api/serializers/rest_framework/notification_action.py +++ b/src/sentry/api/serializers/rest_framework/notification_action.py @@ -1,3 +1,4 @@ +from collections.abc import Sequence from typing import TypedDict from django.db import router, transaction @@ -19,7 +20,7 @@ from sentry.utils.strings import oxfordize_list -def format_choices_text(choices: list[tuple[int, str]]): +def format_choices_text(choices: Sequence[tuple[int, str]]): choices_as_display_text = [f"'{display_text}'" for (_, display_text) in choices] return oxfordize_list(choices_as_display_text) @@ -33,7 +34,7 @@ def format_choices_text(choices: list[tuple[int, str]]): # Note the ordering of fields affects the Spike Protection API Documentation -class NotificationActionInputData(TypedDict): +class NotificationActionInputData(TypedDict, total=False): trigger_type: int service_type: int integration_id: int @@ -160,6 +161,11 @@ def validate_integration_and_service(self, data: NotificationActionInputData): } ) integration = integration_service.get_integration(integration_id=data.get("integration_id")) + if integration is None: + raise serializers.ValidationError( + f"Service type of '{service_provider}' requires having an active integration" + ) + if integration and service_provider != integration.provider: raise serializers.ValidationError( { @@ -251,7 +257,7 @@ def validate_discord_channel( ) -> NotificationActionInputData: """ Validates that SPECIFIC targets for DISCORD service have the following target data: - target_display: Discord channel id + target_display: Discord channel name target_identifier: Discord channel id NOTE: Reaches out to via discord integration to verify channel """ @@ -263,12 +269,12 @@ def validate_discord_channel( ): return data - channel_name = data.get("target_display") - channel_id = data.get("target_identifier") + channel_name = data.get("target_display", None) + channel_id = data.get("target_identifier", None) - if not channel_id and channel_name: + if channel_id is None or channel_name is None: raise serializers.ValidationError( - {"target_identifier": "Did not receive a discord channel id."} + {"target_identifier": "Did not receive a discord channel id or name."} ) try: @@ -280,7 +286,6 @@ def validate_discord_channel( except Exception as e: raise serializers.ValidationError({"target_identifier": str(e)}) - data["target_identifier"] = channel_id return data def validate_pagerduty_service( diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index d631b249dc97e6..b3e00d4222cfc9 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -337,6 +337,7 @@ from sentry.users.api.endpoints.user_roles import UserUserRolesEndpoint from sentry.users.api.endpoints.userroles_details import UserRoleDetailsEndpoint from sentry.users.api.endpoints.userroles_index import UserRolesEndpoint +from sentry.workflow_engine.endpoints import urls as workflow_urls from .endpoints.accept_organization_invite import AcceptOrganizationInvite from .endpoints.accept_project_transfer import AcceptProjectTransferEndpoint @@ -372,7 +373,6 @@ SourceMapsEndpoint, UnknownDebugFilesEndpoint, ) -from .endpoints.event_ai_suggested_fix import EventAiSuggestedFixEndpoint from .endpoints.event_apple_crash_report import EventAppleCrashReportEndpoint from .endpoints.event_attachment_details import EventAttachmentDetailsEndpoint from .endpoints.event_attachments import EventAttachmentsEndpoint @@ -488,9 +488,6 @@ OrganizationMemberIndexEndpoint, ) from .endpoints.organization_member.team_details import OrganizationMemberTeamDetailsEndpoint -from .endpoints.organization_member_unreleased_commits import ( - OrganizationMemberUnreleasedCommitsEndpoint, -) from .endpoints.organization_metrics_code_locations import OrganizationMetricsCodeLocationsEndpoint from .endpoints.organization_metrics_details import OrganizationMetricsDetailsEndpoint from .endpoints.organization_metrics_meta import ( @@ -498,7 +495,6 @@ OrganizationMetricsCompatibilitySums, ) from .endpoints.organization_metrics_query import OrganizationMetricsQueryEndpoint -from .endpoints.organization_metrics_samples import OrganizationMetricsSamplesEndpoint from .endpoints.organization_metrics_tag_details import OrganizationMetricsTagDetailsEndpoint from .endpoints.organization_metrics_tags import OrganizationMetricsTagsEndpoint from .endpoints.organization_on_demand_metrics_estimation_stats import ( @@ -561,9 +557,6 @@ OrganizationTraceSpansEndpoint, OrganizationTracesStatsEndpoint, ) -from .endpoints.organization_transaction_anomaly_detection import ( - OrganizationTransactionAnomalyDetectionEndpoint, -) from .endpoints.organization_user_details import OrganizationUserDetailsEndpoint from .endpoints.organization_user_reports import OrganizationUserReportsEndpoint from .endpoints.organization_user_teams import OrganizationUserTeamsEndpoint @@ -1767,11 +1760,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationMemberDetailsEndpoint.as_view(), name="sentry-api-0-organization-member-details", ), - re_path( - r"^(?P[^\/]+)/members/(?P[^\/]+)/unreleased-commits/$", - OrganizationMemberUnreleasedCommitsEndpoint.as_view(), - name="sentry-api-0-organization-member-unreleased-commits", - ), re_path( r"^(?P[^\/]+)/members/(?P[^\/]+)/teams/(?P[^\/]+)/$", OrganizationMemberTeamDetailsEndpoint.as_view(), @@ -1998,11 +1986,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationJoinRequestEndpoint.as_view(), name="sentry-api-0-organization-join-request", ), - re_path( - r"^(?P[^\/]+)/transaction-anomaly-detection/$", - OrganizationTransactionAnomalyDetectionEndpoint.as_view(), - name="sentry-api-0-organization-transaction-anomaly-detection", - ), # relay usage re_path( r"^(?P[^\/]+)/relay_usage/$", @@ -2113,11 +2096,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationMetricsQueryEndpoint.as_view(), name="sentry-api-0-organization-metrics-query", ), - re_path( - r"^(?P[^/]+)/metrics/samples/$", - OrganizationMetricsSamplesEndpoint.as_view(), - name="sentry-api-0-organization-metrics-samples", - ), re_path( r"^(?P[^/]+)/metrics/tags/$", OrganizationMetricsTagsEndpoint.as_view(), @@ -2284,11 +2262,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: EventGroupingInfoEndpoint.as_view(), name="sentry-api-0-event-grouping-info", ), - re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/ai-fix-suggest/$", - EventAiSuggestedFixEndpoint.as_view(), - name="sentry-api-0-event-ai-fix-suggest", - ), re_path( r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/apple-crash-report$", EventAppleCrashReportEndpoint.as_view(), @@ -2806,6 +2779,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ProjectUptimeAlertIndexEndpoint.as_view(), name="sentry-api-0-project-uptime-alert-index", ), + *workflow_urls.urlpatterns, ] TEAM_URLS = [ diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index fb0738df6b3ed3..ebfbf0dfac2cd4 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -234,6 +234,35 @@ def get_date_range_from_stats_period( return start, end +def clamp_date_range( + range: tuple[datetime.datetime, datetime.datetime], max_timedelta: datetime.timedelta +) -> tuple[datetime.datetime, datetime.datetime]: + """ + Accepts a date range and a maximum time delta. If the date range is shorter + than the max delta, returns the range as-is. If the date range is longer than the max delta, clamps the range range, anchoring to the end. + + If any of the inputs are invalid (e.g., a negative range) returns the range + without modifying it. + + :param range: A tuple of two `datetime.datetime` objects + :param max_timedelta: Maximum allowed range delta + :return: A tuple of two `datetime.datetime` objects + """ + + [start, end] = range + delta = end - start + + # Ignore negative max time deltas + if max_timedelta < datetime.timedelta(0): + return (start, end) + + # Ignore if delta is within acceptable range + if delta < max_timedelta: + return (start, end) + + return (end - max_timedelta, end) + + # The wide typing allows us to move towards RpcUserOrganizationContext in the future to save RPC calls. # If you can use the wider more correct type, please do. def is_member_disabled_from_limit( diff --git a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py index 7f5ef84ac62f9d..230d3e1703f016 100644 --- a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py +++ b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py @@ -42,7 +42,6 @@ "/api/0/sentry-apps/{sentry_app_id_or_slug}/features/", "/api/0/organizations/{organization_id_or_slug}/monitors/", "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/filters/{filter_id}/", - "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/unreleased-commits/", "/api/0/sentry-apps/{sentry_app_id_or_slug}/api-tokens/", "/api/0/internal/quotas/", "/api/0/sentry-apps/{sentry_app_id_or_slug}/stats/", diff --git a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py index 6a7117f1c73097..f0ca4e6a4e1c67 100644 --- a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py +++ b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py @@ -406,9 +406,6 @@ "/api/0/organizations/{organization_id_or_slug}/sessions/": {"GET"}, "/api/0/organizations/{organization_id_or_slug}/releases/{version}/resolved/": {"GET"}, "/api/0/organizations/{organization_id_or_slug}/request-project-creation/": {"POST"}, - "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/unreleased-commits/": { - "GET" - }, "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/teams/{team_id_or_slug}/": { "GET", "PUT", diff --git a/src/sentry/apidocs/examples/dashboard_examples.py b/src/sentry/apidocs/examples/dashboard_examples.py index 20fdeaeaf8c920..8fbcad45f3ed3a 100644 --- a/src/sentry/apidocs/examples/dashboard_examples.py +++ b/src/sentry/apidocs/examples/dashboard_examples.py @@ -104,6 +104,7 @@ }, "widgetDisplay": [], "widgetPreview": [], + "permissions": {"isEditableByEveryone": True, "teamsWithEditAccess": []}, }, { "id": "2", @@ -134,6 +135,7 @@ }, "widgetDisplay": [], "widgetPreview": [], + "permissions": None, }, ] diff --git a/src/sentry/apidocs/examples/event_examples.py b/src/sentry/apidocs/examples/event_examples.py index 22797eafad0537..0dae9ea877e764 100644 --- a/src/sentry/apidocs/examples/event_examples.py +++ b/src/sentry/apidocs/examples/event_examples.py @@ -28,6 +28,7 @@ "location": "example.py:123", "culprit": "/books/new/", "projectID": "49271", + "metadata": None, } GROUP_EVENT: GroupEventDetailsResponse = { diff --git a/src/sentry/apidocs/examples/organization_examples.py b/src/sentry/apidocs/examples/organization_examples.py index 09f90d6ba36b71..676eee217115c6 100644 --- a/src/sentry/apidocs/examples/organization_examples.py +++ b/src/sentry/apidocs/examples/organization_examples.py @@ -310,7 +310,6 @@ class OrganizationExamples: "allowJoinRequests": True, "relayPiiConfig": None, "codecovAccess": False, - "aiSuggestedSolution": True, "hideAiFeatures": False, "githubPRBot": True, "githubOpenPRBot": True, diff --git a/src/sentry/apidocs/examples/project_examples.py b/src/sentry/apidocs/examples/project_examples.py index d46b805b9dc825..cc609bbc07168a 100644 --- a/src/sentry/apidocs/examples/project_examples.py +++ b/src/sentry/apidocs/examples/project_examples.py @@ -526,3 +526,30 @@ class ProjectExamples: response_only=True, ), ] + + GET_PROJECT_FILTERS = [ + OpenApiExample( + "List a project's filters", + value=[ + {"id": "browser-extensions", "active": False}, + {"id": "filtered-transaction", "active": True}, + { + "id": "legacy-browsers", + "active": [ + "opera", + "edge", + "safari", + "chrome", + "ie", + "opera_mini", + "firefox", + "android", + ], + }, + {"id": "localhost", "active": False}, + {"id": "web-crawlers", "active": True}, + ], + status_codes=["200"], + response_only=True, + ), + ] diff --git a/src/sentry/auth/services/auth/model.py b/src/sentry/auth/services/auth/model.py index 86c918af04a68c..d0bbd928b60133 100644 --- a/src/sentry/auth/services/auth/model.py +++ b/src/sentry/auth/services/auth/model.py @@ -40,6 +40,7 @@ class RpcApiToken(RpcModel): expires_at: datetime.datetime | None = None allowed_origins: list[str] = Field(default_factory=list) scope_list: list[str] = Field(default_factory=list) + scoping_organization_id: int | None = None class RpcMemberSsoState(RpcModel): diff --git a/src/sentry/auth/services/auth/serial.py b/src/sentry/auth/services/auth/serial.py index 514b54ff1eb090..722649ed210584 100644 --- a/src/sentry/auth/services/auth/serial.py +++ b/src/sentry/auth/services/auth/serial.py @@ -85,6 +85,7 @@ def serialize_api_token(at: ApiToken) -> RpcApiToken: user_id=at.user_id, application_id=at.application_id, organization_id=at.organization_id, + scoping_organization_id=at.scoping_organization_id, application_is_active=at.application is None or at.application.is_active, token=at.token, hashed_token=at.hashed_token, diff --git a/src/sentry/auth/system.py b/src/sentry/auth/system.py index baa20a77f4bc04..6bc29df82b15e5 100644 --- a/src/sentry/auth/system.py +++ b/src/sentry/auth/system.py @@ -46,6 +46,7 @@ class SystemToken: token = "" application = None organization_id = None + scoping_organization_id = None @classmethod def from_request(cls, request: HttpRequest, token: str) -> SystemToken | None: diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index ea04a5061ef512..c6ae1aa9a05422 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -26,6 +26,7 @@ from sentry.conf.types.role_dict import RoleDict from sentry.conf.types.sdk_config import ServerSdkConfig from sentry.conf.types.sentry_config import SentryMode +from sentry.conf.types.service_options import ServiceOptions from sentry.utils import json # NOQA (used in getsentry config) from sentry.utils.celery import crontab_with_minute_jitter, make_split_task_queues from sentry.utils.types import Type, type_from_value @@ -2493,7 +2494,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: SENTRY_SELF_HOSTED_ERRORS_ONLY = False # only referenced in getsentry to provide the stable beacon version # updated with scripts/bump-version.sh -SELF_HOSTED_STABLE_VERSION = "24.11.0" +SELF_HOSTED_STABLE_VERSION = "24.11.1" # Whether we should look at X-Forwarded-For header or not # when checking REMOTE_ADDR ip addresses @@ -2882,6 +2883,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "transactions-subscription-results": "default", "generic-metrics-subscription-results": "default", "metrics-subscription-results": "default", + "eap-spans-subscription-results": "default", "ingest-events": "default", "ingest-feedback-events": "default", "ingest-feedback-events-dlq": "default", @@ -3161,12 +3163,12 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: SENTRY_SNOWFLAKE_EPOCH_START = datetime(2022, 8, 8, 0, 0).timestamp() SENTRY_USE_SNOWFLAKE = False -SENTRY_DEFAULT_LOCKS_BACKEND_OPTIONS = { +SENTRY_DEFAULT_LOCKS_BACKEND_OPTIONS: ServiceOptions = { "path": "sentry.utils.locking.backends.redis.RedisLockBackend", "options": {"cluster": "default"}, } -SENTRY_POST_PROCESS_LOCKS_BACKEND_OPTIONS = { +SENTRY_POST_PROCESS_LOCKS_BACKEND_OPTIONS: ServiceOptions = { "path": "sentry.utils.locking.backends.redis.RedisLockBackend", "options": {"cluster": "default"}, } @@ -3244,7 +3246,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: # lost as a result of toggling this setting. SENTRY_REPLAYS_ATTEMPT_LEGACY_FILESTORE_LOOKUP = True -SENTRY_FEATURE_ADOPTION_CACHE_OPTIONS = { +SENTRY_FEATURE_ADOPTION_CACHE_OPTIONS: ServiceOptions = { "path": "sentry.models.featureadoption.FeatureAdoptionRedisBackend", "options": {"cluster": "default"}, } diff --git a/src/sentry/conf/types/kafka_definition.py b/src/sentry/conf/types/kafka_definition.py index 710d365852999e..59ae1228343494 100644 --- a/src/sentry/conf/types/kafka_definition.py +++ b/src/sentry/conf/types/kafka_definition.py @@ -26,6 +26,7 @@ class Topic(Enum): TRANSACTIONS_SUBSCRIPTIONS_RESULTS = "transactions-subscription-results" GENERIC_METRICS_SUBSCRIPTIONS_RESULTS = "generic-metrics-subscription-results" METRICS_SUBSCRIPTIONS_RESULTS = "metrics-subscription-results" + EAP_SPANS_SUBSCRIPTIONS_RESULTS = "eap-spans-subscription-results" INGEST_EVENTS = "ingest-events" INGEST_EVENTS_DLQ = "ingest-events-dlq" INGEST_FEEDBACK_EVENTS = "ingest-feedback-events" diff --git a/src/sentry/conf/types/service_options.py b/src/sentry/conf/types/service_options.py new file mode 100644 index 00000000000000..5c1bccb44dbbb6 --- /dev/null +++ b/src/sentry/conf/types/service_options.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from typing import TypedDict + + +class ServiceOptions(TypedDict, total=False): + path: str + options: dict[str, object] + executor: ServiceOptions diff --git a/src/sentry/constants.py b/src/sentry/constants.py index 136a975fe4236b..4275b65316a62c 100644 --- a/src/sentry/constants.py +++ b/src/sentry/constants.py @@ -706,7 +706,6 @@ class InsightModules(Enum): SCRAPE_JAVASCRIPT_DEFAULT = True TRUSTED_RELAYS_DEFAULT = None JOIN_REQUESTS_DEFAULT = True -AI_SUGGESTED_SOLUTION = True HIDE_AI_FEATURES_DEFAULT = False GITHUB_COMMENT_BOT_DEFAULT = True ISSUE_ALERTS_THREAD_DEFAULT = True diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index 02c31010a58a91..bb21f85fadb759 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -301,6 +301,12 @@ def ingest_transactions_options() -> list[click.Option]: "click_options": multiprocessing_options(default_max_batch_size=100), "static_args": {"dataset": "metrics"}, }, + "eap-spans-subscription-results": { + "topic": Topic.EAP_SPANS_SUBSCRIPTIONS_RESULTS, + "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", + "click_options": multiprocessing_options(default_max_batch_size=100), + "static_args": {"dataset": "events_analytics_platform"}, + }, "ingest-events": { "topic": Topic.INGEST_EVENTS, "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory", diff --git a/src/sentry/db/postgres/schema.py b/src/sentry/db/postgres/schema.py index e38c153cd77c70..385feff6591a91 100644 --- a/src/sentry/db/postgres/schema.py +++ b/src/sentry/db/postgres/schema.py @@ -88,23 +88,27 @@ def alter_db_table(self, model, old_db_table, new_db_table): "More info here: https://develop.sentry.dev/database-migrations/#renaming-tables" ) - def delete_model(self, model): + def delete_model(self, model, is_safe=False): """ It's never safe to delete a model using the standard migration process """ - raise UnsafeOperationException( - f"Deleting the {model.__name__} model is unsafe.\n" - "More info here: https://develop.sentry.dev/database-migrations/#deleting-tables" - ) + if not is_safe: + raise UnsafeOperationException( + f"Deleting the {model.__name__} model is unsafe.\n" + "More info here: https://develop.sentry.dev/database-migrations/#deleting-tables" + ) + super(DatabaseSchemaEditorMixin, self).delete_model(model) - def remove_field(self, model, field): + def remove_field(self, model, field, is_safe=False): """ It's never safe to remove a field using the standard migration process """ - raise UnsafeOperationException( - f"Removing the {model.__name__}.{field.name} field is unsafe.\n" - "More info here: https://develop.sentry.dev/database-migrations/#deleting-columns" - ) + if not is_safe: + raise UnsafeOperationException( + f"Removing the {model.__name__}.{field.name} field is unsafe.\n" + "More info here: https://develop.sentry.dev/database-migrations/#deleting-columns" + ) + super(DatabaseSchemaEditorMixin, self).remove_field(model, field) def execute(self, sql, params=()): if sql is DUMMY_SQL: diff --git a/src/sentry/db/router.py b/src/sentry/db/router.py index 5668c49b9ac3da..26cbd56d8d1969 100644 --- a/src/sentry/db/router.py +++ b/src/sentry/db/router.py @@ -71,6 +71,8 @@ class SiloRouter: "sentry_projectavatar": SiloMode.REGION, "sentry_pagerdutyservice": SiloMode.REGION, "sentry_notificationsetting": SiloMode.CONTROL, + "authprovider_duplicate": SiloMode.CONTROL, + "authidentity_duplicate": SiloMode.CONTROL, } """ When we remove models, we are no longer able to resolve silo assignments diff --git a/src/sentry/deletions/defaults/project.py b/src/sentry/deletions/defaults/project.py index fb06ba353c6fc5..d1d4874652e3df 100644 --- a/src/sentry/deletions/defaults/project.py +++ b/src/sentry/deletions/defaults/project.py @@ -93,7 +93,7 @@ def get_child_relations(self, instance: Project) -> list[BaseRelation]: relations.append( ModelRelation( AlertRule, - {"snuba_query__subscriptions__project": instance, "include_all_projects": False}, + {"snuba_query__subscriptions__project": instance}, ) ) diff --git a/src/sentry/discover/endpoints/discover_homepage_query.py b/src/sentry/discover/endpoints/discover_homepage_query.py index 44abebde7c399f..cbd3c68698b06f 100644 --- a/src/sentry/discover/endpoints/discover_homepage_query.py +++ b/src/sentry/discover/endpoints/discover_homepage_query.py @@ -71,7 +71,7 @@ def put(self, request: Request, organization) -> Response: serializer = DiscoverSavedQuerySerializer( # HACK: To ensure serializer data is valid, pass along a name temporarily data={**request.data, "name": "New Query"}, - context={"params": params}, + context={"params": params, "organization": organization, "user": request.user}, ) if not serializer.is_valid(): raise ParseError(serializer.errors) diff --git a/src/sentry/discover/endpoints/serializers.py b/src/sentry/discover/endpoints/serializers.py index 52bb22efdfe6ff..9349af9aaf5f02 100644 --- a/src/sentry/discover/endpoints/serializers.py +++ b/src/sentry/discover/endpoints/serializers.py @@ -6,6 +6,7 @@ from rest_framework import serializers from rest_framework.serializers import ListField +from sentry import features from sentry.api.fields.empty_integer import EmptyIntegerField from sentry.api.utils import get_date_range_from_params from sentry.constants import ALL_ACCESS_PROJECTS @@ -16,9 +17,12 @@ TeamKeyTransaction, ) from sentry.exceptions import InvalidParams, InvalidSearchQuery +from sentry.models.organization import Organization from sentry.models.team import Team from sentry.search.events.builder.discover import DiscoverQueryBuilder +from sentry.search.events.types import QueryBuilderConfig from sentry.snuba.dataset import Dataset +from sentry.users.models import User from sentry.utils.dates import parse_stats_period, validate_interval from sentry.utils.snuba import SENTRY_SNUBA_MAP @@ -253,6 +257,33 @@ class DiscoverSavedQuerySerializer(serializers.Serializer): 2: {"groupby", "rollup", "aggregations", "conditions", "limit"}, } + def get_metrics_features( + self, organization: Organization | None, user: User | None + ) -> dict[str, bool | None]: + if organization is None or user is None: + return {} + + feature_names = [ + "organizations:mep-rollout-flag", + "organizations:dynamic-sampling", + "organizations:performance-use-metrics", + "organizations:dashboards-mep", + ] + batch_features = features.batch_has( + feature_names, + organization=organization, + actor=user, + ) + + return ( + batch_features.get(f"organization:{organization.id}", {}) + if batch_features is not None + else { + feature_name: features.has(feature_name, organization=organization, actor=user) + for feature_name in feature_names + } + ) + def validate_projects(self, projects): from sentry.api.validators import validate_project_ids @@ -305,6 +336,18 @@ def validate(self, data): 0, ) try: + batch_features = self.get_metrics_features( + self.context.get("organization"), self.context.get("user") + ) + use_metrics = bool( + ( + batch_features.get("organizations:mep-rollout-flag", False) + and batch_features.get("organizations:dynamic-sampling", False) + ) + or batch_features.get("organizations:performance-use-metrics", False) + or batch_features.get("organizations:dashboards-mep", False) + ) + equations, columns = categorize_columns(query["fields"]) builder = DiscoverQueryBuilder( dataset=Dataset.Discover, @@ -313,6 +356,7 @@ def validate(self, data): selected_columns=columns, equations=equations, orderby=query.get("orderby"), + config=QueryBuilderConfig(has_metrics=use_metrics), ) builder.get_snql_query().validate() except (InvalidSearchQuery, ArithmeticError) as err: diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index f0cdbda79b8f12..1937e45b8f8a50 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -47,6 +47,7 @@ from sentry.eventtypes import EventType from sentry.eventtypes.transaction import TransactionEvent from sentry.exceptions import HashDiscarded +from sentry.features.rollout import in_rollout_group from sentry.grouping.api import ( NULL_GROUPHASH_INFO, GroupHashInfo, @@ -70,6 +71,9 @@ ) from sentry.grouping.variants import BaseVariant from sentry.ingest.inbound_filters import FilterStatKeys +from sentry.ingest.transaction_clusterer.datasource.redis import ( + record_transaction_name as record_transaction_name_for_clustering, +) from sentry.integrations.tasks.kick_off_status_syncs import kick_off_status_syncs from sentry.issues.grouptype import ErrorGroupType from sentry.issues.issue_occurrence import IssueOccurrence @@ -99,6 +103,8 @@ from sentry.net.http import connection_from_url from sentry.plugins.base import plugins from sentry.quotas.base import index_data_category +from sentry.receivers.features import record_event_processed +from sentry.receivers.onboarding import record_release_received, record_user_context_received from sentry.reprocessing2 import is_reprocessed_event from sentry.seer.signed_seer_api import make_signed_seer_api_request from sentry.signals import ( @@ -2512,6 +2518,34 @@ def _detect_performance_problems( ) +@sentry_sdk.tracing.trace +def _record_transaction_info(jobs: Sequence[Job], projects: ProjectsMapping) -> None: + """ + this function does what we do in post_process for transactions. if this option is + turned on, we do the actions here instead of in post_process, with the goal + eventually being to not run transactions through post_process + """ + for job in jobs: + try: + event = job["event"] + if not in_rollout_group("transactions.do_post_process_in_save", event.event_id): + continue + + project = event.project + with sentry_sdk.start_span(op="event_manager.record_transaction_name_for_clustering"): + record_transaction_name_for_clustering(project, event.data) + + # these are what the "transaction_processed" signal hooked into + # we should not use signals here, so call the recievers directly + # instead of sending a signal. we should consider potentially + # deleting these + record_event_processed(project, event) + record_user_context_received(project, event) + record_release_received(project, event) + except Exception: + sentry_sdk.capture_exception() + + class PerformanceJob(TypedDict, total=False): performance_problems: Sequence[PerformanceProblem] event: Event @@ -2637,6 +2671,9 @@ def save_transaction_events(jobs: Sequence[Job], projects: ProjectsMapping) -> S with metrics.timer("save_transaction_events.send_occurrence_to_platform"): _send_occurrence_to_platform(jobs, projects) + with metrics.timer("save_transaction_events.record_transaction_info"): + _record_transaction_info(jobs, projects) + return jobs diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py index bca5783584f736..67a49708ce2a03 100644 --- a/src/sentry/eventstore/models.py +++ b/src/sentry/eventstore/models.py @@ -572,6 +572,11 @@ def __getstate__(self) -> Mapping[str, Any]: state.pop("_groups_cache", None) return state + def __repr__(self): + return "".format( + id(self), self.event_id + ) + @property def data(self) -> NodeData: return self._data diff --git a/src/sentry/eventstore/processing/__init__.py b/src/sentry/eventstore/processing/__init__.py index 8666683109849c..04034c01126772 100644 --- a/src/sentry/eventstore/processing/__init__.py +++ b/src/sentry/eventstore/processing/__init__.py @@ -1,4 +1,3 @@ -import sentry_sdk from django.conf import settings from sentry.eventstore.processing.base import EventProcessingStore @@ -15,20 +14,11 @@ settings.SENTRY_TRANSACTION_PROCESSING_STORE and settings.SENTRY_TRANSACTION_PROCESSING_STORE_OPTIONS ): - try: - transaction_processing_store = LazyServiceWrapper( - EventProcessingStore, - settings.SENTRY_TRANSACTION_PROCESSING_STORE, - settings.SENTRY_TRANSACTION_PROCESSING_STORE_OPTIONS, - ) - except BaseException as e: - sentry_sdk.capture_exception(e) - transaction_processing_store = LazyServiceWrapper( - EventProcessingStore, - settings.SENTRY_EVENT_PROCESSING_STORE, - settings.SENTRY_EVENT_PROCESSING_STORE_OPTIONS, - ) - + transaction_processing_store = LazyServiceWrapper( + EventProcessingStore, + settings.SENTRY_TRANSACTION_PROCESSING_STORE, + settings.SENTRY_TRANSACTION_PROCESSING_STORE_OPTIONS, + ) else: transaction_processing_store = LazyServiceWrapper( EventProcessingStore, @@ -36,5 +26,4 @@ settings.SENTRY_EVENT_PROCESSING_STORE_OPTIONS, ) - __all__ = ["event_processing_store", "transaction_processing_store"] diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 94e032a1783e00..fd98aa35ac7c76 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -80,12 +80,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:continuous-profiling-beta", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable stopping the ingestion of continuous profile for non-beta orgs manager.add("organizations:continuous-profiling-beta-ingest", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) - # Enable continuous profiling ui - manager.add("organizations:continuous-profiling-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Display profile durations on the stats page manager.add("organizations:continuous-profiling-stats", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True) - # Enable the continuous profiling compatible redesign - manager.add("organizations:continuous-profiling-compat", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Delightful Developer Metrics (DDM): # Enables experimental WIP custom metrics related features manager.add("organizations:custom-metrics-experimental", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) @@ -102,14 +98,16 @@ def register_temporary_features(manager: FeatureManager): # Enable metrics enhanced performance for AM2+ customers as they transition from AM2 to AM3 manager.add("organizations:dashboards-metrics-transition", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) manager.add("organizations:dashboards-span-metrics", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False) - # Enable releases overlay on dashboard chart widgets - manager.add("organizations:dashboards-releases-on-charts", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable table view on dashboards landing page manager.add("organizations:dashboards-table-view", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable access protected editing of dashboards manager.add("organizations:dashboards-edit-access", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable share links for dashboards for sharing outside the org manager.add("organizations:dashboards-share", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enable favouriting dashboards + manager.add("organizations:dashboards-favourite", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enable the dashboard widget builder redesign UI + manager.add("organizations:dashboards-widget-builder-redesign", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable the dev toolbar PoC code for employees # Data Secrecy manager.add("organizations:data-secrecy", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) @@ -203,8 +201,6 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:messaging-integration-onboarding-project-creation", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable threshold period in metric alert rule builder manager.add("organizations:metric-alert-threshold-period", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) - # Enables the search bar for metrics samples list - manager.add("organizations:metrics-samples-list-search", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Migrate Orgs to new Azure DevOps Integration manager.add("organizations:migrate-azure-devops-integration", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable Session Stats down to a minute resolution @@ -244,8 +240,6 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:ownership-size-limit-large", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Enable xlarge ownership rule file size limit manager.add("organizations:ownership-size-limit-xlarge", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) - # Enable views for anomaly detection - manager.add("organizations:performance-anomaly-detection-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable mobile performance score calculation for transactions in relay manager.add("organizations:performance-calculate-mobile-perf-score-relay", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Enable performance change explorer panel on trends page @@ -570,6 +564,8 @@ def register_temporary_features(manager: FeatureManager): # Enable alternative version of group creation that is supposed to be less racy. manager.add("projects:race-free-group-creation", ProjectFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=False) # Enable similarity embeddings API call + # This feature is only available on the frontend using project details since the handler gets + # project options and this is slow in the project index endpoint feature flag serialization manager.add("projects:similarity-embeddings", ProjectFeature, FeatureHandlerStrategy.INTERNAL, default=False, api_expose=True) manager.add("projects:similarity-embeddings-backfill", ProjectFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False) manager.add("projects:similarity-embeddings-delete-by-hash", ProjectFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False) diff --git a/src/sentry/grouping/component.py b/src/sentry/grouping/component.py index 517b98a6699707..255f202ec13c49 100644 --- a/src/sentry/grouping/component.py +++ b/src/sentry/grouping/component.py @@ -1,5 +1,6 @@ from __future__ import annotations +from collections import Counter from collections.abc import Generator, Iterator, Sequence from typing import Any, Self @@ -228,17 +229,29 @@ class SymbolGroupingComponent(BaseGroupingComponent[str]): id: str = "symbol" -class FrameGroupingComponent( - BaseGroupingComponent[ - ContextLineGroupingComponent - | FilenameGroupingComponent - | FunctionGroupingComponent - | LineNumberGroupingComponent # only in legacy config - | ModuleGroupingComponent - | SymbolGroupingComponent # only in legacy config - ] -): +FrameGroupingComponentChildren = ( + ContextLineGroupingComponent + | FilenameGroupingComponent + | FunctionGroupingComponent + | LineNumberGroupingComponent # only in legacy config + | ModuleGroupingComponent + | SymbolGroupingComponent # only in legacy config +) + + +class FrameGroupingComponent(BaseGroupingComponent[FrameGroupingComponentChildren]): id: str = "frame" + in_app: bool + + def __init__( + self, + values: Sequence[FrameGroupingComponentChildren], + in_app: bool, + hint: str | None = None, # only passed in legacy + contributes: bool | None = None, # only passed in legacy + ): + super().__init__(hint=hint, contributes=contributes, values=values) + self.in_app = in_app # Security-related inner components @@ -270,21 +283,55 @@ class MessageGroupingComponent(BaseGroupingComponent[str]): class StacktraceGroupingComponent(BaseGroupingComponent[FrameGroupingComponent]): id: str = "stacktrace" + frame_counts: Counter[str] + def __init__( + self, + values: Sequence[FrameGroupingComponent] | None = None, + hint: str | None = None, + contributes: bool | None = None, + frame_counts: Counter[str] | None = None, + ): + super().__init__(hint=hint, contributes=contributes, values=values) + self.frame_counts = frame_counts or Counter() -class ExceptionGroupingComponent( - BaseGroupingComponent[ - ErrorTypeGroupingComponent - | ErrorValueGroupingComponent - | NSErrorGroupingComponent - | StacktraceGroupingComponent - ] -): + +ExceptionGroupingComponentChildren = ( + ErrorTypeGroupingComponent + | ErrorValueGroupingComponent + | NSErrorGroupingComponent + | StacktraceGroupingComponent +) + + +class ExceptionGroupingComponent(BaseGroupingComponent[ExceptionGroupingComponentChildren]): id: str = "exception" + frame_counts: Counter[str] + + def __init__( + self, + values: Sequence[ExceptionGroupingComponentChildren] | None = None, + hint: str | None = None, + contributes: bool | None = None, + frame_counts: Counter[str] | None = None, + ): + super().__init__(hint=hint, contributes=contributes, values=values) + self.frame_counts = frame_counts or Counter() class ChainedExceptionGroupingComponent(BaseGroupingComponent[ExceptionGroupingComponent]): id: str = "chained-exception" + frame_counts: Counter[str] + + def __init__( + self, + values: Sequence[ExceptionGroupingComponent] | None = None, + hint: str | None = None, + contributes: bool | None = None, + frame_counts: Counter[str] | None = None, + ): + super().__init__(hint=hint, contributes=contributes, values=values) + self.frame_counts = frame_counts or Counter() class ThreadsGroupingComponent(BaseGroupingComponent[StacktraceGroupingComponent]): diff --git a/src/sentry/grouping/enhancer/__init__.py b/src/sentry/grouping/enhancer/__init__.py index 01177e42ea3aff..0499870a57e860 100644 --- a/src/sentry/grouping/enhancer/__init__.py +++ b/src/sentry/grouping/enhancer/__init__.py @@ -4,6 +4,7 @@ import logging import os import zlib +from collections import Counter from collections.abc import Sequence from typing import Any, Literal @@ -187,13 +188,21 @@ def assemble_stacktrace_component( match_frames, make_rust_exception_data(exception_data), rust_components ) + # Tally the number of each type of frame in the stacktrace. Later on, this will allow us to + # both collect metrics and use the information in decisions about whether to send the event + # to Seer + frame_counts: Counter[str] = Counter() + for py_component, rust_component in zip(components, rust_components): py_component.update(contributes=rust_component.contributes, hint=rust_component.hint) + key = f"{"in_app" if py_component.in_app else "system"}_{"contributing" if py_component.contributes else "non_contributing"}_frames" + frame_counts[key] += 1 component = StacktraceGroupingComponent( values=components, hint=rust_results.hint, contributes=rust_results.contributes, + frame_counts=frame_counts, ) return component, rust_results.invert_stacktrace diff --git a/src/sentry/grouping/grouping_info.py b/src/sentry/grouping/grouping_info.py index 92e4d60e6bc8ac..5e7ff6e9f695da 100644 --- a/src/sentry/grouping/grouping_info.py +++ b/src/sentry/grouping/grouping_info.py @@ -1,4 +1,5 @@ import logging +from collections.abc import Mapping from typing import Any from sentry.api.exceptions import ResourceDoesNotExist @@ -88,7 +89,7 @@ def _check_for_mismatched_hashes( def get_grouping_info_from_variants( - variants: dict[str, BaseVariant], + variants: Mapping[str, BaseVariant], ) -> dict[str, dict[str, Any]]: """ Given a dictionary of variant objects, create and return a copy of the dictionary in which each diff --git a/src/sentry/grouping/ingest/grouphash_metadata.py b/src/sentry/grouping/ingest/grouphash_metadata.py index 8df33bc3dfb8eb..8369b7bf298c7c 100644 --- a/src/sentry/grouping/ingest/grouphash_metadata.py +++ b/src/sentry/grouping/ingest/grouphash_metadata.py @@ -3,6 +3,8 @@ import logging from typing import Any, cast +from typing_extensions import TypeIs + from sentry.eventstore.models import Event from sentry.grouping.component import ( ChainedExceptionGroupingComponent, @@ -38,9 +40,12 @@ StacktraceHashingMetadata, TemplateHashingMetadata, ) +from sentry.utils import metrics +from sentry.utils.metrics import MutableTags logger = logging.getLogger(__name__) + GROUPING_METHODS_BY_DESCRIPTION = { # All frames from a stacktrace at the top level of the event, in `exception`, or in # `threads` (top-level stacktraces come, for example, from using `attach_stacktrace` @@ -75,8 +80,21 @@ "fallback": HashBasis.FALLBACK, } +# TODO: For now not including `csp_directive` and `csp_script_violation` - let's see if we end up +# wanting them +METRICS_TAGS_BY_HASH_BASIS = { + HashBasis.STACKTRACE: ["stacktrace_type", "stacktrace_location"], + HashBasis.MESSAGE: ["message_source", "message_parameterized"], + HashBasis.FINGERPRINT: ["fingerprint_source"], + HashBasis.SECURITY_VIOLATION: ["security_report_type"], + HashBasis.TEMPLATE: [], + HashBasis.CHECKSUM: [], + HashBasis.FALLBACK: ["fallback_reason"], + HashBasis.UNKNOWN: [], +} -def create_or_update_grouphash_metadata( + +def create_or_update_grouphash_metadata_if_needed( event: Event, project: Project, grouphash: GroupHash, @@ -88,7 +106,12 @@ def create_or_update_grouphash_metadata( # we'll have to override the metadata creation date for them. if created: - hash_basis, hashing_metadata = get_hash_basis_and_metadata(event, project, variants) + with metrics.timer( + "grouping.grouphashmetadata.get_hash_basis_and_metadata" + ) as metrics_timer_tags: + hash_basis, hashing_metadata = get_hash_basis_and_metadata( + event, project, variants, metrics_timer_tags + ) GroupHashMetadata.objects.create( grouphash=grouphash, @@ -104,7 +127,10 @@ def create_or_update_grouphash_metadata( def get_hash_basis_and_metadata( - event: Event, project: Project, variants: dict[str, BaseVariant] + event: Event, + project: Project, + variants: dict[str, BaseVariant], + metrics_timer_tags: MutableTags, ) -> tuple[HashBasis, HashingMetadata]: hashing_metadata: HashingMetadata = {} @@ -151,6 +177,8 @@ def get_hash_basis_and_metadata( ) return (HashBasis.UNKNOWN, {}) + metrics_timer_tags["hash_basis"] = hash_basis + # Gather different metadata depending on the grouping method if hash_basis == HashBasis.STACKTRACE: @@ -193,6 +221,51 @@ def get_hash_basis_and_metadata( return hash_basis, hashing_metadata +def record_grouphash_metadata_metrics(grouphash_metadata: GroupHashMetadata) -> None: + # TODO: Once https://peps.python.org/pep-0728 is a thing (still in draft but theoretically on + # track for 3.14), we can mark the various hashing metadata types as closed and that should + # narrow the types for the tag values such that we can stop stringifying everything + + # TODO: For now, until we backfill data for pre-existing hashes, these metrics are going + # to be somewhat skewed + + # Define a helper for this check so that it can double as a type guard + def is_stacktrace_hashing( + _hashing_metadata: HashingMetadata, + hash_basis: str, + ) -> TypeIs[StacktraceHashingMetadata]: + return hash_basis == HashBasis.STACKTRACE + + hash_basis = grouphash_metadata.hash_basis + hashing_metadata = grouphash_metadata.hashing_metadata + + if hash_basis: + hash_basis_tags: dict[str, str] = {"hash_basis": hash_basis} + if hashing_metadata: + hash_basis_tags["is_hybrid_fingerprint"] = str( + hashing_metadata.get("is_hybrid_fingerprint", False) + ) + metrics.incr( + "grouping.grouphashmetadata.event_hash_basis", sample_rate=1.0, tags=hash_basis_tags + ) + + if hashing_metadata: + hashing_metadata_tags: dict[str, str | bool] = { + tag: str(hashing_metadata.get(tag)) + for tag in METRICS_TAGS_BY_HASH_BASIS[hash_basis] + } + if is_stacktrace_hashing(hashing_metadata, hash_basis): + hashing_metadata_tags["chained_exception"] = str( + int(hashing_metadata.get("num_stacktraces", 1)) > 1 + ) + if hashing_metadata_tags: + metrics.incr( + f"grouping.grouphashmetadata.event_hashing_metadata.{hash_basis}", + sample_rate=1.0, + tags=hashing_metadata_tags, + ) + + def _get_stacktrace_hashing_metadata( contributing_variant: ComponentVariant, contributing_component: ( diff --git a/src/sentry/grouping/ingest/hashing.py b/src/sentry/grouping/ingest/hashing.py index 73620569b04bb3..93b0c6e4865f61 100644 --- a/src/sentry/grouping/ingest/hashing.py +++ b/src/sentry/grouping/ingest/hashing.py @@ -21,7 +21,10 @@ load_grouping_config, ) from sentry.grouping.ingest.config import is_in_transition -from sentry.grouping.ingest.grouphash_metadata import create_or_update_grouphash_metadata +from sentry.grouping.ingest.grouphash_metadata import ( + create_or_update_grouphash_metadata_if_needed, + record_grouphash_metadata_metrics, +) from sentry.grouping.variants import BaseVariant from sentry.models.grouphash import GroupHash from sentry.models.project import Project @@ -230,9 +233,23 @@ def get_or_create_grouphashes( if options.get("grouping.grouphash_metadata.ingestion_writes_enabled") and features.has( "organizations:grouphash-metadata-creation", project.organization ): - create_or_update_grouphash_metadata( - event, project, grouphash, created, grouping_config, variants - ) + try: + # We don't expect this to throw any errors, but collecting this metadata + # shouldn't ever derail ingestion, so better to be safe + create_or_update_grouphash_metadata_if_needed( + event, project, grouphash, created, grouping_config, variants + ) + except Exception as exc: + sentry_sdk.capture_exception(exc) + + if grouphash.metadata: + record_grouphash_metadata_metrics(grouphash.metadata) + else: + # Collect a temporary metric to get a sense of how often we would be adding metadata to an + # existing hash. (Yes, this is an overestimate, because this will fire every time we see a given + # non-backfilled grouphash, not the once per non-backfilled grouphash we'd actually be doing a + # backfill, but it will give us a ceiling from which we can work down.) + metrics.incr("grouping.grouphashmetadata.backfill_needed") grouphashes.append(grouphash) diff --git a/src/sentry/grouping/ingest/seer.py b/src/sentry/grouping/ingest/seer.py index e8b1b15bb7db06..234759e73ddd8a 100644 --- a/src/sentry/grouping/ingest/seer.py +++ b/src/sentry/grouping/ingest/seer.py @@ -1,4 +1,5 @@ import logging +from collections.abc import Mapping from dataclasses import asdict from typing import Any @@ -16,9 +17,10 @@ from sentry.seer.similarity.similar_issues import get_similarity_data_from_seer from sentry.seer.similarity.types import SimilarIssuesEmbeddingsRequest from sentry.seer.similarity.utils import ( + ReferrerOptions, event_content_is_seer_eligible, filter_null_from_string, - get_stacktrace_string, + get_stacktrace_string_with_metrics, killswitch_enabled, ) from sentry.utils import metrics @@ -28,7 +30,7 @@ logger = logging.getLogger("sentry.events.grouping") -def should_call_seer_for_grouping(event: Event, variants: dict[str, BaseVariant]) -> bool: +def should_call_seer_for_grouping(event: Event, variants: Mapping[str, BaseVariant]) -> bool: """ Use event content, feature flags, rate limits, killswitches, seer health, etc. to determine whether a call to Seer should be made. @@ -46,6 +48,10 @@ def should_call_seer_for_grouping(event: Event, variants: dict[str, BaseVariant] _has_customized_fingerprint(event, variants) or killswitch_enabled(project.id, event) or _circuit_breaker_broken(event, project) + # The rate limit check has to be last (see below) but rate-limiting aside, call this after other checks + # because it calculates the stacktrace string, which we only want to spend the time to do if we already + # know the other checks have passed. + or _has_empty_stacktrace_string(event, variants) # **Do not add any new checks after this.** The rate limit check MUST remain the last of all # the checks. # @@ -80,7 +86,7 @@ def _project_has_similarity_grouping_enabled(project: Project) -> bool: # combined with some other value). To the extent to which we're then using this function to decide # whether or not to call Seer, this means that the calculations giving rise to the default part of # the value never involve Seer input. In the long run, we probably want to change that. -def _has_customized_fingerprint(event: Event, variants: dict[str, BaseVariant]) -> bool: +def _has_customized_fingerprint(event: Event, variants: Mapping[str, BaseVariant]) -> bool: fingerprint = event.data.get("fingerprint", []) if "{{ default }}" in fingerprint: @@ -176,9 +182,30 @@ def _circuit_breaker_broken(event: Event, project: Project) -> bool: return circuit_broken +def _has_empty_stacktrace_string(event: Event, variants: Mapping[str, BaseVariant]) -> bool: + stacktrace_string = get_stacktrace_string_with_metrics( + get_grouping_info_from_variants(variants), event.platform, ReferrerOptions.INGEST + ) + if not stacktrace_string: + if stacktrace_string == "": + metrics.incr( + "grouping.similarity.did_call_seer", + sample_rate=options.get("seer.similarity.metrics_sample_rate"), + tags={ + "call_made": False, + "blocker": "empty-stacktrace-string", + }, + ) + return True + # Store the stacktrace string in the event so we only calculate it once. We need to pop it + # later so it isn't stored in the database. + event.data["stacktrace_string"] = stacktrace_string + return False + + def get_seer_similar_issues( event: Event, - variants: dict[str, BaseVariant], + variants: Mapping[str, BaseVariant], num_neighbors: int = 1, ) -> tuple[dict[str, Any], GroupHash | None]: """ @@ -187,9 +214,31 @@ def get_seer_similar_issues( should go in (if any), or None if no neighbor was near enough. """ event_hash = event.get_primary_hash() - stacktrace_string = get_stacktrace_string(get_grouping_info_from_variants(variants)) exception_type = get_path(event.data, "exception", "values", -1, "type") + stacktrace_string = event.data.get( + "stacktrace_string", + get_stacktrace_string_with_metrics( + get_grouping_info_from_variants(variants), event.platform, ReferrerOptions.INGEST + ), + ) + + if not stacktrace_string: + # TODO: remove this log once we've confirmed it isn't happening + logger.info( + "get_seer_similar_issues.empty_stacktrace", + extra={ + "event_id": event.event_id, + "project_id": event.project.id, + "stacktrace_string": stacktrace_string, + }, + ) + similar_issues_metadata_empty = { + "results": [], + "similarity_model_version": SEER_SIMILARITY_MODEL_VERSION, + } + return (similar_issues_metadata_empty, None) + request_data: SimilarIssuesEmbeddingsRequest = { "event_id": event.event_id, "hash": event_hash, @@ -200,6 +249,7 @@ def get_seer_similar_issues( "referrer": "ingest", "use_reranking": options.get("seer.similarity.ingest.use_reranking"), } + event.data.pop("stacktrace_string", None) # Similar issues are returned with the closest match first seer_results = get_similarity_data_from_seer(request_data) @@ -231,7 +281,7 @@ def get_seer_similar_issues( def maybe_check_seer_for_matching_grouphash( - event: Event, variants: dict[str, BaseVariant], all_grouphashes: list[GroupHash] + event: Event, variants: Mapping[str, BaseVariant], all_grouphashes: list[GroupHash] ) -> GroupHash | None: seer_matched_grouphash = None @@ -262,6 +312,7 @@ def maybe_check_seer_for_matching_grouphash( # Once those two problems are fixed, there will only be one hash passed to this function # and we won't have to do this search to find the right one to update. primary_hash = event.get_primary_hash() + grouphash_sent = list( filter(lambda grouphash: grouphash.hash == primary_hash, all_grouphashes) )[0] diff --git a/src/sentry/grouping/strategies/legacy.py b/src/sentry/grouping/strategies/legacy.py index 41f4c814466eb1..4e6dc1581f8323 100644 --- a/src/sentry/grouping/strategies/legacy.py +++ b/src/sentry/grouping/strategies/legacy.py @@ -392,6 +392,7 @@ def frame_legacy( ], contributes=contributes, hint=hint, + in_app=interface.in_app, ) } diff --git a/src/sentry/grouping/strategies/newstyle.py b/src/sentry/grouping/strategies/newstyle.py index 683c60eb91469a..15daac601845bc 100644 --- a/src/sentry/grouping/strategies/newstyle.py +++ b/src/sentry/grouping/strategies/newstyle.py @@ -3,6 +3,7 @@ import itertools import logging import re +from collections import Counter from collections.abc import Generator from typing import Any @@ -341,7 +342,7 @@ def frame( if context_line_component is not None: values.append(context_line_component) - rv = FrameGroupingComponent(values=values) + rv = FrameGroupingComponent(values=values, in_app=frame.in_app) # if we are in javascript fuzzing mode we want to disregard some # frames consistently. These force common bad stacktraces together @@ -594,7 +595,9 @@ def single_exception( values.append(value_component) - rv[variant] = ExceptionGroupingComponent(values=values) + rv[variant] = ExceptionGroupingComponent( + values=values, frame_counts=stacktrace_component.frame_counts + ) return rv @@ -644,7 +647,16 @@ def chained_exception( rv = {} for name, component_list in by_name.items(): - rv[name] = ChainedExceptionGroupingComponent(values=component_list) + # Calculate an aggregate tally of the different types of frames (in-app vs system, + # contributing or not) across all of the exceptions in the chain + total_frame_counts: Counter[str] = Counter() + for exception_component in component_list: + total_frame_counts += exception_component.frame_counts + + rv[name] = ChainedExceptionGroupingComponent( + values=component_list, + frame_counts=total_frame_counts, + ) return rv diff --git a/src/sentry/hybridcloud/migrations/0003_add_scopes_to_api_key_replica.py b/src/sentry/hybridcloud/migrations/0003_add_scopes_to_api_key_replica.py index 320d680a87853f..092ed71bbc5cc9 100644 --- a/src/sentry/hybridcloud/migrations/0003_add_scopes_to_api_key_replica.py +++ b/src/sentry/hybridcloud/migrations/0003_add_scopes_to_api_key_replica.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("hybridcloud", "0002_add_slug_reservation_replica_model"), ] diff --git a/src/sentry/hybridcloud/migrations/0017_add_scoping_organization_apitokenreplica.py b/src/sentry/hybridcloud/migrations/0017_add_scoping_organization_apitokenreplica.py new file mode 100644 index 00000000000000..e70659d4095006 --- /dev/null +++ b/src/sentry/hybridcloud/migrations/0017_add_scoping_organization_apitokenreplica.py @@ -0,0 +1,36 @@ +# Generated by Django 5.1.1 on 2024-11-22 22:03 + +from django.db import migrations + +import sentry.db.models.fields.hybrid_cloud_foreign_key +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("hybridcloud", "0016_add_control_cacheversion"), + ] + + operations = [ + migrations.AddField( + model_name="apitokenreplica", + name="scoping_organization_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.Organization", db_index=True, null=True, on_delete="CASCADE" + ), + ), + ] diff --git a/src/sentry/hybridcloud/models/apitokenreplica.py b/src/sentry/hybridcloud/models/apitokenreplica.py index 21e3982dc808c5..d9ee2be13040dc 100644 --- a/src/sentry/hybridcloud/models/apitokenreplica.py +++ b/src/sentry/hybridcloud/models/apitokenreplica.py @@ -24,6 +24,9 @@ class ApiTokenReplica(Model, HasApiScopes): expires_at = models.DateTimeField(null=True) allowed_origins = models.TextField(blank=True, null=True) date_added = models.DateTimeField(default=timezone.now) + scoping_organization_id = HybridCloudForeignKey( + "sentry.Organization", null=True, on_delete="CASCADE" + ) class Meta: app_label = "hybridcloud" diff --git a/src/sentry/hybridcloud/options.py b/src/sentry/hybridcloud/options.py index 21b32cf12fbc84..9f280aac25d822 100644 --- a/src/sentry/hybridcloud/options.py +++ b/src/sentry/hybridcloud/options.py @@ -1,5 +1,5 @@ from sentry.options import FLAG_AUTOMATOR_MODIFIABLE, register -from sentry.utils.types import Bool, Float, Int, Sequence +from sentry.utils.types import Bool, Int, Sequence register( "outbox_replication.sentry_organizationmember.replication_version", @@ -161,10 +161,3 @@ default=[], flags=FLAG_AUTOMATOR_MODIFIABLE, ) - -register( - "app_service.installations_for_org.cached", - type=Float, - default=0.0, - flags=FLAG_AUTOMATOR_MODIFIABLE, -) diff --git a/src/sentry/hybridcloud/services/replica/impl.py b/src/sentry/hybridcloud/services/replica/impl.py index ea45a5d32fd3cf..25d5936ebc740f 100644 --- a/src/sentry/hybridcloud/services/replica/impl.py +++ b/src/sentry/hybridcloud/services/replica/impl.py @@ -163,6 +163,7 @@ def upsert_replicated_api_token(self, *, api_token: RpcApiToken, region_name: st "\n".join(api_token.allowed_origins) if api_token.allowed_origins else None ), user_id=api_token.user_id, + scoping_organization_id=api_token.scoping_organization_id, ) handle_replication(ApiToken, destination) diff --git a/src/sentry/identity/vsts/provider.py b/src/sentry/identity/vsts/provider.py index bc17268b06d4a1..6cd3cfe481ed70 100644 --- a/src/sentry/identity/vsts/provider.py +++ b/src/sentry/identity/vsts/provider.py @@ -3,7 +3,8 @@ from rest_framework.request import Request from sentry import http, options -from sentry.identity.oauth2 import OAuth2CallbackView, OAuth2LoginView, OAuth2Provider +from sentry.identity.oauth2 import OAuth2CallbackView, OAuth2LoginView, OAuth2Provider, record_event +from sentry.integrations.utils.metrics import IntegrationPipelineViewType from sentry.utils.http import absolute_uri @@ -120,21 +121,27 @@ def exchange_token(self, request: Request, pipeline, code): from sentry.http import safe_urlopen, safe_urlread from sentry.utils.http import absolute_uri - req = safe_urlopen( - url=self.access_token_url, - headers={"Content-Type": "application/x-www-form-urlencoded", "Content-Length": "1322"}, - data={ - "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer", - "client_assertion": self.client_secret, - "grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer", - "assertion": code, - "redirect_uri": absolute_uri(pipeline.redirect_url()), - }, - ) - body = safe_urlread(req) - if req.headers["Content-Type"].startswith("application/x-www-form-urlencoded"): - return dict(parse_qsl(body)) - return orjson.loads(body) + with record_event( + IntegrationPipelineViewType.TOKEN_EXCHANGE, pipeline.provider.key + ).capture(): + req = safe_urlopen( + url=self.access_token_url, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Content-Length": "1322", + }, + data={ + "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer", + "client_assertion": self.client_secret, + "grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer", + "assertion": code, + "redirect_uri": absolute_uri(pipeline.redirect_url()), + }, + ) + body = safe_urlread(req) + if req.headers["Content-Type"].startswith("application/x-www-form-urlencoded"): + return dict(parse_qsl(body)) + return orjson.loads(body) # TODO(iamrajjoshi): Make this the default provider @@ -232,18 +239,24 @@ def exchange_token(self, request: Request, pipeline, code): from sentry.http import safe_urlopen, safe_urlread from sentry.utils.http import absolute_uri - req = safe_urlopen( - url=self.access_token_url, - headers={"Content-Type": "application/x-www-form-urlencoded", "Content-Length": "1322"}, - data={ - "grant_type": "authorization_code", - "client_id": self.client_id, - "client_secret": self.client_secret, - "code": code, - "redirect_uri": absolute_uri(pipeline.redirect_url()), - }, - ) - body = safe_urlread(req) - if req.headers["Content-Type"].startswith("application/x-www-form-urlencoded"): - return dict(parse_qsl(body)) - return orjson.loads(body) + with record_event( + IntegrationPipelineViewType.TOKEN_EXCHANGE, pipeline.provider.key + ).capture(): + req = safe_urlopen( + url=self.access_token_url, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Content-Length": "1322", + }, + data={ + "grant_type": "authorization_code", + "client_id": self.client_id, + "client_secret": self.client_secret, + "code": code, + "redirect_uri": absolute_uri(pipeline.redirect_url()), + }, + ) + body = safe_urlread(req) + if req.headers["Content-Type"].startswith("application/x-www-form-urlencoded"): + return dict(parse_qsl(body)) + return orjson.loads(body) diff --git a/src/sentry/incidents/charts.py b/src/sentry/incidents/charts.py index b9797c5cce07c6..db53f4668704b7 100644 --- a/src/sentry/incidents/charts.py +++ b/src/sentry/incidents/charts.py @@ -292,6 +292,11 @@ def build_metric_alert_chart( else: if query_type == SnubaQuery.Type.PERFORMANCE and dataset == Dataset.PerformanceMetrics: query_params["dataset"] = "metrics" + elif ( + query_type == SnubaQuery.Type.PERFORMANCE and dataset == Dataset.EventsAnalyticsPlatform + ): + query_params["dataset"] = "spans" + query_params["useRpc"] = "1" elif query_type == SnubaQuery.Type.ERROR: query_params["dataset"] = "errors" else: diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_details.py b/src/sentry/incidents/endpoints/organization_alert_rule_details.py index 848c005af5eb94..e2a2225d1a0d40 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_details.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_details.py @@ -212,9 +212,6 @@ class OrganizationAlertRuleDetailsPutSerializer(serializers.Serializer): owner = ActorField( required=False, allow_null=True, help_text="The ID of the team or user that owns the rule." ) - excludedProjects = serializers.ListField( - child=ProjectField(scope="project:read"), required=False - ) thresholdPeriod = serializers.IntegerField(required=False, default=1, min_value=1, max_value=20) monitorType = serializers.IntegerField( required=False, diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_index.py b/src/sentry/incidents/endpoints/organization_alert_rule_index.py index 6a896cdb6bf22a..f2942cc73d12b1 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_index.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_index.py @@ -409,9 +409,6 @@ class OrganizationAlertRuleIndexPostSerializer(serializers.Serializer): owner = ActorField( required=False, allow_null=True, help_text="The ID of the team or user that owns the rule." ) - excludedProjects = serializers.ListField( - child=ProjectField(scope="project:read"), required=False - ) thresholdPeriod = serializers.IntegerField(required=False, default=1, min_value=1, max_value=20) monitorType = serializers.IntegerField( required=False, diff --git a/src/sentry/incidents/endpoints/serializers/alert_rule.py b/src/sentry/incidents/endpoints/serializers/alert_rule.py index 0ec511f596727c..9d6e31d3c65935 100644 --- a/src/sentry/incidents/endpoints/serializers/alert_rule.py +++ b/src/sentry/incidents/endpoints/serializers/alert_rule.py @@ -17,7 +17,6 @@ AlertRule, AlertRuleActivity, AlertRuleActivityType, - AlertRuleExcludedProjects, AlertRuleTrigger, AlertRuleTriggerAction, ) @@ -40,7 +39,6 @@ class AlertRuleSerializerResponseOptional(TypedDict, total=False): environment: str | None projects: list[str] | None - excludedProjects: list[dict] | None queryType: int | None resolveThreshold: float | None dataset: str | None @@ -63,8 +61,6 @@ class AlertRuleSerializerResponseOptional(TypedDict, total=False): "status", "resolution", "thresholdPeriod", - "includeAllProjects", - "excludedProjects", "weeklyAvg", "totalThisWeek", "latestIncident", @@ -89,7 +85,6 @@ class AlertRuleSerializerResponse(AlertRuleSerializerResponseOptional): resolution: float thresholdPeriod: int triggers: list[dict] - includeAllProjects: bool dateModified: datetime dateCreated: datetime createdBy: dict @@ -309,7 +304,6 @@ def serialize( "thresholdPeriod": obj.threshold_period, "triggers": attrs.get("triggers", []), "projects": sorted(attrs.get("projects", [])), - "includeAllProjects": obj.include_all_projects, "owner": attrs.get("owner", None), "originalAlertRuleId": attrs.get("originalAlertRuleId", None), "comparisonDelta": obj.comparison_delta / 60 if obj.comparison_delta else None, @@ -343,13 +337,6 @@ def get_attrs( self, item_list: Sequence[Any], user: User | RpcUser, **kwargs: Any ) -> defaultdict[AlertRule, Any]: result = super().get_attrs(item_list, user, **kwargs) - alert_rules = {item.id: item for item in item_list} - for alert_rule_id, project_slug in AlertRuleExcludedProjects.objects.filter( - alert_rule__in=item_list - ).values_list("alert_rule_id", "project__slug"): - exclusions = result[alert_rules[alert_rule_id]].setdefault("excluded_projects", []) - exclusions.append(project_slug) - query_to_alert_rule = {ar.snuba_query_id: ar for ar in item_list} for event_type in SnubaQueryEventType.objects.filter( @@ -366,7 +353,6 @@ def serialize( self, obj: AlertRule, attrs: Mapping[Any, Any], user: User | RpcUser, **kwargs ) -> AlertRuleSerializerResponse: data = super().serialize(obj, attrs, user) - data["excludedProjects"] = sorted(attrs.get("excluded_projects", [])) data["eventTypes"] = sorted(attrs.get("event_types", [])) data["snooze"] = False return data diff --git a/src/sentry/incidents/endpoints/serializers/alert_rule_trigger.py b/src/sentry/incidents/endpoints/serializers/alert_rule_trigger.py index c188c507c94349..66aca9a48658ae 100644 --- a/src/sentry/incidents/endpoints/serializers/alert_rule_trigger.py +++ b/src/sentry/incidents/endpoints/serializers/alert_rule_trigger.py @@ -5,11 +5,7 @@ from sentry.api.serializers import Serializer, register, serialize from sentry.incidents.endpoints.utils import translate_threshold -from sentry.incidents.models.alert_rule import ( - AlertRuleTrigger, - AlertRuleTriggerAction, - AlertRuleTriggerExclusion, -) +from sentry.incidents.models.alert_rule import AlertRuleTrigger, AlertRuleTriggerAction @register(AlertRuleTrigger) @@ -45,20 +41,3 @@ def serialize(self, obj, attrs, user, **kwargs): "dateCreated": obj.date_added, "actions": attrs.get("actions", []), } - - -class DetailedAlertRuleTriggerSerializer(AlertRuleTriggerSerializer): - def get_attrs(self, item_list, user, **kwargs): - triggers = {item.id: item for item in item_list} - result: dict[str, dict[str, list[str]]] = defaultdict(lambda: defaultdict(list)) - for trigger_id, project_slug in AlertRuleTriggerExclusion.objects.filter( - alert_rule_trigger__in=item_list - ).values_list("alert_rule_trigger_id", "query_subscription__project__slug"): - if project_slug is not None: - result[triggers[trigger_id]]["excludedProjects"].append(project_slug) - return result - - def serialize(self, obj, attrs, user, **kwargs): - data = super().serialize(obj, attrs, user, **kwargs) - data["excludedProjects"] = sorted(attrs.get("excludedProjects", [])) - return data diff --git a/src/sentry/incidents/logic.py b/src/sentry/incidents/logic.py index 7504fb9c5b7f91..2f75ab8add4654 100644 --- a/src/sentry/incidents/logic.py +++ b/src/sentry/incidents/logic.py @@ -60,12 +60,14 @@ from sentry.models.project import Project from sentry.notifications.models.notificationaction import ActionService, ActionTarget from sentry.relay.config.metric_extraction import on_demand_metrics_feature_flags +from sentry.search.eap.types import SearchResolverConfig from sentry.search.events.builder.base import BaseQueryBuilder from sentry.search.events.constants import ( METRICS_LAYER_UNSUPPORTED_TRANSACTION_METRICS_FUNCTIONS, SPANS_METRICS_FUNCTIONS, ) from sentry.search.events.fields import is_function, resolve_field +from sentry.search.events.types import SnubaParams from sentry.seer.anomaly_detection.delete_rule import delete_rule_in_seer from sentry.seer.anomaly_detection.store_data import send_new_rule_data, update_rule_data from sentry.sentry_apps.services.app import RpcSentryAppInstallation, app_service @@ -74,7 +76,8 @@ DuplicateDisplayNameError, IntegrationError, ) -from sentry.snuba.dataset import Dataset +from sentry.snuba import spans_rpc +from sentry.snuba.dataset import Dataset, EntityKey from sentry.snuba.entity_subscription import ( ENTITY_TIME_COLUMNS, EntitySubscription, @@ -85,6 +88,7 @@ from sentry.snuba.metrics.extraction import should_use_on_demand_metrics from sentry.snuba.metrics.naming_layer.mri import get_available_operations, is_mri, parse_mri from sentry.snuba.models import QuerySubscription, SnubaQuery, SnubaQueryEventType +from sentry.snuba.referrer import Referrer from sentry.snuba.subscriptions import ( bulk_delete_snuba_subscriptions, bulk_disable_snuba_subscriptions, @@ -417,20 +421,61 @@ def get_incident_aggregates( snuba_query, incident.organization_id, ) - query_builder = _build_incident_query_builder( - incident, entity_subscription, start, end, windowed_stats - ) - try: - results = query_builder.run_query(referrer="incidents.get_incident_aggregates") - except Exception: - metrics.incr( - "incidents.get_incident_aggregates.snql.query.error", - tags={ - "dataset": snuba_query.dataset, - "entity": get_entity_key_from_query_builder(query_builder).value, - }, + if entity_subscription.dataset == Dataset.EventsAnalyticsPlatform: + start, end = _calculate_incident_time_range( + incident, start, end, windowed_stats=windowed_stats + ) + + project_ids = list( + IncidentProject.objects.filter(incident=incident).values_list("project_id", flat=True) ) - raise + + params = SnubaParams( + environments=[snuba_query.environment], + projects=[Project.objects.get_from_cache(id=project_id) for project_id in project_ids], + organization=Organization.objects.get_from_cache(id=incident.organization_id), + start=start, + end=end, + ) + + try: + results = spans_rpc.run_table_query( + params, + query_string=snuba_query.query, + selected_columns=[entity_subscription.aggregate], + orderby=None, + offset=0, + limit=1, + referrer=Referrer.API_ALERTS_ALERT_RULE_CHART.value, + config=SearchResolverConfig( + auto_fields=True, + ), + ) + + except Exception: + metrics.incr( + "incidents.get_incident_aggregates.snql.query.error", + tags={ + "dataset": snuba_query.dataset, + "entity": EntityKey.EAPSpans.value, + }, + ) + raise + else: + query_builder = _build_incident_query_builder( + incident, entity_subscription, start, end, windowed_stats + ) + try: + results = query_builder.run_query(referrer="incidents.get_incident_aggregates") + except Exception: + metrics.incr( + "incidents.get_incident_aggregates.snql.query.error", + tags={ + "dataset": snuba_query.dataset, + "entity": get_entity_key_from_query_builder(query_builder).value, + }, + ) + raise aggregated_result = entity_subscription.aggregate_query_results(results["data"], alias="count") return aggregated_result[0] diff --git a/src/sentry/incidents/models/alert_rule.py b/src/sentry/incidents/models/alert_rule.py index eed2bec7379af3..75eb7696b6c8f6 100644 --- a/src/sentry/incidents/models/alert_rule.py +++ b/src/sentry/incidents/models/alert_rule.py @@ -241,26 +241,6 @@ def conditionally_subscribe_project_to_alert_rules( return [] -@region_silo_model -class AlertRuleExcludedProjects(Model): - """ - Excludes a specific project from an AlertRule - - NOTE: This feature is not currently utilized. - """ - - __relocation_scope__ = RelocationScope.Organization - - alert_rule = FlexibleForeignKey("sentry.AlertRule", db_index=False) - project = FlexibleForeignKey("sentry.Project", db_constraint=False) - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_alertruleexcludedprojects" - unique_together = (("alert_rule", "project"),) - - @region_silo_model class AlertRuleProjects(Model): """ @@ -301,15 +281,6 @@ class AlertRule(Model): user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL") team = FlexibleForeignKey("sentry.Team", null=True, on_delete=models.SET_NULL) - - excluded_projects = models.ManyToManyField( - "sentry.Project", related_name="alert_rule_exclusions", through=AlertRuleExcludedProjects - ) # NOTE: This feature is not currently utilized. - # Determines whether we include all current and future projects from this - # organization in this rule. - include_all_projects = models.BooleanField( - default=False - ) # NOTE: This feature is not currently utilized. name = models.TextField() status = models.SmallIntegerField(default=AlertRuleStatus.PENDING.value) threshold_type = models.SmallIntegerField(null=True) @@ -482,24 +453,6 @@ class Meta: unique_together = (("alert_rule", "label"),) -@region_silo_model -class AlertRuleTriggerExclusion(Model): - """ - Allows us to define a specific trigger to be excluded from a query subscription - """ - - __relocation_scope__ = RelocationScope.Organization - - alert_rule_trigger = FlexibleForeignKey("sentry.AlertRuleTrigger", related_name="exclusions") - query_subscription = FlexibleForeignKey("sentry.QuerySubscription") - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_alertruletriggerexclusion" - unique_together = (("alert_rule_trigger", "query_subscription"),) - - class AlertRuleTriggerActionMethod(StrEnum): FIRE = "fire" RESOLVE = "resolve" diff --git a/src/sentry/incidents/serializers/alert_rule.py b/src/sentry/incidents/serializers/alert_rule.py index 74d631d95d8ae5..4c0c6ddac49405 100644 --- a/src/sentry/incidents/serializers/alert_rule.py +++ b/src/sentry/incidents/serializers/alert_rule.py @@ -76,9 +76,6 @@ class AlertRuleSerializer(CamelSnakeModelSerializer[AlertRule]): required=False, max_length=1, ) - excluded_projects = serializers.ListField( - child=ProjectField(scope="project:read"), required=False - ) triggers = serializers.ListField(required=True) query_type = serializers.IntegerField(required=False) dataset = serializers.CharField(required=False) @@ -123,8 +120,6 @@ class Meta: "comparison_delta", "aggregate", "projects", - "include_all_projects", - "excluded_projects", "triggers", "event_types", "monitor_type", @@ -136,7 +131,6 @@ class Meta: ] extra_kwargs = { "name": {"min_length": 1, "max_length": 256}, - "include_all_projects": {"default": False}, "threshold_type": {"required": True}, "resolve_threshold": {"required": False}, } diff --git a/src/sentry/ingest/billing_metrics_consumer.py b/src/sentry/ingest/billing_metrics_consumer.py index 688d1e71a7e70f..471855aa356f6d 100644 --- a/src/sentry/ingest/billing_metrics_consumer.py +++ b/src/sentry/ingest/billing_metrics_consumer.py @@ -14,7 +14,6 @@ from django.core.cache import cache from sentry_kafka_schemas.schema_types.snuba_generic_metrics_v1 import GenericMetric -from sentry import options from sentry.constants import DataCategory from sentry.models.project import Project from sentry.sentry_metrics.indexer.strings import ( @@ -105,13 +104,6 @@ def _count_processed_items(self, generic_metric: GenericMetric) -> Mapping[DataC items = {data_category: quantity} - if not options.get("profiling.emit_outcomes_in_profiling_consumer.enabled"): - if self._has_profile(generic_metric): - # The bucket is tagged with the "has_profile" tag, - # so we also count the quantity of this bucket towards profiles. - # This assumes a "1 to 0..1" relationship between transactions / spans and profiles. - items[DataCategory.PROFILE] = quantity - return items def _has_profile(self, generic_metric: GenericMetric) -> bool: diff --git a/src/sentry/ingest/consumer/processors.py b/src/sentry/ingest/consumer/processors.py index 95e42756b48164..5efaa577677254 100644 --- a/src/sentry/ingest/consumer/processors.py +++ b/src/sentry/ingest/consumer/processors.py @@ -203,9 +203,9 @@ def process_event( else: with metrics.timer("ingest_consumer._store_event"): cache_key = processing_store.store(data) - if data.get("type") == "transaction": + if consumer_type == ConsumerType.Transactions: track_sampled_event( - data["event_id"], "transaction", TransactionStageStatus.REDIS_PUT + data["event_id"], ConsumerType.Transactions, TransactionStageStatus.REDIS_PUT ) save_attachments(attachments, cache_key) diff --git a/src/sentry/ingest/types.py b/src/sentry/ingest/types.py index d9ca4198dbc01b..4b0c4596d72796 100644 --- a/src/sentry/ingest/types.py +++ b/src/sentry/ingest/types.py @@ -1,4 +1,7 @@ -class ConsumerType: +from enum import StrEnum + + +class ConsumerType(StrEnum): """ Defines the types of ingestion consumers """ diff --git a/src/sentry/integrations/api/endpoints/doc_integration_details.py b/src/sentry/integrations/api/endpoints/doc_integration_details.py index a06c47481bd5ee..e6974c13876c38 100644 --- a/src/sentry/integrations/api/endpoints/doc_integration_details.py +++ b/src/sentry/integrations/api/endpoints/doc_integration_details.py @@ -22,9 +22,9 @@ class DocIntegrationDetailsEndpoint(DocIntegrationBaseEndpoint): owner = ApiOwner.INTEGRATIONS publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "GET": ApiPublishStatus.UNKNOWN, - "PUT": ApiPublishStatus.UNKNOWN, + "DELETE": ApiPublishStatus.PRIVATE, + "GET": ApiPublishStatus.PRIVATE, + "PUT": ApiPublishStatus.PRIVATE, } def get(self, request: Request, doc_integration: DocIntegration) -> Response: diff --git a/src/sentry/integrations/api/endpoints/doc_integrations_index.py b/src/sentry/integrations/api/endpoints/doc_integrations_index.py index 9358d0bc0d2b66..869079209a13c1 100644 --- a/src/sentry/integrations/api/endpoints/doc_integrations_index.py +++ b/src/sentry/integrations/api/endpoints/doc_integrations_index.py @@ -23,8 +23,8 @@ class DocIntegrationsEndpoint(DocIntegrationsBaseEndpoint): owner = ApiOwner.INTEGRATIONS publish_status = { - "GET": ApiPublishStatus.UNKNOWN, - "POST": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, + "POST": ApiPublishStatus.PRIVATE, } def get(self, request: Request): diff --git a/src/sentry/integrations/bitbucket/search.py b/src/sentry/integrations/bitbucket/search.py index 708341f88b8585..234229e3919646 100644 --- a/src/sentry/integrations/bitbucket/search.py +++ b/src/sentry/integrations/bitbucket/search.py @@ -9,6 +9,10 @@ from sentry.integrations.bitbucket.integration import BitbucketIntegration from sentry.integrations.models.integration import Integration from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import ( + SCMIntegrationInteractionType, + SourceCodeSearchEndpointHaltReason, +) from sentry.integrations.source_code_management.search import SourceCodeSearchEndpoint from sentry.shared_integrations.exceptions import ApiError @@ -37,32 +41,37 @@ def installation_class(self): return BitbucketIntegration def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: - assert repo + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_ISSUES + ).capture() as lifecycle: + assert repo - full_query = f'title~"{query}"' - try: - response = installation.search_issues(query=full_query, repo=repo) - except ApiError as e: - if "no issue tracker" in str(e): - logger.info( - "bitbucket.issue-search-no-issue-tracker", - extra={"installation_id": installation.model.id, "repo": repo}, - ) - return Response( - {"detail": "Bitbucket Repository has no issue tracker."}, status=400 - ) - raise + full_query = f'title~"{query}"' + try: + response = installation.search_issues(query=full_query, repo=repo) + except ApiError as e: + if "no issue tracker" in str(e): + lifecycle.record_halt(str(SourceCodeSearchEndpointHaltReason.NO_ISSUE_TRACKER)) + logger.info( + "bitbucket.issue-search-no-issue-tracker", + extra={"installation_id": installation.model.id, "repo": repo}, + ) + return Response( + {"detail": "Bitbucket Repository has no issue tracker."}, status=400 + ) + raise - assert isinstance(response, dict) - return Response( - [ - {"label": "#{} {}".format(i["id"], i["title"]), "value": i["id"]} - for i in response.get("values", []) - ] - ) + assert isinstance(response, dict) + return Response( + [ + {"label": "#{} {}".format(i["id"], i["title"]), "value": i["id"]} + for i in response.get("values", []) + ] + ) def handle_search_repositories( self, integration: Integration, installation: T, query: str ) -> Response: - result = installation.get_repositories(query) - return Response([{"label": i["name"], "value": i["name"]} for i in result]) + with self.record_event(SCMIntegrationInteractionType.HANDLE_SEARCH_REPOSITORIES).capture(): + result = installation.get_repositories(query) + return Response([{"label": i["name"], "value": i["name"]} for i in result]) diff --git a/src/sentry/integrations/discord/actions/metric_alert.py b/src/sentry/integrations/discord/actions/metric_alert.py index c21997f4f2d0fc..beff2896743e6f 100644 --- a/src/sentry/integrations/discord/actions/metric_alert.py +++ b/src/sentry/integrations/discord/actions/metric_alert.py @@ -38,7 +38,7 @@ def send_incident_alert_notification( # We can't send a message if we don't know the channel logger.warning( "discord.metric_alert.no_channel", - extra={"guild_id": incident.identifier}, + extra={"incident_id": incident.id}, ) return False @@ -56,7 +56,7 @@ def send_incident_alert_notification( except Exception as error: logger.warning( "discord.metric_alert.message_send_failure", - extra={"error": error, "guild_id": incident.identifier, "channel_id": channel}, + extra={"error": error, "incident_id": incident.id, "channel_id": channel}, ) return False else: diff --git a/src/sentry/integrations/discord/webhooks/command.py b/src/sentry/integrations/discord/webhooks/command.py index 5b7d63d0c262fd..3736d7ff9e55e9 100644 --- a/src/sentry/integrations/discord/webhooks/command.py +++ b/src/sentry/integrations/discord/webhooks/command.py @@ -80,7 +80,7 @@ def help_handler(self, input: CommandInput) -> IntegrationResponse[str]: def link_user_handler(self, _: CommandInput) -> IntegrationResponse[str]: if self.request.has_identity(): return IntegrationResponse( - interaction_result=EventLifecycleOutcome.HALTED, + interaction_result=EventLifecycleOutcome.SUCCESS, response=ALREADY_LINKED_MESSAGE.format(email=self.request.get_identity_str()), outcome_reason=str(MessageCommandHaltReason.ALREADY_LINKED), context_data={ @@ -120,7 +120,7 @@ def link_user_handler(self, _: CommandInput) -> IntegrationResponse[str]: def unlink_user_handler(self, input: CommandInput) -> IntegrationResponse[str]: if not self.request.has_identity(): return IntegrationResponse( - interaction_result=EventLifecycleOutcome.HALTED, + interaction_result=EventLifecycleOutcome.SUCCESS, response=NOT_LINKED_MESSAGE, outcome_reason=str(MessageCommandHaltReason.NOT_LINKED), ) diff --git a/src/sentry/integrations/github/search.py b/src/sentry/integrations/github/search.py index 523b9d61e4a5db..19d013fac4befd 100644 --- a/src/sentry/integrations/github/search.py +++ b/src/sentry/integrations/github/search.py @@ -7,6 +7,10 @@ from sentry.integrations.github_enterprise.integration import GitHubEnterpriseIntegration from sentry.integrations.models.integration import Integration from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import ( + SCMIntegrationInteractionType, + SourceCodeSearchEndpointHaltReason, +) from sentry.integrations.source_code_management.search import SourceCodeSearchEndpoint from sentry.shared_integrations.exceptions import ApiError @@ -30,42 +34,53 @@ def installation_class(self): return (GitHubIntegration, GitHubEnterpriseIntegration) def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: - assert repo + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_ISSUES + ).capture() as lifecycle: + assert repo - try: - response = installation.search_issues(query=f"repo:{repo} {query}") - except ApiError as err: - if err.code == 403: - return Response({"detail": "Rate limit exceeded"}, status=429) - raise + try: + response = installation.search_issues(query=f"repo:{repo} {query}") + except ApiError as err: + if err.code == 403: + lifecycle.record_halt(str(SourceCodeSearchEndpointHaltReason.RATE_LIMITED)) + return Response({"detail": "Rate limit exceeded"}, status=429) + raise - assert isinstance(response, dict) - return Response( - [ - {"label": "#{} {}".format(i["number"], i["title"]), "value": i["number"]} - for i in response.get("items", []) - ] - ) + assert isinstance(response, dict) + return Response( + [ + {"label": "#{} {}".format(i["number"], i["title"]), "value": i["number"]} + for i in response.get("items", []) + ] + ) def handle_search_repositories( self, integration: Integration, installation: T, query: str ) -> Response: - assert isinstance(installation, self.installation_class) + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_REPOSITORIES + ).capture() as lifecyle: + assert isinstance(installation, self.installation_class) - full_query = build_repository_query(integration.metadata, integration.name, query) - try: - response = installation.get_client().search_repositories(full_query) - except ApiError as err: - if err.code == 403: - return Response({"detail": "Rate limit exceeded"}, status=429) - if err.code == 422: - return Response( - { - "detail": "Repositories could not be searched because they do not exist, or you do not have access to them." - }, - status=404, - ) - raise - return Response( - [{"label": i["name"], "value": i["full_name"]} for i in response.get("items", [])] - ) + full_query = build_repository_query(integration.metadata, integration.name, query) + try: + response = installation.get_client().search_repositories(full_query) + except ApiError as err: + if err.code == 403: + lifecyle.record_halt(str(SourceCodeSearchEndpointHaltReason.RATE_LIMITED)) + return Response({"detail": "Rate limit exceeded"}, status=429) + if err.code == 422: + lifecyle.record_halt( + str(SourceCodeSearchEndpointHaltReason.MISSING_REPOSITORY_OR_NO_ACCESS) + ) + return Response( + { + "detail": "Repositories could not be searched because they do not exist, or you do not have access to them." + }, + status=404, + ) + raise + return Response( + [{"label": i["name"], "value": i["full_name"]} for i in response.get("items", [])] + ) diff --git a/src/sentry/integrations/gitlab/search.py b/src/sentry/integrations/gitlab/search.py index 45ef39b78a1496..5423110e0eef40 100644 --- a/src/sentry/integrations/gitlab/search.py +++ b/src/sentry/integrations/gitlab/search.py @@ -6,6 +6,7 @@ from sentry.integrations.gitlab.integration import GitlabIntegration from sentry.integrations.models.integration import Integration from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import SCMIntegrationInteractionType from sentry.integrations.source_code_management.search import SourceCodeSearchEndpoint from sentry.shared_integrations.exceptions import ApiError @@ -27,43 +28,51 @@ def installation_class(self): return GitlabIntegration def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: - assert repo + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_ISSUES + ).capture() as lifecycle: + assert repo - full_query: str | None = query + full_query: str | None = query - try: - iids = [int(query)] - full_query = None - except ValueError: - iids = None + try: + iids = [int(query)] + full_query = None + except ValueError: + iids = None - try: - response = installation.search_issues(query=full_query, project_id=repo, iids=iids) - except ApiError as e: - return Response({"detail": str(e)}, status=400) + try: + response = installation.search_issues(query=full_query, project_id=repo, iids=iids) + except ApiError as e: + lifecycle.record_failure(e) + return Response({"detail": str(e)}, status=400) - assert isinstance(response, list) - return Response( - [ - { - "label": "(#{}) {}".format(i["iid"], i["title"]), - "value": "{}#{}".format(i["project_id"], i["iid"]), - } - for i in response - ] - ) + assert isinstance(response, list) + return Response( + [ + { + "label": "(#{}) {}".format(i["iid"], i["title"]), + "value": "{}#{}".format(i["project_id"], i["iid"]), + } + for i in response + ] + ) def handle_search_repositories( self, integration: Integration, installation: T, query: str ) -> Response: - assert isinstance(installation, self.installation_class) - try: - response = installation.search_projects(query) - except ApiError as e: - return Response({"detail": str(e)}, status=400) - return Response( - [ - {"label": project["name_with_namespace"], "value": project["id"]} - for project in response - ] - ) + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_REPOSITORIES + ).capture() as lifecyle: + assert isinstance(installation, self.installation_class) + try: + response = installation.search_projects(query) + except ApiError as e: + lifecyle.record_failure(e) + return Response({"detail": str(e)}, status=400) + return Response( + [ + {"label": project["name_with_namespace"], "value": project["id"]} + for project in response + ] + ) diff --git a/src/sentry/integrations/jira/utils/api.py b/src/sentry/integrations/jira/utils/api.py index cdc0f2b37e6a51..a3701ad2d7c5a9 100644 --- a/src/sentry/integrations/jira/utils/api.py +++ b/src/sentry/integrations/jira/utils/api.py @@ -13,6 +13,11 @@ from sentry.shared_integrations.exceptions import ApiError from ...mixins.issues import IssueSyncIntegration +from ...project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, + ProjectManagementHaltReason, +) from ..client import JiraCloudClient logger = logging.getLogger(__name__) @@ -75,26 +80,45 @@ def handle_assignee_change( sync_group_assignee_inbound(integration, email, issue_key, assign=True) -def handle_status_change(integration, data): - issue_key = data["issue"]["key"] - status_changed = any(item for item in data["changelog"]["items"] if item["field"] == "status") - log_context = {"issue_key": issue_key, "integration_id": integration.id} - - if not status_changed: - logger.info("jira.handle_status_change.unchanged", extra=log_context) - return - - try: - changelog = next(item for item in data["changelog"]["items"] if item["field"] == "status") - except StopIteration: - logger.info("jira.missing-changelog-status", extra=log_context) - return - - result = integration_service.organization_contexts(integration_id=integration.id) - for oi in result.organization_integrations: - install = integration.get_installation(organization_id=oi.organization_id) - if isinstance(install, IssueSyncIntegration): - install.sync_status_inbound(issue_key, {"changelog": changelog, "issue": data["issue"]}) +# TODO(Gabe): Consolidate this with VSTS's implementation, create DTO for status +# changes. +def handle_status_change(integration: RpcIntegration, data: Mapping[str, Any]) -> None: + with ProjectManagementEvent( + action_type=ProjectManagementActionType.INBOUND_STATUS_SYNC, integration=integration + ).capture() as lifecycle: + issue_key = data["issue"]["key"] + status_changed = any( + item for item in data["changelog"]["items"] if item["field"] == "status" + ) + log_context = {"issue_key": issue_key, "integration_id": integration.id} + + if not status_changed: + logger.info("jira.handle_status_change.unchanged", extra=log_context) + return + + try: + changelog = next( + item for item in data["changelog"]["items"] if item["field"] == "status" + ) + except StopIteration: + lifecycle.record_halt( + ProjectManagementHaltReason.SYNC_INBOUND_MISSING_CHANGELOG_STATUS, extra=log_context + ) + logger.info("jira.missing-changelog-status", extra=log_context) + return + + result = integration_service.organization_contexts(integration_id=integration.id) + for oi in result.organization_integrations: + install = integration.get_installation(organization_id=oi.organization_id) + if isinstance(install, IssueSyncIntegration): + install.sync_status_inbound( + issue_key, {"changelog": changelog, "issue": data["issue"]} + ) + else: + lifecycle.record_halt( + ProjectManagementHaltReason.SYNC_NON_SYNC_INTEGRATION_PROVIDED, + extra=log_context, + ) def handle_jira_api_error(error: ApiError, message: str = "") -> Mapping[str, str] | None: diff --git a/src/sentry/integrations/jira/webhooks/installed.py b/src/sentry/integrations/jira/webhooks/installed.py index 421ed574fd93c4..7b6b33f13089c6 100644 --- a/src/sentry/integrations/jira/webhooks/installed.py +++ b/src/sentry/integrations/jira/webhooks/installed.py @@ -11,6 +11,9 @@ from sentry.integrations.utils.atlassian_connect import authenticate_asymmetric_jwt, verify_claims from sentry.utils import jwt +from ...base import IntegrationDomain +from ...project_management.metrics import ProjectManagementFailuresReason +from ...utils.metrics import IntegrationPipelineViewEvent, IntegrationPipelineViewType from ..integration import JiraIntegrationProvider from .base import JiraWebhookBase @@ -26,28 +29,34 @@ class JiraSentryInstalledWebhook(JiraWebhookBase): """ def post(self, request: Request, *args, **kwargs) -> Response: - token = self.get_token(request) - - state = request.data - if not state: - return self.respond(status=status.HTTP_400_BAD_REQUEST) - - key_id = jwt.peek_header(token).get("kid") - if key_id: - decoded_claims = authenticate_asymmetric_jwt(token, key_id) - verify_claims(decoded_claims, request.path, request.GET, method="POST") - - data = JiraIntegrationProvider().build_integration(state) - integration = ensure_integration(self.provider, data) - - # Note: Unlike in all other Jira webhooks, we don't call `bind_org_context_from_integration` - # here, because at this point the integration hasn't yet been bound to an organization. The - # best we can do at this point is to record the integration's id. - sentry_sdk.set_tag("integration_id", integration.id) - - # Sync integration metadata from Jira. This must be executed *after* - # the integration has been installed on Jira as the access tokens will - # not work until then. - sync_metadata.apply_async(kwargs={"integration_id": integration.id}, countdown=10) - - return self.respond() + with IntegrationPipelineViewEvent( + interaction_type=IntegrationPipelineViewType.VERIFY_INSTALLATION, + domain=IntegrationDomain.PROJECT_MANAGEMENT, + provider_key=self.provider, + ).capture() as lifecycle: + token = self.get_token(request) + + state = request.data + if not state: + lifecycle.record_failure(ProjectManagementFailuresReason.INSTALLATION_STATE_MISSING) + return self.respond(status=status.HTTP_400_BAD_REQUEST) + + key_id = jwt.peek_header(token).get("kid") + if key_id: + decoded_claims = authenticate_asymmetric_jwt(token, key_id) + verify_claims(decoded_claims, request.path, request.GET, method="POST") + + data = JiraIntegrationProvider().build_integration(state) + integration = ensure_integration(self.provider, data) + + # Note: Unlike in all other Jira webhooks, we don't call `bind_org_context_from_integration` + # here, because at this point the integration hasn't yet been bound to an organization. The + # best we can do at this point is to record the integration's id. + sentry_sdk.set_tag("integration_id", integration.id) + + # Sync integration metadata from Jira. This must be executed *after* + # the integration has been installed on Jira as the access tokens will + # not work until then. + sync_metadata.apply_async(kwargs={"integration_id": integration.id}, countdown=10) + + return self.respond() diff --git a/src/sentry/integrations/msteams/webhook.py b/src/sentry/integrations/msteams/webhook.py index e07e7b9ba4a1ad..c47087573fe4dc 100644 --- a/src/sentry/integrations/msteams/webhook.py +++ b/src/sentry/integrations/msteams/webhook.py @@ -669,7 +669,7 @@ def link_user_handler(self, input: CommandInput) -> IntegrationResponse[Adaptive if has_linked_identity: return IntegrationResponse( - interaction_result=EventLifecycleOutcome.HALTED, + interaction_result=EventLifecycleOutcome.SUCCESS, response=build_already_linked_identity_command_card(), outcome_reason=str(MessageCommandHaltReason.ALREADY_LINKED), context_data={ diff --git a/src/sentry/integrations/on_call/metrics.py b/src/sentry/integrations/on_call/metrics.py index 76f1b203c203b8..11f61a2ae666a4 100644 --- a/src/sentry/integrations/on_call/metrics.py +++ b/src/sentry/integrations/on_call/metrics.py @@ -1,4 +1,4 @@ -from enum import Enum +from enum import Enum, StrEnum from attr import dataclass @@ -56,3 +56,13 @@ def get_integration_name(self) -> str: def get_interaction_type(self) -> str: return str(self.interaction_type) + + +class OnCallIntegrationsHaltReason(StrEnum): + """ + Reasons why on on call integration method may halt without success/failure. + """ + + INVALID_TEAM = "invalid_team" + INVALID_SERVICE = "invalid_service" + INVALID_KEY = "invalid_key" diff --git a/src/sentry/integrations/opsgenie/actions/form.py b/src/sentry/integrations/opsgenie/actions/form.py index a6a29d9d208145..05bbb4bf6797e4 100644 --- a/src/sentry/integrations/opsgenie/actions/form.py +++ b/src/sentry/integrations/opsgenie/actions/form.py @@ -6,7 +6,7 @@ from django import forms from django.utils.translation import gettext_lazy as _ -from sentry.integrations.on_call.metrics import OnCallInteractionType +from sentry.integrations.on_call.metrics import OnCallIntegrationsHaltReason, OnCallInteractionType from sentry.integrations.opsgenie.metrics import record_event from sentry.integrations.opsgenie.utils import get_team from sentry.integrations.services.integration import integration_service @@ -65,7 +65,7 @@ def _get_team_status( return VALID_TEAM def _validate_team(self, team_id: str | None, integration_id: int | None) -> None: - with record_event(OnCallInteractionType.VERIFY_TEAM).capture(): + with record_event(OnCallInteractionType.VERIFY_TEAM).capture() as lifecyle: params = { "account": dict(self.fields["account"].choices).get(integration_id), "team": dict(self.fields["team"].choices).get(team_id), @@ -78,6 +78,7 @@ def _validate_team(self, team_id: str | None, integration_id: int | None) -> Non organization_id=self.org_id, ) if integration is None or org_integration is None: + lifecyle.record_halt(OnCallIntegrationsHaltReason.INVALID_TEAM) raise forms.ValidationError( _("The Opsgenie integration does not exist."), code="invalid_integration", @@ -86,6 +87,7 @@ def _validate_team(self, team_id: str | None, integration_id: int | None) -> Non team_status = self._get_team_status(team_id=team_id, org_integration=org_integration) if team_status == INVALID_TEAM: + lifecyle.record_halt(OnCallIntegrationsHaltReason.INVALID_TEAM) raise forms.ValidationError( _('The team "%(team)s" does not belong to the %(account)s Opsgenie account.'), code="invalid_team", diff --git a/src/sentry/integrations/opsgenie/integration.py b/src/sentry/integrations/opsgenie/integration.py index 8842f608956130..77e968740449cc 100644 --- a/src/sentry/integrations/opsgenie/integration.py +++ b/src/sentry/integrations/opsgenie/integration.py @@ -20,7 +20,7 @@ ) from sentry.integrations.models.integration import Integration from sentry.integrations.models.organization_integration import OrganizationIntegration -from sentry.integrations.on_call.metrics import OnCallInteractionType +from sentry.integrations.on_call.metrics import OnCallIntegrationsHaltReason, OnCallInteractionType from sentry.integrations.opsgenie.metrics import record_event from sentry.integrations.opsgenie.tasks import migrate_opsgenie_plugin from sentry.organizations.services.organization import RpcOrganizationSummary @@ -183,7 +183,7 @@ def update_organization_config(self, data: MutableMapping[str, Any]) -> None: team["id"] = str(self.org_integration.id) + "-" + team["team"] invalid_keys = [] - with record_event(OnCallInteractionType.VERIFY_KEYS).capture(): + with record_event(OnCallInteractionType.VERIFY_KEYS).capture() as lifecycle: for team in teams: # skip if team, key pair already exist in config if (team["team"], team["integration_key"]) in existing_team_key_pairs: @@ -213,6 +213,10 @@ def update_organization_config(self, data: MutableMapping[str, Any]) -> None: raise if invalid_keys: + lifecycle.record_halt( + OnCallIntegrationsHaltReason.INVALID_KEY, + extra={"invalid_keys": invalid_keys, "integration_id": integration.id}, + ) raise ApiUnauthorized(f"Invalid integration key: {str(invalid_keys)}") return super().update_organization_config(data) diff --git a/src/sentry/integrations/pagerduty/actions/form.py b/src/sentry/integrations/pagerduty/actions/form.py index 3ac96179b54794..a6f468f7539d40 100644 --- a/src/sentry/integrations/pagerduty/actions/form.py +++ b/src/sentry/integrations/pagerduty/actions/form.py @@ -6,7 +6,7 @@ from django import forms from django.utils.translation import gettext_lazy as _ -from sentry.integrations.on_call.metrics import OnCallInteractionType +from sentry.integrations.on_call.metrics import OnCallIntegrationsHaltReason, OnCallInteractionType from sentry.integrations.pagerduty.metrics import record_event from sentry.integrations.services.integration import integration_service from sentry.integrations.types import ExternalProviders @@ -47,7 +47,7 @@ def __init__(self, *args, **kwargs): self.fields["service"].widget.choices = self.fields["service"].choices def _validate_service(self, service_id: int, integration_id: int) -> None: - with record_event(OnCallInteractionType.VALIDATE_SERVICE).capture(): + with record_event(OnCallInteractionType.VALIDATE_SERVICE).capture() as lifecycle: params = { "account": dict(self.fields["account"].choices).get(integration_id), "service": dict(self.fields["service"].choices).get(service_id), @@ -66,6 +66,7 @@ def _validate_service(self, service_id: int, integration_id: int) -> None: ): # We need to make sure that the service actually belongs to that integration, # meaning that it belongs under the appropriate account in PagerDuty. + lifecycle.record_halt(OnCallIntegrationsHaltReason.INVALID_SERVICE) raise forms.ValidationError( _( 'The service "%(service)s" has not been granted access in the %(account)s Pagerduty account.' diff --git a/src/sentry/integrations/project_management/__init__.py b/src/sentry/integrations/project_management/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry/integrations/project_management/metrics.py b/src/sentry/integrations/project_management/metrics.py new file mode 100644 index 00000000000000..782bb9c2f4021d --- /dev/null +++ b/src/sentry/integrations/project_management/metrics.py @@ -0,0 +1,45 @@ +from dataclasses import dataclass +from enum import StrEnum + +from sentry.integrations.base import IntegrationDomain +from sentry.integrations.models import Integration +from sentry.integrations.services.integration import RpcIntegration +from sentry.integrations.utils.metrics import IntegrationEventLifecycleMetric + + +class ProjectManagementActionType(StrEnum): + CREATE_EXTERNAL_ISSUE = "create_external_issue" + OUTBOUND_ASSIGNMENT_SYNC = "outbound_assignment_sync" + INBOUND_ASSIGNMENT_SYNC = "inbound_assignment_sync" + COMMENT_SYNC = "comment_sync" + OUTBOUND_STATUS_SYNC = "outbound_status_sync" + INBOUND_STATUS_SYNC = "inbound_status_sync" + LINK_EXTERNAL_ISSUE = "link_external_issue" + + def __str__(self): + return self.value.lower() + + +class ProjectManagementHaltReason(StrEnum): + SYNC_INBOUND_ASSIGNEE_NOT_FOUND = "inbound-assignee-not-found" + SYNC_NON_SYNC_INTEGRATION_PROVIDED = "sync-non-sync-integration-provided" + SYNC_INBOUND_MISSING_CHANGELOG_STATUS = "missing-changelog-status" + + +class ProjectManagementFailuresReason(StrEnum): + INSTALLATION_STATE_MISSING = "installation-state-missing" + + +@dataclass +class ProjectManagementEvent(IntegrationEventLifecycleMetric): + action_type: ProjectManagementActionType + integration: Integration | RpcIntegration + + def get_integration_name(self) -> str: + return self.integration.provider + + def get_integration_domain(self) -> IntegrationDomain: + return IntegrationDomain.PROJECT_MANAGEMENT + + def get_interaction_type(self) -> str: + return str(self.action_type) diff --git a/src/sentry/integrations/slack/utils/notifications.py b/src/sentry/integrations/slack/utils/notifications.py index 5535275a3a4a09..2fce90fc3443db 100644 --- a/src/sentry/integrations/slack/utils/notifications.py +++ b/src/sentry/integrations/slack/utils/notifications.py @@ -140,6 +140,8 @@ def send_incident_alert_notification( "incident_id": incident.id, "incident_status": new_status, "attachments": attachments, + "channel_id": channel, + "channel_name": action.target_display, } _logger.info("slack.metric_alert.error", exc_info=True, extra=log_params) metrics.incr( diff --git a/src/sentry/integrations/slack/webhooks/base.py b/src/sentry/integrations/slack/webhooks/base.py index 286d3a1a0ef3d6..fba29ed49d3b62 100644 --- a/src/sentry/integrations/slack/webhooks/base.py +++ b/src/sentry/integrations/slack/webhooks/base.py @@ -166,7 +166,7 @@ def link_user_handler(self, input: CommandInput) -> IntegrationResponse[Response response = self.endpoint.link_user(self.request) if ALREADY_LINKED_MESSAGE.format(username=self.request.identity_str) in str(response.data): return IntegrationResponse( - interaction_result=EventLifecycleOutcome.HALTED, + interaction_result=EventLifecycleOutcome.SUCCESS, response=response, outcome_reason=str(MessageCommandHaltReason.ALREADY_LINKED), context_data={ @@ -182,7 +182,7 @@ def unlink_user_handler(self, input: CommandInput) -> IntegrationResponse[Respon response = self.endpoint.unlink_user(self.request) if NOT_LINKED_MESSAGE in str(response.data): return IntegrationResponse( - interaction_result=EventLifecycleOutcome.HALTED, + interaction_result=EventLifecycleOutcome.SUCCESS, response=response, outcome_reason=str(MessageCommandHaltReason.NOT_LINKED), context_data={ @@ -200,7 +200,7 @@ def link_team_handler(self, input: CommandInput) -> IntegrationResponse[Response for message, reason in self.TEAM_HALT_MAPPINGS.items(): if message in str(response.data): return IntegrationResponse( - interaction_result=EventLifecycleOutcome.HALTED, + interaction_result=EventLifecycleOutcome.SUCCESS, response=response, outcome_reason=str(reason), ) @@ -215,7 +215,7 @@ def unlink_team_handler(self, input: CommandInput) -> IntegrationResponse[Respon for message, reason in self.TEAM_HALT_MAPPINGS.items(): if message in str(response.data): return IntegrationResponse( - interaction_result=EventLifecycleOutcome.HALTED, + interaction_result=EventLifecycleOutcome.SUCCESS, response=response, outcome_reason=str(reason), ) diff --git a/src/sentry/integrations/source_code_management/commit_context.py b/src/sentry/integrations/source_code_management/commit_context.py index 590431ef4d4c70..564a7c5eeb9c35 100644 --- a/src/sentry/integrations/source_code_management/commit_context.py +++ b/src/sentry/integrations/source_code_management/commit_context.py @@ -13,6 +13,11 @@ from sentry import analytics from sentry.auth.exceptions import IdentityNotValid from sentry.integrations.models.repository_project_path_config import RepositoryProjectPathConfig +from sentry.integrations.source_code_management.metrics import ( + CommitContextHaltReason, + CommitContextIntegrationInteractionEvent, + SCMIntegrationInteractionType, +) from sentry.locks import locks from sentry.models.commit import Commit from sentry.models.group import Group @@ -26,6 +31,7 @@ PullRequestCommit, ) from sentry.models.repository import Repository +from sentry.shared_integrations.exceptions import ApiRateLimitedError from sentry.users.models.identity import Identity from sentry.utils import metrics from sentry.utils.cache import cache @@ -94,16 +100,28 @@ def get_blame_for_files( files: list of FileBlameInfo objects """ - try: - client = self.get_client() - except Identity.DoesNotExist: - return [] - try: - response = client.get_blame_for_files(files, extra) - except IdentityNotValid: - return [] - - return response + with CommitContextIntegrationInteractionEvent( + interaction_type=SCMIntegrationInteractionType.GET_BLAME_FOR_FILES, + provider_key=self.integration_name, + ).capture() as lifecycle: + try: + client = self.get_client() + except Identity.DoesNotExist as e: + lifecycle.record_failure(e) + sentry_sdk.capture_exception(e) + return [] + try: + response = client.get_blame_for_files(files, extra) + except IdentityNotValid as e: + lifecycle.record_failure(e) + sentry_sdk.capture_exception(e) + return [] + # Swallow rate limited errors so we don't log them as exceptions + except ApiRateLimitedError as e: + sentry_sdk.capture_exception(e) + lifecycle.record_halt(e) + return [] + return response def get_commit_context_all_frames( self, files: Sequence[SourceLineInfo], extra: Mapping[str, Any] @@ -120,114 +138,137 @@ def queue_comment_task_if_needed( group_owner: GroupOwner, group_id: int, ) -> None: - if not OrganizationOption.objects.get_value( + with CommitContextIntegrationInteractionEvent( + interaction_type=SCMIntegrationInteractionType.QUEUE_COMMENT_TASK, + provider_key=self.integration_name, organization=project.organization, - key="sentry:github_pr_bot", - default=True, - ): - logger.info( - _pr_comment_log(integration_name=self.integration_name, suffix="disabled"), - extra={"organization_id": project.organization_id}, - ) - return - - repo_query = Repository.objects.filter(id=commit.repository_id).order_by("-date_added") - group = Group.objects.get_from_cache(id=group_id) - if not ( - group.level is not logging.INFO and repo_query.exists() - ): # Don't comment on info level issues - logger.info( - _pr_comment_log( - integration_name=self.integration_name, suffix="incorrect_repo_config" - ), - extra={"organization_id": project.organization_id}, - ) - return - - repo: Repository = repo_query.get() - - logger.info( - _pr_comment_log(integration_name=self.integration_name, suffix="queue_comment_check"), - extra={"organization_id": commit.organization_id, "merge_commit_sha": commit.key}, - ) - from sentry.integrations.github.tasks.pr_comment import github_comment_workflow + project=project, + commit=commit, + ).capture() as lifecycle: + if not OrganizationOption.objects.get_value( + organization=project.organization, + key="sentry:github_pr_bot", + default=True, + ): + # TODO: remove logger in favor of the log recorded in lifecycle.record_halt + logger.info( + _pr_comment_log(integration_name=self.integration_name, suffix="disabled"), + extra={"organization_id": project.organization_id}, + ) + lifecycle.record_halt(CommitContextHaltReason.PR_BOT_DISABLED) + return + + repo_query = Repository.objects.filter(id=commit.repository_id).order_by("-date_added") + group = Group.objects.get_from_cache(id=group_id) + if not ( + group.level is not logging.INFO and repo_query.exists() + ): # Don't comment on info level issues + logger.info( + _pr_comment_log( + integration_name=self.integration_name, suffix="incorrect_repo_config" + ), + extra={"organization_id": project.organization_id}, + ) + lifecycle.record_halt(CommitContextHaltReason.INCORRECT_REPO_CONFIG) + return - # client will raise an Exception if the request is not successful - try: - client = self.get_client() - merge_commit_sha = client.get_merge_commit_sha_from_commit( - repo=repo.name, sha=commit.key - ) - except Exception as e: - sentry_sdk.capture_exception(e) - return + repo: Repository = repo_query.get() + lifecycle.add_extra("repository_id", repo.id) - if merge_commit_sha is None: logger.info( _pr_comment_log( - integration_name=self.integration_name, - suffix="queue_comment_workflow.commit_not_in_default_branch", + integration_name=self.integration_name, suffix="queue_comment_check" ), - extra={ - "organization_id": commit.organization_id, - "repository_id": repo.id, - "commit_sha": commit.key, - }, + extra={"organization_id": commit.organization_id, "merge_commit_sha": commit.key}, ) - return + scope = sentry_sdk.Scope.get_isolation_scope() + scope.set_tag("queue_comment_check.merge_commit_sha", commit.key) + scope.set_tag("queue_comment_check.organization_id", commit.organization_id) + from sentry.integrations.github.tasks.pr_comment import github_comment_workflow + + # client will raise an Exception if the request is not successful + try: + client = self.get_client() + merge_commit_sha = client.get_merge_commit_sha_from_commit( + repo=repo.name, sha=commit.key + ) + except Exception as e: + sentry_sdk.capture_exception(e) + lifecycle.record_halt(e) + return - pr_query = PullRequest.objects.filter( - organization_id=commit.organization_id, - repository_id=commit.repository_id, - merge_commit_sha=merge_commit_sha, - ) - if not pr_query.exists(): - logger.info( - _pr_comment_log( - integration_name=self.integration_name, - suffix="queue_comment_workflow.missing_pr", - ), - extra={ - "organization_id": commit.organization_id, - "repository_id": repo.id, - "commit_sha": commit.key, - }, - ) - return + if merge_commit_sha is None: + logger.info( + _pr_comment_log( + integration_name=self.integration_name, + suffix="queue_comment_workflow.commit_not_in_default_branch", + ), + extra={ + "organization_id": commit.organization_id, + "repository_id": repo.id, + "commit_sha": commit.key, + }, + ) + lifecycle.record_halt(CommitContextHaltReason.COMMIT_NOT_IN_DEFAULT_BRANCH) + return - pr = pr_query.first() - assert pr is not None - # need to query explicitly for merged PR comments since we can have multiple comments per PR - merged_pr_comment_query = PullRequestComment.objects.filter( - pull_request_id=pr.id, comment_type=CommentType.MERGED_PR - ) - if pr.date_added >= datetime.now(tz=timezone.utc) - timedelta(days=PR_COMMENT_WINDOW) and ( - not merged_pr_comment_query.exists() - or group_owner.group_id not in merged_pr_comment_query[0].group_ids - ): - lock = locks.get( - _debounce_pr_comment_lock_key(pr.id), duration=10, name="queue_comment_task" + pr_query = PullRequest.objects.filter( + organization_id=commit.organization_id, + repository_id=commit.repository_id, + merge_commit_sha=merge_commit_sha, ) - with lock.acquire(): - cache_key = _debounce_pr_comment_cache_key(pullrequest_id=pr.id) - if cache.get(cache_key) is not None: - return - - # create PR commit row for suspect commit and PR - PullRequestCommit.objects.get_or_create(commit=commit, pull_request=pr) - + if not pr_query.exists(): logger.info( _pr_comment_log( - integration_name=self.integration_name, suffix="queue_comment_workflow" + integration_name=self.integration_name, + suffix="queue_comment_workflow.missing_pr", ), - extra={"pullrequest_id": pr.id, "project_id": group_owner.project_id}, + extra={ + "organization_id": commit.organization_id, + "repository_id": repo.id, + "commit_sha": commit.key, + }, ) + lifecycle.record_halt(CommitContextHaltReason.MISSING_PR) + return + + pr = pr_query.first() + lifecycle.add_extra("pull_request_id", pr.id if pr else None) + assert pr is not None + # need to query explicitly for merged PR comments since we can have multiple comments per PR + merged_pr_comment_query = PullRequestComment.objects.filter( + pull_request_id=pr.id, comment_type=CommentType.MERGED_PR + ) + if pr.date_added >= datetime.now(tz=timezone.utc) - timedelta( + days=PR_COMMENT_WINDOW + ) and ( + not merged_pr_comment_query.exists() + or group_owner.group_id not in merged_pr_comment_query[0].group_ids + ): + lock = locks.get( + _debounce_pr_comment_lock_key(pr.id), duration=10, name="queue_comment_task" + ) + with lock.acquire(): + cache_key = _debounce_pr_comment_cache_key(pullrequest_id=pr.id) + if cache.get(cache_key) is not None: + lifecycle.record_halt(CommitContextHaltReason.ALREADY_QUEUED) + return - cache.set(cache_key, True, PR_COMMENT_TASK_TTL) + # create PR commit row for suspect commit and PR + PullRequestCommit.objects.get_or_create(commit=commit, pull_request=pr) - github_comment_workflow.delay( - pullrequest_id=pr.id, project_id=group_owner.project_id - ) + logger.info( + _pr_comment_log( + integration_name=self.integration_name, suffix="queue_comment_workflow" + ), + extra={"pullrequest_id": pr.id, "project_id": group_owner.project_id}, + ) + + cache.set(cache_key, True, PR_COMMENT_TASK_TTL) + + github_comment_workflow.delay( + pullrequest_id=pr.id, project_id=group_owner.project_id + ) def create_or_update_comment( self, @@ -248,70 +289,81 @@ def create_or_update_comment( ) pr_comment = pr_comment_query[0] if pr_comment_query.exists() else None - # client will raise ApiError if the request is not successful - if pr_comment is None: - resp = client.create_comment( - repo=repo.name, - issue_id=str(pr_key), - data=( - { - "body": comment_body, - "actions": github_copilot_actions, - } - if github_copilot_actions - else {"body": comment_body} - ), - ) + interaction_type = ( + SCMIntegrationInteractionType.CREATE_COMMENT + if not pr_comment + else SCMIntegrationInteractionType.UPDATE_COMMENT + ) - current_time = django_timezone.now() - comment = PullRequestComment.objects.create( - external_id=resp.body["id"], - pull_request_id=pullrequest_id, - created_at=current_time, - updated_at=current_time, - group_ids=issue_list, - comment_type=comment_type, - ) - metrics.incr( - metrics_base.format(integration=self.integration_name, key="comment_created") - ) + with CommitContextIntegrationInteractionEvent( + interaction_type=interaction_type, + provider_key=self.integration_name, + repository=repo, + pull_request_id=pullrequest_id, + ).capture(): + if pr_comment is None: + resp = client.create_comment( + repo=repo.name, + issue_id=str(pr_key), + data=( + { + "body": comment_body, + "actions": github_copilot_actions, + } + if github_copilot_actions + else {"body": comment_body} + ), + ) - if comment_type == CommentType.OPEN_PR: - analytics.record( - "open_pr_comment.created", - comment_id=comment.id, - org_id=repo.organization_id, - pr_id=pullrequest_id, - language=(language or "not found"), + current_time = django_timezone.now() + comment = PullRequestComment.objects.create( + external_id=resp.body["id"], + pull_request_id=pullrequest_id, + created_at=current_time, + updated_at=current_time, + group_ids=issue_list, + comment_type=comment_type, ) - else: - resp = client.update_comment( - repo=repo.name, - issue_id=str(pr_key), - comment_id=pr_comment.external_id, - data=( - { - "body": comment_body, - "actions": github_copilot_actions, - } - if github_copilot_actions - else {"body": comment_body} - ), + metrics.incr( + metrics_base.format(integration=self.integration_name, key="comment_created") + ) + + if comment_type == CommentType.OPEN_PR: + analytics.record( + "open_pr_comment.created", + comment_id=comment.id, + org_id=repo.organization_id, + pr_id=pullrequest_id, + language=(language or "not found"), + ) + else: + resp = client.update_comment( + repo=repo.name, + issue_id=str(pr_key), + comment_id=pr_comment.external_id, + data=( + { + "body": comment_body, + "actions": github_copilot_actions, + } + if github_copilot_actions + else {"body": comment_body} + ), + ) + metrics.incr( + metrics_base.format(integration=self.integration_name, key="comment_updated") + ) + pr_comment.updated_at = django_timezone.now() + pr_comment.group_ids = issue_list + pr_comment.save() + + logger_event = metrics_base.format( + integration=self.integration_name, key="create_or_update_comment" ) - metrics.incr( - metrics_base.format(integration=self.integration_name, key="comment_updated") + logger.info( + logger_event, + extra={"new_comment": pr_comment is None, "pr_key": pr_key, "repo": repo.name}, ) - pr_comment.updated_at = django_timezone.now() - pr_comment.group_ids = issue_list - pr_comment.save() - - logger_event = metrics_base.format( - integration=self.integration_name, key="create_or_update_comment" - ) - logger.info( - logger_event, - extra={"new_comment": pr_comment is None, "pr_key": pr_key, "repo": repo.name}, - ) class CommitContextClient(ABC): diff --git a/src/sentry/integrations/source_code_management/metrics.py b/src/sentry/integrations/source_code_management/metrics.py index c5b821347c5de6..054390dd954e0a 100644 --- a/src/sentry/integrations/source_code_management/metrics.py +++ b/src/sentry/integrations/source_code_management/metrics.py @@ -1,5 +1,5 @@ from collections.abc import Mapping -from enum import Enum, StrEnum +from enum import StrEnum from typing import Any from attr import dataclass @@ -8,11 +8,14 @@ from sentry.integrations.models.organization_integration import OrganizationIntegration from sentry.integrations.services.integration import RpcOrganizationIntegration from sentry.integrations.utils.metrics import IntegrationEventLifecycleMetric +from sentry.models.commit import Commit from sentry.models.organization import Organization +from sentry.models.project import Project +from sentry.models.repository import Repository from sentry.organizations.services.organization import RpcOrganization -class SCMIntegrationInteractionType(Enum): +class SCMIntegrationInteractionType(StrEnum): """ SCM integration features """ @@ -25,21 +28,28 @@ class SCMIntegrationInteractionType(Enum): # SourceCodeIssueIntegration (SCM only) GET_REPOSITORY_CHOICES = "GET_REPOSITORY_CHOICES" + # SourceCodeSearchEndpoint + HANDLE_SEARCH_ISSUES = "HANDLE_SEARCH_ISSUES" + HANDLE_SEARCH_REPOSITORIES = "HANDLE_SEARCH_REPOSITORIES" + GET = "GET" + # CommitContextIntegration + GET_BLAME_FOR_FILES = "GET_BLAME_FOR_FILES" CREATE_COMMENT = "CREATE_COMMENT" UPDATE_COMMENT = "UPDATE_COMMENT" + QUEUE_COMMENT_TASK = "QUEUE_COMMENT_TASK" # Tasks LINK_ALL_REPOS = "LINK_ALL_REPOS" - def __str__(self) -> str: - return self.value.lower() + # GitHub only + DERIVE_CODEMAPPINGS = "DERIVE_CODEMAPPINGS" @dataclass class SCMIntegrationInteractionEvent(IntegrationEventLifecycleMetric): """ - An instance to be recorded of a RepositoryIntegration feature call. + An instance to be recorded of an SCM integration feature call. """ interaction_type: SCMIntegrationInteractionType @@ -65,10 +75,58 @@ def get_extras(self) -> Mapping[str, Any]: } +@dataclass +class CommitContextIntegrationInteractionEvent(SCMIntegrationInteractionEvent): + """ + An instance to be recorded of a CommitContextIntegration feature call. + """ + + project: Project | None = None + commit: Commit | None = None + repository: Repository | None = None + pull_request_id: int | None = None + + def get_extras(self) -> Mapping[str, Any]: + parent_extras = super().get_extras() + return { + **parent_extras, + "project_id": (self.project.id if self.project else None), + "commit_id": (self.commit.id if self.commit else None), + "repository_id": (self.repository.id if self.repository else None), + "pull_request_id": self.pull_request_id, + } + + +class CommitContextHaltReason(StrEnum): + """Common reasons why a commit context integration may halt without success/failure.""" + + PR_BOT_DISABLED = "pr_bot_disabled" + INCORRECT_REPO_CONFIG = "incorrect_repo_config" + COMMIT_NOT_IN_DEFAULT_BRANCH = "commit_not_in_default_branch" + MISSING_PR = "missing_pr" + ALREADY_QUEUED = "already_queued" + + class LinkAllReposHaltReason(StrEnum): - """Common reasons why a link all repos task may halt without success/failure.""" + """ + Common reasons why a link all repos task may halt without success/failure. + """ MISSING_INTEGRATION = "missing_integration" MISSING_ORGANIZATION = "missing_organization" RATE_LIMITED = "rate_limited" REPOSITORY_NOT_CREATED = "repository_not_created" + + +class SourceCodeSearchEndpointHaltReason(StrEnum): + """ + Reasons why a SourceCodeSearchEndpoint method (handle_search_issues, + handle_search_repositories, or get) may halt without success/failure. + """ + + NO_ISSUE_TRACKER = "no_issue_tracker" + RATE_LIMITED = "rate_limited" + MISSING_REPOSITORY_OR_NO_ACCESS = "missing_repository_or_no_access" + MISSING_INTEGRATION = "missing_integration" + SERIALIZER_ERRORS = "serializer_errors" + MISSING_REPOSITORY_FIELD = "missing_repository_field" diff --git a/src/sentry/integrations/source_code_management/search.py b/src/sentry/integrations/source_code_management/search.py index d6619aaa287291..c3d90350538c95 100644 --- a/src/sentry/integrations/source_code_management/search.py +++ b/src/sentry/integrations/source_code_management/search.py @@ -13,6 +13,11 @@ from sentry.integrations.api.bases.integration import IntegrationEndpoint from sentry.integrations.models.integration import Integration from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import ( + SCMIntegrationInteractionEvent, + SCMIntegrationInteractionType, + SourceCodeSearchEndpointHaltReason, +) from sentry.organizations.services.organization import RpcOrganization T = TypeVar("T", bound=SourceCodeIssueIntegration) @@ -55,6 +60,15 @@ def installation_class( def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: raise NotImplementedError + def record_event(self, event: SCMIntegrationInteractionType): + # XXX (mifu67): self.integration_provider is None for the GithubSharedSearchEndpoint, + # which is used by both GitHub and GitHub Enterprise. + provider_name = "github" if self.integration_provider is None else self.integration_provider + return SCMIntegrationInteractionEvent( + interaction_type=event, + provider_key=provider_name, + ) + # not used in VSTS def handle_search_repositories( self, integration: Integration, installation: T, query: str @@ -64,41 +78,50 @@ def handle_search_repositories( def get( self, request: Request, organization: RpcOrganization, integration_id: int, **kwds: Any ) -> Response: - integration_query = Q( - organizationintegration__organization_id=organization.id, id=integration_id - ) - - if self.integration_provider: - integration_query &= Q(provider=self.integration_provider) - try: - integration: Integration = Integration.objects.get(integration_query) - except Integration.DoesNotExist: - return Response(status=404) - - serializer = SourceCodeSearchSerializer(data=request.query_params) - if not serializer.is_valid(): - return self.respond(serializer.errors, status=400) - - field = serializer.validated_data["field"] - query = serializer.validated_data["query"] - - installation = integration.get_installation(organization.id) - if not isinstance(installation, self.installation_class): - raise NotFound(f"Integration by that id is not of type {self.integration_provider}.") - - if field == self.issue_field: - repo = None - - if self.repository_field: # only fetch repository - repo = request.GET.get(self.repository_field) - if repo is None: - return Response( - {"detail": f"{self.repository_field} is a required parameter"}, status=400 - ) - - return self.handle_search_issues(installation, query, repo) - - if self.repository_field and field == self.repository_field: - return self.handle_search_repositories(integration, installation, query) - - return Response({"detail": "Invalid field"}, status=400) + with self.record_event(SCMIntegrationInteractionType.GET).capture() as lifecycle: + integration_query = Q( + organizationintegration__organization_id=organization.id, id=integration_id + ) + + if self.integration_provider: + integration_query &= Q(provider=self.integration_provider) + try: + integration: Integration = Integration.objects.get(integration_query) + except Integration.DoesNotExist: + lifecycle.record_halt(str(SourceCodeSearchEndpointHaltReason.MISSING_INTEGRATION)) + return Response(status=404) + + serializer = SourceCodeSearchSerializer(data=request.query_params) + if not serializer.is_valid(): + lifecycle.record_halt(str(SourceCodeSearchEndpointHaltReason.SERIALIZER_ERRORS)) + return self.respond(serializer.errors, status=400) + + field = serializer.validated_data["field"] + query = serializer.validated_data["query"] + + installation = integration.get_installation(organization.id) + if not isinstance(installation, self.installation_class): + raise NotFound( + f"Integration by that id is not of type {self.integration_provider}." + ) + + if field == self.issue_field: + repo = None + + if self.repository_field: # only fetch repository + repo = request.GET.get(self.repository_field) + if repo is None: + lifecycle.record_halt( + str(SourceCodeSearchEndpointHaltReason.MISSING_REPOSITORY_FIELD) + ) + return Response( + {"detail": f"{self.repository_field} is a required parameter"}, + status=400, + ) + + return self.handle_search_issues(installation, query, repo) + + if self.repository_field and field == self.repository_field: + return self.handle_search_repositories(integration, installation, query) + + return Response({"detail": "Invalid field"}, status=400) diff --git a/src/sentry/integrations/tasks/sync_assignee_outbound.py b/src/sentry/integrations/tasks/sync_assignee_outbound.py index 78b24fe9273a28..749113771cbd15 100644 --- a/src/sentry/integrations/tasks/sync_assignee_outbound.py +++ b/src/sentry/integrations/tasks/sync_assignee_outbound.py @@ -4,6 +4,10 @@ from sentry.constants import ObjectStatus from sentry.integrations.models.external_issue import ExternalIssue from sentry.integrations.models.integration import Integration +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, +) from sentry.integrations.services.assignment_source import AssignmentSource from sentry.integrations.services.integration import integration_service from sentry.models.organization import Organization @@ -48,23 +52,28 @@ def sync_assignee_outbound( return installation = integration.get_installation(organization_id=external_issue.organization_id) - if not ( - hasattr(installation, "should_sync") and hasattr(installation, "sync_assignee_outbound") - ): - return - parsed_assignment_source = ( - AssignmentSource.from_dict(assignment_source_dict) if assignment_source_dict else None - ) - if installation.should_sync("outbound_assignee", parsed_assignment_source): - # Assume unassign if None. - user = user_service.get_user(user_id) if user_id else None - installation.sync_assignee_outbound( - external_issue, user, assign=assign, assignment_source=parsed_assignment_source - ) - analytics.record( - "integration.issue.assignee.synced", - provider=integration.provider, - id=integration.id, - organization_id=external_issue.organization_id, + with ProjectManagementEvent( + action_type=ProjectManagementActionType.OUTBOUND_ASSIGNMENT_SYNC, integration=integration + ).capture() as lifecycle: + lifecycle.add_extra("sync_task", "sync_assignee_outbound") + if not ( + hasattr(installation, "should_sync") and hasattr(installation, "sync_assignee_outbound") + ): + return + + parsed_assignment_source = ( + AssignmentSource.from_dict(assignment_source_dict) if assignment_source_dict else None ) + if installation.should_sync("outbound_assignee", parsed_assignment_source): + # Assume unassign if None. + user = user_service.get_user(user_id) if user_id else None + installation.sync_assignee_outbound( + external_issue, user, assign=assign, assignment_source=parsed_assignment_source + ) + analytics.record( + "integration.issue.assignee.synced", + provider=integration.provider, + id=integration.id, + organization_id=external_issue.organization_id, + ) diff --git a/src/sentry/integrations/tasks/sync_status_outbound.py b/src/sentry/integrations/tasks/sync_status_outbound.py index 7aa1fb3afcc9d9..cf6dfe157a1ad3 100644 --- a/src/sentry/integrations/tasks/sync_status_outbound.py +++ b/src/sentry/integrations/tasks/sync_status_outbound.py @@ -2,6 +2,10 @@ from sentry.constants import ObjectStatus from sentry.integrations.models.external_issue import ExternalIssue from sentry.integrations.models.integration import Integration +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, +) from sentry.integrations.services.integration import integration_service from sentry.models.group import Group, GroupStatus from sentry.silo.base import SiloMode @@ -43,14 +47,19 @@ def sync_status_outbound(group_id: int, external_issue_id: int) -> bool | None: installation = integration.get_installation(organization_id=external_issue.organization_id) if not (hasattr(installation, "should_sync") and hasattr(installation, "sync_status_outbound")): return None - if installation.should_sync("outbound_status"): - installation.sync_status_outbound( - external_issue, group.status == GroupStatus.RESOLVED, group.project_id - ) - analytics.record( - "integration.issue.status.synced", - provider=integration.provider, - id=integration.id, - organization_id=external_issue.organization_id, - ) + + with ProjectManagementEvent( + action_type=ProjectManagementActionType.OUTBOUND_STATUS_SYNC, integration=integration + ).capture() as lifecycle: + lifecycle.add_extra("sync_task", "sync_status_outbound") + if installation.should_sync("outbound_status"): + installation.sync_status_outbound( + external_issue, group.status == GroupStatus.RESOLVED, group.project_id + ) + analytics.record( + "integration.issue.status.synced", + provider=integration.provider, + id=integration.id, + organization_id=external_issue.organization_id, + ) return None diff --git a/src/sentry/integrations/utils/metrics.py b/src/sentry/integrations/utils/metrics.py index 0b4962c61a77a4..4977ee7d744594 100644 --- a/src/sentry/integrations/utils/metrics.py +++ b/src/sentry/integrations/utils/metrics.py @@ -113,6 +113,10 @@ def add_extra(self, name: str, value: Any) -> None: """ self._extra[name] = value + def add_extras(self, extras: Mapping[str, int | str]) -> None: + """Add multiple values to logged "extra" data.""" + self._extra.update(extras) + def record_event( self, outcome: EventLifecycleOutcome, outcome_reason: BaseException | str | None = None ) -> None: diff --git a/src/sentry/integrations/utils/sync.py b/src/sentry/integrations/utils/sync.py index a672dc4daee464..86bb58330748ae 100644 --- a/src/sentry/integrations/utils/sync.py +++ b/src/sentry/integrations/utils/sync.py @@ -6,6 +6,11 @@ from sentry import features from sentry.integrations.models.integration import Integration +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, + ProjectManagementHaltReason, +) from sentry.integrations.services.assignment_source import AssignmentSource from sentry.integrations.services.integration import integration_service from sentry.integrations.tasks.sync_assignee_outbound import sync_assignee_outbound @@ -77,48 +82,54 @@ def sync_group_assignee_inbound( logger = logging.getLogger(f"sentry.integrations.{integration.provider}") - orgs_with_sync_enabled = where_should_sync(integration, "inbound_assignee") - affected_groups = Group.objects.get_groups_by_external_issue( - integration, - orgs_with_sync_enabled, - external_issue_key, - ) - log_context = { - "integration_id": integration.id, - "email": email, - "issue_key": external_issue_key, - } - if not affected_groups: - logger.info("no-affected-groups", extra=log_context) - return [] - - if not assign: + with ProjectManagementEvent( + action_type=ProjectManagementActionType.INBOUND_ASSIGNMENT_SYNC, integration=integration + ).capture() as lifecycle: + orgs_with_sync_enabled = where_should_sync(integration, "inbound_assignee") + affected_groups = Group.objects.get_groups_by_external_issue( + integration, + orgs_with_sync_enabled, + external_issue_key, + ) + log_context = { + "integration_id": integration.id, + "email": email, + "issue_key": external_issue_key, + } + if not affected_groups: + logger.info("no-affected-groups", extra=log_context) + return [] + + if not assign: + for group in affected_groups: + GroupAssignee.objects.deassign( + group, + assignment_source=AssignmentSource.from_integration(integration), + ) + + return affected_groups + + users = user_service.get_many_by_email(emails=[email], is_verified=True) + users_by_id = {user.id: user for user in users} + projects_by_user = Project.objects.get_by_users(users) + + groups_assigned = [] for group in affected_groups: - GroupAssignee.objects.deassign( - group, - assignment_source=AssignmentSource.from_integration(integration), - ) - - return affected_groups - - users = user_service.get_many_by_email(emails=[email], is_verified=True) - users_by_id = {user.id: user for user in users} - projects_by_user = Project.objects.get_by_users(users) - - groups_assigned = [] - for group in affected_groups: - user_id = get_user_id(projects_by_user, group) - user = users_by_id.get(user_id) - if user: - GroupAssignee.objects.assign( - group, - user, - assignment_source=AssignmentSource.from_integration(integration), - ) - groups_assigned.append(group) - else: - logger.info("assignee-not-found-inbound", extra=log_context) - return groups_assigned + user_id = get_user_id(projects_by_user, group) + user = users_by_id.get(user_id) + if user: + GroupAssignee.objects.assign( + group, + user, + assignment_source=AssignmentSource.from_integration(integration), + ) + groups_assigned.append(group) + else: + lifecycle.record_halt( + ProjectManagementHaltReason.SYNC_INBOUND_ASSIGNEE_NOT_FOUND, extra=log_context + ) + logger.info("inbound-assignee-not-found", extra=log_context) + return groups_assigned def sync_group_assignee_outbound( diff --git a/src/sentry/integrations/vsts/integration.py b/src/sentry/integrations/vsts/integration.py index b6b84563f5921b..8db5fe37936af1 100644 --- a/src/sentry/integrations/vsts/integration.py +++ b/src/sentry/integrations/vsts/integration.py @@ -20,6 +20,7 @@ from sentry.identity.vsts.provider import get_user_info from sentry.integrations.base import ( FeatureDescription, + IntegrationDomain, IntegrationFeatures, IntegrationMetadata, IntegrationProvider, @@ -31,6 +32,10 @@ from sentry.integrations.services.repository import RpcRepository, repository_service from sentry.integrations.source_code_management.repository import RepositoryIntegration from sentry.integrations.tasks.migrate_repo import migrate_repo +from sentry.integrations.utils.metrics import ( + IntegrationPipelineViewEvent, + IntegrationPipelineViewType, +) from sentry.integrations.vsts.issues import VstsIssuesSpec from sentry.models.apitoken import generate_token from sentry.models.repository import Repository @@ -631,43 +636,46 @@ def setup(self) -> None: class AccountConfigView(PipelineView): def dispatch(self, request: HttpRequest, pipeline: Pipeline) -> HttpResponseBase: - account_id = request.POST.get("account") - if account_id is not None: - state_accounts: Sequence[Mapping[str, Any]] | None = pipeline.fetch_state( - key="accounts" - ) - account = self.get_account_from_id(account_id, state_accounts or []) - if account is not None: - pipeline.bind_state("account", account) - return pipeline.next_step() - - state: Mapping[str, Any] | None = pipeline.fetch_state(key="identity") - access_token = (state or {}).get("data", {}).get("access_token") - user = get_user_info(access_token) - - accounts = self.get_accounts(access_token, user["uuid"]) - logger.info( - "vsts.get_accounts", - extra={ + with IntegrationPipelineViewEvent( + IntegrationPipelineViewType.ACCOUNT_CONFIG, + IntegrationDomain.SOURCE_CODE_MANAGEMENT, + VstsIntegrationProvider.key, + ).capture() as lifecycle: + account_id = request.POST.get("account") + if account_id is not None: + state_accounts: Sequence[Mapping[str, Any]] | None = pipeline.fetch_state( + key="accounts" + ) + account = self.get_account_from_id(account_id, state_accounts or []) + if account is not None: + pipeline.bind_state("account", account) + return pipeline.next_step() + + state: Mapping[str, Any] | None = pipeline.fetch_state(key="identity") + access_token = (state or {}).get("data", {}).get("access_token") + user = get_user_info(access_token) + + accounts = self.get_accounts(access_token, user["uuid"]) + extra = { "organization_id": pipeline.organization.id if pipeline.organization else None, "user_id": request.user.id, "accounts": accounts, - }, - ) - if not accounts or not accounts.get("value"): + } + if not accounts or not accounts.get("value"): + lifecycle.record_failure("no_accounts", extra=extra) + return render_to_response( + template="sentry/integrations/vsts-config.html", + context={"no_accounts": True}, + request=request, + ) + accounts = accounts["value"] + pipeline.bind_state("accounts", accounts) + account_form = AccountForm(accounts) return render_to_response( template="sentry/integrations/vsts-config.html", - context={"no_accounts": True}, + context={"form": account_form, "no_accounts": False}, request=request, ) - accounts = accounts["value"] - pipeline.bind_state("accounts", accounts) - account_form = AccountForm(accounts) - return render_to_response( - template="sentry/integrations/vsts-config.html", - context={"form": account_form, "no_accounts": False}, - request=request, - ) def get_account_from_id( self, account_id: int, accounts: Sequence[Mapping[str, Any]] diff --git a/src/sentry/integrations/vsts/search.py b/src/sentry/integrations/vsts/search.py index dfad6424b2ad7b..05b56eff862294 100644 --- a/src/sentry/integrations/vsts/search.py +++ b/src/sentry/integrations/vsts/search.py @@ -4,6 +4,7 @@ from sentry.api.base import control_silo_endpoint from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import SCMIntegrationInteractionType from sentry.integrations.source_code_management.search import SourceCodeSearchEndpoint from sentry.integrations.vsts.integration import VstsIntegration @@ -21,17 +22,18 @@ def installation_class(self): return VstsIntegration def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: - if not query: - return Response([]) - - assert isinstance(installation, self.installation_class) - resp = installation.search_issues(query=query) - return Response( - [ - { - "label": f'({i["fields"]["system.id"]}) {i["fields"]["system.title"]}', - "value": i["fields"]["system.id"], - } - for i in resp.get("results", []) - ] - ) + with self.record_event(SCMIntegrationInteractionType.HANDLE_SEARCH_ISSUES).capture(): + if not query: + return Response([]) + + assert isinstance(installation, self.installation_class) + resp = installation.search_issues(query=query) + return Response( + [ + { + "label": f'({i["fields"]["system.id"]}) {i["fields"]["system.title"]}', + "value": i["fields"]["system.id"], + } + for i in resp.get("results", []) + ] + ) diff --git a/src/sentry/integrations/vsts/webhooks.py b/src/sentry/integrations/vsts/webhooks.py index d0d6ea877fd48f..23cc73eaf18cef 100644 --- a/src/sentry/integrations/vsts/webhooks.py +++ b/src/sentry/integrations/vsts/webhooks.py @@ -14,6 +14,11 @@ from sentry.api.base import Endpoint, region_silo_endpoint from sentry.constants import ObjectStatus from sentry.integrations.mixins.issues import IssueSyncIntegration +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, + ProjectManagementHaltReason, +) from sentry.integrations.services.integration import integration_service from sentry.integrations.utils.sync import sync_group_assignee_inbound from sentry.utils.email import parse_email @@ -122,31 +127,48 @@ def handle_assign_to( ) +# TODO(Gabe): Consolidate this with Jira's implementation, create DTO for status +# changes. def handle_status_change( integration: RpcIntegration, external_issue_key: str, status_change: Mapping[str, str] | None, project: str | None, ) -> None: - if status_change is None: - return - - org_integrations = integration_service.get_organization_integrations( - integration_id=integration.id - ) + with ProjectManagementEvent( + action_type=ProjectManagementActionType.INBOUND_STATUS_SYNC, integration=integration + ).capture() as lifecycle: + if status_change is None: + return + + org_integrations = integration_service.get_organization_integrations( + integration_id=integration.id + ) - for org_integration in org_integrations: - installation = integration.get_installation(organization_id=org_integration.organization_id) - if isinstance(installation, IssueSyncIntegration): - installation.sync_status_inbound( - external_issue_key, - { - "new_state": status_change["newValue"], - # old_state is None when the issue is New - "old_state": status_change.get("oldValue"), - "project": project, - }, + logging_context = { + "org_integration_ids": [oi.id for oi in org_integrations], + "integration_id": integration.id, + "status_change": status_change, + } + for org_integration in org_integrations: + installation = integration.get_installation( + organization_id=org_integration.organization_id ) + if isinstance(installation, IssueSyncIntegration): + installation.sync_status_inbound( + external_issue_key, + { + "new_state": status_change["newValue"], + # old_state is None when the issue is New + "old_state": status_change.get("oldValue"), + "project": project, + }, + ) + else: + lifecycle.record_halt( + ProjectManagementHaltReason.SYNC_NON_SYNC_INTEGRATION_PROVIDED, + extra=logging_context, + ) def handle_updated_workitem(data: Mapping[str, Any], integration: RpcIntegration) -> None: diff --git a/src/sentry/issues/endpoints/group_hashes.py b/src/sentry/issues/endpoints/group_hashes.py index 696a304bdf4dfd..73c5104ec47d8d 100644 --- a/src/sentry/issues/endpoints/group_hashes.py +++ b/src/sentry/issues/endpoints/group_hashes.py @@ -8,7 +8,7 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases import GroupEndpoint from sentry.api.paginator import GenericOffsetPaginator -from sentry.api.serializers import EventSerializer, serialize +from sentry.api.serializers import EventSerializer, SimpleEventSerializer, serialize from sentry.models.grouphash import GroupHash from sentry.tasks.unmerge import unmerge from sentry.utils import metrics @@ -31,8 +31,10 @@ def get(self, request: Request, group) -> Response: checksums used to aggregate individual events. :pparam string issue_id: the ID of the issue to retrieve. + :pparam bool full: If this is set to true, the event payload will include the full event body, including the stacktrace. :auth: required """ + full = request.GET.get("full", True) data_fn = partial( lambda *args, **kwargs: raw_query(*args, **kwargs)["data"], @@ -47,7 +49,9 @@ def get(self, request: Request, group) -> Response: tenant_ids={"organization_id": group.project.organization_id}, ) - handle_results = partial(self.__handle_results, group.project_id, group.id, request.user) + handle_results = partial( + self.__handle_results, group.project_id, group.id, request.user, full + ) return self.paginate( request=request, @@ -90,13 +94,16 @@ def put(self, request: Request, group) -> Response: return Response(status=202) - def __handle_results(self, project_id, group_id, user, results): - return [self.__handle_result(user, project_id, group_id, result) for result in results] + def __handle_results(self, project_id, group_id, user, full, results): + return [ + self.__handle_result(user, project_id, group_id, full, result) for result in results + ] - def __handle_result(self, user, project_id, group_id, result): + def __handle_result(self, user, project_id, group_id, full, result): event = eventstore.backend.get_event_by_id(project_id, result["event_id"]) + serializer = EventSerializer if full else SimpleEventSerializer return { "id": result["primary_hash"], - "latestEvent": serialize(event, user, EventSerializer()), + "latestEvent": serialize(event, user, serializer()), } diff --git a/src/sentry/issues/endpoints/group_similar_issues_embeddings.py b/src/sentry/issues/endpoints/group_similar_issues_embeddings.py index 55cdda74aa4f19..6d5e17476dff92 100644 --- a/src/sentry/issues/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/issues/endpoints/group_similar_issues_embeddings.py @@ -18,6 +18,7 @@ from sentry.seer.similarity.similar_issues import get_similarity_data_from_seer from sentry.seer.similarity.types import SeerSimilarIssueData, SimilarIssuesEmbeddingsRequest from sentry.seer.similarity.utils import ( + TooManyOnlySystemFramesException, event_content_has_stacktrace, get_stacktrace_string, killswitch_enabled, @@ -82,9 +83,14 @@ def get(self, request: Request, group) -> Response: stacktrace_string = "" if latest_event and event_content_has_stacktrace(latest_event): grouping_info = get_grouping_info(None, project=group.project, event=latest_event) - stacktrace_string = get_stacktrace_string(grouping_info) - - if stacktrace_string == "" or not latest_event: + try: + stacktrace_string = get_stacktrace_string( + grouping_info, platform=latest_event.platform + ) + except TooManyOnlySystemFramesException: + stacktrace_string = "" + + if not stacktrace_string or not latest_event: return Response([]) # No exception, stacktrace or in-app frames, or event similar_issues_params: SimilarIssuesEmbeddingsRequest = { diff --git a/src/sentry/issues/status_change.py b/src/sentry/issues/status_change.py index cfda68a6ef8384..94f2a614da7ce1 100644 --- a/src/sentry/issues/status_change.py +++ b/src/sentry/issues/status_change.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from collections import defaultdict, namedtuple +from collections import defaultdict from collections.abc import Sequence from datetime import datetime, timedelta, timezone from typing import Any @@ -25,7 +25,6 @@ from sentry.utils import json logger = logging.getLogger(__name__) -ActivityInfo = namedtuple("ActivityInfo", ("activity_type", "activity_data")) def infer_substatus( @@ -74,12 +73,10 @@ def handle_status_update( status_details: dict[str, Any], acting_user: User | None, sender: Any, -) -> ActivityInfo: +) -> None: """ Update the status for a list of groups and create entries for Activity and GroupHistory. This currently handles unresolving or ignoring groups. - - Returns a tuple of (activity_type, activity_data) for the activity that was created. """ activity_data = {} activity_type = ( @@ -173,5 +170,3 @@ def handle_status_update( created=False, update_fields=["status", "substatus"], ) - - return ActivityInfo(activity_type, activity_data) diff --git a/src/sentry/management/commands/makemigrations.py b/src/sentry/management/commands/makemigrations.py index 500e35b5e7ceff..e701094a4efea2 100644 --- a/src/sentry/management/commands/makemigrations.py +++ b/src/sentry/management/commands/makemigrations.py @@ -71,7 +71,7 @@ def handle(self, *app_labels, **options): if options.get("check_changes"): validate(migrations_filepath, latest_migration_by_app) else: - result = "\n".join( + result = "\n\n".join( f"{app_label}: {name}" for app_label, name in sorted(latest_migration_by_app.items()) ) diff --git a/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py b/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py index 5592f4a25a27a5..16ef15658ea34a 100644 --- a/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py +++ b/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py @@ -54,6 +54,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + replaces = [ ("sentry", "0001_squashed_0200_release_indices"), ("sentry", "0201_semver_package"), diff --git a/src/sentry/migrations/0490_add_is_test_to_org.py b/src/sentry/migrations/0490_add_is_test_to_org.py index 5ea600ad4cf024..398394ba1409cb 100644 --- a/src/sentry/migrations/0490_add_is_test_to_org.py +++ b/src/sentry/migrations/0490_add_is_test_to_org.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0489_index_checkin_timeout"), ] diff --git a/src/sentry/migrations/0491_remove_orgmemmap_unique_constraints.py b/src/sentry/migrations/0491_remove_orgmemmap_unique_constraints.py index a239fb98237803..4e30ad2812ad4c 100644 --- a/src/sentry/migrations/0491_remove_orgmemmap_unique_constraints.py +++ b/src/sentry/migrations/0491_remove_orgmemmap_unique_constraints.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0490_add_is_test_to_org"), ] diff --git a/src/sentry/migrations/0505_debugfile_date_accessed.py b/src/sentry/migrations/0505_debugfile_date_accessed.py index c39b0755a95d94..ff60d15fe6bbad 100644 --- a/src/sentry/migrations/0505_debugfile_date_accessed.py +++ b/src/sentry/migrations/0505_debugfile_date_accessed.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0504_add_artifact_bundle_index"), ] diff --git a/src/sentry/migrations/0526_pr_comment_type_column.py b/src/sentry/migrations/0526_pr_comment_type_column.py index 3bfa0f8575e774..1791094fdecd7d 100644 --- a/src/sentry/migrations/0526_pr_comment_type_column.py +++ b/src/sentry/migrations/0526_pr_comment_type_column.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0525_add_next_checkin_latest"), ] diff --git a/src/sentry/migrations/0535_add_created_date_to_outbox_model.py b/src/sentry/migrations/0535_add_created_date_to_outbox_model.py index 179daa393d1816..abff5766528167 100644 --- a/src/sentry/migrations/0535_add_created_date_to_outbox_model.py +++ b/src/sentry/migrations/0535_add_created_date_to_outbox_model.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0534_add_notification_uuid_to_rule_fire_history"), ] diff --git a/src/sentry/migrations/0544_remove_groupsubscription_columns.py b/src/sentry/migrations/0544_remove_groupsubscription_columns.py index 10f92624209879..bc6ad90444e2d2 100644 --- a/src/sentry/migrations/0544_remove_groupsubscription_columns.py +++ b/src/sentry/migrations/0544_remove_groupsubscription_columns.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0543_add_team_id_to_groupsubscription"), ] diff --git a/src/sentry/migrations/0545_add_last_verified_auth_ident_replica.py b/src/sentry/migrations/0545_add_last_verified_auth_ident_replica.py index b24ed739818100..71a4ebafc20939 100644 --- a/src/sentry/migrations/0545_add_last_verified_auth_ident_replica.py +++ b/src/sentry/migrations/0545_add_last_verified_auth_ident_replica.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0544_remove_groupsubscription_columns"), ] diff --git a/src/sentry/migrations/0548_add_is_unclaimed_boolean_to_user.py b/src/sentry/migrations/0548_add_is_unclaimed_boolean_to_user.py index 324603794f7859..6e2ae095940622 100644 --- a/src/sentry/migrations/0548_add_is_unclaimed_boolean_to_user.py +++ b/src/sentry/migrations/0548_add_is_unclaimed_boolean_to_user.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0547_add_commitfilechange_language_column"), ] diff --git a/src/sentry/migrations/0549_re_add_groupsubscription_columns.py b/src/sentry/migrations/0549_re_add_groupsubscription_columns.py index f124f9337c4f8f..4468a981c603bf 100644 --- a/src/sentry/migrations/0549_re_add_groupsubscription_columns.py +++ b/src/sentry/migrations/0549_re_add_groupsubscription_columns.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = True + allow_run_sql = True + dependencies = [ ("sentry", "0548_add_is_unclaimed_boolean_to_user"), ] diff --git a/src/sentry/migrations/0556_organizationmapping_replicate_require_2fa.py b/src/sentry/migrations/0556_organizationmapping_replicate_require_2fa.py index e249bd0d688fd4..3185e6373b831d 100644 --- a/src/sentry/migrations/0556_organizationmapping_replicate_require_2fa.py +++ b/src/sentry/migrations/0556_organizationmapping_replicate_require_2fa.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0555_set_neglectedrule_email_date_columns_nullable"), ] diff --git a/src/sentry/migrations/0564_commitfilechange_delete_language_column.py b/src/sentry/migrations/0564_commitfilechange_delete_language_column.py index c29268441b68d4..9a5cc11a0d9ee6 100644 --- a/src/sentry/migrations/0564_commitfilechange_delete_language_column.py +++ b/src/sentry/migrations/0564_commitfilechange_delete_language_column.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0563_commitfilechange_drop_language_column"), ] diff --git a/src/sentry/migrations/0570_repository_add_languages_column.py b/src/sentry/migrations/0570_repository_add_languages_column.py index 8d61069af4bbdb..9b7cd95383156f 100644 --- a/src/sentry/migrations/0570_repository_add_languages_column.py +++ b/src/sentry/migrations/0570_repository_add_languages_column.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0569_dashboard_widgets_indicator"), ] diff --git a/src/sentry/migrations/0583_add_early_adopter_to_organization_mapping.py b/src/sentry/migrations/0583_add_early_adopter_to_organization_mapping.py index 8d87de590a53d8..5641c4e1d47a1d 100644 --- a/src/sentry/migrations/0583_add_early_adopter_to_organization_mapping.py +++ b/src/sentry/migrations/0583_add_early_adopter_to_organization_mapping.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0582_add_status_indexes_checkins"), ] diff --git a/src/sentry/migrations/0590_add_metadata_to_sentry_app.py b/src/sentry/migrations/0590_add_metadata_to_sentry_app.py index 70467066f7a0b4..17c605b63b648b 100644 --- a/src/sentry/migrations/0590_add_metadata_to_sentry_app.py +++ b/src/sentry/migrations/0590_add_metadata_to_sentry_app.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0589_add_commit_date_added_indices"), ] diff --git a/src/sentry/migrations/0591_remove_relocation_hybrid_cloud_foreign_keys.py b/src/sentry/migrations/0591_remove_relocation_hybrid_cloud_foreign_keys.py index 9504b752d61bef..d4702555a72981 100644 --- a/src/sentry/migrations/0591_remove_relocation_hybrid_cloud_foreign_keys.py +++ b/src/sentry/migrations/0591_remove_relocation_hybrid_cloud_foreign_keys.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0590_add_metadata_to_sentry_app"), ] diff --git a/src/sentry/migrations/0592_delete_relocation_hybrid_cloud_foreign_keys.py b/src/sentry/migrations/0592_delete_relocation_hybrid_cloud_foreign_keys.py index f184baa6f83908..6ce84ad9f211e1 100644 --- a/src/sentry/migrations/0592_delete_relocation_hybrid_cloud_foreign_keys.py +++ b/src/sentry/migrations/0592_delete_relocation_hybrid_cloud_foreign_keys.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0591_remove_relocation_hybrid_cloud_foreign_keys"), ] diff --git a/src/sentry/migrations/0607_drop_externalactor_actorid.py b/src/sentry/migrations/0607_drop_externalactor_actorid.py index 6f7bf3fd118686..6052d52bb1aa2f 100644 --- a/src/sentry/migrations/0607_drop_externalactor_actorid.py +++ b/src/sentry/migrations/0607_drop_externalactor_actorid.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0606_update_user_to_optional_organization_slug_reservation"), ] diff --git a/src/sentry/migrations/0610_remove_notification_setting_table.py b/src/sentry/migrations/0610_remove_notification_setting_table.py index 47dfb155e353ac..e88cf1a990ec60 100644 --- a/src/sentry/migrations/0610_remove_notification_setting_table.py +++ b/src/sentry/migrations/0610_remove_notification_setting_table.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0609_remove_notification_setting_model"), ] diff --git a/src/sentry/migrations/0617_monitor_boolean_fields_muted_disabled.py b/src/sentry/migrations/0617_monitor_boolean_fields_muted_disabled.py index fe25887609f43f..040d95c6bfcce5 100644 --- a/src/sentry/migrations/0617_monitor_boolean_fields_muted_disabled.py +++ b/src/sentry/migrations/0617_monitor_boolean_fields_muted_disabled.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0616_drop_event_user_id_from_userreport_table_step_1"), ] diff --git a/src/sentry/migrations/0618_drop_event_user_id_from_userreport_table_step_2.py b/src/sentry/migrations/0618_drop_event_user_id_from_userreport_table_step_2.py index 0a2d77ed6ca01d..60c5c7d3c100c6 100644 --- a/src/sentry/migrations/0618_drop_event_user_id_from_userreport_table_step_2.py +++ b/src/sentry/migrations/0618_drop_event_user_id_from_userreport_table_step_2.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0617_monitor_boolean_fields_muted_disabled"), ] diff --git a/src/sentry/migrations/0624_add_is_muted_monitorenvironment.py b/src/sentry/migrations/0624_add_is_muted_monitorenvironment.py index 9d36e7039b1533..28437b53203c1b 100644 --- a/src/sentry/migrations/0624_add_is_muted_monitorenvironment.py +++ b/src/sentry/migrations/0624_add_is_muted_monitorenvironment.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0623_increase_regression_fingerprint_length"), ] diff --git a/src/sentry/migrations/0631_add_priority_columns_to_groupedmessage.py b/src/sentry/migrations/0631_add_priority_columns_to_groupedmessage.py index fdad116dc7ee45..6dffdce9d51f16 100644 --- a/src/sentry/migrations/0631_add_priority_columns_to_groupedmessage.py +++ b/src/sentry/migrations/0631_add_priority_columns_to_groupedmessage.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0630_better_monitor_latest_index"), ] diff --git a/src/sentry/migrations/0633_add_priority_locked_at_to_groupedmessage.py b/src/sentry/migrations/0633_add_priority_locked_at_to_groupedmessage.py index f0639df01f5e08..25c185d4151429 100644 --- a/src/sentry/migrations/0633_add_priority_locked_at_to_groupedmessage.py +++ b/src/sentry/migrations/0633_add_priority_locked_at_to_groupedmessage.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0632_apitoken_backfill_last_chars"), ] diff --git a/src/sentry/migrations/0637_remove_pr_comment_pr_id_constraint.py b/src/sentry/migrations/0637_remove_pr_comment_pr_id_constraint.py index 2c7a45b552bbf6..c06a425026ca2f 100644 --- a/src/sentry/migrations/0637_remove_pr_comment_pr_id_constraint.py +++ b/src/sentry/migrations/0637_remove_pr_comment_pr_id_constraint.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0636_monitor_incident_env_resolving_index"), ] diff --git a/src/sentry/migrations/0638_add_date_added_to_dashboard_widget_on_demand.py b/src/sentry/migrations/0638_add_date_added_to_dashboard_widget_on_demand.py index dc5e9246bdb970..b759ff0f296f58 100644 --- a/src/sentry/migrations/0638_add_date_added_to_dashboard_widget_on_demand.py +++ b/src/sentry/migrations/0638_add_date_added_to_dashboard_widget_on_demand.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0637_remove_pr_comment_pr_id_constraint"), ] diff --git a/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py b/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py index 1c6498a4817afc..09068d38850701 100644 --- a/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py +++ b/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0642_index_together_release"), ] diff --git a/src/sentry/migrations/0651_enable_activated_alert_rules.py b/src/sentry/migrations/0651_enable_activated_alert_rules.py index 60408e2a6f4779..4e1d2640b9edc3 100644 --- a/src/sentry/migrations/0651_enable_activated_alert_rules.py +++ b/src/sentry/migrations/0651_enable_activated_alert_rules.py @@ -23,6 +23,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0650_create_sentryshot"), ] diff --git a/src/sentry/migrations/0657_add_status_column_for_alert_rule_trigger_action.py b/src/sentry/migrations/0657_add_status_column_for_alert_rule_trigger_action.py index d1d5dc69bbc3d6..31fbdf83276a70 100644 --- a/src/sentry/migrations/0657_add_status_column_for_alert_rule_trigger_action.py +++ b/src/sentry/migrations/0657_add_status_column_for_alert_rule_trigger_action.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0656_add_discover_dataset_split_dashboard"), ] diff --git a/src/sentry/migrations/0658_projectkey_usecase.py b/src/sentry/migrations/0658_projectkey_usecase.py index 721f564704bce8..84964091d4aa0d 100644 --- a/src/sentry/migrations/0658_projectkey_usecase.py +++ b/src/sentry/migrations/0658_projectkey_usecase.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0657_add_status_column_for_alert_rule_trigger_action"), ] diff --git a/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py b/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py index 70ec590c959b64..670b5d9aa5bb6f 100644 --- a/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py +++ b/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0662_monitor_drop_last_state_change"), ] diff --git a/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py b/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py index dbef628771474b..56e9a741d8020a 100644 --- a/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py +++ b/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0664_create_new_broken_monitor_detection_table"), ] diff --git a/src/sentry/migrations/0678_add_is_hidden_dashboard_widget_query.py b/src/sentry/migrations/0678_add_is_hidden_dashboard_widget_query.py index d45b47230af02f..6f6fb4a07250d5 100644 --- a/src/sentry/migrations/0678_add_is_hidden_dashboard_widget_query.py +++ b/src/sentry/migrations/0678_add_is_hidden_dashboard_widget_query.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0677_unpickle_project_options_again"), ] diff --git a/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py b/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py index c6a78343712ecf..1bf094a1582093 100644 --- a/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py +++ b/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = True + allow_run_sql = True + dependencies = [ ("sentry", "0681_unpickle_authenticator_again"), ] diff --git a/src/sentry/migrations/0689_drop_config_from_cron_checkin.py b/src/sentry/migrations/0689_drop_config_from_cron_checkin.py index 724035f46302cf..e2165fe3b044b8 100644 --- a/src/sentry/migrations/0689_drop_config_from_cron_checkin.py +++ b/src/sentry/migrations/0689_drop_config_from_cron_checkin.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0688_add_project_flag_high_priority_alerts"), ] diff --git a/src/sentry/migrations/0697_remove_monitor_owner_actor_id_db.py b/src/sentry/migrations/0697_remove_monitor_owner_actor_id_db.py index 6f09d09c5c55af..837d74f0dd3f44 100644 --- a/src/sentry/migrations/0697_remove_monitor_owner_actor_id_db.py +++ b/src/sentry/migrations/0697_remove_monitor_owner_actor_id_db.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0696_remove_monitor_owner_actor_id"), ] diff --git a/src/sentry/migrations/0700_drop_fileid_controlavatar.py b/src/sentry/migrations/0700_drop_fileid_controlavatar.py index 0df209ea2d5de8..2e0607e567795c 100644 --- a/src/sentry/migrations/0700_drop_fileid_controlavatar.py +++ b/src/sentry/migrations/0700_drop_fileid_controlavatar.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0699_update_monitor_owner_team_id_cascsade"), ] diff --git a/src/sentry/migrations/0707_alert_rule_activations_incidents_fk.py b/src/sentry/migrations/0707_alert_rule_activations_incidents_fk.py index ff7a700e1aa93b..084f37f4732178 100644 --- a/src/sentry/migrations/0707_alert_rule_activations_incidents_fk.py +++ b/src/sentry/migrations/0707_alert_rule_activations_incidents_fk.py @@ -22,6 +22,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0706_grouphistory_userteam_backfill"), ] diff --git a/src/sentry/migrations/0709_alertrule_remove_owner_state.py b/src/sentry/migrations/0709_alertrule_remove_owner_state.py index 4eb13999d671c1..8123ffc5f80607 100644 --- a/src/sentry/migrations/0709_alertrule_remove_owner_state.py +++ b/src/sentry/migrations/0709_alertrule_remove_owner_state.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0708_rule_remove_owner_state"), ] diff --git a/src/sentry/migrations/0710_grouphistory_remove_actor_state.py b/src/sentry/migrations/0710_grouphistory_remove_actor_state.py index b011bcbde62c45..59e4bdb5806122 100644 --- a/src/sentry/migrations/0710_grouphistory_remove_actor_state.py +++ b/src/sentry/migrations/0710_grouphistory_remove_actor_state.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0709_alertrule_remove_owner_state"), ] diff --git a/src/sentry/migrations/0713_team_remove_actor_state.py b/src/sentry/migrations/0713_team_remove_actor_state.py index 84a80d3fe1d029..f4c773750cfd7f 100644 --- a/src/sentry/migrations/0713_team_remove_actor_state.py +++ b/src/sentry/migrations/0713_team_remove_actor_state.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0712_create_tombstone_compound_indexes"), ] diff --git a/src/sentry/migrations/0714_drop_project_team_avatar.py b/src/sentry/migrations/0714_drop_project_team_avatar.py index 73e0438f7c58bb..7e3a5b6623364f 100644 --- a/src/sentry/migrations/0714_drop_project_team_avatar.py +++ b/src/sentry/migrations/0714_drop_project_team_avatar.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0713_team_remove_actor_state"), ] diff --git a/src/sentry/migrations/0715_remove_actormodel_constraints.py b/src/sentry/migrations/0715_remove_actormodel_constraints.py index cd87515e7ed464..9b9eb125d0a456 100644 --- a/src/sentry/migrations/0715_remove_actormodel_constraints.py +++ b/src/sentry/migrations/0715_remove_actormodel_constraints.py @@ -22,6 +22,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0714_drop_project_team_avatar"), ] diff --git a/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py b/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py index 59ca6933a64ed6..718648afa84e14 100644 --- a/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py +++ b/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0718_delete_timebox_columns"), ] diff --git a/src/sentry/migrations/0720_remove_actor_columns.py b/src/sentry/migrations/0720_remove_actor_columns.py index 6b098b9ebddbcb..add6252e7b4805 100644 --- a/src/sentry/migrations/0720_remove_actor_columns.py +++ b/src/sentry/migrations/0720_remove_actor_columns.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0719_querysubscription_timebox_column_deletion_db"), ] diff --git a/src/sentry/migrations/0722_drop_sentryfunctions.py b/src/sentry/migrations/0722_drop_sentryfunctions.py index 8e0bcd9ece8416..bf0357c3fb98d3 100644 --- a/src/sentry/migrations/0722_drop_sentryfunctions.py +++ b/src/sentry/migrations/0722_drop_sentryfunctions.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0721_delete_sentryfunctions"), ] diff --git a/src/sentry/migrations/0724_discover_saved_query_dataset.py b/src/sentry/migrations/0724_discover_saved_query_dataset.py index c8439d7e2261d2..0dbf432449a084 100644 --- a/src/sentry/migrations/0724_discover_saved_query_dataset.py +++ b/src/sentry/migrations/0724_discover_saved_query_dataset.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0723_project_template_models"), ] diff --git a/src/sentry/migrations/0725_create_sentry_groupsearchview_table.py b/src/sentry/migrations/0725_create_sentry_groupsearchview_table.py index 40120a0f0db007..eecb7007bcafa9 100644 --- a/src/sentry/migrations/0725_create_sentry_groupsearchview_table.py +++ b/src/sentry/migrations/0725_create_sentry_groupsearchview_table.py @@ -26,6 +26,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0724_discover_saved_query_dataset"), ] diff --git a/src/sentry/migrations/0733_relocation_provenance.py b/src/sentry/migrations/0733_relocation_provenance.py index 9600cfa7b7f2c8..72626606c5ca6e 100644 --- a/src/sentry/migrations/0733_relocation_provenance.py +++ b/src/sentry/migrations/0733_relocation_provenance.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0732_add_span_attribute_extraction_rules"), ] diff --git a/src/sentry/migrations/0737_add_discover_saved_query_dataset_source.py b/src/sentry/migrations/0737_add_discover_saved_query_dataset_source.py index d3d14ff0930995..02ea18a44e3410 100644 --- a/src/sentry/migrations/0737_add_discover_saved_query_dataset_source.py +++ b/src/sentry/migrations/0737_add_discover_saved_query_dataset_source.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0736_rm_reprocessing_step2"), ] diff --git a/src/sentry/migrations/0738_rm_reprocessing_step3.py b/src/sentry/migrations/0738_rm_reprocessing_step3.py index ed23fac78edb47..de7cde8e7701bb 100644 --- a/src/sentry/migrations/0738_rm_reprocessing_step3.py +++ b/src/sentry/migrations/0738_rm_reprocessing_step3.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0737_add_discover_saved_query_dataset_source"), ] diff --git a/src/sentry/migrations/0741_metric_alert_anomaly_detection.py b/src/sentry/migrations/0741_metric_alert_anomaly_detection.py index 3e8d7f3aeda955..86eb1ba26927a5 100644 --- a/src/sentry/migrations/0741_metric_alert_anomaly_detection.py +++ b/src/sentry/migrations/0741_metric_alert_anomaly_detection.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0740_one_relocation_file_kind_per_relocation"), ] diff --git a/src/sentry/migrations/0744_add_dataset_source_field_to_dashboards.py b/src/sentry/migrations/0744_add_dataset_source_field_to_dashboards.py index 47159802485e5d..de09cf3ff0abee 100644 --- a/src/sentry/migrations/0744_add_dataset_source_field_to_dashboards.py +++ b/src/sentry/migrations/0744_add_dataset_source_field_to_dashboards.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0743_backfill_broken_monitor_notification_setting_option"), ] diff --git a/src/sentry/migrations/0746_add_bitflags_to_hybrid_cloud.py b/src/sentry/migrations/0746_add_bitflags_to_hybrid_cloud.py index 0c577fe5630a9e..9dad268ac936f7 100644 --- a/src/sentry/migrations/0746_add_bitflags_to_hybrid_cloud.py +++ b/src/sentry/migrations/0746_add_bitflags_to_hybrid_cloud.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0745_add_prevent_superuser_access_bitflag"), ] diff --git a/src/sentry/migrations/0750_disable_member_invite_in_hybrid_cloud.py b/src/sentry/migrations/0750_disable_member_invite_in_hybrid_cloud.py index fcb366e642b1df..3eb750ff9d496b 100644 --- a/src/sentry/migrations/0750_disable_member_invite_in_hybrid_cloud.py +++ b/src/sentry/migrations/0750_disable_member_invite_in_hybrid_cloud.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0749_disable_member_invite"), ] diff --git a/src/sentry/migrations/0757_add_scopes_to_apiapplication.py b/src/sentry/migrations/0757_add_scopes_to_apiapplication.py index e0851898bf17ab..074c9ce950dc69 100644 --- a/src/sentry/migrations/0757_add_scopes_to_apiapplication.py +++ b/src/sentry/migrations/0757_add_scopes_to_apiapplication.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0756_grouprelease_represented_in_django"), ] diff --git a/src/sentry/migrations/0759_remove_spanattributeextraction_tables.py b/src/sentry/migrations/0759_remove_spanattributeextraction_tables.py index d778dbf7def58d..aba74fb7c586e3 100644 --- a/src/sentry/migrations/0759_remove_spanattributeextraction_tables.py +++ b/src/sentry/migrations/0759_remove_spanattributeextraction_tables.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0758_remove_spanattributeextraction_models"), ] diff --git a/src/sentry/migrations/0760_remove_appstore_connect_integration_tables.py b/src/sentry/migrations/0760_remove_appstore_connect_integration_tables.py index 3d62e30b52a786..5383ebc03a650c 100644 --- a/src/sentry/migrations/0760_remove_appstore_connect_integration_tables.py +++ b/src/sentry/migrations/0760_remove_appstore_connect_integration_tables.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0759_remove_spanattributeextraction_tables"), ] diff --git a/src/sentry/migrations/0776_drop_group_score_in_database.py b/src/sentry/migrations/0776_drop_group_score_in_database.py index 53ed5f75b6f0d2..2a9f36dd7e98f8 100644 --- a/src/sentry/migrations/0776_drop_group_score_in_database.py +++ b/src/sentry/migrations/0776_drop_group_score_in_database.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = True + allow_run_sql = True + dependencies = [ ("sentry", "0775_add_dashboard_permissions_model"), ] diff --git a/src/sentry/migrations/0785_add_new_field_to_dashboard_permissions.py b/src/sentry/migrations/0785_add_new_field_to_dashboard_permissions.py index 32912ed3a81cfc..bae928a544df7d 100644 --- a/src/sentry/migrations/0785_add_new_field_to_dashboard_permissions.py +++ b/src/sentry/migrations/0785_add_new_field_to_dashboard_permissions.py @@ -23,6 +23,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0784_remove_broadcasts_cta_column"), ] diff --git a/src/sentry/migrations/0786_drop_broadcasts_cta_column.py b/src/sentry/migrations/0786_drop_broadcasts_cta_column.py index fd14d73769726a..c8cac50728a022 100644 --- a/src/sentry/migrations/0786_drop_broadcasts_cta_column.py +++ b/src/sentry/migrations/0786_drop_broadcasts_cta_column.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0785_add_new_field_to_dashboard_permissions"), ] diff --git a/src/sentry/migrations/0790_delete_dashboard_perms_col.py b/src/sentry/migrations/0790_delete_dashboard_perms_col.py index 5349d41fd14706..38e17ab6fda1da 100644 --- a/src/sentry/migrations/0790_delete_dashboard_perms_col.py +++ b/src/sentry/migrations/0790_delete_dashboard_perms_col.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0789_add_unique_constraint_to_rollbackorganization"), ] diff --git a/src/sentry/migrations/0793_remove_db_constraint_alert_rule_exclusion.py b/src/sentry/migrations/0793_remove_db_constraint_alert_rule_exclusion.py new file mode 100644 index 00000000000000..8c673571a87f4d --- /dev/null +++ b/src/sentry/migrations/0793_remove_db_constraint_alert_rule_exclusion.py @@ -0,0 +1,59 @@ +# Generated by Django 5.1.1 on 2024-11-22 17:43 + +import django.db.models.deletion +from django.db import migrations + +import sentry.db.models.fields.foreignkey +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0792_add_unique_index_apiauthorization"), + ] + + operations = [ + migrations.AlterField( + model_name="alertruleexcludedprojects", + name="alert_rule", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + db_constraint=False, + db_index=False, + on_delete=django.db.models.deletion.CASCADE, + to="sentry.alertrule", + ), + ), + migrations.AlterField( + model_name="alertruletriggerexclusion", + name="alert_rule_trigger", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + db_constraint=False, + on_delete=django.db.models.deletion.CASCADE, + related_name="exclusions", + to="sentry.alertruletrigger", + ), + ), + migrations.AlterField( + model_name="alertruletriggerexclusion", + name="query_subscription", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + db_constraint=False, + on_delete=django.db.models.deletion.CASCADE, + to="sentry.querysubscription", + ), + ), + ] diff --git a/src/sentry/migrations/0794_rm_excluded_included_projects_alertrule.py b/src/sentry/migrations/0794_rm_excluded_included_projects_alertrule.py new file mode 100644 index 00000000000000..0add9ea8dd47c6 --- /dev/null +++ b/src/sentry/migrations/0794_rm_excluded_included_projects_alertrule.py @@ -0,0 +1,45 @@ +# Generated by Django 5.1.1 on 2024-11-22 19:12 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0793_remove_db_constraint_alert_rule_exclusion"), + ] + + operations = [ + SafeRemoveField( + model_name="alertrule", + name="excluded_projects", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + migrations.AlterField( + model_name="alertrule", + name="include_all_projects", + field=models.BooleanField(default=False, null=True), + ), + SafeRemoveField( + model_name="alertrule", + name="include_all_projects", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/src/sentry/migrations/0795_drop_included_excluded_projects.py b/src/sentry/migrations/0795_drop_included_excluded_projects.py new file mode 100644 index 00000000000000..772097d94199f3 --- /dev/null +++ b/src/sentry/migrations/0795_drop_included_excluded_projects.py @@ -0,0 +1,36 @@ +# Generated by Django 5.1.1 on 2024-11-25 17:33 + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0794_rm_excluded_included_projects_alertrule"), + ] + + operations = [ + SafeRemoveField( + model_name="alertrule", name="excluded_projects", deletion_action=DeletionAction.DELETE + ), + SafeRemoveField( + model_name="alertrule", + name="include_all_projects", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/src/sentry/migrations/0796_rm_excluded_projects_triggers.py b/src/sentry/migrations/0796_rm_excluded_projects_triggers.py new file mode 100644 index 00000000000000..710cea5d0cc5f7 --- /dev/null +++ b/src/sentry/migrations/0796_rm_excluded_projects_triggers.py @@ -0,0 +1,36 @@ +# Generated by Django 5.1.1 on 2024-11-25 20:06 + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0795_drop_included_excluded_projects"), + ] + + operations = [ + SafeDeleteModel( + name="AlertRuleExcludedProjects", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + SafeDeleteModel( + name="AlertRuleTriggerExclusion", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/src/sentry/migrations/0797_drop_excluded_project_triggers.py b/src/sentry/migrations/0797_drop_excluded_project_triggers.py new file mode 100644 index 00000000000000..d410dcf3729a51 --- /dev/null +++ b/src/sentry/migrations/0797_drop_excluded_project_triggers.py @@ -0,0 +1,30 @@ +# Generated by Django 5.1.1 on 2024-11-26 18:34 + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0796_rm_excluded_projects_triggers"), + ] + + operations = [ + SafeDeleteModel(name="AlertRuleExcludedProjects", deletion_action=DeletionAction.DELETE), + SafeDeleteModel(name="AlertRuleTriggerExclusion", deletion_action=DeletionAction.DELETE), + ] diff --git a/src/sentry/models/dashboard.py b/src/sentry/models/dashboard.py index d4d2ee7c38a753..e135160f1b972d 100644 --- a/src/sentry/models/dashboard.py +++ b/src/sentry/models/dashboard.py @@ -156,6 +156,7 @@ def get_prebuilt_dashboards(organization, user) -> list[dict[str, Any]]: "title": "General", "dateCreated": "", "createdBy": "", + "permissions": {"isEditableByEveryone": True, "teamsWithEditAccess": []}, "widgets": [ { "title": "Number of Errors", diff --git a/src/sentry/models/debugfile.py b/src/sentry/models/debugfile.py index ea0d64793ac8b8..c7572ddccfeb48 100644 --- a/src/sentry/models/debugfile.py +++ b/src/sentry/models/debugfile.py @@ -338,7 +338,10 @@ def create_dif_from_id( return dif, True -def _analyze_progard_filename(filename: str) -> str | None: +def _analyze_progard_filename(filename: str | None) -> str | None: + if filename is None: + return None + match = _proguard_file_re.search(filename) if match is None: return None @@ -474,9 +477,9 @@ def detect_dif_from_path( :raises BadDif: If the file is not a valid DIF. """ - # proguard files (proguard/UUID.txt) or + # Proguard files have a path or a name like (proguard/UUID.txt) or # (proguard/mapping-UUID.txt). - proguard_id = _analyze_progard_filename(path) + proguard_id = _analyze_progard_filename(path) or _analyze_progard_filename(name) if proguard_id is not None: data = {"features": ["mapping"]} return [ diff --git a/src/sentry/models/grouphashmetadata.py b/src/sentry/models/grouphashmetadata.py index be72988e8de326..f8ad100527d748 100644 --- a/src/sentry/models/grouphashmetadata.py +++ b/src/sentry/models/grouphashmetadata.py @@ -57,7 +57,9 @@ class GroupHashMetadata(Model): # Most recent config to produce this hash latest_grouping_config = models.CharField(null=True) # The primary grouping method (message, stacktrace, fingerprint, etc.) - hash_basis = models.CharField(choices=HashBasis, null=True) + hash_basis: models.Field[HashBasis | None, HashBasis | None] = models.CharField( + choices=HashBasis, null=True + ) # Metadata about the inputs to the hashing process and the hashing process itself (what # fingerprinting rules were matched? did we parameterize the message? etc.). For the specific # data stored, see the class definitions of the `HashingMetadata` subtypes. diff --git a/src/sentry/monitors/system_incidents.py b/src/sentry/monitors/system_incidents.py index cd56ff2619d9c3..4b7531802b90e7 100644 --- a/src/sentry/monitors/system_incidents.py +++ b/src/sentry/monitors/system_incidents.py @@ -22,7 +22,7 @@ from sentry import options from sentry.utils import metrics, redis -logger = logging.getLogger("sentry") +logger = logging.getLogger(__name__) # This key is used to record historical date about the volume of check-ins. MONITOR_VOLUME_HISTORY = "sentry.monitors.volume_history:{ts}" @@ -90,7 +90,7 @@ def process_clock_tick_for_system_incidents(tick: datetime) -> DecisionResult: result = make_clock_tick_decision(tick) logger.info( - "monitors.system_incidents.process_clock_tick", + "process_clock_tick", extra={"decision": result.decision, "transition": result.transition}, ) @@ -116,7 +116,7 @@ def process_clock_tick_for_system_incidents(tick: datetime) -> DecisionResult: if start := get_last_incident_ts(): prune_incident_check_in_volume(start, result.ts) else: - logger.error("monitors.system_incidents.recovered_without_start_ts") + logger.error("recovered_without_start_ts") return result @@ -207,10 +207,12 @@ def record_clock_tick_volume_metric(tick: datetime) -> None: # Can't make any decisions if we didn't have data for the past minute if past_minute_volume is None: + logger.info("past_minute_volume_missing", extra={"reference_datetime": tick}) return # We need AT LEAST two data points to calculate standard deviation if len(historic_volume) < 2: + logger.info("history_volume_low", extra={"reference_datetime": tick}) return # Record some statistics about the past_minute_volume volume in comparison @@ -242,7 +244,7 @@ def record_clock_tick_volume_metric(tick: datetime) -> None: metrics.gauge("monitors.task.volume_history.pct_deviation", pct_deviation, sample_rate=1.0) logger.info( - "monitors.system_incidents.volume_history", + "volume_history", extra={ "reference_datetime": str(tick), "evaluation_minute": past_ts.strftime("%H:%M"), @@ -511,7 +513,7 @@ def make_decision( pipeline.execute() logger.info( - "monitors.system_incidents.decision", + "clock_tick_decision", extra={ "reference_datetime": str(tick), "decision": decision, @@ -631,7 +633,7 @@ def _make_backfill(start: datetime, until_not: TickAnomalyDecision) -> Generator # If we've iterated through the entire BACKFILL_CUTOFF we have a # "decision runaway" and should report this as an error - logger.error("sentry.system_incidents.decision_backfill_runaway") + logger.error("decision_backfill_runaway") def _backfill_decisions( diff --git a/src/sentry/new_migrations/migrations.py b/src/sentry/new_migrations/migrations.py index ecb9968ebf677b..9dc3a5128e2807 100644 --- a/src/sentry/new_migrations/migrations.py +++ b/src/sentry/new_migrations/migrations.py @@ -1,4 +1,5 @@ -from django.db.migrations import Migration +from django.db.migrations import Migration, RunSQL +from django_zero_downtime_migrations.backends.postgres.schema import UnsafeOperationException class CheckedMigration(Migration): @@ -16,7 +17,20 @@ class CheckedMigration(Migration): # the `owners-migrations` team. checked = True + # This determines whether we allow `RunSQL` to be used in migrations. We want to discourage this going forward, + # because it's hard for our framework to determine whether SQL is safe. It can also cause problems with setting + # lock/statement timeouts appropriately. + allow_run_sql = False + def apply(self, project_state, schema_editor, collect_sql=False): if self.checked: schema_editor.safe = True + for op in self.operations: + if not self.allow_run_sql and type(op) is RunSQL: + raise UnsafeOperationException( + "Using RunSQL is unsafe because our migrations safety framework can't detect problems with the " + "migration. If you need to use RunSQL, set `allow_run_sql = True` and get approval from " + "`owners-migrations` to make sure that it's safe." + ) + return super().apply(project_state, schema_editor, collect_sql) diff --git a/src/sentry/new_migrations/monkey/__init__.py b/src/sentry/new_migrations/monkey/__init__.py index a6b52294b510c1..1f581c9e0a6323 100644 --- a/src/sentry/new_migrations/monkey/__init__.py +++ b/src/sentry/new_migrations/monkey/__init__.py @@ -1,5 +1,4 @@ from django import VERSION -from django.db import models from sentry.new_migrations.monkey.executor import SentryMigrationExecutor from sentry.new_migrations.monkey.fields import deconstruct @@ -19,6 +18,10 @@ is copied and modified from `Queryset.update()` to add `RETURNING ` to the update query. Verify that the `update` code hasn't significantly changed, and if it has update as needed. + - We monkeypatch `SentryProjectState` over `ProjectState` in a few places. Check where + Django is importing it and make sure that we're still patching correctly. + We also need to verify that the patched `SentryProjectState` isn't missing new + features added by Django. When you're happy that these changes are good to go, update `LAST_VERIFIED_DJANGO_VERSION` to the version of Django you're upgrading to. If the @@ -77,6 +80,8 @@ class Migration(CheckedMigration): def monkey_migrations(): + from django.db import models + # This import needs to be below the other imports for `executor` and `writer` so # that we can successfully monkeypatch them. from django.db.migrations import executor, migration, writer @@ -86,3 +91,11 @@ def monkey_migrations(): migration.Migration.initial = None writer.MIGRATION_TEMPLATE = SENTRY_MIGRATION_TEMPLATE models.Field.deconstruct = deconstruct # type: ignore[method-assign] + + from django.db.migrations import graph, state + + from sentry.new_migrations.monkey.state import SentryProjectState + + state.ProjectState = SentryProjectState # type: ignore[misc] + graph.ProjectState = SentryProjectState # type: ignore[attr-defined] + executor.ProjectState = SentryProjectState # type: ignore[attr-defined] diff --git a/src/sentry/new_migrations/monkey/fields.py b/src/sentry/new_migrations/monkey/fields.py index d45d426bbe6c0f..b11e363dfb68ca 100644 --- a/src/sentry/new_migrations/monkey/fields.py +++ b/src/sentry/new_migrations/monkey/fields.py @@ -1,4 +1,10 @@ -from django.db.models import Field +from django.db.migrations import RemoveField +from django.db.models import Field, ManyToManyField +from django.db.models.fields import NOT_PROVIDED +from django_zero_downtime_migrations.backends.postgres.schema import UnsafeOperationException + +from sentry.db.postgres.schema import SafePostgresDatabaseSchemaEditor +from sentry.new_migrations.monkey.state import DeletionAction, SentryProjectState IGNORED_ATTRS = ["verbose_name", "help_text", "choices"] original_deconstruct = Field.deconstruct @@ -14,3 +20,66 @@ def deconstruct(self): for attr in IGNORED_ATTRS: kwargs.pop(attr, None) return name, path, args, kwargs + + +class SafeRemoveField(RemoveField): + def __init__(self, *args, deletion_action: DeletionAction, **kwargs): + super().__init__(*args, **kwargs) + self.deletion_action = deletion_action + + def state_forwards(self, app_label: str, state: SentryProjectState) -> None: # type: ignore[override] + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + field = state.apps.get_model(app_label, self.model_name_lower)._meta.get_field( + self.name_lower + ) + if getattr(field, "db_constraint", False): + raise UnsafeOperationException( + f"Foreign key db constraint must be removed before dropping {app_label}.{self.model_name_lower}.{self.name}. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + if ( + not isinstance(field, ManyToManyField) + and not field.null + and field.db_default is NOT_PROVIDED + ): + raise UnsafeOperationException( + f"Field {app_label}.{self.model_name_lower}.{self.name} must either be nullable or have a db_default before dropping. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + + state.remove_field( + app_label, self.model_name_lower, self.name_lower, deletion_action=self.deletion_action + ) + + def database_forwards( + self, + app_label: str, + schema_editor: SafePostgresDatabaseSchemaEditor, # type: ignore[override] + from_state: SentryProjectState, # type: ignore[override] + to_state: SentryProjectState, # type: ignore[override] + ) -> None: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return + + field = from_state.get_pending_deletion_field(app_label, self.model_name, self.name) + if self.allow_migrate_model(schema_editor.connection.alias, field.model): + schema_editor.remove_field(field.model, field, is_safe=True) + + def database_backwards( + self, + app_label: str, + schema_editor: SafePostgresDatabaseSchemaEditor, # type: ignore[override] + from_state: SentryProjectState, # type: ignore[override] + to_state: SentryProjectState, # type: ignore[override] + ) -> None: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return + field = to_state.get_pending_deletion_field(app_label, self.model_name, self.name) + if self.allow_migrate_model(schema_editor.connection.alias, field.model): + schema_editor.add_field(field.model, field) + + def describe(self) -> str: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return f"Moved {self.model_name}.{self.name} field to pending deletion state" + else: + return super().describe() diff --git a/src/sentry/new_migrations/monkey/models.py b/src/sentry/new_migrations/monkey/models.py new file mode 100644 index 00000000000000..e744e11b356e70 --- /dev/null +++ b/src/sentry/new_migrations/monkey/models.py @@ -0,0 +1,58 @@ +from django.db.migrations import DeleteModel +from django_zero_downtime_migrations.backends.postgres.schema import UnsafeOperationException + +from sentry.db.postgres.schema import SafePostgresDatabaseSchemaEditor +from sentry.new_migrations.monkey.state import DeletionAction, SentryProjectState + + +class SafeDeleteModel(DeleteModel): + def __init__(self, *args, deletion_action: DeletionAction, **kwargs): + super().__init__(*args, **kwargs) + self.deletion_action = deletion_action + + def state_forwards(self, app_label: str, state: SentryProjectState) -> None: # type: ignore[override] + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + model = state.apps.get_model(app_label, self.name) + fields_with_constraints = [ + f.name for f in model._meta.fields if getattr(f, "db_constraint", False) + ] + if fields_with_constraints: + raise UnsafeOperationException( + "Foreign key db constraints must be removed before dropping " + f"{app_label}.{self.name}. Fields with constraints: {fields_with_constraints}" + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-tables" + ) + state.remove_model(app_label, self.name_lower, deletion_action=self.deletion_action) + + def database_forwards( + self, + app_label: str, + schema_editor: SafePostgresDatabaseSchemaEditor, # type: ignore[override] + from_state: SentryProjectState, # type: ignore[override] + to_state: SentryProjectState, # type: ignore[override] + ) -> None: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return + + model = from_state.get_pending_deletion_model(app_label, self.name) + if self.allow_migrate_model(schema_editor.connection.alias, model): + schema_editor.delete_model(model, is_safe=True) + + def database_backwards( + self, + app_label: str, + schema_editor: SafePostgresDatabaseSchemaEditor, # type: ignore[override] + from_state: SentryProjectState, # type: ignore[override] + to_state: SentryProjectState, # type: ignore[override] + ) -> None: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return + model = to_state.get_pending_deletion_model(app_label, self.name) + if self.allow_migrate_model(schema_editor.connection.alias, model): + schema_editor.create_model(model) + + def describe(self) -> str: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return f"Moved model {self.name} to pending deletion state" + else: + return super().describe() diff --git a/src/sentry/new_migrations/monkey/state.py b/src/sentry/new_migrations/monkey/state.py new file mode 100644 index 00000000000000..aa78cf1b3569a1 --- /dev/null +++ b/src/sentry/new_migrations/monkey/state.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +from copy import deepcopy +from enum import Enum + +from django.db.migrations.state import ProjectState +from django.db.models import Field, Model +from django_zero_downtime_migrations.backends.postgres.schema import UnsafeOperationException + + +class DeletionAction(Enum): + MOVE_TO_PENDING = 0 + DELETE = 1 + + +class SentryProjectState(ProjectState): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.pending_deletion_models: dict[tuple[str, str], type[Model]] = {} + self.pending_deletion_fields: dict[tuple[str, str, str], type[Field]] = {} + + def get_pending_deletion_model(self, app_label: str, model_name: str) -> type[Model]: + model_key = (app_label.lower(), model_name.lower()) + if model_key not in self.pending_deletion_models: + raise UnsafeOperationException( + "Model must be in the pending deletion state before full deletion. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-tables" + ) + return self.pending_deletion_models[model_key] + + def get_pending_deletion_field( + self, app_label: str, model_name: str, field_name: str + ) -> type[Field]: + field_key = (app_label.lower(), model_name.lower(), field_name.lower()) + if field_key not in self.pending_deletion_fields: + raise UnsafeOperationException( + "Field must be in the pending deletion state before full deletion. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + return self.pending_deletion_fields[field_key] + + def remove_model( + self, app_label: str, model_name: str, deletion_action: DeletionAction | None = None + ) -> None: + model_key = (app_label.lower(), model_name.lower()) + if deletion_action == DeletionAction.DELETE: + if model_key not in self.pending_deletion_models: + raise UnsafeOperationException( + "Model must be in the pending deletion state before full deletion. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-tables" + ) + del self.pending_deletion_models[model_key] + return + if deletion_action == DeletionAction.MOVE_TO_PENDING: + if model_key in self.pending_deletion_models: + raise UnsafeOperationException( + f"{app_label}.{model_name} is already pending deletion. Use DeletionAction.DELETE to delete" + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-tables" + ) + self.pending_deletion_models[model_key] = self.apps.get_model(app_label, model_name) + super().remove_model(app_label, model_name) + + def remove_field( + self, + app_label: str, + model_name: str, + name: str, + deletion_action: DeletionAction | None = None, + ): + field_key = app_label.lower(), model_name.lower(), name.lower() + if deletion_action == DeletionAction.DELETE: + if field_key not in self.pending_deletion_fields: + raise UnsafeOperationException( + "Field must be in the pending deletion state before full deletion. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + del self.pending_deletion_fields[field_key] + return + + if deletion_action == DeletionAction.MOVE_TO_PENDING: + if field_key in self.pending_deletion_fields: + raise UnsafeOperationException( + f"{app_label}.{model_name}.{name} is already pending deletion. Use DeletionAction.DELETE to delete" + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + self.pending_deletion_fields[field_key] = self.apps.get_model( + app_label, model_name + )._meta.get_field(name) + + super().remove_field(app_label, model_name, name) + + def clone(self) -> SentryProjectState: + new_state = super().clone() + new_state.pending_deletion_models = deepcopy(self.pending_deletion_models) # type: ignore[attr-defined] + new_state.pending_deletion_fields = deepcopy(self.pending_deletion_fields) # type: ignore[attr-defined] + return new_state # type: ignore[return-value] diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index de06fc7fa684d1..aa1a5797285204 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -2897,6 +2897,13 @@ flags=FLAG_AUTOMATOR_MODIFIABLE, ) +# option for clamping project tag key date range +register( + "visibility.tag-key-max-date-range.days", + default=14, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) + # option used to enable/disable applying # stack trace rules in profiles register( @@ -2916,7 +2923,28 @@ flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE, ) register( - "performance.event-tracker.sample-rate.transaction", + "performance.event-tracker.sample-rate.transactions", + default=0.0, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) + +# migrating send_alert_event task to not pass Event +register( + "sentryapps.send_alert_event.use-eventid", + type=Float, + default=0.0, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) +register( + "transactions.do_post_process_in_save", default=0.0, + flags=FLAG_AUTOMATOR_MODIFIABLE | FLAG_RATE, +) + +# allows us to disable indexing during maintenance events +register( + "sentry.similarity.indexing.enabled", + default=True, + type=Bool, flags=FLAG_AUTOMATOR_MODIFIABLE, ) diff --git a/src/sentry/profiles/flamegraph.py b/src/sentry/profiles/flamegraph.py index 1a46b8ea67df72..6dc8dd8842c2e2 100644 --- a/src/sentry/profiles/flamegraph.py +++ b/src/sentry/profiles/flamegraph.py @@ -26,7 +26,6 @@ from sentry.search.events.builder.profile_functions import ProfileFunctionsQueryBuilder from sentry.search.events.fields import resolve_datetime64 from sentry.search.events.types import QueryBuilderConfig, SnubaParams -from sentry.snuba import functions from sentry.snuba.dataset import Dataset, EntityKey, StorageKey from sentry.snuba.referrer import Referrer from sentry.utils.iterators import chunked @@ -42,70 +41,6 @@ class ProfileIds(TypedDict): profile_ids: list[str] -def get_profile_ids( - snuba_params: SnubaParams, - query: str | None = None, -) -> ProfileIds: - builder = DiscoverQueryBuilder( - dataset=Dataset.Discover, - params={}, - snuba_params=snuba_params, - query=query, - selected_columns=["profile.id"], - limit=options.get("profiling.flamegraph.profile-set.size"), - ) - - builder.add_conditions( - [ - Condition(Column("type"), Op.EQ, "transaction"), - Condition(Column("profile_id"), Op.IS_NOT_NULL), - ] - ) - - result = builder.run_query(Referrer.API_PROFILING_PROFILE_FLAMEGRAPH.value) - - return {"profile_ids": [row["profile.id"] for row in result["data"]]} - - -def get_profiles_with_function( - organization_id: int, - project_id: int, - function_fingerprint: int, - snuba_params: SnubaParams, - query: str, -) -> ProfileIds: - conditions = [query, f"fingerprint:{function_fingerprint}"] - - result = functions.query( - selected_columns=["timestamp", "unique_examples()"], - query=" ".join(cond for cond in conditions if cond), - snuba_params=snuba_params, - limit=100, - orderby=["-timestamp"], - referrer=Referrer.API_PROFILING_FUNCTION_SCOPED_FLAMEGRAPH.value, - auto_aggregations=True, - use_aggregate_conditions=True, - transform_alias_to_input_format=True, - ) - - def extract_profile_ids() -> list[str]: - max_profiles = options.get("profiling.flamegraph.profile-set.size") - profile_ids = [] - - for i in range(5): - for row in result["data"]: - examples = row["unique_examples()"] - if i < len(examples): - profile_ids.append(examples[i]) - - if len(profile_ids) >= max_profiles: - return profile_ids - - return profile_ids - - return {"profile_ids": extract_profile_ids()} - - class IntervalMetadata(TypedDict): start: str end: str diff --git a/src/sentry/profiles/task.py b/src/sentry/profiles/task.py index 254d5f5555ec36..9bde153f739cbf 100644 --- a/src/sentry/profiles/task.py +++ b/src/sentry/profiles/task.py @@ -205,22 +205,15 @@ def process_profile_task( except Exception as e: sentry_sdk.capture_exception(e) if "profiler_id" not in profile: - if options.get("profiling.emit_outcomes_in_profiling_consumer.enabled"): - _track_outcome( - profile=profile, - project=project, - outcome=Outcome.ACCEPTED, - categories=[DataCategory.PROFILE, DataCategory.PROFILE_INDEXED], - ) - else: - _track_outcome_legacy( - profile=profile, project=project, outcome=Outcome.ACCEPTED - ) + _track_outcome( + profile=profile, + project=project, + outcome=Outcome.ACCEPTED, + categories=[DataCategory.PROFILE, DataCategory.PROFILE_INDEXED], + ) + else: - if ( - options.get("profiling.emit_outcomes_in_profiling_consumer.enabled") - and "profiler_id" not in profile - ): + if "profiler_id" not in profile: _track_outcome( profile=profile, project=project, @@ -960,28 +953,20 @@ def _track_outcome( def _track_failed_outcome(profile: Profile, project: Project, reason: str) -> None: - if options.get("profiling.emit_outcomes_in_profiling_consumer.enabled"): - categories = [] - if "profiler_id" not in profile: - categories.append(DataCategory.PROFILE) - if profile.get("sampled"): - categories.append(DataCategory.PROFILE_INDEXED) - else: - categories.append(DataCategory.PROFILE_CHUNK) - _track_outcome( - profile=profile, - project=project, - outcome=Outcome.INVALID, - categories=categories, - reason=reason, - ) + categories = [] + if "profiler_id" not in profile: + categories.append(DataCategory.PROFILE) + if profile.get("sampled"): + categories.append(DataCategory.PROFILE_INDEXED) else: - _track_outcome_legacy( - profile=profile, - project=project, - outcome=Outcome.INVALID, - reason=reason, - ) + categories.append(DataCategory.PROFILE_CHUNK) + _track_outcome( + profile=profile, + project=project, + outcome=Outcome.INVALID, + categories=categories, + reason=reason, + ) @metrics.wraps("process_profile.insert_vroom_profile") diff --git a/src/sentry/projects/services/project/impl.py b/src/sentry/projects/services/project/impl.py index b5d73ae6e28153..e935baa96ed331 100644 --- a/src/sentry/projects/services/project/impl.py +++ b/src/sentry/projects/services/project/impl.py @@ -2,6 +2,7 @@ from django.db import router, transaction +from sentry.api.helpers.default_symbol_sources import set_default_symbol_sources from sentry.api.serializers import ProjectSerializer from sentry.auth.services.auth import AuthenticationContext from sentry.constants import ObjectStatus @@ -126,6 +127,8 @@ def create_project_for_organization( if team: project.add_team(team) + set_default_symbol_sources(project) + project_created.send( project=project, default_rules=True, diff --git a/src/sentry/remote_subscriptions/migrations/0003_drop_remote_subscription.py b/src/sentry/remote_subscriptions/migrations/0003_drop_remote_subscription.py index 650fd61c8b7e04..32b828597f44fa 100644 --- a/src/sentry/remote_subscriptions/migrations/0003_drop_remote_subscription.py +++ b/src/sentry/remote_subscriptions/migrations/0003_drop_remote_subscription.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("remote_subscriptions", "0002_remove_separate_remote_subscription"), ("uptime", "0003_drop_remote_subscription"), diff --git a/src/sentry/rules/actions/integrations/create_ticket/utils.py b/src/sentry/rules/actions/integrations/create_ticket/utils.py index 4d9476c8cb377c..b31a80d9760fd1 100644 --- a/src/sentry/rules/actions/integrations/create_ticket/utils.py +++ b/src/sentry/rules/actions/integrations/create_ticket/utils.py @@ -9,9 +9,14 @@ from sentry.eventstore.models import GroupEvent from sentry.integrations.base import IntegrationInstallation from sentry.integrations.models.external_issue import ExternalIssue +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, +) from sentry.integrations.services.integration.model import RpcIntegration from sentry.integrations.services.integration.service import integration_service from sentry.models.grouplink import GroupLink +from sentry.shared_integrations.exceptions import IntegrationFormError from sentry.silo.base import region_silo_function from sentry.types.rules import RuleFuture from sentry.utils import metrics @@ -114,27 +119,34 @@ def create_issue(event: GroupEvent, futures: Sequence[RuleFuture]) -> None: }, ) return - try: - response = installation.create_issue(data) - except Exception as e: - logger.info( - "%s.rule_trigger.create_ticket.failure", - provider, - extra={ - "rule_id": rule_id, - "provider": provider, - "integration_id": integration.id, - "error_message": str(e), - "exception_type": type(e).__name__, - }, - ) - metrics.incr( - f"{provider}.rule_trigger.create_ticket.failure", - tags={ - "provider": provider, - }, - ) - raise + with ProjectManagementEvent( + action_type=ProjectManagementActionType.CREATE_EXTERNAL_ISSUE, + integration=integration, + ).capture() as lifecycle: + lifecycle.add_extra("provider", provider) + lifecycle.add_extra("integration_id", integration.id) + lifecycle.add_extra("rule_id", rule_id) + + try: + response = installation.create_issue(data) + except Exception as e: + if isinstance(e, IntegrationFormError): + # Most of the time, these aren't explicit failures, they're + # some misconfiguration of an issue field - typically Jira. + lifecycle.record_halt(str(e)) + + metrics.incr( + f"{provider}.rule_trigger.create_ticket.failure", + tags={ + "provider": provider, + }, + ) + + # Don't pass the full exception here, as it can contain a + # massive request response along with its stacktrace + lifecycle.record_failure(str(e)) + + raise create_link(integration, installation, event, response) diff --git a/src/sentry/rules/conditions/event_attribute.py b/src/sentry/rules/conditions/event_attribute.py index e5fb5699a1707b..0c630c418ead2f 100644 --- a/src/sentry/rules/conditions/event_attribute.py +++ b/src/sentry/rules/conditions/event_attribute.py @@ -63,8 +63,8 @@ def _handle(cls, path: list[str], event: GroupEvent) -> list[str]: "stacktrace.package": Columns.STACK_PACKAGE, "unreal.crashtype": Columns.UNREAL_CRASH_TYPE, "app.in_foreground": Columns.APP_IN_FOREGROUND, - "os.distribution.name": Columns.OS_DISTRIBUTION_NAME, - "os.distribution.version": Columns.OS_DISTRIBUTION_VERSION, + "os.distribution_name": Columns.OS_DISTRIBUTION_NAME, + "os.distribution_version": Columns.OS_DISTRIBUTION_VERSION, } @@ -418,21 +418,14 @@ def _handle(cls, path: list[str], event: GroupEvent) -> list[str]: @attribute_registry.register("os") class OsAttributeHandler(AttributeHandler): - minimum_path_length = 3 + minimum_path_length = 2 @classmethod def _handle(cls, path: list[str], event: GroupEvent) -> list[str]: - if path[1] in ("distribution"): - if path[2] in ("name", "version"): - contexts = event.data.get("contexts", {}) - os_context = contexts.get("os") - if os_context is None: - os_context = {} - - distribution = os_context.get(path[1]) - if distribution is None: - distribution = {} - - return [distribution.get(path[2])] - return [] + if path[1] in ("distribution_name", "distribution_version"): + contexts = event.data.get("contexts", {}) + os_context = contexts.get("os") + if os_context is None: + os_context = {} + return [os_context.get(path[1])] return [] diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py index ce55cc8a3e7902..5a2b9a947a8f4a 100644 --- a/src/sentry/runner/commands/devserver.py +++ b/src/sentry/runner/commands/devserver.py @@ -37,6 +37,7 @@ "transactions-subscription-results", "generic-metrics-subscription-results", "metrics-subscription-results", + "eap-spans-subscription-results", ] @@ -353,7 +354,10 @@ def devserver( # Create all topics if the Kafka eventstream is selected if kafka_consumers: - if "sentry_kafka" not in containers and "shared-kafka-kafka-1" not in containers: + kafka_container_name = ( + "kafka-kafka-1" if os.environ.get("USE_NEW_DEVSERVICES") == "1" else "sentry_kafka" + ) + if kafka_container_name not in containers: raise click.ClickException( f""" Devserver is configured to start some kafka consumers, but Kafka diff --git a/src/sentry/runner/commands/openai.py b/src/sentry/runner/commands/openai.py deleted file mode 100644 index 2152d8c894d555..00000000000000 --- a/src/sentry/runner/commands/openai.py +++ /dev/null @@ -1,29 +0,0 @@ -# The sentry utils json cannot pretty print -import json # noqa: S003 -from typing import IO - -import click - - -@click.command("openai") -@click.option("--event", type=click.File("r")) -@click.option("--model", default="gpt-3.5-turbo") -@click.option("--dump-prompt", is_flag=True) -def openai(event: IO[str], model: str, dump_prompt: bool) -> None: - """ - Runs the OpenAI assistent against a JSON event payload. - """ - from sentry.runner import configure - - configure() - - from sentry.api.endpoints.event_ai_suggested_fix import describe_event_for_ai, suggest_fix - - event_data = json.load(event) - if dump_prompt: - click.echo(json.dumps(describe_event_for_ai(event_data, model=model), indent=2)) - else: - resp = suggest_fix(event_data, stream=True, model=model) - for chunk in resp: - click.echo(chunk, nl=False) - click.echo() diff --git a/src/sentry/runner/commands/run.py b/src/sentry/runner/commands/run.py index 1c17cbf8d85712..41a2acbd5d97e7 100644 --- a/src/sentry/runner/commands/run.py +++ b/src/sentry/runner/commands/run.py @@ -253,6 +253,53 @@ def taskworker(rpc_host: str, max_task_count: int, **options: Any) -> None: raise SystemExit(exitcode) +@run.command() +@log_options() +@configuration +@click.option( + "--repeat", + type=int, + help="Number of messages to send to the kafka topic", + default=1, + show_default=True, +) +@click.option( + "--kwargs", + type=str, + help="Task function keyword arguments", +) +@click.option( + "--args", + type=str, + help="Task function arguments", +) +@click.option( + "--task-function-path", + type=str, + help="The path to the function name of the task to execute", + required=True, +) +def taskbroker_send_tasks( + task_function_path: str, + args: str, + kwargs: str, + repeat: int, +) -> None: + from sentry.utils.imports import import_string + + try: + func = import_string(task_function_path) + except Exception as e: + click.echo(f"Error: {e}") + raise click.Abort() + task_args = [] if not args else eval(args) + task_kwargs = {} if not kwargs else eval(kwargs) + + for _ in range(repeat): + func.delay(*task_args, **task_kwargs) + click.echo(message=f"Successfully sent {repeat} messages.") + + @run.command() @click.option( "--pidfile", diff --git a/src/sentry/runner/main.py b/src/sentry/runner/main.py index a24249045386f8..2c3517b94945ae 100644 --- a/src/sentry/runner/main.py +++ b/src/sentry/runner/main.py @@ -68,7 +68,6 @@ def cli(config: str) -> None: "sentry.runner.commands.performance.performance", "sentry.runner.commands.spans.spans", "sentry.runner.commands.spans.write_hashes", - "sentry.runner.commands.openai.openai", "sentry.runner.commands.llm.llm", "sentry.runner.commands.workstations.workstations", ), diff --git a/src/sentry/search/eap/columns.py b/src/sentry/search/eap/columns.py index dce4ba8a3a42df..7b82fefef61258 100644 --- a/src/sentry/search/eap/columns.py +++ b/src/sentry/search/eap/columns.py @@ -255,6 +255,16 @@ def simple_measurements_field(field) -> ResolvedColumn: internal_name="sentry.segment_name", search_type="string", ), + ResolvedColumn( + public_alias="transaction.span_id", + internal_name="sentry.segment_id", + search_type="string", + ), + ResolvedColumn( + public_alias="profile.id", + internal_name="sentry.profile_id", + search_type="string", + ), ResolvedColumn( public_alias="replay.id", internal_name="sentry.replay_id", @@ -285,8 +295,10 @@ def simple_measurements_field(field) -> ResolvedColumn: simple_sentry_field("sdk.version"), simple_sentry_field("span.status_code"), simple_sentry_field("span_id"), + simple_sentry_field("timestamp"), simple_sentry_field("trace.status"), simple_sentry_field("transaction.method"), + simple_sentry_field("transaction.op"), simple_sentry_field("user"), simple_sentry_field("user.email"), simple_sentry_field("user.geo.country_code"), @@ -334,7 +346,6 @@ def simple_measurements_field(field) -> ResolvedColumn: simple_measurements_field("messaging.message.body.size"), simple_measurements_field("messaging.message.receive.latency"), simple_measurements_field("messaging.message.retry.count"), - simple_measurements_field("http.response_content_length"), ] } diff --git a/src/sentry/search/eap/spans.py b/src/sentry/search/eap/spans.py index 9a7a50f98faecd..d066d25746e004 100644 --- a/src/sentry/search/eap/spans.py +++ b/src/sentry/search/eap/spans.py @@ -210,44 +210,7 @@ def _resolve_terms(self, terms: event_filter.ParsedTerms) -> TraceItemFilter | N parsed_terms = [] for item in terms: if isinstance(item, event_search.SearchFilter): - resolved_column, context = self.resolve_column(item.key.name) - raw_value = item.value.raw_value - if item.value.is_wildcard(): - if item.operator == "=": - operator = ComparisonFilter.OP_LIKE - elif item.operator == "!=": - operator = ComparisonFilter.OP_NOT_LIKE - else: - raise InvalidSearchQuery( - f"Cannot use a wildcard with a {item.operator} filter" - ) - # Slashes have to be double escaped so they are - # interpreted as a string literal. - raw_value = ( - str(item.value.raw_value) - .replace("\\", "\\\\") - .replace("%", "\\%") - .replace("_", "\\_") - .replace("*", "%") - ) - elif item.operator in constants.OPERATOR_MAP: - operator = constants.OPERATOR_MAP[item.operator] - else: - raise InvalidSearchQuery(f"Unknown operator: {item.operator}") - if isinstance(resolved_column.proto_definition, AttributeKey): - parsed_terms.append( - TraceItemFilter( - comparison_filter=ComparisonFilter( - key=resolved_column.proto_definition, - op=operator, - value=self._resolve_search_value( - resolved_column, item.operator, raw_value - ), - ) - ) - ) - else: - raise NotImplementedError("Can't filter on aggregates yet") + parsed_terms.append(self.resolve_term(cast(event_search.SearchFilter, item))) else: if self.config.use_aggregate_conditions: raise NotImplementedError("Can't filter on aggregates yet") @@ -259,6 +222,40 @@ def _resolve_terms(self, terms: event_filter.ParsedTerms) -> TraceItemFilter | N else: return None + def resolve_term(self, term: event_search.SearchFilter) -> TraceItemFilter: + resolved_column, context = self.resolve_column(term.key.name) + raw_value = term.value.raw_value + if term.value.is_wildcard(): + if term.operator == "=": + operator = ComparisonFilter.OP_LIKE + elif term.operator == "!=": + operator = ComparisonFilter.OP_NOT_LIKE + else: + raise InvalidSearchQuery(f"Cannot use a wildcard with a {term.operator} filter") + # Slashes have to be double escaped so they are + # interpreted as a string literal. + raw_value = ( + str(term.value.raw_value) + .replace("\\", "\\\\") + .replace("%", "\\%") + .replace("_", "\\_") + .replace("*", "%") + ) + elif term.operator in constants.OPERATOR_MAP: + operator = constants.OPERATOR_MAP[term.operator] + else: + raise InvalidSearchQuery(f"Unknown operator: {term.operator}") + if isinstance(resolved_column.proto_definition, AttributeKey): + return TraceItemFilter( + comparison_filter=ComparisonFilter( + key=resolved_column.proto_definition, + op=operator, + value=self._resolve_search_value(resolved_column, term.operator, raw_value), + ) + ) + else: + raise NotImplementedError("Can't filter on aggregates yet") + def _resolve_search_value( self, column: ResolvedColumn, diff --git a/src/sentry/search/events/builder/errors.py b/src/sentry/search/events/builder/errors.py index 4869fe65ac191f..650d4963513009 100644 --- a/src/sentry/search/events/builder/errors.py +++ b/src/sentry/search/events/builder/errors.py @@ -88,7 +88,13 @@ def aliased_column(self, name: str) -> SelectType: aliased_col, exp=self._apply_column_entity(aliased_col.exp.name) ) elif isinstance(aliased_col, Column): - return self._apply_column_entity(aliased_col.name) + if self.config.use_entity_prefix_for_fields: + return self._apply_column_entity(aliased_col.name) + + # Map the column with the entity name back to the original resolved name + return AliasedExpression( + self._apply_column_entity(aliased_col.name), alias=aliased_col.name + ) raise NotImplementedError(f"{type(aliased_col)} not implemented in aliased_column") diff --git a/src/sentry/search/events/builder/metrics_summaries.py b/src/sentry/search/events/builder/metrics_summaries.py deleted file mode 100644 index 5d2968ab2ba104..00000000000000 --- a/src/sentry/search/events/builder/metrics_summaries.py +++ /dev/null @@ -1,38 +0,0 @@ -from snuba_sdk import Entity, Flags, Query, Request - -from sentry.search.events.builder.base import BaseQueryBuilder -from sentry.search.events.datasets.metrics_summaries import MetricsSummariesDatasetConfig -from sentry.snuba.dataset import Dataset - - -class MetricsSummariesQueryBuilder(BaseQueryBuilder): - requires_organization_condition = False - config_class = MetricsSummariesDatasetConfig - - def get_field_type(self, field: str) -> str | None: - if field in ["min_metric", "max_metric", "sum_metric", "count_metric"]: - return "number" - return None - - def get_snql_query(self) -> Request: - self.validate_having_clause() - - return Request( - # the metrics summaries entity exists within the spans indexed dataset - dataset=Dataset.SpansIndexed.value, - app_id="default", - query=Query( - match=Entity(self.dataset.value, sample=self.sample_rate), - select=self.columns, - array_join=self.array_join, - where=self.where, - having=self.having, - groupby=self.groupby, - orderby=self.orderby, - limit=self.limit, - offset=self.offset, - limitby=self.limitby, - ), - flags=Flags(turbo=self.turbo), - tenant_ids=self.tenant_ids, - ) diff --git a/src/sentry/search/events/datasets/discover.py b/src/sentry/search/events/datasets/discover.py index f422ad4361b918..9178ae61de05cd 100644 --- a/src/sentry/search/events/datasets/discover.py +++ b/src/sentry/search/events/datasets/discover.py @@ -107,6 +107,7 @@ class DiscoverDatasetConfig(DatasetConfig): "user_misery()", } non_nullable_keys = {"event.type"} + use_entity_prefix_for_fields: bool = False def __init__(self, builder: BaseQueryBuilder): self.builder = builder diff --git a/src/sentry/search/events/datasets/spans_indexed.py b/src/sentry/search/events/datasets/spans_indexed.py index 34c4cc0317d3ae..f5cf34511336c8 100644 --- a/src/sentry/search/events/datasets/spans_indexed.py +++ b/src/sentry/search/events/datasets/spans_indexed.py @@ -44,7 +44,7 @@ def __init__(self, builder: BaseQueryBuilder): @property def search_filter_converter( self, - ) -> Mapping[str, Callable[[SearchFilter], WhereType | None]]: + ) -> dict[str, Callable[[SearchFilter], WhereType | None]]: return { "message": self._message_filter_converter, constants.PROJECT_ALIAS: self._project_slug_filter_converter, @@ -916,6 +916,14 @@ def field_alias_converter(self) -> Mapping[str, Callable[[str], SelectType]]: existing_field_aliases.update(field_alias_converter) return existing_field_aliases + @property + def search_filter_converter( + self, + ) -> dict[str, Callable[[SearchFilter], WhereType | None]]: + existing_search_filters = super().search_filter_converter + del existing_search_filters[constants.SPAN_STATUS] + return existing_search_filters + def _resolve_sum_weighted( self, args: Mapping[str, str | SelectType | int | float], diff --git a/src/sentry/search/events/types.py b/src/sentry/search/events/types.py index 81992445893f47..f4b8e4e4672311 100644 --- a/src/sentry/search/events/types.py +++ b/src/sentry/search/events/types.py @@ -231,6 +231,8 @@ class QueryBuilderConfig: skip_field_validation_for_entity_subscription_deletion: bool = False allow_metric_aggregates: bool | None = False insights_metrics_override_metric_layer: bool = False + # Allow the errors query builder to use the entity prefix for fields + use_entity_prefix_for_fields: bool = False @dataclass(frozen=True) diff --git a/src/sentry/seer/similarity/utils.py b/src/sentry/seer/similarity/utils.py index 29c8b3810c28dd..b92fd1858d81df 100644 --- a/src/sentry/seer/similarity/utils.py +++ b/src/sentry/seer/similarity/utils.py @@ -1,4 +1,5 @@ import logging +from enum import StrEnum from typing import Any, TypeVar from sentry import options @@ -26,15 +27,15 @@ ) SEER_ELIGIBLE_PLATFORMS = frozenset( [ - # "android", - # "android-profiling-onboarding-1-install", - # "android-profiling-onboarding-3-configure-profiling", - # "android-profiling-onboarding-4-upload", + "android", + "android-profiling-onboarding-1-install", + "android-profiling-onboarding-3-configure-profiling", + "android-profiling-onboarding-4-upload", "bun", - # "dart", + "dart", "deno", "django", - # "flutter", + "flutter", "go", "go-echo", "go-fasthttp", @@ -44,16 +45,16 @@ "go-iris", "go-martini", "go-negroni", - # "groovy", + "groovy", "java", "java-android", - # "java-appengine", - # "java-log4j", - # "java-log4j2", - # "java-logging", + "java-appengine", + "java-log4j", + "java-log4j2", + "java-logging", "java-logback", - # "java-spring", - # "java-spring-boot", + "java-spring", + "java-spring-boot", "javascript", "javascript-angular", "javascript-angularjs", @@ -143,6 +144,19 @@ "ruby-rails", ] ) +SYSTEM_FRAME_CHECK_PLATFORMS = frozenset( + [ + "java", + "java-android", + "java-appengine", + "java-log4j", + "java-log4j2", + "java-logging", + "java-logback", + "java-spring", + "java-spring-boot", + ] +) BASE64_ENCODED_PREFIXES = [ "data:text/html;base64", "data:text/javascript;base64", @@ -151,11 +165,24 @@ ] +class ReferrerOptions(StrEnum): + INGEST = "ingest" + BACKFILL = "backfill" + + +class TooManyOnlySystemFramesException(Exception): + pass + + +class NoFilenameOrModuleException(Exception): + pass + + def _get_value_if_exists(exception_value: dict[str, Any]) -> str: return exception_value["values"][0] if exception_value.get("values") else "" -def get_stacktrace_string(data: dict[str, Any]) -> str: +def get_stacktrace_string(data: dict[str, Any], platform: str | None = None) -> str: """Format a stacktrace string from the grouping information.""" app_hash = get_path(data, "app", "hash") app_component = get_path(data, "app", "component", "values") @@ -177,6 +204,8 @@ def get_stacktrace_string(data: dict[str, Any]) -> str: frame_count = 0 html_frame_count = 0 # for a temporary metric + is_frames_truncated = False + has_no_filename_or_module = False stacktrace_str = "" found_non_snipped_context_line = False @@ -185,19 +214,23 @@ def get_stacktrace_string(data: dict[str, Any]) -> str: def _process_frames(frames: list[dict[str, Any]]) -> list[str]: nonlocal frame_count nonlocal html_frame_count + nonlocal is_frames_truncated + nonlocal has_no_filename_or_module nonlocal found_non_snipped_context_line frame_strings = [] contributing_frames = [ frame for frame in frames if frame.get("id") == "frame" and frame.get("contributes") ] + if len(contributing_frames) + frame_count > MAX_FRAME_COUNT: + is_frames_truncated = True contributing_frames = _discard_excess_frames( contributing_frames, MAX_FRAME_COUNT, frame_count ) frame_count += len(contributing_frames) for frame in contributing_frames: - frame_dict = {"filename": "", "function": "", "context-line": ""} + frame_dict = {"filename": "", "function": "", "context-line": "", "module": ""} for frame_values in frame.get("values", []): if frame_values.get("id") in frame_dict: frame_dict[frame_values["id"]] = _get_value_if_exists(frame_values) @@ -205,6 +238,11 @@ def _process_frames(frames: list[dict[str, Any]]) -> list[str]: if not _is_snipped_context_line(frame_dict["context-line"]): found_non_snipped_context_line = True + if frame_dict["filename"] == "" and frame_dict["module"] == "": + has_no_filename_or_module = True + elif frame_dict["filename"] == "": + frame_dict["filename"] = frame_dict["module"] + # Not an exhaustive list of tests we could run to detect HTML, but this is only # meant to be a temporary, quick-and-dirty metric # TODO: Don't let this, and the metric below, hang around forever. It's only to @@ -255,6 +293,10 @@ def _process_frames(frames: list[dict[str, Any]]) -> list[str]: exc_value = _get_value_if_exists(exception_value) elif exception_value.get("id") == "stacktrace" and frame_count < MAX_FRAME_COUNT: frame_strings = _process_frames(exception_value["values"]) + if platform in SYSTEM_FRAME_CHECK_PLATFORMS and is_frames_truncated and not app_hash: + raise TooManyOnlySystemFramesException + if has_no_filename_or_module: + raise NoFilenameOrModuleException # Only exceptions have the type and value properties, so we don't need to handle the threads # case here header = f"{exc_type}: {exc_value}\n" if exception["id"] == "exception" else "" @@ -290,6 +332,42 @@ def _process_frames(frames: list[dict[str, Any]]) -> list[str]: return stacktrace_str.strip() +def get_stacktrace_string_with_metrics( + data: dict[str, Any], platform: str | None, referrer: ReferrerOptions +) -> str | None: + try: + stacktrace_string = get_stacktrace_string(data, platform) + except TooManyOnlySystemFramesException: + platform = platform if platform else "unknown" + metrics.incr( + "grouping.similarity.over_threshold_only_system_frames", + sample_rate=options.get("seer.similarity.metrics_sample_rate"), + tags={"platform": platform, "referrer": referrer}, + ) + if referrer == ReferrerOptions.INGEST: + metrics.incr( + "grouping.similarity.did_call_seer", + sample_rate=options.get("seer.similarity.metrics_sample_rate"), + tags={ + "call_made": False, + "blocker": "over-threshold-only-system-frames", + }, + ) + stacktrace_string = None + except NoFilenameOrModuleException: + if referrer == ReferrerOptions.INGEST: + metrics.incr( + "grouping.similarity.did_call_seer", + sample_rate=options.get("seer.similarity.metrics_sample_rate"), + tags={ + "call_made": False, + "blocker": "no-module-or-filename", + }, + ) + stacktrace_string = None + return stacktrace_string + + def event_content_has_stacktrace(event: Event) -> bool: # If an event has no stacktrace, there's no data for Seer to analyze, so no point in making the # API call. If we ever start analyzing message-only events, we'll need to add `event.title in diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py b/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py index d0ad44c5f50932..7f8aae8ad5f161 100644 --- a/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py +++ b/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py @@ -16,7 +16,7 @@ class SentryAppsStatsEndpoint(SentryAppsBaseEndpoint): owner = ApiOwner.INTEGRATIONS publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,) diff --git a/src/sentry/sentry_apps/services/app/impl.py b/src/sentry/sentry_apps/services/app/impl.py index 218d010f8436af..e266283516b35e 100644 --- a/src/sentry/sentry_apps/services/app/impl.py +++ b/src/sentry/sentry_apps/services/app/impl.py @@ -97,12 +97,6 @@ def get_sentry_app_by_slug(self, *, slug: str) -> RpcSentryApp | None: except SentryApp.DoesNotExist: return None - def get_installed_for_organization( - self, *, organization_id: int - ) -> list[RpcSentryAppInstallation]: - # Deprecated. Use get_installations_for_organization instead. - return self.get_installations_for_organization(organization_id=organization_id) - def get_installations_for_organization( self, *, organization_id: int ) -> list[RpcSentryAppInstallation]: diff --git a/src/sentry/sentry_apps/services/app/service.py b/src/sentry/sentry_apps/services/app/service.py index 731c2f6573fbc6..7d8d8f466fcc60 100644 --- a/src/sentry/sentry_apps/services/app/service.py +++ b/src/sentry/sentry_apps/services/app/service.py @@ -8,7 +8,6 @@ from typing import Any from sentry.auth.services.auth import AuthenticationContext -from sentry.features.rollout import in_random_rollout from sentry.hybridcloud.rpc.caching.service import back_with_silo_cache, back_with_silo_cache_list from sentry.hybridcloud.rpc.filter_query import OpaqueSerializedResponse from sentry.hybridcloud.rpc.service import RpcService, rpc_method @@ -61,16 +60,6 @@ def find_installation_by_proxy_user( ) -> RpcSentryAppInstallation | None: pass - @rpc_method - @abc.abstractmethod - def get_installed_for_organization( - self, - *, - organization_id: int, - ) -> list[RpcSentryAppInstallation]: - # Deprecated use installations_for_organization instead. - pass - def installations_for_organization( self, *, organization_id: int ) -> list[RpcSentryAppInstallation]: @@ -79,10 +68,7 @@ def installations_for_organization( This is a cached wrapper around get_installations_for_organization """ - if in_random_rollout("app_service.installations_for_org.cached"): - return get_installations_for_organization(organization_id) - else: - return self.get_installed_for_organization(organization_id=organization_id) + return get_installations_for_organization(organization_id) @rpc_method @abc.abstractmethod diff --git a/src/sentry/sentry_apps/tasks/__init__.py b/src/sentry/sentry_apps/tasks/__init__.py index 62d7eee5405b7d..26909deb270a05 100644 --- a/src/sentry/sentry_apps/tasks/__init__.py +++ b/src/sentry/sentry_apps/tasks/__init__.py @@ -5,6 +5,7 @@ installation_webhook, process_resource_change_bound, send_alert_event, + send_alert_webhook, send_resource_change_webhook, workflow_notification, ) @@ -20,4 +21,5 @@ "send_resource_change_webhook", "workflow_notification", "process_service_hook", + "send_alert_webhook", ) diff --git a/src/sentry/sentry_apps/tasks/sentry_apps.py b/src/sentry/sentry_apps/tasks/sentry_apps.py index 02ead328930772..62e35c5197cdaf 100644 --- a/src/sentry/sentry_apps/tasks/sentry_apps.py +++ b/src/sentry/sentry_apps/tasks/sentry_apps.py @@ -2,7 +2,7 @@ import logging from collections import defaultdict -from collections.abc import Mapping +from collections.abc import Mapping, Sequence from typing import Any from celery import Task, current_task @@ -13,8 +13,10 @@ from sentry.api.serializers import serialize from sentry.constants import SentryAppInstallationStatus from sentry.db.models.base import Model -from sentry.eventstore.models import Event, GroupEvent +from sentry.eventstore.models import BaseEvent, Event, GroupEvent +from sentry.features.rollout import in_random_rollout from sentry.hybridcloud.rpc.caching import region_caching_service +from sentry.issues.issue_occurrence import IssueOccurrence from sentry.models.activity import Activity from sentry.models.group import Group from sentry.models.organization import Organization @@ -34,6 +36,7 @@ from sentry.shared_integrations.exceptions import ApiHostError, ApiTimeoutError, ClientError from sentry.silo.base import SiloMode from sentry.tasks.base import instrumented_task, retry +from sentry.types.rules import RuleFuture from sentry.users.services.user.model import RpcUser from sentry.users.services.user.service import user_service from sentry.utils import metrics @@ -75,6 +78,8 @@ def _webhook_event_data( event: Event | GroupEvent, group_id: int, project_id: int ) -> dict[str, Any]: + from sentry.api.serializers.rest_framework import convert_dict_key_case, snake_to_camel_case + project = Project.objects.get_from_cache(id=project_id) organization = Organization.objects.get_from_cache(id=project.organization_id) @@ -91,6 +96,10 @@ def _webhook_event_data( "sentry-organization-event-detail", args=[organization.slug, group_id, event.event_id] ) ) + if hasattr(event, "occurrence") and event.occurrence is not None: + event_context["occurrence"] = convert_dict_key_case( + event.occurrence.to_dict(), snake_to_camel_case + ) # The URL has a regex OR in it ("|") which means `reverse` cannot generate # a valid URL (it can't know which option to pick). We have to manually @@ -100,6 +109,104 @@ def _webhook_event_data( return event_context +@instrumented_task(name="sentry.sentry_apps.tasks.sentry_apps.send_alert_webhook", **TASK_OPTIONS) +@retry_decorator +def send_alert_webhook( + rule: str, + sentry_app_id: int, + instance_id: str, + group_id: int, + occurrence_id: str, + additional_payload_key: str | None = None, + additional_payload: Mapping[str, Any] | None = None, + **kwargs: Any, +): + group = Group.objects.get_from_cache(id=group_id) + assert group, "Group must exist to get related attributes" + project = Project.objects.get_from_cache(id=group.project_id) + organization = Organization.objects.get_from_cache(id=project.organization_id) + extra = { + "sentry_app_id": sentry_app_id, + "project_slug": project.slug, + "organization_slug": organization.slug, + "rule": rule, + } + + sentry_app = app_service.get_sentry_app_by_id(id=sentry_app_id) + if sentry_app is None: + logger.info("event_alert_webhook.missing_sentry_app", extra=extra) + return + + installations = app_service.get_many( + filter=dict( + organization_id=organization.id, + app_ids=[sentry_app.id], + status=SentryAppInstallationStatus.INSTALLED, + ) + ) + if not installations: + logger.info("event_alert_webhook.missing_installation", extra=extra) + return + (install,) = installations + + nodedata = nodestore.backend.get( + BaseEvent.generate_node_id(project_id=project.id, event_id=instance_id) + ) + + if not nodedata: + extra = { + "event_id": instance_id, + "occurrence_id": occurrence_id, + "rule": rule, + "sentry_app": sentry_app.slug, + "group_id": group_id, + } + logger.info("send_alert_event.missing_event", extra=extra) + return + + occurrence = None + if occurrence_id: + occurrence = IssueOccurrence.fetch(occurrence_id, project_id=project.id) + + if not occurrence: + logger.info( + "send_alert_event.missing_occurrence", + extra={"occurrence_id": occurrence_id, "project_id": project.id}, + ) + return + + group_event = GroupEvent( + project_id=project.id, + event_id=instance_id, + group=group, + data=nodedata, + occurrence=occurrence, + ) + + event_context = _webhook_event_data(group_event, group.id, project.id) + + data = {"event": event_context, "triggered_rule": rule} + + # Attach extra payload to the webhook + if additional_payload_key and additional_payload: + data[additional_payload_key] = additional_payload + + request_data = AppPlatformEvent( + resource="event_alert", action="triggered", install=install, data=data + ) + + send_and_save_webhook_request(sentry_app, request_data) + + # On success, record analytic event for Alert Rule UI Component + if request_data.data.get("issue_alert"): + analytics.record( + "alert_rule_ui_component_webhook.sent", + organization_id=organization.id, + sentry_app_id=sentry_app_id, + event=f"{request_data.resource}.{request_data.action}", + ) + + @instrumented_task(name="sentry.sentry_apps.tasks.sentry_apps.send_alert_event", **TASK_OPTIONS) @retry_decorator def send_alert_event( @@ -426,7 +533,7 @@ def send_resource_change_webhook( metrics.incr("resource_change.processed", sample_rate=1.0, tags={"change_event": event}) -def notify_sentry_app(event: Event | GroupEvent, futures): +def notify_sentry_app(event: GroupEvent, futures: Sequence[RuleFuture]): for f in futures: if not f.kwargs.get("sentry_app"): continue @@ -446,12 +553,22 @@ def notify_sentry_app(event: Event | GroupEvent, futures): "settings": settings, } - send_alert_event.delay( - event=event, - rule=f.rule.label, - sentry_app_id=f.kwargs["sentry_app"].id, - **extra_kwargs, - ) + if in_random_rollout("sentryapps.send_alert_event.use-eventid"): + send_alert_webhook.delay( + instance_id=event.event_id, + group_id=event.group_id, + occurrence_id=event.occurrence_id if hasattr(event, "occurrence_id") else None, + rule=f.rule.label, + sentry_app_id=f.kwargs["sentry_app"].id, + **extra_kwargs, + ) + else: + send_alert_event.delay( + event=event, + rule=f.rule.label, + sentry_app_id=f.kwargs["sentry_app"].id, + **extra_kwargs, + ) def send_webhooks(installation: RpcSentryAppInstallation, event: str, **kwargs: Any) -> None: diff --git a/src/sentry/sentry_metrics/querying/samples_list.py b/src/sentry/sentry_metrics/querying/samples_list.py deleted file mode 100644 index ef305548c71a71..00000000000000 --- a/src/sentry/sentry_metrics/querying/samples_list.py +++ /dev/null @@ -1,1225 +0,0 @@ -from abc import ABC, abstractmethod -from bisect import bisect -from collections.abc import Callable -from dataclasses import dataclass -from datetime import datetime -from typing import Any, Literal, TypedDict, cast - -from snuba_sdk import And, Column, Condition, Function, Op, Or - -from sentry import options -from sentry.api.event_search import SearchFilter, SearchKey, SearchValue -from sentry.search.events.builder.base import BaseQueryBuilder -from sentry.search.events.builder.discover import DiscoverQueryBuilder -from sentry.search.events.builder.metrics_summaries import MetricsSummariesQueryBuilder -from sentry.search.events.builder.spans_indexed import SpansIndexedQueryBuilder -from sentry.search.events.types import QueryBuilderConfig, SelectType, SnubaParams -from sentry.snuba.dataset import Dataset -from sentry.snuba.metrics.naming_layer.mri import ( - SpanMRI, - TransactionMRI, - is_custom_metric, - is_measurement, - parse_mri, -) -from sentry.snuba.referrer import Referrer -from sentry.utils.numbers import clip - - -@dataclass(frozen=True) -class SpanKey: - group: str - timestamp: str - span_id: str - - -class Summary(TypedDict): - min: float - max: float - sum: float - count: int - - -class AbstractSamplesListExecutor(ABC): - # picking 30 samples gives a decent chance to surface a few samples from the higher percentiles - num_samples = 30 - - sortable_columns: set[str] - - def __init__( - self, - *, - mri: str, - snuba_params: SnubaParams, - referrer: Referrer, - fields: list[str], - operation: str | None = None, - query: str | None = None, - min: float | None = None, - max: float | None = None, - sort: str | None = None, - rollup: int | None = None, - ): - self.mri = mri - self.snuba_params = snuba_params - self.fields = fields - self.operation = operation - self.query = query - self.min = min - self.max = max - self.sort = sort - self.rollup = rollup - self.referrer = referrer - - @classmethod - @abstractmethod - def supports_mri(cls, mri: str) -> bool: - raise NotImplementedError - - @classmethod - def supports_sort(cls, column: str) -> bool: - return column in cls.sortable_columns - - @abstractmethod - def get_matching_traces(self, limit: int) -> tuple[list[str], list[datetime]]: - raise NotImplementedError - - @abstractmethod - def get_matching_spans_from_traces( - self, - trace_ids: list[str], - max_spans_per_trace: int, - ) -> list[SpanKey]: - raise NotImplementedError - - def get_matching_spans(self, offset, limit): - assert self.rollup is not None - - if self.sort is None: - execute_fn = self.get_matching_spans_unsorted - else: - execute_fn = self.get_matching_spans_sorted - return execute_fn(offset, limit) - - @abstractmethod - def get_matching_spans_sorted(self, offset, limit): - raise NotImplementedError - - @abstractmethod - def get_matching_spans_unsorted(self, offset, limit): - raise NotImplementedError - - def get_spans_by_key( - self, - span_keys: list[SpanKey], - additional_fields: list[str] | None = None, - ): - if not span_keys: - return {"data": []} - - fields = self.fields[:] - if additional_fields is not None: - fields.extend(additional_fields) - - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - selected_columns=fields, - limit=len(span_keys), - offset=0, - ) - - # This are the additional conditions to better take advantage of the ORDER BY - # on the spans table. This creates a list of conditions to be `OR`ed together - # that can will be used by ClickHouse to narrow down the granules. - # - # The span ids are not in this condition because they are more effective when - # specified within the `PREWHERE` clause. So, it's in a separate condition. - conditions = [ - And( - [ - Condition(builder.column("span.group"), Op.EQ, key.group), - Condition( - builder.column("timestamp"), Op.EQ, datetime.fromisoformat(key.timestamp) - ), - ] - ) - for key in span_keys - ] - - if len(conditions) == 1: - order_by_condition = conditions[0] - else: - order_by_condition = Or(conditions) - - # Using `IN` combined with putting the list in a SnQL "tuple" triggers an optimizer - # in snuba where it - # 1. moves the condition into the `PREWHERE` clause - # 2. maps the ids to the underlying UInt64 and uses the bloom filter index - # - # NOTE: the "tuple" here is critical as without it, snuba will not correctly - # rewrite the condition and keep it in the WHERE and as a hexidecimal. - span_id_condition = Condition( - builder.column("id"), - Op.IN, - Function("tuple", [key.span_id for key in span_keys]), - ) - - builder.add_conditions([order_by_condition, span_id_condition]) - - query_results = builder.run_query(self.referrer.value) - return builder.process_results(query_results) - - -class SegmentsSamplesListExecutor(AbstractSamplesListExecutor): - sortable_columns = {"timestamp", "span.duration", "summary"} - - SORT_MAPPING = { - "span.duration": "transaction.duration", - "timestamp": "timestamp", - } - - @classmethod - @abstractmethod - def mri_to_column(cls, mri: str) -> str | None: - raise NotImplementedError - - @classmethod - def convert_sort(cls, sort: str, mri: str) -> tuple[Literal["", "-"], str] | None: - direction: Literal["", "-"] = "" - - if sort.startswith("-"): - direction = "-" - sort = sort[1:] - - if sort in cls.SORT_MAPPING: - return direction, cls.SORT_MAPPING[sort] - - if sort == "summary": - column = cls.mri_to_column(mri) - if column is not None: - return direction, column - - return None - - @classmethod - def supports_mri(cls, mri: str) -> bool: - return cls.mri_to_column(mri) is not None - - def get_matching_traces(self, limit: int) -> tuple[list[str], list[datetime]]: - column = self.mri_to_column(self.mri) - assert column - - builder = SpansIndexedQueryBuilder( - Dataset.Transactions, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["trace", "timestamp"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=limit, - limitby=("trace", 1), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - trace_ids = [row["trace"] for row in results["data"]] - timestamps = [datetime.fromisoformat(row["timestamp"]) for row in results["data"]] - return trace_ids, timestamps - - def get_matching_spans_from_traces( - self, - trace_ids: list[str], - max_spans_per_trace: int, - ) -> list[SpanKey]: - column = self.mri_to_column(self.mri) - assert column is not None - - builder = SpansIndexedQueryBuilder( - Dataset.Transactions, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["timestamp", "span_id"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=len(trace_ids) * max_spans_per_trace, - limitby=("trace", max_spans_per_trace), - ) - - trace_id_condition = Condition(Column("trace_id"), Op.IN, trace_ids) - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - builder.add_conditions( - [ - trace_id_condition, - *additional_conditions, - *min_max_conditions, - ] - ) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - return [ - SpanKey( - group="00", # all segments have a group of `00` currently - timestamp=row["timestamp"], - span_id=row["span_id"], - ) - for row in results["data"] - ] - - def _get_spans( - self, - span_keys: list[SpanKey], - summaries: dict[str, Summary], - ): - result = self.get_spans_by_key( - span_keys, - # force `id` to be one of the fields - additional_fields=["id"], - ) - - # if there is a sort, we want to preserve the result in the same - # order as the span keys which we can do by checking the span ids - if self.sort: - order = {key.span_id: i for i, key in enumerate(span_keys)} - result["data"].sort(key=lambda row: order[row["id"]]) - - # if `id` wasn't initially there, we should remove it - should_pop_id = "id" not in self.fields - - for row in result["data"]: - span_id = row.pop("id") if should_pop_id else row["id"] - row["summary"] = summaries[span_id] - - return result - - def get_matching_spans_sorted(self, offset, limit): - span_keys, summaries = self.get_sorted_span_keys(offset, limit) - return self._get_spans(span_keys, summaries) - - def get_sorted_span_keys( - self, - offset: int, - limit: int, - ) -> tuple[list[SpanKey], dict[str, Summary]]: - """ - When getting examples for a segment, it's actually much faster to read it - from the transactions dataset compared to the spans dataset as it's a much - smaller dataset. - - One consideration here is that there is an one to one mapping between a - transaction to a segment today. If this relationship changes, we'll have to - rethink how to fetch segment samples a little as the transactions dataset - may not contain all the necessary data. - """ - assert self.sort - sort = self.convert_sort(self.sort, self.mri) - assert sort is not None - direction, sort_column = sort - - mri_column = self.mri_to_column(self.mri) - assert mri_column is not None - - fields = ["span_id", "timestamp"] - if sort_column not in fields: - fields.append(sort_column) - if mri_column not in fields: - fields.append(mri_column) - - builder = DiscoverQueryBuilder( - Dataset.Transactions, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=fields, - orderby=f"{direction}{sort_column}", - limit=limit, - offset=offset, - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.column(mri_column)) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - span_keys = [ - SpanKey( - group="00", # all segments have a group of `00` currently - timestamp=row["timestamp"], - span_id=row["span_id"], - ) - for row in result["data"] - ] - - """ - Because transaction level measurements currently do not get - propagated to the spans dataset, we have to query them here, - generate the summary for it here, and propagate it to the - results of the next stage. - - Once we start writing transaction level measurements to the - indexed spans dataset, we can stop doing this and read the - value directly from the indexed spans dataset. - - For simplicity, all transaction based metrics use this approach. - """ - summaries = { - cast(str, row["span_id"]): cast( - Summary, - { - "min": row[mri_column], - "max": row[mri_column], - "sum": row[mri_column], - "count": 1, - }, - ) - for row in result["data"] - } - - return span_keys, summaries - - def get_matching_spans_unsorted(self, offset, limit): - span_keys, summaries = self.get_unsorted_span_keys(offset, limit) - return self._get_spans(span_keys, summaries) - - def get_unsorted_span_keys( - self, - offset: int, - limit: int, - ) -> tuple[list[SpanKey], dict[str, Summary]]: - """ - When getting examples for a segment, it's actually much faster to read it - from the transactions dataset compared to the spans dataset as it's a much - smaller dataset. - - One consideration here is that there is an one to one mapping between a - transaction to a segment today. If this relationship changes, we'll have to - rethink how to fetch segment samples a little as the transactions dataset - may not contain all the necessary data. - """ - column = self.mri_to_column(self.mri) - assert column is not None - - builder = DiscoverQueryBuilder( - Dataset.Transactions, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=[ - f"rounded_timestamp({self.rollup})", - f"examples({column}, {self.num_samples}) AS examples", - ], - limit=limit, - offset=offset, - sample_rate=options.get("metrics.sample-list.sample-rate"), - config=QueryBuilderConfig(functions_acl=["rounded_timestamp", "examples"]), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.column(column)) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - metric_key = lambda example: example[2] # sort by metric - for row in result["data"]: - row["examples"] = pick_samples(row["examples"], metric_key=metric_key) - - span_keys = [ - SpanKey( - group="00", # all segments have a group of `00` currently - timestamp=example[0], - span_id=example[1], - ) - for row in result["data"] - for example in row["examples"] - ][:limit] - - """ - Because transaction level measurements currently do not get - propagated to the spans dataset, we have to query them here, - generate the summary for it here, and propagate it to the - results of the next stage. - - Once we start writing transaction level measurements to the - indexed spans dataset, we can stop doing this and read the - value directly from the indexed spans dataset. - - For simplicity, all transaction based metrics use this approach. - """ - summaries = { - cast(str, example[1]): cast( - Summary, - { - "min": example[2], - "max": example[2], - "sum": example[2], - "count": 1, - }, - ) - for row in result["data"] - for example in row["examples"] - } - - return span_keys, summaries - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - raise NotImplementedError - - def get_min_max_conditions(self, column: Column) -> list[Condition]: - conditions = [] - - if self.min is not None: - conditions.append(Condition(column, Op.GTE, self.min)) - if self.max is not None: - conditions.append(Condition(column, Op.LTE, self.max)) - - return conditions - - -class TransactionDurationSamplesListExecutor(SegmentsSamplesListExecutor): - @classmethod - def mri_to_column(cls, mri: str) -> str | None: - if mri == TransactionMRI.DURATION.value: - # Because we read this from the transactions dataset, - # we use the name for the transactions dataset instead. - return "transaction.duration" - return None - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - return [] - - -class TransactionMeasurementsSamplesListExecutor(SegmentsSamplesListExecutor): - @classmethod - def mri_to_column(cls, mri) -> str | None: - name = cls.mri_to_measurement_name(mri) - if name is not None: - return f"measurements.{name}" - - return None - - @classmethod - def mri_to_measurement_name(cls, mri) -> str | None: - parsed_mri = parse_mri(mri) - if parsed_mri is not None and is_measurement(parsed_mri): - return parsed_mri.name[len("measurements:") :] - return None - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - name = self.mri_to_measurement_name(self.mri) - return [Condition(Function("has", [Column("measurements.key"), name]), Op.EQ, 1)] - - -class SpansSamplesListExecutor(AbstractSamplesListExecutor): - sortable_columns = {"timestamp", "span.duration", "span.self_time", "summary"} - - @classmethod - @abstractmethod - def mri_to_column(cls, mri) -> str | None: - raise NotImplementedError - - @classmethod - def convert_sort(cls, sort: str, mri: str) -> tuple[Literal["", "-"], str] | None: - direction: Literal["", "-"] = "" - - if sort.startswith("-"): - direction = "-" - sort = sort[1:] - - if sort == "summary": - column = cls.mri_to_column(mri) - if column is not None: - return direction, column - - if sort in cls.sortable_columns: - return direction, sort - - return None - - @classmethod - def supports_mri(cls, mri: str) -> bool: - return cls.mri_to_column(mri) is not None - - def get_matching_traces(self, limit: int) -> tuple[list[str], list[datetime]]: - column = self.mri_to_column(self.mri) - assert column is not None - - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["trace", "timestamp"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=limit, - limitby=("trace", 1), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - trace_ids = [row["trace"] for row in results["data"]] - timestamps = [datetime.fromisoformat(row["timestamp"]) for row in results["data"]] - return trace_ids, timestamps - - def get_matching_spans_from_traces( - self, - trace_ids: list[str], - max_spans_per_trace: int, - ) -> list[SpanKey]: - column = self.mri_to_column(self.mri) - assert column is not None - - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["span.group", "timestamp", "id"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=len(trace_ids) * max_spans_per_trace, - limitby=("trace", max_spans_per_trace), - ) - - trace_id_condition = Condition(Column("trace_id"), Op.IN, trace_ids) - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - builder.add_conditions( - [ - trace_id_condition, - *additional_conditions, - *min_max_conditions, - ] - ) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - return [ - SpanKey( - group=row["span.group"], - timestamp=row["timestamp"], - span_id=row["id"], - ) - for row in results["data"] - ] - - def get_matching_spans_sorted(self, offset, limit): - """ - Since we're already querying the spans table sorted on some column, - there's no reason to split this into 2 queries. We can go ahead and - just do it all in a single query. - """ - assert self.sort - sort = self.convert_sort(self.sort, self.mri) - assert sort is not None - direction, sort_column = sort - - fields = self.fields[:] - if sort_column not in fields: - fields.append(sort_column) - - column = self.mri_to_column(self.mri) - assert column is not None - if column not in fields: - fields.append(column) - - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - selected_columns=fields, - orderby=f"{direction}{sort_column}", - limit=limit, - offset=0, - ) - - additional_conditions = self.get_additional_conditions(builder) - - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - should_pop_column = column not in self.fields - - for row in result["data"]: - value = row.pop(column) if should_pop_column else row[column] - row["summary"] = { - "min": value, - "max": value, - "sum": value, - "count": 1, - } - - return result - - def get_matching_spans_unsorted(self, offset, limit): - span_keys = self.get_unsorted_span_keys(offset, limit) - - column = self.mri_to_column(self.mri) - assert column is not None # should always resolve to a column here - - result = self.get_spans_by_key(span_keys, additional_fields=[column]) - - should_pop_column = column not in self.fields - - for row in result["data"]: - value = row.pop(column) if should_pop_column else row[column] - row["summary"] = { - "min": value, - "max": value, - "sum": value, - "count": 1, - } - - return result - - def get_unsorted_span_keys(self, offset: int, limit: int) -> list[SpanKey]: - column = self.mri_to_column(self.mri) - - for dataset_segmentation_condition_fn in self.dataset_segmentation_conditions(): - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=[ - f"rounded_timestamp({self.rollup})", - f"examples({column}, {self.num_samples}) AS examples", - ], - limit=limit, - offset=offset, - sample_rate=options.get("metrics.sample-list.sample-rate"), - config=QueryBuilderConfig(functions_acl=["rounded_timestamp", "examples"]), - ) - - segmentation_conditions = dataset_segmentation_condition_fn(builder) - - additional_conditions = self.get_additional_conditions(builder) - - assert column is not None - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - - builder.add_conditions( - [ - *segmentation_conditions, - *additional_conditions, - *min_max_conditions, - ] - ) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - if not result["data"]: - continue - - metric_key = lambda example: example[3] # sort by metric - for row in result["data"]: - row["examples"] = pick_samples(row["examples"], metric_key=metric_key) - - return [ - SpanKey( - group=example[0], - timestamp=example[1], - span_id=example[2], - ) - for row in result["data"] - for example in row["examples"] - ][:limit] - - return [] - - @abstractmethod - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - raise NotImplementedError - - def dataset_segmentation_conditions( - self, - ) -> list[Callable[[BaseQueryBuilder], list[Condition]]]: - return [lambda builder: []] - - def get_min_max_conditions(self, column: SelectType) -> list[Condition]: - conditions = [] - - if self.min is not None: - conditions.append(Condition(column, Op.GTE, self.min)) - if self.max is not None: - conditions.append(Condition(column, Op.LTE, self.max)) - - return conditions - - -class SpansTimingsSamplesListExecutor(SpansSamplesListExecutor): - MRI_MAPPING = { - SpanMRI.DURATION.value: "span.duration", - SpanMRI.SELF_TIME.value: "span.self_time", - } - - @classmethod - def mri_to_column(cls, mri) -> str | None: - return cls.MRI_MAPPING.get(mri) - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - return [] - - def dataset_segmentation_conditions( - self, - ) -> list[Callable[[BaseQueryBuilder], list[Condition]]]: - return [ - # This grouping makes the assumption that spans are divided into 2 groups right now. - # Those that are classified with a non zero group, and those that are unclassified - # with a zero group. - # - # In the future, if all span groups are classified, this segmentation should change - # to reflect that. - lambda builder: [ - # The `00` group is used for spans not used within the - # new starfish experience. It's effectively the group - # for other. It is a massive group, so we've chosen - # to exclude it here. - Condition(builder.column("span.group"), Op.NEQ, "00"), - ], - lambda builder: [ - # If the previous query contained no results, we'll - # have to search the `00` group which is slower but - # unfortunately necessary here. - Condition(builder.column("span.group"), Op.EQ, "00"), - ], - ] - - -class SpansMeasurementsSamplesListExecutor(SpansSamplesListExecutor): - # These are some hard coded metrics in the spans name space that can be - # queried in the measurements of the indexed spans dataset - MRI_MAPPING = { - SpanMRI.RESPONSE_CONTENT_LENGTH.value: "http.response_content_length", - SpanMRI.DECODED_RESPONSE_CONTENT_LENGTH.value: "http.decoded_response_content_length", - SpanMRI.RESPONSE_TRANSFER_SIZE.value: "http.response_transfer_size", - SpanMRI.AI_TOTAL_TOKENS.value: "ai_total_tokens_used", - SpanMRI.AI_TOTAL_COST.value: "ai_total_cost", - SpanMRI.CACHE_ITEM_SIZE.value: "cache.item_size", - SpanMRI.MOBILE_SLOW_FRAMES.value: "frames.slow", - SpanMRI.MOBILE_FROZEN_FRAMES.value: "frames.frozen", - SpanMRI.MOBILE_TOTAL_FRAMES.value: "frames.total", - SpanMRI.MOBILE_FRAMES_DELAY.value: "frames.delay", - SpanMRI.MESSAGE_RECEIVE_LATENCY.value: "messaging.message.receive.latency", - } - - @classmethod - def mri_to_column(cls, mri) -> str | None: - name = cls.mri_measurement_name(mri) - if name is not None: - return f"measurements.{name}" - - return None - - @classmethod - def mri_measurement_name(cls, mri) -> str | None: - if name := cls.MRI_MAPPING.get(mri): - return name - - # some web vitals exist on spans - parsed_mri = parse_mri(mri) - if ( - parsed_mri is not None - and parsed_mri.namespace == "spans" - and parsed_mri.name.startswith("webvital.") - ): - return parsed_mri.name[len("webvital:") :] - - return None - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - name = self.mri_measurement_name(self.mri) - return [Condition(Function("has", [Column("measurements.key"), name]), Op.EQ, 1)] - - -class CustomSamplesListExecutor(AbstractSamplesListExecutor): - sortable_columns = {"timestamp", "span.duration", "summary"} - - SORT_MAPPING = { - "span.duration": "span.duration", - "timestamp": "timestamp", - } - - OPERATION_COLUMN_MAPPING = { - "min": "min_metric", - "max": "max_metric", - "count": "count_metric", - } - - # refer to the definition of `examples()` in the metrics summary dataset - EXAMPLES_SORT_KEY = { - "min": 3, - "max": 4, - "count": 6, - } - - @classmethod - def convert_sort(cls, sort: str, operation: str | None) -> tuple[Literal["", "-"], str] | None: - direction: Literal["", "-"] = "" - - if sort.startswith("-"): - direction = "-" - sort = sort[1:] - - if sort in cls.SORT_MAPPING: - return direction, cls.SORT_MAPPING[sort] - - if sort == "summary": - return direction, cls.OPERATION_COLUMN_MAPPING.get(operation or "", "avg_metric") - - return None - - @classmethod - def supports_mri(cls, mri: str) -> bool: - parsed_mri = parse_mri(mri) - if parsed_mri is not None and is_custom_metric(parsed_mri): - return True - return False - - def get_matching_traces(self, limit: int) -> tuple[list[str], list[datetime]]: - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["trace", "timestamp"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=limit, - limitby=("trace", 1), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - trace_ids = [row["trace"] for row in results["data"]] - timestamps = [datetime.fromisoformat(row["timestamp"]) for row in results["data"]] - return trace_ids, timestamps - - def get_matching_spans_from_traces( - self, - trace_ids: list[str], - max_spans_per_trace: int, - ) -> list[SpanKey]: - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["span.group", "timestamp", "id"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=len(trace_ids) * max_spans_per_trace, - limitby=("trace", max_spans_per_trace), - ) - - trace_id_condition = Condition(Column("trace_id"), Op.IN, trace_ids) - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder) - builder.add_conditions( - [ - trace_id_condition, - *additional_conditions, - *min_max_conditions, - ] - ) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - return [ - SpanKey( - group=row["span.group"], - timestamp=row["timestamp"], - span_id=row["id"], - ) - for row in results["data"] - ] - - def _get_spans( - self, - span_keys: list[SpanKey], - summaries: dict[str, Summary], - ): - result = self.get_spans_by_key(span_keys, additional_fields=["id"]) - - # if there is a sort, we want to preserve the result in the same - # order as the span keys which we can do by checking the span ids - if self.sort: - order = {key.span_id: i for i, key in enumerate(span_keys)} - result["data"].sort(key=lambda row: order[row["id"]]) - - should_pop_id = "id" not in self.fields - - for row in result["data"]: - span_id = row.pop("id") if should_pop_id else row["id"] - row["summary"] = summaries[span_id] - - return result - - def get_matching_spans_sorted(self, offset, limit): - span_keys, summaries = self.get_sorted_span_keys(offset, limit) - return self._get_spans(span_keys, summaries) - - def get_sorted_span_keys( - self, - offset: int, - limit: int, - ) -> tuple[list[SpanKey], dict[str, Summary]]: - assert self.sort - sort = self.convert_sort(self.sort, self.operation) - assert sort is not None - direction, sort_column = sort - - fields = [ - "id", - "timestamp", - "span.group", - "min_metric", - "max_metric", - "sum_metric", - "count_metric", - ] - if sort_column not in fields: - fields.append(sort_column) - - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=fields, - orderby=f"{direction}{sort_column}", - limit=limit, - offset=offset, - # This table has a poor SAMPLE BY so DO NOT use it for now - # sample_rate=options.get("metrics.sample-list.sample-rate"), - config=QueryBuilderConfig(functions_acl=["rounded_timestamp", "example"]), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - span_keys = [ - SpanKey( - group=row["span.group"], - timestamp=row["timestamp"], - span_id=row["id"], - ) - for row in result["data"] - ] - - """ - The indexed spans dataset does not contain any metric related - data. To propagate these values, we read it from the metric - summaries table, and copy them to the results in the next step. - """ - summaries = { - cast(str, row["id"]): cast( - Summary, - { - "min": row["min_metric"], - "max": row["max_metric"], - "sum": row["sum_metric"], - "count": row["count_metric"], - }, - ) - for row in result["data"] - } - - return span_keys, summaries - - def get_matching_spans_unsorted(self, offset, limit): - span_keys, summaries = self.get_unsorted_span_keys(offset, limit) - return self._get_spans(span_keys, summaries) - - def get_unsorted_span_keys( - self, - offset: int, - limit: int, - ) -> tuple[list[SpanKey], dict[str, Summary]]: - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=[ - f"rounded_timestamp({self.rollup})", - f"examples({self.num_samples}) AS examples", - ], - limit=limit, - offset=offset, - # This table has a poor SAMPLE BY so DO NOT use it for now - # sample_rate=options.get("metrics.sample-list.sample-rate"), - config=QueryBuilderConfig(functions_acl=["rounded_timestamp", "examples"]), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - # 7 here refers to the avg value which is the default - # if the operaton doesn't have metric it should sort by - index = self.EXAMPLES_SORT_KEY.get(self.operation or "", 7) # sort by metric - metric_key = lambda example: example[index] - - for row in result["data"]: - row["examples"] = pick_samples(row["examples"], metric_key=metric_key) - - span_keys = [ - SpanKey( - group=example[0], - timestamp=example[1], - span_id=example[2], - ) - for row in result["data"] - for example in row["examples"] - ][:limit] - - """ - The indexed spans dataset does not contain any metric related - data. To propagate these values, we read it from the metric - summaries table, and copy them to the results in the next step. - """ - summaries = { - cast(str, example[2]): cast( - Summary, - { - "min": example[3], - "max": example[4], - "sum": example[5], - "count": example[6], - }, - ) - for row in result["data"] - for example in row["examples"] - } - - return span_keys, summaries - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - return [ - builder.convert_search_filter_to_condition( - SearchFilter(SearchKey("metric"), "=", SearchValue(self.mri)), - ) - ] - - def get_min_max_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - conditions = [] - - column = builder.resolve_column( - self.OPERATION_COLUMN_MAPPING.get(self.operation or "", "avg_metric") - ) - - if self.min is not None: - conditions.append(Condition(column, Op.GTE, self.min)) - if self.max is not None: - conditions.append(Condition(column, Op.LTE, self.max)) - - return conditions - - -SAMPLE_LIST_EXECUTORS = [ - TransactionDurationSamplesListExecutor, - TransactionMeasurementsSamplesListExecutor, - SpansTimingsSamplesListExecutor, - SpansMeasurementsSamplesListExecutor, - CustomSamplesListExecutor, -] - - -def get_sample_list_executor_cls(mri) -> type[AbstractSamplesListExecutor] | None: - for executor_cls in SAMPLE_LIST_EXECUTORS: - if executor_cls.supports_mri(mri): - return executor_cls - return None - - -def pick_samples( - samples: list[Any], - metric_key: Callable[[Any], float], -) -> list[Any]: - # if there are at most 3 samples, there's no picking needed - # as we want to return at most 3 from the list provided - if len(samples) <= 3: - return samples - - samples.sort(key=metric_key) - - keys = [metric_key(sample) for sample in samples] - - # first element is the one near the average - # but must not be the first or last element - avg_m = sum(keys) / len(keys) - idx_m = bisect(keys, avg_m) - # ensure there is at least 1 element on both sides - # of the middle element we just picked - # i.e. should not pick index 0 and len(keys) - 1 - idx_m = clip(idx_m, 1, len(keys) - 2) - - # second element is near the average of first - # split, but must not be the split element - avg_l = sum(keys[:idx_m]) / idx_m - idx_l = bisect(keys, avg_l, hi=idx_m - 1) - idx_l += 1 # push it closer to the middle - # ensure this is not the same as middle element - idx_l = clip(idx_l, 0, idx_m - 1) - - # third element is near the average of second - # split, but must not be the split element - avg_r = sum(keys[idx_m + 1 :]) / (len(keys) - idx_m - 1) - idx_r = bisect(keys, avg_r, lo=idx_m + 1) - idx_r -= 1 # push it closer to the middle - # ensure this is not the same as middle element - idx_r = clip(idx_r, idx_m + 1, len(keys) - 1) - - return [samples[idx_m], samples[idx_l], samples[idx_r]] diff --git a/src/sentry/snuba/dataset.py b/src/sentry/snuba/dataset.py index 799029ea0e7b19..e2ab7d47fd87f5 100644 --- a/src/sentry/snuba/dataset.py +++ b/src/sentry/snuba/dataset.py @@ -54,12 +54,6 @@ class Dataset(Enum): EventsAnalyticsPlatform = "events_analytics_platform" - MetricsSummaries = "metrics_summaries" - """ - Summaries of all metrics within a span. Used to correlate indexed - spans to a metric. - """ - @unique class EntityKey(Enum): @@ -79,7 +73,6 @@ class EntityKey(Enum): GenericOrgMetricsCounters = "generic_org_metrics_counters" IssuePlatform = "search_issues" Functions = "functions" - MetricsSummaries = "metrics_summaries" @unique diff --git a/src/sentry/snuba/entity_subscription.py b/src/sentry/snuba/entity_subscription.py index e4731e6baf73ef..08c7319179d86f 100644 --- a/src/sentry/snuba/entity_subscription.py +++ b/src/sentry/snuba/entity_subscription.py @@ -213,6 +213,7 @@ def build_query_builder( skip_time_conditions=True, parser_config_overrides=parser_config_overrides, skip_field_validation_for_entity_subscription_deletion=skip_field_validation_for_entity_subscription_deletion, + use_entity_prefix_for_fields=True, ), ) @@ -671,6 +672,9 @@ def get_entity_key_from_snuba_query( project_id: int, skip_field_validation_for_entity_subscription_deletion: bool = False, ) -> EntityKey: + query_dataset = Dataset(snuba_query.dataset) + if query_dataset == Dataset.EventsAnalyticsPlatform: + return EntityKey.EAPSpans entity_subscription = get_entity_subscription_from_snuba_query( snuba_query, organization_id, diff --git a/src/sentry/snuba/events.py b/src/sentry/snuba/events.py index 3b201dc4dcf1d8..24718ed5a41150 100644 --- a/src/sentry/snuba/events.py +++ b/src/sentry/snuba/events.py @@ -601,20 +601,20 @@ class Columns(Enum): alias="app.in_foreground", ) OS_DISTRIBUTION_NAME = Column( - group_name="events.contexts[os.distribution.name]", - event_name="contexts[os.distribution.name]", - transaction_name="contexts[os.distribution.name]", - discover_name="contexts[os.distribution.name]", - issue_platform_name="contexts[os.distribution.name]", - alias="os.distribution.name", + group_name="events.contexts[os.distribution_name]", + event_name="contexts[os.distribution_name]", + transaction_name="contexts[os.distribution_name]", + discover_name="contexts[os.distribution_name]", + issue_platform_name="contexts[os.distribution_name]", + alias="os.distribution_name", ) OS_DISTRIBUTION_VERSION = Column( - group_name="events.contexts[os.distribution.version]", - event_name="contexts[os.distribution.version]", - transaction_name="contexts[os.distribution.version]", - discover_name="contexts[os.distribution.version]", - issue_platform_name="contexts[os.distribution.version]", - alias="os.distribution.version", + group_name="events.contexts[os.distribution_version]", + event_name="contexts[os.distribution_version]", + transaction_name="contexts[os.distribution_version]", + discover_name="contexts[os.distribution_version]", + issue_platform_name="contexts[os.distribution_version]", + alias="os.distribution_version", ) # Transactions specific columns TRANSACTION_OP = Column( diff --git a/src/sentry/snuba/metrics_summaries.py b/src/sentry/snuba/metrics_summaries.py deleted file mode 100644 index 3abff797de7237..00000000000000 --- a/src/sentry/snuba/metrics_summaries.py +++ /dev/null @@ -1,60 +0,0 @@ -from sentry.search.events.builder.metrics_summaries import MetricsSummariesQueryBuilder -from sentry.search.events.types import QueryBuilderConfig -from sentry.snuba.dataset import Dataset -from sentry.snuba.metrics.extraction import MetricSpecType -from sentry.snuba.query_sources import QuerySource - - -def query( - selected_columns, - query, - params, - snuba_params=None, - equations=None, - orderby=None, - offset=None, - limit=50, - referrer=None, - auto_fields=False, - auto_aggregations=False, - include_equation_fields=False, - allow_metric_aggregates=False, - use_aggregate_conditions=False, - conditions=None, - functions_acl=None, - transform_alias_to_input_format=False, - sample=None, - has_metrics=False, - use_metrics_layer=False, - skip_tag_resolution=False, - extra_columns=None, - on_demand_metrics_enabled=False, - on_demand_metrics_type: MetricSpecType | None = None, - fallback_to_transactions=False, - query_source: QuerySource | None = None, -): - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params, - snuba_params=snuba_params, - query=query, - selected_columns=selected_columns, - equations=equations, - orderby=orderby, - limit=limit, - offset=offset, - sample_rate=sample, - config=QueryBuilderConfig( - has_metrics=has_metrics, - transform_alias_to_input_format=transform_alias_to_input_format, - skip_tag_resolution=skip_tag_resolution, - equation_config={"auto_add": include_equation_fields}, - auto_fields=auto_fields, - auto_aggregations=auto_aggregations, - use_aggregate_conditions=use_aggregate_conditions, - functions_acl=functions_acl, - ), - ) - - result = builder.process_results(builder.run_query(referrer, query_source=query_source)) - return result diff --git a/src/sentry/snuba/query_subscriptions/constants.py b/src/sentry/snuba/query_subscriptions/constants.py index b04cf11f619fc6..f692db46d6d9c5 100644 --- a/src/sentry/snuba/query_subscriptions/constants.py +++ b/src/sentry/snuba/query_subscriptions/constants.py @@ -7,6 +7,7 @@ Dataset.Transactions: "transactions-subscription-results", Dataset.PerformanceMetrics: "generic-metrics-subscription-results", Dataset.Metrics: "metrics-subscription-results", + Dataset.EventsAnalyticsPlatform: "eap-spans-subscription-results", } topic_to_dataset = { diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py index bd9141627d5a99..b19de6e0795b07 100644 --- a/src/sentry/snuba/referrer.py +++ b/src/sentry/snuba/referrer.py @@ -165,12 +165,8 @@ class Referrer(Enum): API_ORGANIZATION_METRICS_DATA = "api.organization.metrics-data" API_ORGANIZATION_METRICS_ESTIMATION_STATS = "api.organization-metrics-estimation-stats" API_ORGANIZATION_METRICS_METADATA_FETCH_SPANS = "api.organization.metrics-metadata.fetch-spans" - API_ORGANIZATION_METRICS_METADATA_FETCH_METRICS_SUMMARIES = ( - "api.organization.metrics-metadata.fetch-metrics-summaries" - ) API_ORGANIZATION_METRICS_QUERY = "api.organization.metrics-query" API_ORGANIZATION_METRICS_EAP_QUERY = "api.organization.metrics-eap-query" - API_ORGANIZATION_METRICS_SAMPLES = "api.organization.metrics-samples" API_ORGANIZATION_ISSUE_REPLAY_COUNT = "api.organization-issue-replay-count" API_ORGANIZATION_SDK_UPDATES = "api.organization-sdk-updates" API_ORGANIZATION_SPANS_HISTOGRAM_MIN_MAX = "api.organization-spans-histogram-min-max" @@ -410,7 +406,6 @@ class Referrer(Enum): API_PROFILING_PROFILE_SUMMARY_TOTALS = "api.profiling.profile-summary-totals" API_PROFILING_PROFILE_SUMMARY_TABLE = "api.profiling.profile-summary-table" API_PROFILING_PROFILE_SUMMARY_FUNCTIONS_TABLE = "api.profiling.profile-summary-functions-table" - API_PROFILING_PROFILE_FLAMEGRAPH = "api.profiling.profile-flamegraph" API_PROFILING_PROFILE_FLAMEGRAPH_TRANSACTION_CANDIDATES = ( "api.profiling.profile-flamegraph-transaction-candidates" ) diff --git a/src/sentry/snuba/spans_rpc.py b/src/sentry/snuba/spans_rpc.py index 232f3dc554f150..bd87a9a9311f3e 100644 --- a/src/sentry/snuba/spans_rpc.py +++ b/src/sentry/snuba/spans_rpc.py @@ -1,16 +1,19 @@ import logging from typing import Any -from sentry_protos.snuba.v1.endpoint_time_series_pb2 import TimeSeriesRequest, TimeSeriesResponse +from sentry_protos.snuba.v1.endpoint_time_series_pb2 import TimeSeries, TimeSeriesRequest from sentry_protos.snuba.v1.endpoint_trace_item_table_pb2 import Column, TraceItemTableRequest from sentry_protos.snuba.v1.trace_item_attribute_pb2 import AttributeAggregation, AttributeKey +from sentry_protos.snuba.v1.trace_item_filter_pb2 import AndFilter, OrFilter, TraceItemFilter +from sentry.api.event_search import SearchFilter, SearchKey, SearchValue from sentry.search.eap.columns import ResolvedColumn, ResolvedFunction from sentry.search.eap.constants import FLOAT, INT, STRING from sentry.search.eap.spans import SearchResolver from sentry.search.eap.types import SearchResolverConfig -from sentry.search.events.fields import get_function_alias +from sentry.search.events.fields import get_function_alias, is_function from sentry.search.events.types import EventsMeta, EventsResponse, SnubaData, SnubaParams +from sentry.snuba.discover import OTHER_KEY, create_result_key from sentry.utils import snuba_rpc from sentry.utils.snuba import SnubaTSResult @@ -33,24 +36,40 @@ def run_table_query( limit: int, referrer: str, config: SearchResolverConfig, + search_resolver: SearchResolver | None = None, ) -> EventsResponse: """Make the query""" - resolver = SearchResolver(params=params, config=config) + resolver = ( + SearchResolver(params=params, config=config) if search_resolver is None else search_resolver + ) meta = resolver.resolve_meta(referrer=referrer) query = resolver.resolve_query(query_string) columns, contexts = resolver.resolve_columns(selected_columns) + # We allow orderby function_aliases if they're a selected_column + # eg. can orderby sum_span_self_time, assuming sum(span.self_time) is selected + orderby_aliases = { + get_function_alias(column_name): resolved_column + for resolved_column, column_name in zip(columns, selected_columns) + } # Orderby is only applicable to TraceItemTableRequest - resolved_orderby = ( - [ + resolved_orderby = [] + orderby_columns = orderby if orderby is not None else [] + for orderby_column in orderby_columns: + stripped_orderby = orderby_column.lstrip("-") + if stripped_orderby in orderby_aliases: + resolved_column = orderby_aliases[stripped_orderby] + else: + resolved_column = resolver.resolve_column(stripped_orderby)[0] + resolved_orderby.append( TraceItemTableRequest.OrderBy( - column=categorize_column(resolver.resolve_column(orderby_column.lstrip("-"))[0]), + column=categorize_column(resolved_column), descending=orderby_column.startswith("-"), ) - for orderby_column in orderby - ] - if orderby - else [] + ) + has_aggregations = any( + col for col in columns if isinstance(col.proto_definition, AttributeAggregation) ) + labeled_columns = [categorize_column(col) for col in columns] """Run the query""" @@ -58,12 +77,17 @@ def run_table_query( meta=meta, filter=query, columns=labeled_columns, - group_by=[ - col.proto_definition - for col in columns - if isinstance(col.proto_definition, AttributeKey) - ], + group_by=( + [ + col.proto_definition + for col in columns + if isinstance(col.proto_definition, AttributeKey) + ] + if has_aggregations + else [] + ), order_by=resolved_orderby, + limit=limit, virtual_column_contexts=[context for context in contexts if context is not None], ) rpc_response = snuba_rpc.table_rpc(rpc_request) @@ -109,12 +133,18 @@ def get_timeseries_query( referrer: str, config: SearchResolverConfig, granularity_secs: int, + extra_conditions: TraceItemFilter | None = None, ) -> TimeSeriesRequest: resolver = SearchResolver(params=params, config=config) meta = resolver.resolve_meta(referrer=referrer) query = resolver.resolve_query(query_string) (aggregations, _) = resolver.resolve_aggregates(y_axes) (groupbys, _) = resolver.resolve_columns(groupby) + if extra_conditions is not None: + if query is not None: + query = TraceItemFilter(and_filter=AndFilter(filters=[query, extra_conditions])) + else: + query = extra_conditions return TimeSeriesRequest( meta=meta, @@ -150,7 +180,50 @@ def run_timeseries_query( rpc_response = snuba_rpc.timeseries_rpc(rpc_request) """Process the results""" - return _process_timeseries(rpc_response, params, granularity_secs) + result: list[dict[str, Any]] = [] + for timeseries in rpc_response.result_timeseries: + processed = _process_timeseries(timeseries, params, granularity_secs) + if len(result) == 0: + result = processed + else: + for existing, new in zip(result, processed): + existing.update(new) + return SnubaTSResult({"data": result}, params.start, params.end, granularity_secs) + + +def build_top_event_conditions( + resolver: SearchResolver, top_events: EventsResponse, groupby_columns: list[str] +) -> Any: + conditions = [] + other_conditions = [] + for event in top_events["data"]: + row_conditions = [] + other_row_conditions = [] + for key in groupby_columns: + resolved_term = resolver.resolve_term( + SearchFilter( + key=SearchKey(name=key), + operator="=", + value=SearchValue(raw_value=event[key]), + ) + ) + if resolved_term is not None: + row_conditions.append(resolved_term) + other_term = resolver.resolve_term( + SearchFilter( + key=SearchKey(name=key), + operator="!=", + value=SearchValue(raw_value=event[key]), + ) + ) + if other_term is not None: + other_row_conditions.append(other_term) + conditions.append(TraceItemFilter(and_filter=AndFilter(filters=row_conditions))) + other_conditions.append(TraceItemFilter(or_filter=OrFilter(filters=other_row_conditions))) + return ( + TraceItemFilter(or_filter=OrFilter(filters=conditions)), + TraceItemFilter(and_filter=AndFilter(filters=other_conditions)), + ) def run_top_events_timeseries_query( @@ -158,40 +231,115 @@ def run_top_events_timeseries_query( query_string: str, y_axes: list[str], groupby: list[str], - orderby: list[str], + orderby: list[str] | None, + limit: int, + referrer: str, + granularity_secs: int, + config: SearchResolverConfig, ) -> Any: """We intentionally duplicate run_timeseries_query code here to reduce the complexity of needing multiple helper functions that both would call This is because at time of writing, the query construction is very straightforward, if that changes perhaps we can change this""" - pass + """Make a table query first to get what we need to filter by""" + search_resolver = SearchResolver(params, config) + top_events = run_table_query( + params, + query_string, + groupby + y_axes, + orderby, + 0, + limit, + referrer, + config, + search_resolver=search_resolver, + ) + groupby_columns = [col for col in groupby if not is_function(col)] + top_conditions, other_conditions = build_top_event_conditions( + search_resolver, top_events, groupby_columns + ) """Make the query""" - # maker = SearchResolver(params) - # top_events = run_table_query() with process_results off - # new_conditions = construct conditions based on top_events - # resolved_query = And(new_conditions, maker.resolve_query(query_string)) - # groupby, contexts = maker.resolve_columns(groupby) - # yaxes = maker.resolve_aggregate(y_axes) + rpc_request = get_timeseries_query( + params, + query_string, + y_axes, + groupby, + referrer, + config, + granularity_secs, + extra_conditions=top_conditions, + ) + other_request = get_timeseries_query( + params, + query_string, + y_axes, + groupby, + referrer, + config, + granularity_secs, + extra_conditions=other_conditions, + ) """Run the query""" - # rpc = timeseries_RPC(columns=[column.proto_definition for column in groupby], query=query) + rpc_response = snuba_rpc.timeseries_rpc(rpc_request) + other_response = snuba_rpc.timeseries_rpc(other_request) """Process the results""" - # result = rpc.run() - # return _process_timeseries(result, columns) + map_result_key_to_timeseries = {} + for timeseries in rpc_response.result_timeseries: + groupby_attributes = timeseries.group_by_attributes + remapped_groupby = {} + # Remap internal attrs back to public ones + for col in groupby_columns: + resolved_groupby, _ = search_resolver.resolve_attribute(col) + remapped_groupby[resolved_groupby.public_alias] = groupby_attributes[ + resolved_groupby.internal_name + ] + result_key = create_result_key(remapped_groupby, groupby_columns, {}) + map_result_key_to_timeseries[result_key] = timeseries + final_result = {} + # Top Events actually has the order, so we need to iterate through it, regenerate the result keys + for index, row in enumerate(top_events["data"]): + result_key = create_result_key(row, groupby_columns, {}) + final_result[result_key] = SnubaTSResult( + { + "data": _process_timeseries( + map_result_key_to_timeseries[result_key], + params, + granularity_secs, + ), + "order": index, + }, + params.start, + params.end, + granularity_secs, + ) + final_result[OTHER_KEY] = SnubaTSResult( + { + "data": _process_timeseries( + other_response.result_timeseries[0], + params, + granularity_secs, + ), + "order": limit, + }, + params.start, + params.end, + granularity_secs, + ) + return final_result def _process_timeseries( - rpc_response: TimeSeriesResponse, params: SnubaParams, granularity_secs: int -) -> SnubaTSResult: + timeseries: TimeSeries, params: SnubaParams, granularity_secs: int, order: int | None = None +) -> list[dict[str, Any]]: result: SnubaData = [] - for timeseries in rpc_response.result_timeseries: - # Timeseries serialization expects the function alias (eg. `count` not `count()`) - label = get_function_alias(timeseries.label) - if len(result) < len(timeseries.buckets): - for bucket in timeseries.buckets: - result.append({"time": bucket.seconds}) - for index, data_point in enumerate(timeseries.data_points): - result[index][label] = data_point.data + # Timeseries serialization expects the function alias (eg. `count` not `count()`) + label = get_function_alias(timeseries.label) + if len(result) < len(timeseries.buckets): + for bucket in timeseries.buckets: + result.append({"time": bucket.seconds}) + for index, data_point in enumerate(timeseries.data_points): + result[index][label] = data_point.data - return SnubaTSResult({"data": result}, params.start, params.end, granularity_secs) + return result diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py index 8b87ffe1a84f5f..8f0bf74cb0c826 100644 --- a/src/sentry/tagstore/snuba/backend.py +++ b/src/sentry/tagstore/snuba/backend.py @@ -6,6 +6,7 @@ from datetime import timedelta, timezone from typing import Any +import sentry_sdk from dateutil.parser import parse as parse_datetime from django.core.cache import cache from sentry_relay.consts import SPAN_STATUS_CODE_TO_NAME @@ -309,11 +310,21 @@ def __get_tag_keys_for_projects( # Cause there's rounding to create this cache suffix, we want to update the query end so results match end = snuba.quantize_time(end, key_hash) cache_key += f":{duration}@{end.isoformat()}" - result = cache.get(cache_key, None) - if result is not None: - metrics.incr("testing.tagstore.cache_tag_key.hit") - else: - metrics.incr("testing.tagstore.cache_tag_key.miss") + + with sentry_sdk.start_span( + op="cache.get", name="sentry.tagstore.cache.__get_tag_keys_for_projects" + ) as span: + result = cache.get(cache_key, None) + + span.set_data("cache.key", [cache_key]) + + if result is not None: + span.set_data("cache.hit", True) + span.set_data("cache.item_size", len(str(result))) + metrics.incr("testing.tagstore.cache_tag_key.hit") + else: + span.set_data("cache.hit", False) + metrics.incr("testing.tagstore.cache_tag_key.miss") if result is None: result = snuba.query( @@ -330,8 +341,13 @@ def __get_tag_keys_for_projects( **kwargs, ) if should_cache: - cache.set(cache_key, result, 300) - metrics.incr("testing.tagstore.cache_tag_key.len", amount=len(result)) + with sentry_sdk.start_span( + op="cache.put", name="sentry.tagstore.cache.__get_tag_keys_for_projects" + ) as span: + cache.set(cache_key, result, 300) + span.set_data("cache.key", [cache_key]) + span.set_data("cache.item_size", len(str(result))) + metrics.incr("testing.tagstore.cache_tag_key.len", amount=len(result)) if group is None: ctor = TagKey @@ -435,13 +451,14 @@ def get_tag_keys_for_projects( # We want to disable FINAL in the snuba query to reduce load. optimize_kwargs = {"turbo": True} - # Add static sample amount to the query. Turbo will sample at 10% by - # default, but organizations with many events still get timeouts. A - # static sample creates more consistent performance. organization_id = get_organization_id_from_project_ids(projects) organization = Organization.objects.get_from_cache(id=organization_id) if features.has("organizations:tag-key-sample-n", organization): + # Add static sample amount to the query. Turbo will sample at 10% by + # default, but organizations with many events still get timeouts. A + # static sample creates more consistent performance. optimize_kwargs["sample"] = options.get("visibility.tag-key-sample-size") + # If we are fetching less than max_unsampled_projects, then disable # the sampling that turbo enables so that we get more accurate results. # We only want sampling when we have a large number of projects, so diff --git a/src/sentry/tasks/derive_code_mappings.py b/src/sentry/tasks/derive_code_mappings.py index 009d8793bb83d1..ca79f41e311a65 100644 --- a/src/sentry/tasks/derive_code_mappings.py +++ b/src/sentry/tasks/derive_code_mappings.py @@ -2,6 +2,7 @@ import logging from collections.abc import Mapping +from enum import StrEnum from typing import TYPE_CHECKING, Any from sentry_sdk import set_tag, set_user @@ -9,8 +10,13 @@ from sentry import features from sentry.constants import ObjectStatus from sentry.db.models.fields.node import NodeData +from sentry.integrations.github.integration import GitHubIntegration from sentry.integrations.models.repository_project_path_config import RepositoryProjectPathConfig from sentry.integrations.services.integration import RpcOrganizationIntegration, integration_service +from sentry.integrations.source_code_management.metrics import ( + SCMIntegrationInteractionEvent, + SCMIntegrationInteractionType, +) from sentry.integrations.utils.code_mapping import CodeMapping, CodeMappingTreesHelper from sentry.locks import locks from sentry.models.organization import Organization @@ -29,6 +35,12 @@ from sentry.integrations.base import IntegrationInstallation +class DeriveCodeMappingsErrorReason(StrEnum): + UNEXPECTED_ERROR = "Unexpected error type while calling `get_trees_for_org()`." + LOCK_FAILED = "Failed to acquire lock" + EMPTY_TREES = "The trees are empty." + + def process_error(error: ApiError, extra: dict[str, str]) -> None: """Log known issues and report unknown ones""" if error.json: @@ -115,24 +127,30 @@ def derive_code_mappings( # Acquire the lock for a maximum of 10 minutes lock = locks.get(key=f"get_trees_for_org:{org.slug}", duration=60 * 10, name="process_pending") - try: - with lock.acquire(): - # This method is specific to the GithubIntegration - trees = installation.get_trees_for_org() # type: ignore[attr-defined] - except ApiError as error: - process_error(error, extra) - return - except UnableToAcquireLock as error: - extra["error"] = error - logger.warning("derive_code_mappings.getting_lock_failed", extra=extra) - return - except Exception: - logger.exception("Unexpected error type while calling `get_trees_for_org()`.", extra=extra) - return + with SCMIntegrationInteractionEvent( + SCMIntegrationInteractionType.DERIVE_CODEMAPPINGS, provider_key=installation.model.provider + ).capture() as lifecycle: + try: + with lock.acquire(): + # This method is specific to the GithubIntegration + if not isinstance(installation, GitHubIntegration): + return + trees = installation.get_trees_for_org() + except ApiError as error: + process_error(error, extra) + lifecycle.record_halt(error, extra) + return + except UnableToAcquireLock as error: + extra["error"] = error + lifecycle.record_failure(error, extra) + return + except Exception: + lifecycle.record_failure(DeriveCodeMappingsErrorReason.UNEXPECTED_ERROR, extra=extra) + return - if not trees: - logger.warning("The trees are empty.", extra=extra) - return + if not trees: + lifecycle.record_halt(DeriveCodeMappingsErrorReason.EMPTY_TREES, extra=extra) + return trees_helper = CodeMappingTreesHelper(trees) code_mappings = trees_helper.generate_code_mappings(stacktrace_paths) diff --git a/src/sentry/tasks/embeddings_grouping/utils.py b/src/sentry/tasks/embeddings_grouping/utils.py index 7dd7b95c129981..b4606837d64c9e 100644 --- a/src/sentry/tasks/embeddings_grouping/utils.py +++ b/src/sentry/tasks/embeddings_grouping/utils.py @@ -32,9 +32,10 @@ SimilarHashNotFoundError, ) from sentry.seer.similarity.utils import ( + ReferrerOptions, event_content_has_stacktrace, filter_null_from_string, - get_stacktrace_string, + get_stacktrace_string_with_metrics, ) from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer @@ -355,8 +356,10 @@ def get_events_from_nodestore( event._project_cache = project if event and event.data and event_content_has_stacktrace(event): grouping_info = get_grouping_info(None, project=project, event=event) - stacktrace_string = get_stacktrace_string(grouping_info) - if stacktrace_string == "": + stacktrace_string = get_stacktrace_string_with_metrics( + grouping_info, event.platform, ReferrerOptions.BACKFILL + ) + if not stacktrace_string: invalid_event_group_ids.append(group_id) continue primary_hash = event.get_primary_hash() diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index ca527bb730c7dc..9077b64ca6bd03 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -13,9 +13,10 @@ from django.utils import timezone from google.api_core.exceptions import ServiceUnavailable -from sentry import features, projectoptions +from sentry import features, options, projectoptions from sentry.eventstream.types import EventStreamEventType from sentry.exceptions import PluginError +from sentry.features.rollout import in_rollout_group from sentry.issues.grouptype import GroupCategory from sentry.issues.issue_occurrence import IssueOccurrence from sentry.killswitches import killswitch_matches_context @@ -480,6 +481,17 @@ def should_update_escalating_metrics(event: Event, is_transaction_event: bool) - ) +def _get_event_id_from_cache_key(cache_key: str) -> str | None: + """ + format is "e:{}:{}",event_id,project_id + """ + + try: + return cache_key.split(":")[1] + except IndexError: + return None + + @instrumented_task( name="sentry.tasks.post_process.post_process_group", time_limit=120, @@ -527,6 +539,18 @@ def post_process_group( # need to rewind history. data = processing_store.get(cache_key) if not data: + event_id = _get_event_id_from_cache_key(cache_key) + if event_id: + if in_rollout_group( + "transactions.do_post_process_in_save", + event_id, + ): + # if we're doing the work for transactions in save_event_transaction + # instead of here, this is expected, so simply increment a metric + # instead of logging + metrics.incr("post_process.skipped_do_post_process_in_save") + return + logger.info( "post_process.skipped", extra={"cache_key": cache_key, "reason": "missing_cache"}, @@ -1205,6 +1229,8 @@ def process_plugins(job: PostProcessJob) -> None: def process_similarity(job: PostProcessJob) -> None: + if not options.get("sentry.similarity.indexing.enabled"): + return if job["is_reprocessed"] or job["event"].group.project.get_option( "sentry:similarity_backfill_completed" ): diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py index 64e030b597bb0f..ca3c24aac30793 100644 --- a/src/sentry/tasks/store.py +++ b/src/sentry/tasks/store.py @@ -16,6 +16,7 @@ from sentry.constants import DEFAULT_STORE_NORMALIZER_ARGS from sentry.datascrubbing import scrub_data from sentry.eventstore import processing +from sentry.features.rollout import in_rollout_group from sentry.feedback.usecases.create_feedback import FeedbackCreationSource, create_feedback_issue from sentry.ingest.types import ConsumerType from sentry.killswitches import killswitch_matches_context @@ -582,6 +583,16 @@ def _do_save_event( raise finally: + if ( + consumer_type == ConsumerType.Transactions + and event_id + and in_rollout_group("transactions.do_post_process_in_save", event_id) + ): + # we won't use the transaction data in post_process + # so we can delete it from the cache now. + if cache_key: + processing_store.delete_by_key(cache_key) + reprocessing2.mark_event_reprocessed(data) if cache_key and has_attachments: attachment_cache.delete(cache_key) diff --git a/src/sentry/taskworker/registry.py b/src/sentry/taskworker/registry.py index 96aae140d095b1..7d93fdef05fbc5 100644 --- a/src/sentry/taskworker/registry.py +++ b/src/sentry/taskworker/registry.py @@ -71,6 +71,7 @@ def register( retry: Retry | None = None, expires: int | datetime.timedelta | None = None, processing_deadline_duration: int | datetime.timedelta | None = None, + at_most_once: bool = False, ) -> Callable[[Callable[P, R]], Task[P, R]]: """register a task, used as a decorator""" @@ -84,6 +85,7 @@ def wrapped(func: Callable[P, R]) -> Task[P, R]: processing_deadline_duration=( processing_deadline_duration or self.default_processing_deadline_duration ), + at_most_once=at_most_once, ) # TODO(taskworker) tasks should be registered into the registry # so that we can ensure task names are globally unique diff --git a/src/sentry/taskworker/task.py b/src/sentry/taskworker/task.py index 0d60884d850406..e44d8aa59b61b5 100644 --- a/src/sentry/taskworker/task.py +++ b/src/sentry/taskworker/task.py @@ -32,6 +32,7 @@ def __init__( retry: Retry | None = None, expires: int | datetime.timedelta | None = None, processing_deadline_duration: int | datetime.timedelta | None = None, + at_most_once: bool = False, ): # TODO(taskworker) Implement task execution deadlines self.name = name @@ -42,6 +43,7 @@ def __init__( self._processing_deadline_duration = ( processing_deadline_duration or DEFAULT_PROCESSING_DEADLINE ) + self.at_most_once = at_most_once update_wrapper(self, func) @property @@ -92,8 +94,11 @@ def _create_retry_state(self) -> RetryState: attempts=0, kind="sentry.taskworker.retry.Retry", discard_after_attempt=1, + at_most_once=self.at_most_once, ) - return retry.initial_state() + retry_state = retry.initial_state() + retry_state.at_most_once = self.at_most_once + return retry_state def should_retry(self, state: RetryState, exc: Exception) -> bool: # No retry policy means no retries. diff --git a/src/sentry/taskworker/tasks/examples.py b/src/sentry/taskworker/tasks/examples.py new file mode 100644 index 00000000000000..2e077d70e9b88b --- /dev/null +++ b/src/sentry/taskworker/tasks/examples.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +import logging + +from sentry.taskworker.registry import taskregistry + +logger = logging.getLogger(__name__) +exampletasks = taskregistry.create_namespace(name="examples") + + +@exampletasks.register(name="examples.say_hello") +def say_hello(name: str) -> None: + print(f"Hello {name}") # noqa diff --git a/src/sentry/taskworker/worker.py b/src/sentry/taskworker/worker.py index 6b2f57cb0ee83a..bef131a61298de 100644 --- a/src/sentry/taskworker/worker.py +++ b/src/sentry/taskworker/worker.py @@ -11,6 +11,7 @@ import grpc import orjson from django.conf import settings +from django.core.cache import cache from sentry_protos.sentry.v1.taskworker_pb2 import ( TASK_ACTIVATION_STATUS_COMPLETE, TASK_ACTIVATION_STATUS_FAILURE, @@ -20,6 +21,7 @@ from sentry.taskworker.client import TaskworkerClient from sentry.taskworker.registry import taskregistry +from sentry.taskworker.task import Task from sentry.utils import metrics logger = logging.getLogger("sentry.taskworker.worker") @@ -35,6 +37,14 @@ def _process_activation( taskregistry.get(namespace).get(task_name)(*args, **kwargs) +AT_MOST_ONCE_TIMEOUT = 60 * 60 * 24 # 1 day + + +def get_at_most_once_key(namespace: str, taskname: str, task_id: str) -> str: + # tw:amo -> taskworker:at_most_once + return f"tw:amo:{namespace}:{taskname}:{task_id}" + + class TaskWorker: """ A TaskWorker fetches tasks from a taskworker RPC host and handles executing task activations. @@ -128,13 +138,13 @@ def fetch_task(self) -> TaskActivation | None: metrics.incr("taskworker.worker.get_task.success") return activation - def _known_task(self, activation: TaskActivation) -> bool: + def _get_known_task(self, activation: TaskActivation) -> Task[Any, Any] | None: if not taskregistry.contains(activation.namespace): logger.error( "taskworker.invalid_namespace", extra={"namespace": activation.namespace, "taskname": activation.taskname}, ) - return False + return None namespace = taskregistry.get(activation.namespace) if not namespace.contains(activation.taskname): @@ -142,12 +152,13 @@ def _known_task(self, activation: TaskActivation) -> bool: "taskworker.invalid_taskname", extra={"namespace": activation.namespace, "taskname": activation.taskname}, ) - return False - return True + return None + return namespace.get(activation.taskname) def process_task(self, activation: TaskActivation) -> TaskActivation | None: assert self._pool - if not self._known_task(activation): + task = self._get_known_task(activation) + if not task: metrics.incr( "taskworker.worker.unknown_task", tags={"namespace": activation.namespace, "taskname": activation.taskname}, @@ -158,7 +169,18 @@ def process_task(self, activation: TaskActivation) -> TaskActivation | None: status=TASK_ACTIVATION_STATUS_FAILURE, ) - # TODO(taskworker): Add at_most_once checks + if task.at_most_once: + key = get_at_most_once_key(activation.namespace, activation.taskname, activation.id) + if cache.add(key, "1", timeout=AT_MOST_ONCE_TIMEOUT): # The key didn't exist + metrics.incr( + "taskworker.task.at_most_once.executed", tags={"task": activation.taskname} + ) + else: + metrics.incr( + "taskworker.worker.at_most_once.skipped", tags={"task": activation.taskname} + ) + return None + processing_timeout = activation.processing_deadline_duration namespace = taskregistry.get(activation.namespace) next_state = TASK_ACTIVATION_STATUS_FAILURE diff --git a/src/sentry/templates/sentry/toolbar/iframe.html b/src/sentry/templates/sentry/toolbar/iframe.html index 83ecbedba7f457..f35185ab3311a1 100644 --- a/src/sentry/templates/sentry/toolbar/iframe.html +++ b/src/sentry/templates/sentry/toolbar/iframe.html @@ -20,7 +20,7 @@ {% endscript %} diff --git a/src/sentry/templates/sentry/toolbar/login-success.html b/src/sentry/templates/sentry/toolbar/login-success.html index f78c1ee5a05660..83af6518d00f51 100644 --- a/src/sentry/templates/sentry/toolbar/login-success.html +++ b/src/sentry/templates/sentry/toolbar/login-success.html @@ -1,45 +1,57 @@ -{# Auth redirect template for Dev Toolbar. Returned after successfully logging in to a requested organization. #} +{% extends "sentry/bases/auth.html" %} + {% load sentry_assets %} - - - - Sentry - Login Success - - - -
-

You are logged in!

-

This window will automatically close after {{ delay_sec }} seconds. If not then check the console for errors.

- -
- - {% script %} - - {% endscript %} - - + } + })(); + + {% endscript %} +{% endblock %} diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 773e54eb058519..882f67be437c6d 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -1407,43 +1407,6 @@ def store_issues(self, issues): == 200 ) - def store_metrics_summary(self, span): - common_fields = { - "duration_ms": span["duration_ms"], - "end_timestamp": (span["start_timestamp_ms"] + span["duration_ms"]) / 1000, - "group": span["sentry_tags"].get("group", "0"), - "is_segment": span["is_segment"], - "project_id": span["project_id"], - "received": span["received"], - "retention_days": span["retention_days"], - "segment_id": span.get("segment_id", "0"), - "span_id": span["span_id"], - "trace_id": span["trace_id"], - } - rows = [] - for mri, summaries in span.get("_metrics_summary", {}).items(): - for summary in summaries: - rows.append( - { - **common_fields, - **{ - "count": summary.get("count", 0), - "max": summary.get("max", 0.0), - "mri": mri, - "min": summary.get("min", 0.0), - "sum": summary.get("sum", 0.0), - "tags": summary.get("tags", {}), - }, - } - ) - assert ( - requests.post( - settings.SENTRY_SNUBA + "/tests/entities/metrics_summaries/insert", - data=json.dumps(rows), - ).status_code - == 200 - ) - def to_snuba_time_format(self, datetime_value): date_format = "%Y-%m-%d %H:%M:%S%z" return datetime_value.strftime(date_format) @@ -1531,7 +1494,6 @@ def store_segment( tags: Mapping[str, Any] | None = None, measurements: Mapping[str, int | float] | None = None, timestamp: datetime | None = None, - store_metrics_summary: Mapping[str, Sequence[Mapping[str, Any]]] | None = None, sdk_name: str | None = None, op: str | None = None, status: str | None = None, @@ -1570,8 +1532,6 @@ def store_segment( payload["measurements"] = { measurement: {"value": value} for measurement, value in measurements.items() } - if store_metrics_summary: - payload["_metrics_summary"] = store_metrics_summary if parent_span_id: payload["parent_span_id"] = parent_span_id if sdk_name is not None: @@ -1583,9 +1543,6 @@ def store_segment( self.store_span(payload, is_eap=is_eap) - if "_metrics_summary" in payload: - self.store_metrics_summary(payload) - def store_indexed_span( self, project_id: int, @@ -1602,7 +1559,6 @@ def store_indexed_span( measurements: Mapping[str, int | float] | None = None, timestamp: datetime | None = None, store_only_summary: bool = False, - store_metrics_summary: Mapping[str, Sequence[Mapping[str, Any]]] | None = None, group: str = "00", category: str | None = None, organization_id: int = 1, @@ -1644,8 +1600,6 @@ def store_indexed_span( payload["segment_id"] = transaction_id[:16] if profile_id: payload["profile_id"] = profile_id - if store_metrics_summary: - payload["_metrics_summary"] = store_metrics_summary if parent_span_id: payload["parent_span_id"] = parent_span_id if category is not None: @@ -1656,9 +1610,6 @@ def store_indexed_span( if not store_only_summary: self.store_span(payload, is_eap=is_eap) - if "_metrics_summary" in payload: - self.store_metrics_summary(payload) - class BaseMetricsTestCase(SnubaTestCase): ENTITY_SHORTHANDS = { diff --git a/src/sentry/testutils/factories.py b/src/sentry/testutils/factories.py index 4afe946574b83d..9dc7d5513e3c29 100644 --- a/src/sentry/testutils/factories.py +++ b/src/sentry/testutils/factories.py @@ -2126,21 +2126,14 @@ def create_data_source( @staticmethod @assume_test_silo_mode(SiloMode.REGION) def create_detector( - organization: Organization | None = None, name: str | None = None, - owner_user_id: int | None = None, - owner_team: Team | None = None, **kwargs, ) -> Detector: - if organization is None: - organization = Factories.create_organization() if name is None: name = petname.generate(2, " ", letters=10).title() + return Detector.objects.create( - organization=organization, name=name, - owner_user_id=owner_user_id, - owner_team=owner_team, **kwargs, ) diff --git a/src/sentry/testutils/fixtures.py b/src/sentry/testutils/fixtures.py index 7cb89c175ff15d..26413e962a48c4 100644 --- a/src/sentry/testutils/fixtures.py +++ b/src/sentry/testutils/fixtures.py @@ -634,8 +634,16 @@ def create_data_condition( condition_group=condition_group, ) - def create_detector(self, *args, **kwargs) -> Detector: - return Factories.create_detector(*args, **kwargs) + def create_detector( + self, + *args, + project=None, + **kwargs, + ) -> Detector: + if project is None: + project = self.create_project(organization=self.organization) + + return Factories.create_detector(*args, project=project, **kwargs) def create_detector_state(self, *args, **kwargs) -> DetectorState: return Factories.create_detector_state(*args, **kwargs) diff --git a/src/sentry/testutils/helpers/backups.py b/src/sentry/testutils/helpers/backups.py index ce1bd24a36e1b1..9261c144b304ab 100644 --- a/src/sentry/testutils/helpers/backups.py +++ b/src/sentry/testutils/helpers/backups.py @@ -44,11 +44,7 @@ from sentry.backup.validate import validate from sentry.data_secrecy.models import DataSecrecyWaiver from sentry.db.models.paranoia import ParanoidModel -from sentry.incidents.models.alert_rule import ( - AlertRuleExcludedProjects, - AlertRuleMonitorTypeInt, - AlertRuleTriggerExclusion, -) +from sentry.incidents.models.alert_rule import AlertRuleMonitorTypeInt from sentry.incidents.models.incident import ( IncidentActivity, IncidentSnapshot, @@ -511,20 +507,15 @@ def create_exhaustive_organization( ) # AlertRule* - other_project = self.create_project(name=f"other-project-{slug}", teams=[team]) alert = self.create_alert_rule( organization=org, projects=[project], user=owner, ) - AlertRuleExcludedProjects.objects.create(alert_rule=alert, project=other_project) alert.user_id = owner_id alert.save() trigger = self.create_alert_rule_trigger(alert_rule=alert) assert alert.snuba_query is not None - AlertRuleTriggerExclusion.objects.create( - alert_rule_trigger=trigger, query_subscription=alert.snuba_query.subscriptions.get() - ) self.create_alert_rule_trigger_action(alert_rule_trigger=trigger) activated_alert = self.create_alert_rule( organization=org, @@ -659,7 +650,7 @@ def create_exhaustive_organization( # Setup a test 'Issue Rule' and 'Automation' workflow = self.create_workflow(organization=org) - detector = self.create_detector(organization=org) + detector = self.create_detector(project=project) self.create_detector_workflow(detector=detector, workflow=workflow) self.create_detector_state(detector=detector) diff --git a/src/sentry/toolbar/views/iframe_view.py b/src/sentry/toolbar/views/iframe_view.py index ba0ff1c5115fd9..58abcb5439cff1 100644 --- a/src/sentry/toolbar/views/iframe_view.py +++ b/src/sentry/toolbar/views/iframe_view.py @@ -47,7 +47,7 @@ def get( allowed_origins: list[str] = project.get_option("sentry:toolbar_allowed_origins") if referrer and is_origin_allowed(referrer, allowed_origins): - return self._respond_with_state("success") + return self._respond_with_state("logged-in") return self._respond_with_state("invalid-domain") diff --git a/src/sentry/toolbar/views/login_success_view.py b/src/sentry/toolbar/views/login_success_view.py index 55f37a385f7490..c4d4c256017796 100644 --- a/src/sentry/toolbar/views/login_success_view.py +++ b/src/sentry/toolbar/views/login_success_view.py @@ -18,6 +18,7 @@ def get(self, request: HttpRequest, organization, project_id_or_slug): TEMPLATE, status=200, context={ + "organization_slug": organization.slug, "delay_sec": int(delay_ms / 1000), "delay_ms": delay_ms, "cookie": f"{session_cookie_name}={request.COOKIES.get(session_cookie_name)}", diff --git a/src/sentry/uptime/endpoints/validators.py b/src/sentry/uptime/endpoints/validators.py index 3b857709e87587..9d29d964776f97 100644 --- a/src/sentry/uptime/endpoints/validators.py +++ b/src/sentry/uptime/endpoints/validators.py @@ -69,7 +69,7 @@ class UptimeMonitorValidator(CamelSnakeSerializer): name = serializers.CharField( required=True, max_length=128, - help_text="Name of the uptime monitor", + help_text="Name of the uptime monitor.", ) owner = ActorField( required=False, @@ -80,24 +80,40 @@ class UptimeMonitorValidator(CamelSnakeSerializer): max_length=64, required=False, allow_null=True, - help_text="Name of the environment", + help_text="Name of the environment to create uptime issues in.", ) url = URLField(required=True, max_length=255) interval_seconds = serializers.ChoiceField( - required=True, choices=UptimeSubscription.IntervalSeconds.choices + required=True, + choices=UptimeSubscription.IntervalSeconds.choices, + help_text="Time in seconds between uptime checks.", ) timeout_ms = serializers.IntegerField( required=True, min_value=1000, max_value=30_000, + help_text="The number of milliseconds the request will wait for a response before timing-out.", ) mode = serializers.IntegerField(required=False) method = serializers.ChoiceField( - required=False, choices=UptimeSubscription.SupportedHTTPMethods.choices + required=False, + choices=UptimeSubscription.SupportedHTTPMethods.choices, + help_text="The HTTP method used to make the check request.", + ) + headers = serializers.JSONField( + required=False, + help_text="Additional headers to send with the check request.", + ) + trace_sampling = serializers.BooleanField( + required=False, + default=False, + help_text="When enabled allows check requets to be considered for dowstream performance tracing.", + ) + body = serializers.CharField( + required=False, + allow_null=True, + help_text="The body to send with the check request.", ) - headers = serializers.JSONField(required=False) - trace_sampling = serializers.BooleanField(required=False, default=False) - body = serializers.CharField(required=False, allow_null=True) def validate(self, attrs): headers = [] diff --git a/src/sentry/uptime/migrations/0002_remove_separate_remote_subscription.py b/src/sentry/uptime/migrations/0002_remove_separate_remote_subscription.py index bd79fb509bbbc4..cee3dbc032f816 100644 --- a/src/sentry/uptime/migrations/0002_remove_separate_remote_subscription.py +++ b/src/sentry/uptime/migrations/0002_remove_separate_remote_subscription.py @@ -22,6 +22,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0001_uptime_subscriptions"), ("remote_subscriptions", "0001_remote_subscription"), diff --git a/src/sentry/uptime/migrations/0003_drop_remote_subscription.py b/src/sentry/uptime/migrations/0003_drop_remote_subscription.py index 77cf78803a6b1b..baaa9f57d0b69b 100644 --- a/src/sentry/uptime/migrations/0003_drop_remote_subscription.py +++ b/src/sentry/uptime/migrations/0003_drop_remote_subscription.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0002_remove_separate_remote_subscription"), ] diff --git a/src/sentry/uptime/migrations/0004_projectuptimesubscription_mode.py b/src/sentry/uptime/migrations/0004_projectuptimesubscription_mode.py index 0dec267cd7a3bc..7c45752c8c0ea7 100644 --- a/src/sentry/uptime/migrations/0004_projectuptimesubscription_mode.py +++ b/src/sentry/uptime/migrations/0004_projectuptimesubscription_mode.py @@ -22,6 +22,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0731_add_insight_project_flags"), ("uptime", "0003_drop_remote_subscription"), diff --git a/src/sentry/uptime/migrations/0005_uptime_status.py b/src/sentry/uptime/migrations/0005_uptime_status.py index b2e5c67d86a5d1..5402acf8352eb2 100644 --- a/src/sentry/uptime/migrations/0005_uptime_status.py +++ b/src/sentry/uptime/migrations/0005_uptime_status.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0004_projectuptimesubscription_mode"), ] diff --git a/src/sentry/uptime/migrations/0006_projectuptimesubscription_name_owner.py b/src/sentry/uptime/migrations/0006_projectuptimesubscription_name_owner.py index 700265fef0eb70..61a26021ead98c 100644 --- a/src/sentry/uptime/migrations/0006_projectuptimesubscription_name_owner.py +++ b/src/sentry/uptime/migrations/0006_projectuptimesubscription_name_owner.py @@ -23,6 +23,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0742_backfill_alertrule_detection_type"), ("uptime", "0005_uptime_status"), diff --git a/src/sentry/uptime/migrations/0008_uptime_url_suffix.py b/src/sentry/uptime/migrations/0008_uptime_url_suffix.py index f1ea789154cca2..57ffe779e502ed 100644 --- a/src/sentry/uptime/migrations/0008_uptime_url_suffix.py +++ b/src/sentry/uptime/migrations/0008_uptime_url_suffix.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0007_update_detected_subscription_interval"), ] diff --git a/src/sentry/uptime/migrations/0011_remove_uptime_whois_columns_db.py b/src/sentry/uptime/migrations/0011_remove_uptime_whois_columns_db.py index fe4996ed21c1dc..529f7f7731c993 100644 --- a/src/sentry/uptime/migrations/0011_remove_uptime_whois_columns_db.py +++ b/src/sentry/uptime/migrations/0011_remove_uptime_whois_columns_db.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0010_remove_uptime_whois_columns_state"), ] diff --git a/src/sentry/uptime/migrations/0012_uptime_subscription_request_fields.py b/src/sentry/uptime/migrations/0012_uptime_subscription_request_fields.py index 87b20132abb29c..882fdca1fe9154 100644 --- a/src/sentry/uptime/migrations/0012_uptime_subscription_request_fields.py +++ b/src/sentry/uptime/migrations/0012_uptime_subscription_request_fields.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0011_remove_uptime_whois_columns_db"), ] diff --git a/src/sentry/uptime/migrations/0018_add_trace_sampling_field_to_uptime.py b/src/sentry/uptime/migrations/0018_add_trace_sampling_field_to_uptime.py index 84ab364596b37a..d16374639f1e48 100644 --- a/src/sentry/uptime/migrations/0018_add_trace_sampling_field_to_uptime.py +++ b/src/sentry/uptime/migrations/0018_add_trace_sampling_field_to_uptime.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0017_unique_on_timeout"), ] diff --git a/src/sentry/utils/concurrent.py b/src/sentry/utils/concurrent.py index 88e13294c17ef9..82e1b706d2fa47 100644 --- a/src/sentry/utils/concurrent.py +++ b/src/sentry/utils/concurrent.py @@ -9,17 +9,15 @@ from contextlib import contextmanager from queue import Full, PriorityQueue from time import time -from typing import Generic, NamedTuple, TypeVar +from typing import Any, NamedTuple import sentry_sdk import sentry_sdk.scope logger = logging.getLogger(__name__) -T = TypeVar("T") - -def execute(function: Callable[..., T], daemon=True): +def execute[T](function: Callable[..., T], daemon=True) -> Future[T]: future: Future[T] = Future() def run(): @@ -41,7 +39,7 @@ def run(): @functools.total_ordering -class PriorityTask(NamedTuple, Generic[T]): +class PriorityTask[T](NamedTuple): priority: int item: tuple[sentry_sdk.Scope, sentry_sdk.Scope, Callable[[], T], Future[T]] @@ -52,7 +50,7 @@ def __lt__(self, b): return self.priority < b.priority -class TimedFuture(Future[T]): +class TimedFuture[T](Future[T]): _condition: threading.Condition _state: str @@ -124,7 +122,7 @@ def set_exception(self, *args, **kwargs): return super().set_exception(*args, **kwargs) -class Executor(Generic[T]): +class Executor: """ This class provides an API for executing tasks in different contexts (immediately, or asynchronously.) @@ -136,9 +134,15 @@ class Executor(Generic[T]): to allow controlling whether or not queue insertion should be blocking. """ - Future = TimedFuture - - def submit(self, callable, priority=0, block=True, timeout=None) -> TimedFuture[T]: + def submit[ + T + ]( + self, + callable: Callable[[], T], + priority: int = 0, + block: bool = True, + timeout=None, + ) -> TimedFuture[T]: """ Enqueue a task to be executed, returning a ``TimedFuture``. @@ -149,7 +153,7 @@ def submit(self, callable, priority=0, block=True, timeout=None) -> TimedFuture[ raise NotImplementedError -class SynchronousExecutor(Executor[T]): +class SynchronousExecutor(Executor): """ This executor synchronously executes callables in the current thread. @@ -160,11 +164,11 @@ class SynchronousExecutor(Executor[T]): # TODO: The ``Future`` implementation here could be replaced with a # lock-free future for efficiency. - def submit(self, callable, *args, **kwargs): + def submit[T](self, callable: Callable[[], T], *args, **kwargs) -> TimedFuture[T]: """ Immediately execute a callable, returning a ``TimedFuture``. """ - future: Future[T] = self.Future() + future: TimedFuture[T] = TimedFuture() assert future.set_running_or_notify_cancel() try: result = callable() @@ -175,7 +179,7 @@ def submit(self, callable, *args, **kwargs): return future -class ThreadedExecutor(Executor[T]): +class ThreadedExecutor(Executor): """\ This executor provides a method of executing callables in a threaded worker pool. The number of outstanding requests can be limited by the ``maxsize`` @@ -190,7 +194,7 @@ def __init__(self, worker_count=1, maxsize=0): self.__worker_count = worker_count self.__workers = set() self.__started = False - self.__queue: PriorityQueue[PriorityTask[T]] = PriorityQueue(maxsize) + self.__queue: PriorityQueue[PriorityTask[Any]] = PriorityQueue(maxsize) self.__lock = threading.Lock() def __worker(self): @@ -223,7 +227,9 @@ def start(self): self.__started = True - def submit(self, callable, priority=0, block=True, timeout=None): + def submit[ + T + ](self, callable: Callable[[], T], priority=0, block=True, timeout=None) -> TimedFuture[T]: """\ Enqueue a task to be executed, returning a ``TimedFuture``. @@ -237,7 +243,7 @@ def submit(self, callable, priority=0, block=True, timeout=None): if not self.__started: self.start() - future: Future[T] = self.Future() + future: TimedFuture[T] = TimedFuture() task = PriorityTask( priority, ( diff --git a/src/sentry/utils/event_tracker.py b/src/sentry/utils/event_tracker.py index 8ac0287512bc26..5ff2f66a69b023 100644 --- a/src/sentry/utils/event_tracker.py +++ b/src/sentry/utils/event_tracker.py @@ -2,11 +2,7 @@ from enum import StrEnum from sentry import options - - -class EventType(StrEnum): - TRANSACTION = "transaction" - ERROR = "error" +from sentry.ingest.types import ConsumerType class TransactionStageStatus(StrEnum): @@ -14,10 +10,10 @@ class TransactionStageStatus(StrEnum): REDIS_PUT = "redis_put" # a save_transactions task is kicked off - SAVE_TRX_STARTED = "save_trx_started" + SAVE_TXN_STARTED = "save_txn_started" # a save_transactions task is finished - SAVE_TRX_FINISHED = "save_trx_finished" + SAVE_TXN_FINISHED = "save_txn_finished" # the transaction is published to the `events` topic for snuba/sbc consumers to consume SNUBA_TOPIC_PUT = "snuba_topic_put" @@ -35,13 +31,15 @@ class TransactionStageStatus(StrEnum): logger = logging.getLogger("EventTracker") -def track_sampled_event(event_id: str, event_type: str, status: TransactionStageStatus) -> None: +def track_sampled_event( + event_id: str, consumer_type: ConsumerType, status: TransactionStageStatus +) -> None: """ Records how far an event has made it through the ingestion pipeline. Each event type will pick up its sampling rate from its registered option. """ - sample_rate = options.get(f"performance.event-tracker.sample-rate.{event_type}") + sample_rate = options.get(f"performance.event-tracker.sample-rate.{consumer_type}") if sample_rate == 0: return @@ -49,7 +47,7 @@ def track_sampled_event(event_id: str, event_type: str, status: TransactionStage if event_float < sample_rate: extra = { "event_id": event_id, - "event_type": getattr(EventType, event_type.upper(), None), + "consumer_type": consumer_type, "status": status, } _do_record(extra) diff --git a/src/sentry/utils/locking/backends/migration.py b/src/sentry/utils/locking/backends/migration.py index c7b80c92c01f2c..10fa6329c38460 100644 --- a/src/sentry/utils/locking/backends/migration.py +++ b/src/sentry/utils/locking/backends/migration.py @@ -1,6 +1,7 @@ -from collections.abc import Callable, Mapping -from typing import Any, Optional, Union +from collections.abc import Callable +from typing import Optional, Union +from sentry.conf.types.service_options import ServiceOptions from sentry.utils.locking.backends import LockBackend from sentry.utils.services import build_instance_from_options_of_type, resolve_callable @@ -53,8 +54,8 @@ def selector_func(key, routing_key, backend_new, backend_old): def __init__( self, - backend_new_config: Mapping[str, Any], - backend_old_config: Mapping[str, Any], + backend_new_config: ServiceOptions, + backend_old_config: ServiceOptions, selector_func_path: str | SelectorFncType | None = None, ): self.backend_new = build_instance_from_options_of_type(LockBackend, backend_new_config) diff --git a/src/sentry/utils/services.py b/src/sentry/utils/services.py index 5a21d7c6318d70..b959b0f7984a82 100644 --- a/src/sentry/utils/services.py +++ b/src/sentry/utils/services.py @@ -6,11 +6,12 @@ import logging import threading from collections.abc import Callable, Mapping, Sequence -from typing import Any, TypeVar +from typing import Any -from rest_framework.request import Request +from django.http.request import HttpRequest from sentry import options +from sentry.conf.types.service_options import ServiceOptions from sentry.utils.concurrent import Executor, FutureSet, ThreadedExecutor, TimedFuture # TODO: adjust modules to import from new location -- the weird `as` syntax is for mypy @@ -18,26 +19,21 @@ from sentry.utils.lazy_service_wrapper import Service as Service from .imports import import_string -from .types import AnyCallable logger = logging.getLogger(__name__) -T = TypeVar("T") -CallableT = TypeVar("CallableT", bound=Callable[..., object]) - - -def resolve_callable(value: str | CallableT) -> CallableT: - if callable(value): - return value - elif isinstance(value, str): +def resolve_callable[CallableT: Callable[..., object]](value: str | CallableT) -> CallableT: + if isinstance(value, str): return import_string(value) + elif callable(value): + return value else: raise TypeError("Expected callable or string") class Context: - def __init__(self, request: Request, backends: dict[type[Service | None], Service]): + def __init__(self, request: HttpRequest | None, backends: dict[type[Service | None], Service]): self.request = request self.backends = backends @@ -51,7 +47,7 @@ def copy(self) -> Context: ] Callback = Callable[ - [Context, str, Mapping[str, Any], Sequence[str], Sequence[TimedFuture]], + [Context, str, Mapping[str, Any], Sequence[str], Sequence[TimedFuture[Any] | None]], None, ] @@ -253,7 +249,7 @@ def call_backend_method(context: Context, backend: Service, is_primary: bool) -> # executed before the primary request is queued. This is such a # strange usage pattern that I don't think it's worth optimizing # for.) - results = [None] * len(selected_backend_names) + results: list[TimedFuture[Any] | None] = [None] * len(selected_backend_names) for i, backend_name in enumerate(selected_backend_names[1:], 1): try: backend, executor = self.backends[backend_name] @@ -276,7 +272,7 @@ def call_backend_method(context: Context, backend: Service, is_primary: bool) -> # calling thread. (We don't have to protect this from ``KeyError`` # since we already ensured that the primary backend exists.) backend, executor = self.backends[selected_backend_names[0]] - results[0] = executor.submit( + result = results[0] = executor.submit( functools.partial(call_backend_method, context.copy(), backend, is_primary=True), priority=0, block=True, @@ -289,14 +285,13 @@ def call_backend_method(context: Context, backend: Service, is_primary: bool) -> ) ) - result: TimedFuture = results[0] return result.result() return execute def build_instance_from_options( - options: Mapping[str, object], + options: ServiceOptions, *, default_constructor: Callable[..., object] | None = None, ) -> object: @@ -313,9 +308,11 @@ def build_instance_from_options( return constructor(**options.get("options", {})) -def build_instance_from_options_of_type( +def build_instance_from_options_of_type[ + T +]( tp: type[T], - options: Mapping[str, object], + options: ServiceOptions, *, default_constructor: Callable[..., T] | None = None, ) -> T: @@ -364,17 +361,17 @@ class ServiceDelegator(Delegator, Service): def __init__( self, backend_base: str, - backends: Mapping[str, Mapping[str, Any]], - selector_func: str | AnyCallable, - callback_func: str | AnyCallable | None = None, + backends: Mapping[str, ServiceOptions], + selector_func: str | Selector, + callback_func: str | Callback | None = None, ): super().__init__( import_string(backend_base), { name: ( - build_instance_from_options(options), - build_instance_from_options( - options.get("executor", {}), default_constructor=ThreadedExecutor + build_instance_from_options_of_type(Service, options), + build_instance_from_options_of_type( + Executor, options.get("executor", {}), default_constructor=ThreadedExecutor ), ) for name, options in backends.items() @@ -435,9 +432,7 @@ def selector(context: Context, method: str, callargs: Mapping[str, Any]) -> list else: intkey = key - if not isinstance(intkey, int): - logger.error("make_writebehind_selector.invalid", extra={"received_type": type(intkey)}) - return [move_from] + assert isinstance(intkey, int), intkey if rollout_rate < 0: if (intkey % 10000) / 10000 < rollout_rate * -1.0: diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index cb8d772d13000b..c01a1e3fc34ea4 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -165,6 +165,7 @@ def log_snuba_info(content): "origin.transaction": "sentry_tags[transaction]", "is_transaction": "is_segment", "sdk.name": "sentry_tags[sdk.name]", + "sdk.version": "sentry_tags[sdk.version]", "trace.status": "sentry_tags[trace.status]", "messaging.destination.name": "sentry_tags[messaging.destination.name]", "messaging.message.id": "sentry_tags[messaging.message.id]", @@ -201,6 +202,7 @@ def log_snuba_info(content): "timestamp": "timestamp", "trace": "trace_id", "transaction": "segment_name", + "transaction.op": "attr_str[sentry.transaction.op]", # `transaction.id` and `segment.id` is going to be replaced by `transaction.span_id` please do not use # transaction.id is "wrong", its pointing to segment_id to return something for the transistion, but represents the # txn event id(32 char uuid). EAP will no longer be storing this. @@ -215,6 +217,7 @@ def log_snuba_info(content): "messaging.destination.name": "attr_str[sentry.messaging.destination.name]", "messaging.message.id": "attr_str[sentry.messaging.message.id]", "span.status_code": "attr_str[sentry.status_code]", + "profile.id": "attr_str[sentry.profile_id]", "replay.id": "attr_str[sentry.replay_id]", "span.ai.pipeline.group": "attr_str[sentry.ai_pipeline_group]", "trace.status": "attr_str[sentry.trace.status]", @@ -222,6 +225,7 @@ def log_snuba_info(content): "ai.total_tokens.used": "attr_num[ai_total_tokens_used]", "ai.total_cost": "attr_num[ai_total_cost]", "sdk.name": "attr_str[sentry.sdk.name]", + "sdk.version": "attr_str[sentry.sdk.version]", "release": "attr_str[sentry.release]", "environment": "attr_str[sentry.environment]", "user": "attr_str[sentry.user]", @@ -233,22 +237,6 @@ def log_snuba_info(content): "user.geo.country_code": "attr_str[sentry.user.geo.country_code]", } -METRICS_SUMMARIES_COLUMN_MAP = { - "project": "project_id", - "project.id": "project_id", - "id": "span_id", - "trace": "trace_id", - "metric": "metric_mri", - "timestamp": "end_timestamp", - "segment.id": "segment_id", - "span.duration": "duration_ms", - "span.group": "group", - "min_metric": "min", - "max_metric": "max", - "sum_metric": "sum", - "count_metric": "count", -} - SPAN_COLUMN_MAP.update( {col.value.alias: col.value.spans_name for col in Columns if col.value.spans_name is not None} ) @@ -299,7 +287,6 @@ def log_snuba_info(content): Dataset.Discover: DISCOVER_COLUMN_MAP, Dataset.Sessions: SESSIONS_SNUBA_MAP, Dataset.Metrics: METRICS_COLUMN_MAP, - Dataset.MetricsSummaries: METRICS_SUMMARIES_COLUMN_MAP, Dataset.PerformanceMetrics: METRICS_COLUMN_MAP, Dataset.SpansIndexed: SPAN_COLUMN_MAP, Dataset.EventsAnalyticsPlatform: SPAN_EAP_COLUMN_MAP, @@ -318,7 +305,6 @@ def log_snuba_info(content): Dataset.IssuePlatform: list(ISSUE_PLATFORM_MAP.values()), Dataset.SpansIndexed: list(SPAN_COLUMN_MAP.values()), Dataset.EventsAnalyticsPlatform: list(SPAN_EAP_COLUMN_MAP.values()), - Dataset.MetricsSummaries: list(METRICS_SUMMARIES_COLUMN_MAP.values()), } SNUBA_OR = "or" @@ -498,7 +484,13 @@ class RetrySkipTimeout(urllib3.Retry): """ def increment( - self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None + self, + method=None, + url=None, + response=None, + error=None, + _pool=None, + _stacktrace=None, ): """ Just rely on the parent class unless we have a read timeout. In that case @@ -637,7 +629,9 @@ def get_organization_id_from_project_ids(project_ids: Sequence[int]) -> int: return organization_id -def infer_project_ids_from_related_models(filter_keys: Mapping[str, Sequence[int]]) -> list[int]: +def infer_project_ids_from_related_models( + filter_keys: Mapping[str, Sequence[int]], +) -> list[int]: ids = [set(get_related_project_ids(k, filter_keys[k])) for k in filter_keys] return list(set.union(*ids)) @@ -957,7 +951,10 @@ def raw_snql_query( # other functions do here. It does not add any automatic conditions, format # results, nothing. Use at your own risk. return bulk_snuba_queries( - requests=[request], referrer=referrer, use_cache=use_cache, query_source=query_source + requests=[request], + referrer=referrer, + use_cache=use_cache, + query_source=query_source, )[0] @@ -1096,7 +1093,9 @@ def _apply_cache_and_build_results( for result, (query_pos, _, opt_cache_key) in zip(query_results, to_query): if opt_cache_key: cache.set( - opt_cache_key, json.dumps(result), settings.SENTRY_SNUBA_CACHE_TTL_SECONDS + opt_cache_key, + json.dumps(result), + settings.SENTRY_SNUBA_CACHE_TTL_SECONDS, ) results.append((query_pos, result)) @@ -1165,7 +1164,8 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: except ValueError: if response.status != 200: logger.exception( - "snuba.query.invalid-json", extra={"response.data": response.data} + "snuba.query.invalid-json", + extra={"response.data": response.data}, ) raise SnubaError("Failed to parse snuba error response") raise UnexpectedResponseError(f"Could not decode JSON response: {response.data!r}") @@ -1442,7 +1442,6 @@ def _resolve_column(col): # Some dataset specific logic: if dataset == Dataset.Discover: - if isinstance(col, (list, tuple)) or col in ("project_id", "group_id"): return col elif dataset == Dataset.EventsAnalyticsPlatform: @@ -1825,7 +1824,11 @@ def replace(d, key, val): reverse = compose( reverse, lambda row: ( - replace(row, "bucketed_end", int(parse_datetime(row["bucketed_end"]).timestamp())) + replace( + row, + "bucketed_end", + int(parse_datetime(row["bucketed_end"]).timestamp()), + ) if "bucketed_end" in row else row ), diff --git a/src/sentry/web/frontend/oauth_authorize.py b/src/sentry/web/frontend/oauth_authorize.py index 8617bdbd5a465f..e9ed3e3543fc99 100644 --- a/src/sentry/web/frontend/oauth_authorize.py +++ b/src/sentry/web/frontend/oauth_authorize.py @@ -127,17 +127,25 @@ def get(self, request: HttpRequest, **kwargs) -> HttpResponseBase: err_response="client_id", ) - # TODO (athena): Clean up this so scopes are always coming from the model - # This change is temporarily needed before we migrate existing applications - # to have the correct scopes - if application.requires_org_level_access: - scopes = application.scopes + scopes = request.GET.get("scope") + if scopes: + scopes = scopes.split(" ") else: - scopes = request.GET.get("scope") - if scopes: - scopes = scopes.split(" ") - else: - scopes = [] + scopes = [] + if application.requires_org_level_access: + # Applications that require org level access have a maximum scope limit set + # in admin that should not pass + max_scopes = application.scopes + for scope in scopes: + if scope not in max_scopes: + return self.error( + request=request, + client_id=client_id, + response_type=response_type, + redirect_uri=redirect_uri, + name="invalid_scope", + state=state, + ) for scope in scopes: if scope not in settings.SENTRY_SCOPES: @@ -163,7 +171,9 @@ def get(self, request: HttpRequest, **kwargs) -> HttpResponseBase: if not request.user.is_authenticated: return super().get(request, application=application) - if not force_prompt: + # If the application expects org level access, we need to prompt the user to choose which + # organization they want to give access to every time. We should not presume the user intention + if not (force_prompt or application.requires_org_level_access): try: existing_auth = ApiAuthorization.objects.get( user_id=request.user.id, application=application diff --git a/src/sentry/workflow_engine/endpoints/project_detector_index.py b/src/sentry/workflow_engine/endpoints/project_detector_index.py new file mode 100644 index 00000000000000..5e4ca73e560438 --- /dev/null +++ b/src/sentry/workflow_engine/endpoints/project_detector_index.py @@ -0,0 +1,126 @@ +from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema +from rest_framework import status +from rest_framework.exceptions import ValidationError +from rest_framework.response import Response + +from sentry.api.api_owners import ApiOwner +from sentry.api.api_publish_status import ApiPublishStatus +from sentry.api.base import region_silo_endpoint +from sentry.api.bases import ProjectAlertRulePermission, ProjectEndpoint +from sentry.api.serializers import serialize +from sentry.apidocs.constants import ( + RESPONSE_BAD_REQUEST, + RESPONSE_FORBIDDEN, + RESPONSE_NOT_FOUND, + RESPONSE_UNAUTHORIZED, +) +from sentry.apidocs.parameters import GlobalParams +from sentry.issues import grouptype +from sentry.workflow_engine.endpoints.serializers import DetectorSerializer +from sentry.workflow_engine.models import Detector + + +@region_silo_endpoint +@extend_schema(tags=["Workflows"]) +class ProjectDetectorIndexEndpoint(ProjectEndpoint): + publish_status = { + "POST": ApiPublishStatus.EXPERIMENTAL, + "GET": ApiPublishStatus.EXPERIMENTAL, + } + owner = ApiOwner.ISSUES + + # TODO: We probably need a specific permission for detectors. Possibly specific detectors have different perms + # too? + permission_classes = (ProjectAlertRulePermission,) + + def _get_validator(self, request, project, group_type_slug): + detector_type = grouptype.registry.get_by_slug(group_type_slug) + if detector_type is None: + raise ValidationError({"groupType": ["Unknown group type"]}) + + if detector_type.detector_validator is None: + raise ValidationError({"groupType": ["Group type not compatible with detectors"]}) + + return detector_type.detector_validator( + context={ + "project": project, + "organization": project.organization, + "request": request, + "access": request.access, + }, + data=request.data, + ) + + @extend_schema( + operation_id="Fetch a Detector", + parameters=[ + GlobalParams.ORG_ID_OR_SLUG, + GlobalParams.PROJECT_ID_OR_SLUG, + ], + responses={ + 201: DetectorSerializer, + 400: RESPONSE_BAD_REQUEST, + 401: RESPONSE_UNAUTHORIZED, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + ) + def get(self, request, project): + """ + List a Project's Detectors + ````````````````````````` + Return a list of detectors for a given project. + """ + queryset = Detector.objects.filter( + organization_id=project.organization_id, + ).order_by("id") + + return self.paginate( + request=request, + queryset=queryset, + order_by="id", + on_results=lambda x: serialize(x, request.user), + ) + + @extend_schema( + operation_id="Create a Detector", + parameters=[ + GlobalParams.ORG_ID_OR_SLUG, + GlobalParams.PROJECT_ID_OR_SLUG, + ], + request=PolymorphicProxySerializer( + "GenericDetectorSerializer", + serializers=[ + gt.detector_validator for gt in grouptype.registry.all() if gt.detector_validator + ], + resource_type_field_name=None, + ), + responses={ + 201: DetectorSerializer, + 400: RESPONSE_BAD_REQUEST, + 401: RESPONSE_UNAUTHORIZED, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + ) + def post(self, request, project): + """ + Create a Detector + ```````````````` + Create a new detector for a project. + + :param string name: The name of the detector + :param string group_type: The type of detector to create + :param object data_source: Configuration for the data source + :param array data_conditions: List of conditions to trigger the detector + """ + group_type = request.data.get("group_type") + if not group_type: + raise ValidationError({"groupType": ["This field is required."]}) + + validator = self._get_validator(request, project, group_type) + if not validator.is_valid(): + return Response(validator.errors, status=status.HTTP_400_BAD_REQUEST) + + detector = validator.save() + return Response(serialize(detector, request.user), status=status.HTTP_201_CREATED) diff --git a/src/sentry/workflow_engine/endpoints/urls.py b/src/sentry/workflow_engine/endpoints/urls.py new file mode 100644 index 00000000000000..be4d46ce9083b1 --- /dev/null +++ b/src/sentry/workflow_engine/endpoints/urls.py @@ -0,0 +1,11 @@ +from django.urls import re_path + +from .project_detector_index import ProjectDetectorIndexEndpoint + +urlpatterns = [ + re_path( + r"^(?P[^\/]+)/(?P[^\/]+)/detectors/$", + ProjectDetectorIndexEndpoint.as_view(), + name="sentry-api-0-project-detector-index", + ), +] diff --git a/src/sentry/workflow_engine/migrations/0010_detector_state_unique_group.py b/src/sentry/workflow_engine/migrations/0010_detector_state_unique_group.py index 51288a9ed8f74e..8d5a41873bbe77 100644 --- a/src/sentry/workflow_engine/migrations/0010_detector_state_unique_group.py +++ b/src/sentry/workflow_engine/migrations/0010_detector_state_unique_group.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("workflow_engine", "0009_detector_type"), ] diff --git a/src/sentry/workflow_engine/migrations/0014_model_additions_for_milestones.py b/src/sentry/workflow_engine/migrations/0014_model_additions_for_milestones.py new file mode 100644 index 00000000000000..00ab4ba74e3098 --- /dev/null +++ b/src/sentry/workflow_engine/migrations/0014_model_additions_for_milestones.py @@ -0,0 +1,120 @@ +# Generated by Django 5.1.1 on 2024-11-21 21:05 + +import django.db.models.deletion +from django.db import migrations, models + +import sentry.db.models.fields.foreignkey +import sentry.db.models.fields.hybrid_cloud_foreign_key +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0792_add_unique_index_apiauthorization"), + ("workflow_engine", "0013_related_name_conditions_on_dcg"), + ] + + operations = [ + migrations.AddField( + model_name="detector", + name="config", + field=models.JSONField(db_default={}), + ), + migrations.AddField( + model_name="detector", + name="created_by_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=True, null=True, on_delete="SET_NULL" + ), + ), + migrations.AddField( + model_name="detector", + name="description", + field=models.TextField(null=True), + ), + migrations.AddField( + model_name="detector", + name="enabled", + field=models.BooleanField(db_default=True), + ), + migrations.AddField( + model_name="detector", + name="project", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.project" + ), + ), + migrations.AddField( + model_name="workflow", + name="config", + field=models.JSONField(db_default={}), + ), + migrations.AddField( + model_name="workflow", + name="created_by_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=True, null=True, on_delete="SET_NULL" + ), + ), + migrations.AddField( + model_name="workflow", + name="enabled", + field=models.BooleanField(db_default=True), + ), + migrations.AddField( + model_name="workflow", + name="environment", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.environment" + ), + ), + migrations.AddField( + model_name="workflow", + name="owner_team", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, on_delete=django.db.models.deletion.SET_NULL, to="sentry.team" + ), + ), + migrations.AddField( + model_name="workflow", + name="owner_user_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=True, null=True, on_delete="SET_NULL" + ), + ), + migrations.AlterField( + model_name="action", + name="required", + field=models.BooleanField(default=False, null=True), + ), + migrations.AlterField( + model_name="detector", + name="organization", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.organization" + ), + ), + migrations.AlterField( + model_name="workflow", + name="when_condition_group", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="workflow_engine.dataconditiongroup", + ), + ), + ] diff --git a/src/sentry/workflow_engine/models/action.py b/src/sentry/workflow_engine/models/action.py index c291e0b0aea7d6..6dd13d6c491870 100644 --- a/src/sentry/workflow_engine/models/action.py +++ b/src/sentry/workflow_engine/models/action.py @@ -24,17 +24,13 @@ class Type(models.TextChoices): Notification = "SendNotificationAction" TriggerWorkflow = "TriggerWorkflowAction" - """ - Required actions cannot be disabled by the user, and will not be displayed in the UI. - These actions will be used internally, to trigger other aspects of the system. - For example, creating a new issue in the Issue Platform or a detector emitting an event. - """ - required = models.BooleanField(default=False) - # The type field is used to denote the type of action we want to trigger type = models.TextField(choices=Type.choices) data = models.JSONField(default=dict) + # TODO - finish removing this field + required = models.BooleanField(default=False, null=True) + # LEGACY: The integration_id is used to map the integration_id found in the AlertRuleTriggerAction # This allows us to map the way we're saving the notification channels to the action. integration_id = HybridCloudForeignKey( diff --git a/src/sentry/workflow_engine/models/detector.py b/src/sentry/workflow_engine/models/detector.py index 8a17ee38e8ea56..6a0ffeadc5f8f6 100644 --- a/src/sentry/workflow_engine/models/detector.py +++ b/src/sentry/workflow_engine/models/detector.py @@ -4,15 +4,19 @@ import logging from typing import TYPE_CHECKING, Any +from django.conf import settings from django.db import models from django.db.models import UniqueConstraint from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model +from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.issues import grouptype from sentry.issues.grouptype import GroupType from sentry.models.owner_base import OwnerModel +from .json_config import JSONConfigBase + if TYPE_CHECKING: from sentry.workflow_engine.processors.detector import DetectorHandler @@ -20,10 +24,13 @@ @region_silo_model -class Detector(DefaultFieldsModel, OwnerModel): +class Detector(DefaultFieldsModel, OwnerModel, JSONConfigBase): __relocation_scope__ = RelocationScope.Organization - organization = FlexibleForeignKey("sentry.Organization") + # TODO - Finish removing this field + organization = FlexibleForeignKey("sentry.Organization", on_delete=models.CASCADE, null=True) + + project = FlexibleForeignKey("sentry.Project", on_delete=models.CASCADE, null=True) name = models.CharField(max_length=200) # The data sources that the detector is watching @@ -31,7 +38,12 @@ class Detector(DefaultFieldsModel, OwnerModel): "workflow_engine.DataSource", through="workflow_engine.DataSourceDetector" ) - # The conditions that must be met for the detector to be considered 'active' + # If the detector is not enabled, it will not be evaluated. This is how we "snooze" a detector + enabled = models.BooleanField(db_default=True) + + # Optionally set a description of the detector, this will be used in notifications + description = models.TextField(null=True) + # This will emit an event for the workflow to process workflow_condition_group = FlexibleForeignKey( "workflow_engine.DataConditionGroup", @@ -40,8 +52,18 @@ class Detector(DefaultFieldsModel, OwnerModel): unique=True, on_delete=models.SET_NULL, ) + + # The type of detector that is being used, this is used to determine the class + # to load for the detector type = models.CharField(max_length=200) + # The user that created the detector + created_by_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL") + + @property + def CONFIG_SCHEMA(self) -> dict[str, Any]: + raise NotImplementedError('Subclasses must define a "CONFIG_SCHEMA" attribute') + class Meta(OwnerModel.Meta): constraints = OwnerModel.Meta.constraints + [ UniqueConstraint( @@ -50,11 +72,6 @@ class Meta(OwnerModel.Meta): ) ] - @property - def project_id(self): - # XXX: Temporary property until we add `project_id` to the model. - return 1 - @property def group_type(self) -> builtins.type[GroupType] | None: return grouptype.registry.get_by_slug(self.type) diff --git a/src/sentry/workflow_engine/models/json_config.py b/src/sentry/workflow_engine/models/json_config.py new file mode 100644 index 00000000000000..1b353ccf18cbcb --- /dev/null +++ b/src/sentry/workflow_engine/models/json_config.py @@ -0,0 +1,22 @@ +from abc import abstractproperty +from typing import Any + +from django.db import models +from jsonschema import ValidationError, validate + + +class JSONConfigBase(models.Model): + config = models.JSONField(db_default={}) + + @abstractproperty + def CONFIG_SCHEMA(self) -> dict[str, Any]: + pass + + def validate_config(self) -> None: + try: + validate(self.config, self.CONFIG_SCHEMA) + except ValidationError as e: + raise ValidationError(f"Invalid config: {e.message}") + + class Meta: + abstract = True diff --git a/src/sentry/workflow_engine/models/workflow.py b/src/sentry/workflow_engine/models/workflow.py index 4cb8fde6721cf4..d2f6e5fd5f0e01 100644 --- a/src/sentry/workflow_engine/models/workflow.py +++ b/src/sentry/workflow_engine/models/workflow.py @@ -1,13 +1,18 @@ +from typing import Any + +from django.conf import settings from django.db import models from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model, sane_repr +from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey +from sentry.models.owner_base import OwnerModel -from .data_condition_group import DataConditionGroup +from .json_config import JSONConfigBase @region_silo_model -class Workflow(DefaultFieldsModel): +class Workflow(DefaultFieldsModel, OwnerModel, JSONConfigBase): """ A workflow is a way to execute actions in a specified order. Workflows are initiated after detectors have been processed, driven by changes to their state. @@ -17,8 +22,19 @@ class Workflow(DefaultFieldsModel): name = models.CharField(max_length=200) organization = FlexibleForeignKey("sentry.Organization") + # If the workflow is not enabled, it will not be evaluated / invoke actions. This is how we "snooze" a workflow + enabled = models.BooleanField(db_default=True) + # Required as the 'when' condition for the workflow, this evalutes states emitted from the detectors - when_condition_group = FlexibleForeignKey(DataConditionGroup, blank=True, null=True) + when_condition_group = FlexibleForeignKey("workflow_engine.DataConditionGroup", null=True) + + environment = FlexibleForeignKey("sentry.Environment", null=True) + + created_by_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL") + + @property + def CONFIG_SCHEMA(self) -> dict[str, Any]: + raise NotImplementedError('Subclasses must define a "CONFIG_SCHEMA" attribute') __repr__ = sane_repr("name", "organization_id") diff --git a/static/app/actionCreators/dashboards.tsx b/static/app/actionCreators/dashboards.tsx index 091381e7ac1b12..79fd4f6630e3f7 100644 --- a/static/app/actionCreators/dashboards.tsx +++ b/static/app/actionCreators/dashboards.tsx @@ -220,7 +220,7 @@ export function validateWidgetRequest( export function updateDashboardPermissions( api: Client, orgId: string, - dashboard: DashboardDetails + dashboard: DashboardDetails | DashboardListItem ): Promise { const {permissions} = dashboard; const data = { diff --git a/static/app/actionCreators/events.tsx b/static/app/actionCreators/events.tsx index 07c6961908909f..c9c53dbec35230 100644 --- a/static/app/actionCreators/events.tsx +++ b/static/app/actionCreators/events.tsx @@ -175,6 +175,7 @@ export type EventQuery = { referrer?: string; sort?: string | string[]; team?: string | string[]; + useRpc?: '1'; }; export type TagSegment = { diff --git a/static/app/actionCreators/prompts.tsx b/static/app/actionCreators/prompts.tsx index 2bd8afb7960044..51a16f12459a65 100644 --- a/static/app/actionCreators/prompts.tsx +++ b/static/app/actionCreators/prompts.tsx @@ -152,16 +152,15 @@ export function usePrompt({ const prompt = usePromptsCheck({feature, organization, projectId}, options); const queryClient = useQueryClient(); - const isPromptDismissed = - prompt.isSuccess && prompt.data.data - ? promptIsDismissed( - { - dismissedTime: prompt.data.data.dismissed_ts, - snoozedTime: prompt.data.data.snoozed_ts, - }, - daysToSnooze - ) - : undefined; + const isPromptDismissed = prompt.isSuccess + ? promptIsDismissed( + { + dismissedTime: prompt.data?.data?.dismissed_ts, + snoozedTime: prompt.data?.data?.snoozed_ts, + }, + daysToSnooze + ) + : undefined; const dismissPrompt = useCallback(() => { if (!organization) { diff --git a/static/app/components/activity/note/inputWithStorage.tsx b/static/app/components/activity/note/inputWithStorage.tsx index b828e14b03ec8e..0cc58f82c18293 100644 --- a/static/app/components/activity/note/inputWithStorage.tsx +++ b/static/app/components/activity/note/inputWithStorage.tsx @@ -6,7 +6,7 @@ import {NoteInput} from 'sentry/components/activity/note/input'; import type {MentionChangeEvent} from 'sentry/components/activity/note/types'; import type {NoteType} from 'sentry/types/alerts'; import localStorage from 'sentry/utils/localStorage'; -import {StreamlinedNoteInput} from 'sentry/views/issueDetails/streamline/note'; +import {StreamlinedNoteInput} from 'sentry/views/issueDetails/streamline/sidebar/note'; import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; type InputProps = React.ComponentProps; diff --git a/static/app/components/assistant/getGuidesContent.tsx b/static/app/components/assistant/getGuidesContent.tsx index 63d10fb873058c..a49fadbc6f25d0 100644 --- a/static/app/components/assistant/getGuidesContent.tsx +++ b/static/app/components/assistant/getGuidesContent.tsx @@ -359,7 +359,7 @@ function getDemoModeGuides(): GuidesContent { requiredTargets: ['release_version'], steps: [ { - title: t('Release-specfic trends'), + title: t('Release-specific trends'), target: 'release_version', description: t( `Select the latest release to review new and regressed issues, and business critical metrics like crash rate, and user adoption.` diff --git a/static/app/components/avatar/avatarList.tsx b/static/app/components/avatar/avatarList.tsx index 18f121231154fb..df9932016668b3 100644 --- a/static/app/components/avatar/avatarList.tsx +++ b/static/app/components/avatar/avatarList.tsx @@ -1,11 +1,12 @@ import {forwardRef} from 'react'; -import {css} from '@emotion/react'; +import {css, type Theme} from '@emotion/react'; import styled from '@emotion/styled'; import TeamAvatar from 'sentry/components/avatar/teamAvatar'; import UserAvatar from 'sentry/components/avatar/userAvatar'; import {Tooltip} from 'sentry/components/tooltip'; import {space} from 'sentry/styles/space'; +import type {Actor} from 'sentry/types/core'; import type {Team} from 'sentry/types/organization'; import type {AvatarUser} from 'sentry/types/user'; import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; @@ -21,7 +22,7 @@ type Props = { teams?: Team[]; tooltipOptions?: UserAvatarProps['tooltipOptions']; typeAvatars?: string; - users?: AvatarUser[]; + users?: Array; }; const CollapsedAvatars = forwardRef(function CollapsedAvatars( @@ -144,7 +145,7 @@ export const AvatarListWrapper = styled('div')` flex-direction: row-reverse; `; -const AvatarStyle = p => css` +const AvatarStyle = (p: {theme: Theme}) => css` border: 2px solid ${p.theme.background}; margin-left: -8px; cursor: default; diff --git a/static/app/components/avatar/baseAvatar.tsx b/static/app/components/avatar/baseAvatar.tsx index 0b73327b1a4fd0..22d015700cb4b0 100644 --- a/static/app/components/avatar/baseAvatar.tsx +++ b/static/app/components/avatar/baseAvatar.tsx @@ -130,6 +130,7 @@ function BaseAvatar({ suggested={!!suggested} style={{...sizeStyle, ...style}} title={title} + hasTooltip={hasTooltip} {...props} > {hasError ? backup : imageAvatar} @@ -150,6 +151,7 @@ export {BaseAvatar, type BaseAvatarProps}; // Note: Avatar will not always be a child of a flex layout, but this seems like a // sensible default. const StyledBaseAvatar = styled('span')<{ + hasTooltip: boolean; round: boolean; suggested: boolean; }>` @@ -157,6 +159,9 @@ const StyledBaseAvatar = styled('span')<{ border-radius: ${p => (p.round ? '50%' : '3px')}; border: ${p => (p.suggested ? `1px dashed ${p.theme.subText}` : 'none')}; background-color: ${p => (p.suggested ? p.theme.background : 'none')}; + :hover { + pointer-events: ${p => (p.hasTooltip ? 'none' : 'auto')}; + } `; const ImageAvatar = styled('img')` diff --git a/static/app/components/charts/eventsRequest.tsx b/static/app/components/charts/eventsRequest.tsx index 581123181cad71..cf68fce0dfd752 100644 --- a/static/app/components/charts/eventsRequest.tsx +++ b/static/app/components/charts/eventsRequest.tsx @@ -458,6 +458,9 @@ class EventsRequest extends PureComponent ({ name: timestamp * 1000, value: countsForTimestamp.reduce((acc, {count}) => acc + count, 0) * scale, + ...(countsForTimestamp[0]?.confidence + ? {confidence: countsForTimestamp[0].confidence} + : {}), })), }, ]; diff --git a/static/app/components/events/aiSuggestedSolution/banner.tsx b/static/app/components/events/aiSuggestedSolution/banner.tsx deleted file mode 100644 index c1b2818e29ea46..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/banner.tsx +++ /dev/null @@ -1,157 +0,0 @@ -import styled from '@emotion/styled'; - -import bannerBackground from 'sentry-images/spot/ai-suggestion-banner-background.svg'; -import bannerSentaur from 'sentry-images/spot/ai-suggestion-banner-sentaur.svg'; -import bannerStars from 'sentry-images/spot/ai-suggestion-banner-stars.svg'; - -import {Button} from 'sentry/components/button'; -import ExternalLink from 'sentry/components/links/externalLink'; -import Panel from 'sentry/components/panels/panel'; -import PanelBody from 'sentry/components/panels/panelBody'; -import QuestionTooltip from 'sentry/components/questionTooltip'; -import {t, tct} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; -import TextBlock from 'sentry/views/settings/components/text/textBlock'; - -import {ExperimentalFeatureBadge} from './experimentalFeatureBadge'; - -type Props = { - onViewSuggestion: () => void; -}; - -export function Banner({onViewSuggestion}: Props) { - return ( - - -
- - {t('AI Solutions')} - <MoreInfoTooltip - isHoverable - size="sm" - title={tct( - 'This is an OpenAI generated solution that suggests a fix for this issue. Be aware that this may not be accurate. [learnMore:Learn more]', - { - learnMore: ( - <ExternalLink href="https://docs.sentry.io/product/issues/issue-details/ai-suggested-solution/" /> - ), - } - )} - /> - <ExperimentalFeatureBadge /> - - - {t('You might get lucky, but again, maybe not\u2026')} - -
- - - - - - {t('View Suggestion')} - - - -
- ); -} - -const Wrapper = styled(Panel)` - margin-bottom: 0; - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - height: 80px; - } -`; - -const Body = styled(PanelBody)` - display: flex; - align-items: center; - flex-wrap: wrap; - gap: ${space(1)}; - - > *:first-child { - flex: 1; - } - - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - display: grid; - grid-template-columns: 42% 1fr; - } -`; - -const Title = styled('div')` - font-size: ${p => p.theme.fontSizeSmall}; - text-transform: uppercase; - color: ${p => p.theme.gray300}; - display: flex; - align-items: center; - /* to be consistent with the feature badge size */ - height: ${space(2)}; - line-height: ${space(2)}; - white-space: nowrap; -`; - -const Description = styled(TextBlock)` - margin: ${space(1)} 0 0 0; -`; - -const Action = styled('div')` - display: flex; - justify-content: flex-end; - align-items: center; -`; - -const Sentaur = styled('img')` - display: none; - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - display: block; - height: 8.563rem; - position: absolute; - bottom: 0; - right: 6.608rem; - object-fit: cover; - z-index: 1; - pointer-events: none; - } -`; - -const Background = styled('img')` - display: none; - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - display: block; - position: absolute; - top: 0; - right: 0; - object-fit: cover; - max-width: 100%; - height: 100%; - border-radius: ${p => p.theme.panelBorderRadius}; - } -`; - -const Stars = styled('img')` - display: none; - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - display: block; - height: 8.563rem; - position: absolute; - right: -1rem; - bottom: -0.125rem; - object-fit: cover; - /* workaround to remove a extra svg on the bottom right */ - border-radius: ${p => p.theme.panelBorderRadius}; - } -`; - -const ViewSuggestionButton = styled(Button)` - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - position: absolute; - right: 1rem; - top: 1.5rem; - } -`; - -const MoreInfoTooltip = styled(QuestionTooltip)` - margin-left: ${space(0.5)}; -`; diff --git a/static/app/components/events/aiSuggestedSolution/experimentalFeatureBadge.tsx b/static/app/components/events/aiSuggestedSolution/experimentalFeatureBadge.tsx deleted file mode 100644 index 02833aeef3d376..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/experimentalFeatureBadge.tsx +++ /dev/null @@ -1,12 +0,0 @@ -import styled from '@emotion/styled'; - -import FeatureBadge from 'sentry/components/badge/featureBadge'; -import {space} from 'sentry/styles/space'; - -export function ExperimentalFeatureBadge() { - return ; -} - -const CenteredFeatureBadge = styled(FeatureBadge)` - height: ${space(2)}; -`; diff --git a/static/app/components/events/aiSuggestedSolution/index.tsx b/static/app/components/events/aiSuggestedSolution/index.tsx deleted file mode 100644 index a50c084eae7794..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/index.tsx +++ /dev/null @@ -1,53 +0,0 @@ -import {useState} from 'react'; - -import type {Event} from 'sentry/types/event'; -import type {Project} from 'sentry/types/project'; -import {trackAnalytics} from 'sentry/utils/analytics'; -import {getAnalyticsDataForEvent} from 'sentry/utils/events'; -import useOrganization from 'sentry/utils/useOrganization'; - -import {Banner} from './banner'; -import {Suggestion} from './suggestion'; - -type Props = { - event: Event; - projectSlug: Project['slug']; -}; - -export function AiSuggestedSolution({projectSlug, event}: Props) { - const organization = useOrganization(); - - const [openSuggestion, setOpenSuggestion] = useState(false); - - return ( -
- {!openSuggestion ? ( - { - trackAnalytics('ai_suggested_solution.view_suggestion_button_clicked', { - organization, - project_id: event.projectID, - group_id: event.groupID, - ...getAnalyticsDataForEvent(event), - }); - setOpenSuggestion(true); - }} - /> - ) : ( - { - trackAnalytics('ai_suggested_solution.hide_suggestion_button_clicked', { - organization, - project_id: event.projectID, - group_id: event.groupID, - ...getAnalyticsDataForEvent(event), - }); - setOpenSuggestion(false); - }} - /> - )} -
- ); -} diff --git a/static/app/components/events/aiSuggestedSolution/suggestion.tsx b/static/app/components/events/aiSuggestedSolution/suggestion.tsx deleted file mode 100644 index 66d8bf583cdd07..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/suggestion.tsx +++ /dev/null @@ -1,320 +0,0 @@ -import {useCallback, useState} from 'react'; -import styled from '@emotion/styled'; - -import {addSuccessMessage} from 'sentry/actionCreators/indicator'; -import {Button, LinkButton} from 'sentry/components/button'; -import ButtonBar from 'sentry/components/buttonBar'; -import EmptyMessage from 'sentry/components/emptyMessage'; -import LoadingError from 'sentry/components/loadingError'; -import Panel from 'sentry/components/panels/panel'; -import PanelBody from 'sentry/components/panels/panelBody'; -import PanelFooter from 'sentry/components/panels/panelFooter'; -import PanelHeader from 'sentry/components/panels/panelHeader'; -import {IconFile, IconFlag, IconHappy, IconMeh, IconSad} from 'sentry/icons'; -import {t} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; -import type {Event} from 'sentry/types/event'; -import type {Project} from 'sentry/types/project'; -import {trackAnalytics} from 'sentry/utils/analytics'; -import {getAnalyticsDataForEvent} from 'sentry/utils/events'; -import {isActiveSuperuser} from 'sentry/utils/isActiveSuperuser'; -import {limitedMarked} from 'sentry/utils/marked'; -import {useApiQuery} from 'sentry/utils/queryClient'; -import {useIsSentryEmployee} from 'sentry/utils/useIsSentryEmployee'; -import useOrganization from 'sentry/utils/useOrganization'; - -import {ExperimentalFeatureBadge} from './experimentalFeatureBadge'; -import {SuggestionLoaderMessage} from './suggestionLoaderMessage'; -import {useOpenAISuggestionLocalStorage} from './useOpenAISuggestionLocalStorage'; - -type Props = { - event: Event; - onHideSuggestion: () => void; - projectSlug: Project['slug']; -}; - -function ErrorDescription({ - restriction, - organizationSlug, - onRefetch, - onSetIndividualConsent, - onHideSuggestion, -}: { - onHideSuggestion: () => void; - onRefetch: () => void; - onSetIndividualConsent: (consent: boolean) => void; - organizationSlug: string; - restriction?: 'subprocessor' | 'individual_consent'; -}) { - if (restriction === 'subprocessor') { - return ( - } - title={t('OpenAI Subprocessor Acknowledgment')} - description={t( - 'In order to use this feature, your organization needs to accept the OpenAI Subprocessor Acknowledgment.' - )} - action={ - - - - {t('Accept in Settings')} - - - } - /> - ); - } - - if (restriction === 'individual_consent') { - const activeSuperUser = isActiveSuperuser(); - return ( - } - title={t('We need your consent')} - description={t( - 'By using this feature, you agree that OpenAI is a subprocessor and may process the data that you’ve chosen to submit. Sentry makes no guarantees as to the accuracy of the feature’s AI-generated recommendations.' - )} - action={ - - - - - } - /> - ); - } - - return ; -} - -export function Suggestion({onHideSuggestion, projectSlug, event}: Props) { - const organization = useOrganization(); - const [suggestedSolutionLocalConfig, setSuggestedSolutionLocalConfig] = - useOpenAISuggestionLocalStorage(); - const [piiCertified, setPiiCertified] = useState(false); - const [feedbackProvided, setFeedbackProvided] = useState(false); - const isSentryEmployee = useIsSentryEmployee(); - - const { - data, - isPending: dataIsLoading, - isError: dataIsError, - refetch: dataRefetch, - error, - } = useApiQuery<{suggestion: string}>( - [ - `/projects/${organization.slug}/${projectSlug}/events/${event.eventID}/ai-fix-suggest/`, - { - query: { - consent: suggestedSolutionLocalConfig.individualConsent ? 'yes' : undefined, - pii_certified: isSentryEmployee ? (piiCertified ? 'yes' : 'no') : undefined, - }, - }, - ], - { - enabled: isSentryEmployee ? (piiCertified ? true : false) : true, - staleTime: Infinity, - retry: false, - } - ); - - const handleFeedbackClick = useCallback(() => { - addSuccessMessage('Thank you for your feedback!'); - setFeedbackProvided(true); - }, []); - - if (isSentryEmployee && !piiCertified) { - return ( - } - title={t('PII Certification Required')} - description={t( - 'Before using this feature, please confirm that there is no personally identifiable information in this event.' - )} - action={ - - - - } - /> - ); - } - - return ( - -
- - {t('AI Solution')} - <ExperimentalFeatureBadge /> - - -
- - {dataIsLoading ? ( - -
- - - ) : dataIsError ? ( - - setSuggestedSolutionLocalConfig({individualConsent: true}) - } - restriction={error?.responseJSON?.restriction as any} - onHideSuggestion={onHideSuggestion} - /> - ) : ( - - )} - - {!dataIsLoading && !dataIsError && !feedbackProvided && ( - - - {t('Was this helpful?')} - - - - - - - - )} - - ); -} - -const Header = styled(PanelHeader)` - background: transparent; - padding: ${space(1)} ${space(2)}; - align-items: center; - color: ${p => p.theme.gray300}; -`; - -const Feedback = styled('div')` - padding: ${space(1)} ${space(2)}; - display: grid; - grid-template-columns: 1fr; - align-items: center; - text-align: left; - gap: ${space(1)}; - font-size: ${p => p.theme.fontSizeSmall}; - @media (min-width: ${p => p.theme.breakpoints.small}) { - grid-template-columns: 1fr max-content; - text-align: right; - gap: ${space(2)}; - } -`; - -const SuggestionLoadingError = styled(LoadingError)` - margin-bottom: 0; - border: none; - /* This is just to be consitent with other */ - /* padding-top and padding-bottom we are using in the empty state component */ - padding-top: ${space(4)}; - padding-bottom: ${space(4)}; -`; - -const LoaderWrapper = styled('div')` - padding: ${space(4)} 0; - text-align: center; - gap: ${space(2)}; - display: flex; - flex-direction: column; -`; - -const Content = styled('div')` - padding: ${space(2)}; - /* hack until we update backend to send us other heading */ - h4 { - font-size: ${p => p.theme.fontSizeExtraLarge}; - margin-bottom: ${space(1)}; - } -`; - -const Title = styled('div')` - /* to be consistent with the feature badge size */ - height: ${space(2)}; - line-height: ${space(2)}; - display: flex; - align-items: center; -`; diff --git a/static/app/components/events/aiSuggestedSolution/suggestionLoaderMessage.tsx b/static/app/components/events/aiSuggestedSolution/suggestionLoaderMessage.tsx deleted file mode 100644 index aa63118f819bb7..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/suggestionLoaderMessage.tsx +++ /dev/null @@ -1,55 +0,0 @@ -import {useEffect, useState} from 'react'; -import styled from '@emotion/styled'; -import shuffle from 'lodash/shuffle'; - -import {t} from 'sentry/locale'; - -const LOADING_MESSAGES = [ - t('Heating up them GPUs'), - t('Engineering a prompt'), - t('Demonstrating value'), - t('Moving the needle'), - t('Preventing prompt injection attacks'), - t('Remove traces of depression from answers'), - t('Reticulating splines or whatever'), - t('Loading marketing material'), - t('Wiping node_modules'), - t('Installing dependencies'), - t('Searching StackOverflow'), - t('Googling for solutions'), - t('Running spell checker'), - t('Searching for the perfect emoji'), - t('Adding trace amounts of human touch'), - t("Don't be like Sydney, don't be like Sydney"), - t('Initiating quantum leap'), - t('Charging flux capacitors'), - t('Summoning a demon'), -]; - -export function SuggestionLoaderMessage() { - const [messages] = useState(() => shuffle(LOADING_MESSAGES)); - const [messageIndex, setMessageIndex] = useState(0); - - useEffect(() => { - const id = setInterval( - () => { - if (messageIndex < messages.length - 1) { - setMessageIndex(messageIndex + 1); - } - }, - Math.random() * 700 + 800 - ); - return () => clearInterval(id); - }); - - return ( -
- {`${messages[messageIndex]}\u2026`} -
- ); -} - -const Message = styled('div')` - color: ${p => p.theme.gray300}; - font-size: ${p => p.theme.fontSizeLarge}; -`; diff --git a/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx b/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx deleted file mode 100644 index 96b7bc66486db7..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx +++ /dev/null @@ -1,32 +0,0 @@ -import {useCallback} from 'react'; - -import {useLocalStorageState} from 'sentry/utils/useLocalStorageState'; -import {useUser} from 'sentry/utils/useUser'; - -type LocalState = { - individualConsent: boolean; -}; - -export function useOpenAISuggestionLocalStorage(): [ - LocalState, - (newState: Partial) => void, -] { - const user = useUser(); - - const [localStorageState, setLocalStorageState] = useLocalStorageState( - `open-ai-suggestion:${user.id}`, - { - // agree forward data to OpenAI - individualConsent: false, - } - ); - - const setSuggestedSolutionLocalConfig = useCallback( - (newState: Partial) => { - setLocalStorageState({...localStorageState, ...newState}); - }, - [localStorageState, setLocalStorageState] - ); - - return [localStorageState, setSuggestedSolutionLocalConfig]; -} diff --git a/static/app/components/events/autofix/autofixMessageBox.spec.tsx b/static/app/components/events/autofix/autofixMessageBox.spec.tsx index a03d7921b430aa..1954cd41683551 100644 --- a/static/app/components/events/autofix/autofixMessageBox.spec.tsx +++ b/static/app/components/events/autofix/autofixMessageBox.spec.tsx @@ -12,7 +12,7 @@ import { import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator'; import AutofixMessageBox from 'sentry/components/events/autofix/autofixMessageBox'; -import {AutofixStepType} from 'sentry/components/events/autofix/types'; +import {AutofixStatus, AutofixStepType} from 'sentry/components/events/autofix/types'; jest.mock('sentry/actionCreators/indicator'); @@ -41,7 +41,7 @@ describe('AutofixMessageBox', () => { ...changesStepProps, step: AutofixStepFixture({ type: AutofixStepType.CHANGES, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, changes: [AutofixCodebaseChangeData()], }), }; @@ -50,7 +50,7 @@ describe('AutofixMessageBox', () => { ...changesStepProps, step: AutofixStepFixture({ type: AutofixStepType.CHANGES, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, changes: [ AutofixCodebaseChangeData({ repo_name: 'example/repo1', @@ -206,7 +206,7 @@ describe('AutofixMessageBox', () => { it('shows "Create PR" button when "Approve changes" is selected', async () => { MockApiClient.addMockResponse({ - url: '/issues/123/autofix/setup/', + url: '/issues/123/autofix/setup/?check_write_access=true', method: 'GET', body: { genAIConsent: {ok: true}, @@ -229,7 +229,7 @@ describe('AutofixMessageBox', () => { it('shows "Create PRs" button with correct text for multiple changes', async () => { MockApiClient.addMockResponse({ - url: '/issues/123/autofix/setup/', + url: '/issues/123/autofix/setup/?check_write_access=true', method: 'GET', body: { genAIConsent: {ok: true}, @@ -285,7 +285,7 @@ describe('AutofixMessageBox', () => { it('shows "Create PRs" button that opens setup modal when setup is incomplete', async () => { MockApiClient.addMockResponse({ - url: '/issues/123/autofix/setup/', + url: '/issues/123/autofix/setup/?check_write_access=true', method: 'GET', body: { genAIConsent: {ok: true}, @@ -297,6 +297,14 @@ describe('AutofixMessageBox', () => { }, }, }); + MockApiClient.addMockResponse({ + url: '/issues/123/autofix/setup/', + method: 'GET', + body: { + genAIConsent: {ok: true}, + integration: {ok: true}, + }, + }); render(); diff --git a/static/app/components/events/autofix/autofixMessageBox.tsx b/static/app/components/events/autofix/autofixMessageBox.tsx index 5754566451d2e3..ba4a55a2190ad9 100644 --- a/static/app/components/events/autofix/autofixMessageBox.tsx +++ b/static/app/components/events/autofix/autofixMessageBox.tsx @@ -8,6 +8,7 @@ import {Button, LinkButton} from 'sentry/components/button'; import {AutofixSetupWriteAccessModal} from 'sentry/components/events/autofix/autofixSetupWriteAccessModal'; import { type AutofixCodebaseChange, + AutofixStatus, type AutofixStep, AutofixStepType, } from 'sentry/components/events/autofix/types'; @@ -125,7 +126,7 @@ function CreatePRsButton({ ...data, autofix: { ...data.autofix, - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, }, }; } @@ -161,7 +162,7 @@ function SetupAndCreatePRsButton({ changes: AutofixCodebaseChange[]; groupId: string; }) { - const {data: setupData} = useAutofixSetup({groupId}); + const {data: setupData} = useAutofixSetup({groupId, checkWriteAccess: true}); if ( !changes.every( @@ -213,15 +214,15 @@ function StepIcon({step}: {step: AutofixStep}) { } switch (step.status) { - case 'WAITING_FOR_USER_RESPONSE': + case AutofixStatus.WAITING_FOR_USER_RESPONSE: return ; - case 'PROCESSING': + case AutofixStatus.PROCESSING: return ; - case 'CANCELLED': + case AutofixStatus.CANCELLED: return ; - case 'ERROR': + case AutofixStatus.ERROR: return ; - case 'COMPLETED': + case AutofixStatus.COMPLETED: return ; default: return null; @@ -266,12 +267,12 @@ function AutofixMessageBox({ const changes = isChangesStep && step?.type === AutofixStepType.CHANGES ? step.changes : []; const prsMade = - step?.status === 'COMPLETED' && + step?.status === AutofixStatus.COMPLETED && changes.length >= 1 && changes.every(change => change.pull_request); const isDisabled = - step?.status === 'ERROR' || + step?.status === AutofixStatus.ERROR || (step?.type === AutofixStepType.ROOT_CAUSE_ANALYSIS && step.causes?.length === 0); useEffect(() => { diff --git a/static/app/components/events/autofix/autofixRootCause.tsx b/static/app/components/events/autofix/autofixRootCause.tsx index 637ff995070ec4..dc33825bce9c70 100644 --- a/static/app/components/events/autofix/autofixRootCause.tsx +++ b/static/app/components/events/autofix/autofixRootCause.tsx @@ -15,6 +15,7 @@ import { type AutofixRootCauseCodeContext, type AutofixRootCauseData, type AutofixRootCauseSelection, + AutofixStatus, AutofixStepType, type CodeSnippetContext, } from 'sentry/components/events/autofix/types'; @@ -101,7 +102,7 @@ export function useSelectCause({groupId, runId}: {groupId: string; runId: string ...data, autofix: { ...data.autofix, - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, steps: data.autofix.steps?.map(step => { if (step.type !== AutofixStepType.ROOT_CAUSE_ANALYSIS) { return step; diff --git a/static/app/components/events/autofix/autofixSetupModal.tsx b/static/app/components/events/autofix/autofixSetupModal.tsx index eec56087c94106..50bfcd8719ee90 100644 --- a/static/app/components/events/autofix/autofixSetupModal.tsx +++ b/static/app/components/events/autofix/autofixSetupModal.tsx @@ -199,7 +199,7 @@ export function AutofixSetupContent({ organization, setup_gen_ai_consent: data.genAIConsent.ok, setup_integration: data.integration.ok, - setup_write_integration: data.githubWriteIntegration.ok, + setup_write_integration: data.githubWriteIntegration?.ok, }); }, [data, groupId, organization, projectId]); diff --git a/static/app/components/events/autofix/autofixSetupWriteAccessModal.spec.tsx b/static/app/components/events/autofix/autofixSetupWriteAccessModal.spec.tsx index 81d33952ab557f..f4833a8e39212f 100644 --- a/static/app/components/events/autofix/autofixSetupWriteAccessModal.spec.tsx +++ b/static/app/components/events/autofix/autofixSetupWriteAccessModal.spec.tsx @@ -6,7 +6,7 @@ import {AutofixSetupWriteAccessModal} from 'sentry/components/events/autofix/aut describe('AutofixSetupWriteAccessModal', function () { it('displays help text when repos are not all installed', async function () { MockApiClient.addMockResponse({ - url: '/issues/1/autofix/setup/', + url: '/issues/1/autofix/setup/?check_write_access=true', body: { genAIConsent: {ok: false}, integration: {ok: true}, @@ -56,7 +56,7 @@ describe('AutofixSetupWriteAccessModal', function () { it('displays success text when installed repos for github app text', async function () { MockApiClient.addMockResponse({ - url: '/issues/1/autofix/setup/', + url: '/issues/1/autofix/setup/?check_write_access=true', body: { genAIConsent: {ok: false}, integration: {ok: true}, diff --git a/static/app/components/events/autofix/autofixSetupWriteAccessModal.tsx b/static/app/components/events/autofix/autofixSetupWriteAccessModal.tsx index 697475c35e8704..e6138096a5a090 100644 --- a/static/app/components/events/autofix/autofixSetupWriteAccessModal.tsx +++ b/static/app/components/events/autofix/autofixSetupWriteAccessModal.tsx @@ -17,13 +17,13 @@ interface AutofixSetupWriteAccessModalProps extends ModalRenderProps { function Content({groupId, closeModal}: {closeModal: () => void; groupId: string}) { const {canCreatePullRequests, data} = useAutofixSetup( - {groupId}, + {groupId, checkWriteAccess: true}, {refetchOnWindowFocus: true} // We want to check each time the user comes back to the tab ); const sortedRepos = useMemo( () => - data?.githubWriteIntegration.repos.toSorted((a, b) => { + data?.githubWriteIntegration?.repos.toSorted((a, b) => { if (a.ok === b.ok) { return `${a.owner}/${a.name}`.localeCompare(`${b.owner}/${b.name}`); } @@ -93,7 +93,7 @@ export function AutofixSetupWriteAccessModal({ groupId, closeModal, }: AutofixSetupWriteAccessModalProps) { - const {canCreatePullRequests} = useAutofixSetup({groupId}); + const {canCreatePullRequests} = useAutofixSetup({groupId, checkWriteAccess: true}); return ( diff --git a/static/app/components/events/autofix/autofixSteps.spec.tsx b/static/app/components/events/autofix/autofixSteps.spec.tsx index 0d168fbf7fbb5f..18b0067e8007a5 100644 --- a/static/app/components/events/autofix/autofixSteps.spec.tsx +++ b/static/app/components/events/autofix/autofixSteps.spec.tsx @@ -7,7 +7,11 @@ import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrar import {addSuccessMessage} from 'sentry/actionCreators/indicator'; import {AutofixSteps} from 'sentry/components/events/autofix/autofixSteps'; -import {type AutofixStep, AutofixStepType} from 'sentry/components/events/autofix/types'; +import { + AutofixStatus, + type AutofixStep, + AutofixStepType, +} from 'sentry/components/events/autofix/types'; jest.mock('sentry/actionCreators/indicator'); @@ -24,14 +28,14 @@ describe('AutofixSteps', () => { AutofixStepFixture({ id: '1', type: AutofixStepType.DEFAULT, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, insights: [], progress: [], }), AutofixStepFixture({ id: '2', type: AutofixStepType.ROOT_CAUSE_ANALYSIS, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, causes: [ { id: 'cause1', @@ -47,7 +51,7 @@ describe('AutofixSteps', () => { repositories: [], created_at: '2023-01-01T00:00:00Z', run_id: '1', - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, }), groupId: 'group1', runId: 'run1', @@ -126,7 +130,7 @@ describe('AutofixSteps', () => { AutofixStepFixture({ id: '3', type: AutofixStepType.DEFAULT, - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, progress: [ AutofixProgressItemFixture({ message: 'Log message', @@ -172,7 +176,7 @@ describe('AutofixSteps', () => { AutofixStepFixture({ id: '1', type: AutofixStepType.DEFAULT, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, insights: [], progress: [], index: 0, @@ -180,7 +184,7 @@ describe('AutofixSteps', () => { AutofixStepFixture({ id: '2', type: AutofixStepType.CHANGES, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, progress: [], changes: [changeData], }), diff --git a/static/app/components/events/autofix/types.ts b/static/app/components/events/autofix/types.ts index 0191485b1177a0..cb55d823059cdb 100644 --- a/static/app/components/events/autofix/types.ts +++ b/static/app/components/events/autofix/types.ts @@ -28,6 +28,15 @@ export enum AutofixCodebaseIndexingStatus { ERRORED = 'errored', } +export enum AutofixStatus { + COMPLETED = 'COMPLETED', + ERROR = 'ERROR', + PROCESSING = 'PROCESSING', + NEED_MORE_INFORMATION = 'NEED_MORE_INFORMATION', + CANCELLED = 'CANCELLED', + WAITING_FOR_USER_RESPONSE = 'WAITING_FOR_USER_RESPONSE', +} + export type AutofixPullRequestDetails = { pr_number: number; pr_url: string; @@ -49,13 +58,7 @@ export type AutofixData = { created_at: string; repositories: AutofixRepository[]; run_id: string; - status: - | 'PENDING' - | 'PROCESSING' - | 'COMPLETED' - | 'NOFIX' - | 'ERROR' - | 'NEED_MORE_INFORMATION'; + status: AutofixStatus; actor_ids?: number[]; codebase_indexing?: { status: 'COMPLETED'; @@ -80,13 +83,7 @@ interface BaseStep { id: string; index: number; progress: AutofixProgressItem[]; - status: - | 'PENDING' - | 'PROCESSING' - | 'COMPLETED' - | 'ERROR' - | 'CANCELLED' - | 'WAITING_FOR_USER_RESPONSE'; + status: AutofixStatus; title: string; type: AutofixStepType; completedMessage?: string; diff --git a/static/app/components/events/autofix/useAutofix.tsx b/static/app/components/events/autofix/useAutofix.tsx index fd4c793a8b8c1c..1af55d87943a48 100644 --- a/static/app/components/events/autofix/useAutofix.tsx +++ b/static/app/components/events/autofix/useAutofix.tsx @@ -2,6 +2,7 @@ import {useCallback, useState} from 'react'; import { type AutofixData, + AutofixStatus, AutofixStepType, type GroupWithAutofix, } from 'sentry/components/events/autofix/types'; @@ -26,14 +27,14 @@ export const makeAutofixQueryKey = (groupId: string): ApiQueryKey => [ const makeInitialAutofixData = (): AutofixResponse => ({ autofix: { - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, run_id: '', steps: [ { type: AutofixStepType.DEFAULT, id: '1', index: 0, - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, title: 'Starting Autofix...', insights: [], progress: [], @@ -48,13 +49,13 @@ const makeErrorAutofixData = (errorMessage: string): AutofixResponse => { const data = makeInitialAutofixData(); if (data.autofix) { - data.autofix.status = 'ERROR'; + data.autofix.status = AutofixStatus.ERROR; data.autofix.steps = [ { type: AutofixStepType.DEFAULT, id: '1', index: 0, - status: 'ERROR', + status: AutofixStatus.ERROR, title: 'Something went wrong', completedMessage: errorMessage, insights: [], @@ -66,7 +67,12 @@ const makeErrorAutofixData = (errorMessage: string): AutofixResponse => { return data; }; -const isPolling = (autofixData?: AutofixData | null) => autofixData?.status !== 'PENDING'; +/** Will not poll when the autofix is in an error state or has completed */ +const isPolling = (autofixData?: AutofixData | null) => + !autofixData || + ![AutofixStatus.ERROR, AutofixStatus.COMPLETED, AutofixStatus.CANCELLED].includes( + autofixData.status + ); export const useAutofixData = ({groupId}: {groupId: string}) => { const {data} = useApiQuery(makeAutofixQueryKey(groupId), { diff --git a/static/app/components/events/autofix/useAutofixSetup.tsx b/static/app/components/events/autofix/useAutofixSetup.tsx index 23dc3204677301..7abf06860111b8 100644 --- a/static/app/components/events/autofix/useAutofixSetup.tsx +++ b/static/app/components/events/autofix/useAutofixSetup.tsx @@ -14,39 +14,44 @@ export type AutofixSetupResponse = { genAIConsent: { ok: boolean; }; - githubWriteIntegration: { - ok: boolean; - repos: AutofixSetupRepoDefinition[]; - }; integration: { ok: boolean; reason: string | null; }; - subprocessorConsent: { + githubWriteIntegration?: { ok: boolean; - }; + repos: AutofixSetupRepoDefinition[]; + } | null; }; -export function makeAutofixSetupQueryKey(groupId: string): ApiQueryKey { - return [`/issues/${groupId}/autofix/setup/`]; +export function makeAutofixSetupQueryKey( + groupId: string, + checkWriteAccess?: boolean +): ApiQueryKey { + return [ + `/issues/${groupId}/autofix/setup/${checkWriteAccess ? '?check_write_access=true' : ''}`, + ]; } export function useAutofixSetup( - {groupId}: {groupId: string}, + {groupId, checkWriteAccess}: {groupId: string; checkWriteAccess?: boolean}, options: Omit, 'staleTime'> = {} ) { - const queryData = useApiQuery(makeAutofixSetupQueryKey(groupId), { - enabled: Boolean(groupId), - staleTime: 0, - retry: false, - ...options, - }); + const queryData = useApiQuery( + makeAutofixSetupQueryKey(groupId, checkWriteAccess), + { + enabled: Boolean(groupId), + staleTime: 0, + retry: false, + ...options, + } + ); return { ...queryData, canStartAutofix: Boolean( queryData.data?.integration.ok && queryData.data?.genAIConsent.ok ), - canCreatePullRequests: Boolean(queryData.data?.githubWriteIntegration.ok), + canCreatePullRequests: Boolean(queryData.data?.githubWriteIntegration?.ok), }; } diff --git a/static/app/components/events/eventTags/eventTagsTree.spec.tsx b/static/app/components/events/eventTags/eventTagsTree.spec.tsx index 40f24bd15f60d6..65c65347f5b26d 100644 --- a/static/app/components/events/eventTags/eventTagsTree.spec.tsx +++ b/static/app/components/events/eventTags/eventTagsTree.spec.tsx @@ -22,9 +22,9 @@ describe('EventTagsTree', function () { {key: 'tree', value: 'maple'}, {key: 'tree.branch', value: 'jagged'}, {key: 'tree.branch.leaf', value: 'red'}, - {key: 'favourite.colour', value: 'teal'}, - {key: 'favourite.animal', value: 'dog'}, - {key: 'favourite.game', value: 'everdell'}, + {key: 'favorite.color', value: 'teal'}, + {key: 'favorite.animal', value: 'dog'}, + {key: 'favorite.game', value: 'everdell'}, {key: 'magic.is', value: 'real'}, {key: 'magic.is.probably.not', value: 'spells'}, {key: 'double..dot', value: 'works'}, @@ -36,13 +36,13 @@ describe('EventTagsTree', function () { 'app.version', 'tree.branch', 'tree.branch.leaf', - 'favourite.colour', - 'favourite.animal', - 'favourite.game', + 'favorite.color', + 'favorite.animal', + 'favorite.game', 'magic.is', 'magic.is.probably.not', ]; - const emptyBranchTags = ['favourite', 'magic', 'probably']; + const emptyBranchTags = ['favorite', 'magic', 'probably']; const treeBranchTags = [ 'app_start_time', 'app_name', @@ -50,7 +50,7 @@ describe('EventTagsTree', function () { 'tree', 'branch', 'leaf', - 'colour', + 'color', 'animal', 'game', 'is', diff --git a/static/app/components/events/featureFlags/eventFeatureFlagList.tsx b/static/app/components/events/featureFlags/eventFeatureFlagList.tsx index a3de99c68903f2..0584780f69bd72 100644 --- a/static/app/components/events/featureFlags/eventFeatureFlagList.tsx +++ b/static/app/components/events/featureFlags/eventFeatureFlagList.tsx @@ -28,10 +28,10 @@ import {trackAnalytics} from 'sentry/utils/analytics'; import {useFeedbackForm} from 'sentry/utils/useFeedbackForm'; import useOrganization from 'sentry/utils/useOrganization'; import {SectionKey} from 'sentry/views/issueDetails/streamline/context'; +import {useIssueDetailsEventView} from 'sentry/views/issueDetails/streamline/hooks/useIssueDetailsDiscoverQuery'; +import {useOrganizationFlagLog} from 'sentry/views/issueDetails/streamline/hooks/useOrganizationFlagLog'; +import useSuspectFlags from 'sentry/views/issueDetails/streamline/hooks/useSuspectFlags'; import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection'; -import {useIssueDetailsEventView} from 'sentry/views/issueDetails/streamline/useIssueDetailsDiscoverQuery'; -import {useOrganizationFlagLog} from 'sentry/views/issueDetails/streamline/useOrganizationFlagLog'; -import useSuspectFlags from 'sentry/views/issueDetails/streamline/useSuspectFlags'; export function EventFeatureFlagList({ event, @@ -76,7 +76,7 @@ export function EventFeatureFlagList({ statsPeriod: eventView.statsPeriod, }, }); - const {activateSidebar} = useFeatureFlagOnboarding(); + const {activateSidebarSkipConfigure} = useFeatureFlagOnboarding(); const { suspectFlags, @@ -185,7 +185,9 @@ export function EventFeatureFlagList({ diff --git a/static/app/components/events/featureFlags/featureFlagOnboardingLayout.tsx b/static/app/components/events/featureFlags/featureFlagOnboardingLayout.tsx index 9cf39210d9a430..2ec92f0865c5e7 100644 --- a/static/app/components/events/featureFlags/featureFlagOnboardingLayout.tsx +++ b/static/app/components/events/featureFlags/featureFlagOnboardingLayout.tsx @@ -1,6 +1,9 @@ -import {useMemo} from 'react'; +import {useMemo, useState} from 'react'; import styled from '@emotion/styled'; +import Alert from 'sentry/components/alert'; +import {Button} from 'sentry/components/button'; +import {Flex} from 'sentry/components/container/flex'; import OnboardingIntegrationSection from 'sentry/components/events/featureFlags/onboardingIntegrationSection'; import {AuthTokenGeneratorProvider} from 'sentry/components/onboarding/gettingStartedDoc/authTokenGenerator'; import type {OnboardingLayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/onboardingLayout'; @@ -8,14 +11,17 @@ import {Step} from 'sentry/components/onboarding/gettingStartedDoc/step'; import type {DocsParams} from 'sentry/components/onboarding/gettingStartedDoc/types'; import {useSourcePackageRegistries} from 'sentry/components/onboarding/gettingStartedDoc/useSourcePackageRegistries'; import {useUrlPlatformOptions} from 'sentry/components/onboarding/platformOptionsControl'; +import {t} from 'sentry/locale'; import ConfigStore from 'sentry/stores/configStore'; import {useLegacyStore} from 'sentry/stores/useLegacyStore'; +import {space} from 'sentry/styles/space'; import useApi from 'sentry/utils/useApi'; import useOrganization from 'sentry/utils/useOrganization'; interface FeatureFlagOnboardingLayoutProps extends OnboardingLayoutProps { integration?: string; provider?: string; + skipConfig?: boolean; } export function FeatureFlagOnboardingLayout({ @@ -28,6 +34,7 @@ export function FeatureFlagOnboardingLayout({ configType = 'onboarding', integration = '', provider = '', + skipConfig, }: FeatureFlagOnboardingLayoutProps) { const api = useApi(); const organization = useOrganization(); @@ -35,6 +42,7 @@ export function FeatureFlagOnboardingLayout({ useSourcePackageRegistries(organization); const selectedOptions = useUrlPlatformOptions(docsConfig.platformOptions); const {isSelfHosted, urlPrefix} = useLegacyStore(ConfigStore); + const [skipSteps, setSkipSteps] = useState(skipConfig); const {steps} = useMemo(() => { const doc = docsConfig[configType] ?? docsConfig.onboarding; @@ -87,11 +95,23 @@ export function FeatureFlagOnboardingLayout({ return ( - - {steps.map(step => ( - - ))} - + {!skipConfig ? null : ( + + + {t('Feature flag integration detected. Please follow the remaining steps.')} + + + + )} + {!skipSteps && ( + + {steps.map(step => ( + + ))} + + )} diff --git a/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx b/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx index df447c8b35055d..f2fd0c5b3ce92d 100644 --- a/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx +++ b/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx @@ -7,7 +7,11 @@ import HighlightTopRightPattern from 'sentry-images/pattern/highlight-top-right. import {LinkButton} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import {FeatureFlagOnboardingLayout} from 'sentry/components/events/featureFlags/featureFlagOnboardingLayout'; -import {ProviderOptions} from 'sentry/components/events/featureFlags/utils'; +import {FLAG_HASH_SKIP_CONFIG} from 'sentry/components/events/featureFlags/useFeatureFlagOnboarding'; +import { + IntegrationOptions, + ProviderOptions, +} from 'sentry/components/events/featureFlags/utils'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; @@ -146,6 +150,13 @@ function OnboardingContent({ hasDocs: boolean; }) { const organization = useOrganization(); + + // useMemo is needed to remember the original hash + // in case window.location.hash disappears + const ORIGINAL_HASH = useMemo(() => { + return window.location.hash; + }, []); + const skipConfig = ORIGINAL_HASH === FLAG_HASH_SKIP_CONFIG; const openFeatureProviders = [ProviderOptions.LAUNCHDARKLY]; const sdkProviders = [ProviderOptions.LAUNCHDARKLY]; @@ -244,7 +255,10 @@ function OnboardingContent({ ], ]} value={setupMode()} - onChange={setSetupMode} + onChange={value => { + setSetupMode(value); + window.location.hash = ORIGINAL_HASH; + }} /> ); @@ -311,6 +325,7 @@ function OnboardingContent({ {radioButtons} {t('Integrate Feature Flag Service')} - {t('Signing Secret')} +
+ {tct( + "Create a webhook integration with your [link:feature flag service]. When you do so, you'll need to enter a URL, which you can find below.", + {link: } + )} +
+ {t('Webhook URL')} + + {`https://sentry.io/api/0/organizations/${organization.slug}/flags/hooks/provider/${provider.toLowerCase()}/`} + +
+ +
+ {t( + "During the process of creating a webhook integration, you'll be given the option to sign the webhook. This is an auto-generated secret code that Sentry requires to verify requests from your feature flag service. Paste the secret below." + )} +
+ {t('Secret')} setSecret(e.target.value)} /> {tokenSaved ? ( @@ -85,19 +108,6 @@ export default function OnboardingIntegrationSection({ ) : null}
- - {t( - 'Once the token is saved, go back to your feature flag service and create a webhook integration using the URL provided below.' - )} - {t('Webhook URL')} - - {`https://sentry.io/api/0/organizations/${organization.slug}/flags/hooks/provider/${provider.toLowerCase()}/`} - -
); diff --git a/static/app/components/events/featureFlags/useFeatureFlagOnboarding.tsx b/static/app/components/events/featureFlags/useFeatureFlagOnboarding.tsx index 5140f4bec3cd7b..abfe3af57313bf 100644 --- a/static/app/components/events/featureFlags/useFeatureFlagOnboarding.tsx +++ b/static/app/components/events/featureFlags/useFeatureFlagOnboarding.tsx @@ -4,14 +4,19 @@ import {SidebarPanelKey} from 'sentry/components/sidebar/types'; import SidebarPanelStore from 'sentry/stores/sidebarPanelStore'; import {trackAnalytics} from 'sentry/utils/analytics'; import {useLocation} from 'sentry/utils/useLocation'; +import {useNavigate} from 'sentry/utils/useNavigate'; import useOrganization from 'sentry/utils/useOrganization'; +const FLAG_HASH = '#flag-sidequest'; +export const FLAG_HASH_SKIP_CONFIG = '#flag-sidequest-skip'; + export function useFeatureFlagOnboarding() { const location = useLocation(); const organization = useOrganization(); + const navigate = useNavigate(); useEffect(() => { - if (location.hash === '#flag-sidequest') { + if (location.hash === FLAG_HASH || location.hash === FLAG_HASH_SKIP_CONFIG) { SidebarPanelStore.activatePanel(SidebarPanelKey.FEATURE_FLAG_ONBOARDING); trackAnalytics('flags.view-setup-sidebar', { organization, @@ -19,11 +24,30 @@ export function useFeatureFlagOnboarding() { } }, [location.hash, organization]); - const activateSidebar = useCallback((event: {preventDefault: () => void}) => { + const activateSidebar = useCallback((event: React.MouseEvent) => { event.preventDefault(); - window.location.hash = 'flag-sidequest'; + window.location.hash = FLAG_HASH; SidebarPanelStore.activatePanel(SidebarPanelKey.FEATURE_FLAG_ONBOARDING); }, []); - return {activateSidebar}; + // if we detect that event.contexts.flags is set, use this hook instead + // to skip the configure step + const activateSidebarSkipConfigure = useCallback( + (event: React.MouseEvent, projectId: string) => { + event.preventDefault(); + navigate( + { + pathname: location.pathname, + // Adding the projectId will help pick the correct project in onboarding + query: {...location.query, project: projectId}, + hash: FLAG_HASH_SKIP_CONFIG, + }, + {replace: true} + ); + SidebarPanelStore.activatePanel(SidebarPanelKey.FEATURE_FLAG_ONBOARDING); + }, + [navigate, location.pathname, location.query] + ); + + return {activateSidebar, activateSidebarSkipConfigure}; } diff --git a/static/app/components/events/featureFlags/utils.tsx b/static/app/components/events/featureFlags/utils.tsx index 1e3e4de8b344b7..59b1814ecc91fc 100644 --- a/static/app/components/events/featureFlags/utils.tsx +++ b/static/app/components/events/featureFlags/utils.tsx @@ -115,31 +115,14 @@ export const sortedFlags = ({ export enum ProviderOptions { LAUNCHDARKLY = 'LaunchDarkly', - OPENFEATURE = 'OpenFeature', } -type Labels = { - pythonIntegration: string; // what's in the integrations array - pythonModule: string; // what's imported from sentry_sdk.integrations -}; +export enum IntegrationOptions { + LAUNCHDARKLY = 'LaunchDarkly', + OPENFEATURE = 'OpenFeature', +} -// to organize this better, we could do something like -// [ProviderOptions.LAUNCHDARKLY]: { -// python: { -// module: 'launchdarkly', -// integration 'LaunchDarklyIntegration', -// }, -// javascript: { -// ... -// } -// } -export const PROVIDER_OPTION_TO_LABELS: Record = { - [ProviderOptions.LAUNCHDARKLY]: { - pythonModule: 'launchdarkly', - pythonIntegration: 'LaunchDarklyIntegration', - }, - [ProviderOptions.OPENFEATURE]: { - pythonModule: 'OpenFeature', - pythonIntegration: 'OpenFeatureIntegration', - }, +export const PROVIDER_OPTION_TO_URLS: Record = { + [ProviderOptions.LAUNCHDARKLY]: + 'https://app.launchdarkly.com/settings/integrations/webhooks/new?q=Webhooks', }; diff --git a/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx b/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx index b11d431282e3da..366b87c6933a73 100644 --- a/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx +++ b/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx @@ -6,6 +6,7 @@ import {render, screen} from 'sentry-test/reactTestingLibrary'; import {EntryType} from 'sentry/types/event'; import type {TraceEventResponse} from 'sentry/views/issueDetails/traceTimeline/useTraceTimelineEvents'; +import {makeTraceError} from 'sentry/views/performance/newTraceDetails/traceModels/traceTreeTestUtils'; import {EventTraceView} from './eventTraceView'; @@ -29,6 +30,7 @@ describe('EventTraceView', () => { trace_id: traceId, }, }, + eventID: 'issue-5', }); const issuePlatformBody: TraceEventResponse = { data: [], @@ -51,16 +53,18 @@ describe('EventTraceView', () => { performance_issues: 1, projects: 1, transactions: 1, - transaction_child_count_map: [{'transaction.id': '1', count: 1}], + transaction_child_count_map: new Array(20) + .fill(0) + .map((_, i) => [{'transaction.id': i.toString(), count: 1}]), }, }); MockApiClient.addMockResponse({ url: `/organizations/${organization.slug}/events-trace/${traceId}/`, body: { - transactions: [ + transactions: new Array(20).fill(0).map((_, i) => [ { project_slug: project.slug, - event_id: '1', + event_id: i.toString(), children: [], sdk_name: '', start_timestamp: 0, @@ -69,10 +73,10 @@ describe('EventTraceView', () => { 'transaction.op': '', 'transaction.status': '', performance_issues: [], - errors: [], + errors: i === 5 ? [makeTraceError({event_id: 'issue-5'})] : [], }, - ], - orphan_errors: [], + ]), + orphan_errors: [makeTraceError()], }, }); MockApiClient.addMockResponse({ @@ -92,7 +96,9 @@ describe('EventTraceView', () => { render(); expect(await screen.findByText('Trace')).toBeInTheDocument(); - expect(await screen.findByText('transaction')).toBeInTheDocument(); + expect( + await screen.findByText('MaybeEncodingError: Error sending result') + ).toBeInTheDocument(); }); it('does not render the trace preview if it has no transactions', async () => { diff --git a/static/app/components/events/interfaces/performance/eventTraceView.tsx b/static/app/components/events/interfaces/performance/eventTraceView.tsx index 6b8266734716d6..c7833d2920e533 100644 --- a/static/app/components/events/interfaces/performance/eventTraceView.tsx +++ b/static/app/components/events/interfaces/performance/eventTraceView.tsx @@ -1,28 +1,33 @@ -import {Fragment, useMemo} from 'react'; +import {useMemo} from 'react'; import styled from '@emotion/styled'; +import {LinkButton} from 'sentry/components/button'; import ErrorBoundary from 'sentry/components/errorBoundary'; -import {ALL_ACCESS_PROJECTS} from 'sentry/constants/pageFilters'; +import {generateTraceTarget} from 'sentry/components/quickTrace/utils'; +import {IconOpen} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {Event} from 'sentry/types/event'; import {type Group, IssueCategory} from 'sentry/types/group'; import type {Organization} from 'sentry/types/organization'; -import EventView from 'sentry/utils/discover/eventView'; import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; import {SectionKey} from 'sentry/views/issueDetails/streamline/context'; import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection'; import {TraceDataSection} from 'sentry/views/issueDetails/traceDataSection'; -import {TraceViewWaterfall} from 'sentry/views/performance/newTraceDetails'; +import {IssuesTraceWaterfall} from 'sentry/views/performance/newTraceDetails/issuesTraceWaterfall'; +import {useIssuesTraceTree} from 'sentry/views/performance/newTraceDetails/traceApi/useIssuesTraceTree'; import {useTrace} from 'sentry/views/performance/newTraceDetails/traceApi/useTrace'; import {useTraceMeta} from 'sentry/views/performance/newTraceDetails/traceApi/useTraceMeta'; import {useTraceRootEvent} from 'sentry/views/performance/newTraceDetails/traceApi/useTraceRootEvent'; -import {useTraceTree} from 'sentry/views/performance/newTraceDetails/traceApi/useTraceTree'; +import {TraceViewSources} from 'sentry/views/performance/newTraceDetails/traceHeader/breadcrumbs'; import { loadTraceViewPreferences, type TracePreferencesState, } from 'sentry/views/performance/newTraceDetails/traceState/tracePreferences'; import {TraceStateProvider} from 'sentry/views/performance/newTraceDetails/traceState/traceStateProvider'; +import {useTraceEventView} from 'sentry/views/performance/newTraceDetails/useTraceEventView'; +import {useTraceQueryParams} from 'sentry/views/performance/newTraceDetails/useTraceQueryParams'; const DEFAULT_ISSUE_DETAILS_TRACE_VIEW_PREFERENCES: TracePreferencesState = { drawer: { @@ -48,25 +53,26 @@ const DEFAULT_ISSUE_DETAILS_TRACE_VIEW_PREFERENCES: TracePreferencesState = { interface EventTraceViewInnerProps { event: Event; organization: Organization; + traceId: string; } -function EventTraceViewInner({event, organization}: EventTraceViewInnerProps) { - // Assuming profile exists, should be checked in the parent component - const traceId = event.contexts.trace!.trace_id!; - const location = useLocation(); +function EventTraceViewInner({event, organization, traceId}: EventTraceViewInnerProps) { + const timestamp = new Date(event.dateReceived).getTime() / 1e3; const trace = useTrace({ - traceSlug: traceId ? traceId : undefined, + timestamp, + traceSlug: traceId, limit: 10000, }); - const meta = useTraceMeta([{traceSlug: traceId, timestamp: undefined}]); - const tree = useTraceTree({trace, meta, replay: null}); + const params = useTraceQueryParams({ + timestamp, + }); + const meta = useTraceMeta([{traceSlug: traceId, timestamp}]); + const tree = useIssuesTraceTree({trace, meta, replay: null}); - const hasNoTransactions = meta.data?.transactions === 0; - const shouldLoadTraceRoot = !trace.isPending && trace.data && !hasNoTransactions; + const shouldLoadTraceRoot = !trace.isPending && trace.data; const rootEvent = useTraceRootEvent(shouldLoadTraceRoot ? trace.data! : null); - const preferences = useMemo( () => loadTraceViewPreferences('issue-details-trace-view-preferences') || @@ -74,64 +80,88 @@ function EventTraceViewInner({event, organization}: EventTraceViewInnerProps) { [] ); - const traceEventView = useMemo(() => { - const statsPeriod = location.query.statsPeriod as string | undefined; - // Not currently expecting start/end timestamps to be applied to this view - - return EventView.fromSavedQuery({ - id: undefined, - name: `Events with Trace ID ${traceId}`, - fields: ['title', 'event.type', 'project', 'timestamp'], - orderby: '-timestamp', - query: `trace:${traceId}`, - projects: [ALL_ACCESS_PROJECTS], - version: 2, - range: statsPeriod, - }); - }, [location.query.statsPeriod, traceId]); - - const scrollToNode = useMemo(() => { - const firstTransactionEventId = trace.data?.transactions[0]?.event_id; - return {eventId: firstTransactionEventId}; - }, [trace.data]); - - if (trace.isPending || rootEvent.isPending || !rootEvent.data || hasNoTransactions) { + const traceEventView = useTraceEventView(traceId, params); + + if (!traceId) { return null; } return ( - - - - - - - + + + + + + + ); +} + +function IssuesTraceOverlay({event}: {event: Event}) { + const location = useLocation(); + const organization = useOrganization(); + + const traceTarget = generateTraceTarget( + event, + organization, + { + ...location, + query: { + ...location.query, + groupId: event.groupID, + }, + }, + TraceViewSources.ISSUE_DETAILS + ); + + return ( + + } + aria-label={t('Open Trace')} + to={traceTarget} + /> + ); } -interface EventTraceViewProps extends EventTraceViewInnerProps { +const IssuesTraceContainer = styled('div')` + position: relative; +`; + +const IssuesTraceOverlayContainer = styled('div')` + position: absolute; + inset: 0; + z-index: 10; + + a { + position: absolute; + top: ${space(1)}; + right: ${space(1)}; + } +`; + +interface EventTraceViewProps extends Omit { group: Group; } export function EventTraceView({group, event, organization}: EventTraceViewProps) { - // Check trace id exists - if (!event || !event.contexts.trace?.trace_id) { + const traceId = event.contexts.trace?.trace_id; + if (!traceId) { return null; } @@ -149,27 +179,15 @@ export function EventTraceView({group, event, organization}: EventTraceViewProps return ( - -
- -
- {hasTracePreviewFeature && ( - - )} -
+ + {hasTracePreviewFeature && ( + + )}
); } - -const TraceContentWrapper = styled('div')` - display: flex; - flex-direction: column; - gap: ${space(1)}; -`; - -const TraceViewWaterfallWrapper = styled('div')` - display: flex; - flex-direction: column; - height: 500px; -`; diff --git a/static/app/components/events/interfaces/request/index.spec.tsx b/static/app/components/events/interfaces/request/index.spec.tsx index df27c7b78df44f..2ee61131f256e3 100644 --- a/static/app/components/events/interfaces/request/index.spec.tsx +++ b/static/app/components/events/interfaces/request/index.spec.tsx @@ -1,16 +1,22 @@ import {DataScrubbingRelayPiiConfigFixture} from 'sentry-fixture/dataScrubbingRelayPiiConfig'; import {EventFixture} from 'sentry-fixture/event'; +import {UserFixture} from 'sentry-fixture/user'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; import {textWithMarkupMatcher} from 'sentry-test/utils'; import {Request} from 'sentry/components/events/interfaces/request'; +import ConfigStore from 'sentry/stores/configStore'; import type {EntryRequest} from 'sentry/types/event'; import {EntryType} from 'sentry/types/event'; jest.unmock('prismjs'); describe('Request entry', function () { + beforeEach(() => { + ConfigStore.set('user', UserFixture()); + }); + it('display redacted data', async function () { const event = EventFixture({ entries: [ @@ -327,6 +333,38 @@ describe('Request entry', function () { ).not.toThrow(); }); + it('should remove any non-tuple values from array', function () { + const user = UserFixture(); + user.options.prefersIssueDetailsStreamlinedUI = true; + ConfigStore.set('user', user); + + const data: EntryRequest['data'] = { + apiTarget: null, + query: 'a%AFc', + data: '', + headers: [['foo', 'bar'], null], + cookies: [], + env: {}, + method: 'POST', + url: '/Home/PostIndex', + }; + const event = EventFixture({ + entries: [ + { + type: EntryType.REQUEST, + data, + }, + ], + }); + expect(() => + render(, { + organization: { + relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()), + }, + }) + ).not.toThrow(); + }); + it("should not cause an invariant violation if data.data isn't a string", function () { const data: EntryRequest['data'] = { apiTarget: null, diff --git a/static/app/components/events/interfaces/request/index.tsx b/static/app/components/events/interfaces/request/index.tsx index 16be046eaa16aa..c1417f34507a58 100644 --- a/static/app/components/events/interfaces/request/index.tsx +++ b/static/app/components/events/interfaces/request/index.tsx @@ -251,10 +251,13 @@ function RequestDataCard({ const contentItems: KeyValueDataContentProps[] = []; if (Array.isArray(data) && data.length > 0) { - data.forEach(([key, value], i: number) => { - const valueMeta = meta?.[i] ? meta[i]?.[1] : undefined; - contentItems.push({item: {key, subject: key, value}, meta: valueMeta}); - }); + data + // Remove any non-tuple values + .filter(x => Array.isArray(x)) + .forEach(([key, value], i: number) => { + const valueMeta = meta?.[i] ? meta[i]?.[1] : undefined; + contentItems.push({item: {key, subject: key, value}, meta: valueMeta}); + }); } else if (typeof data === 'object') { // Spread to flatten if it's a proxy Object.entries({...data}).forEach(([key, value]) => { diff --git a/static/app/components/events/interfaces/spans/spanDescendantGroupBar.tsx b/static/app/components/events/interfaces/spans/spanDescendantGroupBar.tsx index 92f7cec076a058..b4c560a86af6be 100644 --- a/static/app/components/events/interfaces/spans/spanDescendantGroupBar.tsx +++ b/static/app/components/events/interfaces/spans/spanDescendantGroupBar.tsx @@ -3,7 +3,7 @@ import countBy from 'lodash/countBy'; import type {SpanBarType} from 'sentry/components/performance/waterfall/constants'; import { - getSpanBarColours, + getSpanBarColors, ROW_HEIGHT, } from 'sentry/components/performance/waterfall/constants'; import {DurationPill, RowRectangle} from 'sentry/components/performance/waterfall/rowBar'; @@ -150,7 +150,7 @@ export function SpanDescendantGroupBar(props: SpanDescendantGroupBarProps) { return ( {props.renderGroupSpansTitle()} diff --git a/static/app/components/events/interfaces/spans/spanRectangle.tsx b/static/app/components/events/interfaces/spans/spanRectangle.tsx index a482f4b7a8de1f..b38aac9dd36e1a 100644 --- a/static/app/components/events/interfaces/spans/spanRectangle.tsx +++ b/static/app/components/events/interfaces/spans/spanRectangle.tsx @@ -1,7 +1,7 @@ import {useTheme} from '@emotion/react'; import type {SpanBarType} from 'sentry/components/performance/waterfall/constants'; -import {getSpanBarColours} from 'sentry/components/performance/waterfall/constants'; +import {getSpanBarColors} from 'sentry/components/performance/waterfall/constants'; import {RowRectangle} from 'sentry/components/performance/waterfall/rowBar'; import toPercent from 'sentry/utils/number/toPercent'; @@ -20,7 +20,7 @@ export default function SpanRectangle({ return ( h[0] === 'Accept-Encoding' && h[1].includes('gzip') ); if (compressed) { result += ' \\\n --compressed'; } - // sort headers - const headers = - data.headers?.sort(function (a, b) { - return a[0] === b[0] ? 0 : a[0] < b[0] ? -1 : 1; - }) ?? []; - for (const header of headers) { result += ' \\\n -H "' + header[0] + ': ' + escapeBashString(header[1] + '') + '"'; } @@ -172,7 +172,9 @@ export function getCurlCommand(data: EntryRequest['data']) { return result; } -export function stringifyQueryList(query: string | [key: string, value: string][]) { +export function stringifyQueryList( + query: string | Array<[key: string, value: string] | null> +) { if (typeof query === 'string') { return query; } diff --git a/static/app/components/forms/fieldGroup/index.tsx b/static/app/components/forms/fieldGroup/index.tsx index 23cdfa471d1a7c..566f21f4fd8a14 100644 --- a/static/app/components/forms/fieldGroup/index.tsx +++ b/static/app/components/forms/fieldGroup/index.tsx @@ -96,7 +96,12 @@ function FieldGroup({ {helpElement && showHelpInTooltip && ( - + )} diff --git a/static/app/components/forms/fieldGroup/types.tsx b/static/app/components/forms/fieldGroup/types.tsx index dd15c4f454dba9..bb5078ec281d23 100644 --- a/static/app/components/forms/fieldGroup/types.tsx +++ b/static/app/components/forms/fieldGroup/types.tsx @@ -1,3 +1,5 @@ +import type {TooltipProps} from 'sentry/components/tooltip'; + /** * Props that control UI elements that are part of a Form Group */ @@ -91,9 +93,10 @@ export interface FieldGroupProps { */ required?: boolean; /** - * Displays the help element in the tooltip + * Displays the help element in the tooltip. Tooltip props may be passed to + * customize the help tooltip. */ - showHelpInTooltip?: boolean; + showHelpInTooltip?: boolean | Omit; /** * When stacking forms the bottom border is hidden and padding is adjusted * for form elements to be stacked on each other. diff --git a/static/app/components/gridEditable/index.tsx b/static/app/components/gridEditable/index.tsx index e301366dd19a62..e28ef18e1e8f55 100644 --- a/static/app/components/gridEditable/index.tsx +++ b/static/app/components/gridEditable/index.tsx @@ -94,6 +94,7 @@ type GridEditableProps = { ) => React.ReactNode[]; }; 'aria-label'?: string; + bodyStyle?: React.CSSProperties; emptyMessage?: React.ReactNode; error?: unknown | null; /** @@ -450,6 +451,7 @@ class GridEditable< scrollable, height, 'aria-label': ariaLabel, + bodyStyle, } = this.props; const showHeader = title || headerButtons; return ( @@ -463,7 +465,7 @@ class GridEditable< )} )} - + i.serviceType === component.sentryApp.slug ); - const displayName = component.sentryApp.name; + const appDisplayName = component.sentryApp.name; const displayIcon = ( ); if (externalIssue) { result.linkedIssues.push({ key: externalIssue.id, - displayName: `${displayName} Issue`, + displayName: externalIssue.displayName, url: externalIssue.webUrl, - title: externalIssue.displayName, + // Some display names look like PROJ#1234 + // Others look like ClickUp: Title + // Add the integration name if it's not already included + title: externalIssue.displayName.includes(appDisplayName) + ? externalIssue.displayName + : `${appDisplayName}: ${externalIssue.displayName}`, displayIcon, onUnlink: () => { deleteExternalIssue(api, group.id, externalIssue.id) @@ -76,10 +81,10 @@ export function useSentryAppExternalIssues({ } else { result.integrations.push({ key: component.sentryApp.slug, - displayName, + displayName: appDisplayName, displayIcon, disabled: Boolean(component.error), - disabledText: t('Unable to connect to %s', displayName), + disabledText: t('Unable to connect to %s', appDisplayName), actions: [ { id: component.sentryApp.slug, diff --git a/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.spec.tsx b/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.spec.tsx index 0c3a0755c188eb..78f8bc2774ebd8 100644 --- a/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.spec.tsx +++ b/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.spec.tsx @@ -126,14 +126,14 @@ describe('StreamlinedExternalIssueList', () => { ); expect( - await screen.findByRole('button', {name: 'Clickup Issue'}) + await screen.findByRole('button', {name: 'ClickUp: hello#1'}) ).toBeInTheDocument(); - await userEvent.hover(screen.getByRole('button', {name: 'Clickup Issue'})); + await userEvent.hover(screen.getByRole('button', {name: 'ClickUp: hello#1'})); await userEvent.click(await screen.findByRole('button', {name: 'Unlink issue'})); await waitFor(() => { expect( - screen.queryByRole('button', {name: 'Clickup Issue'}) + screen.queryByRole('button', {name: 'ClickUp: hello#1'}) ).not.toBeInTheDocument(); }); expect(unlinkMock).toHaveBeenCalledTimes(1); diff --git a/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx b/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx index 058d0d9d73059a..c06c6eefb3d379 100644 --- a/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx +++ b/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx @@ -16,7 +16,7 @@ import type {Group} from 'sentry/types/group'; import type {Project} from 'sentry/types/project'; import useOrganization from 'sentry/utils/useOrganization'; import {Divider} from 'sentry/views/issueDetails/divider'; -import {SidebarSectionTitle} from 'sentry/views/issueDetails/streamline/sidebar'; +import {SidebarSectionTitle} from 'sentry/views/issueDetails/streamline/sidebar/sidebar'; import useStreamLinedExternalIssueData from './hooks/useGroupExternalIssues'; diff --git a/static/app/components/group/groupSummary.tsx b/static/app/components/group/groupSummary.tsx index 18fb4949256ef7..6bb582c0bc77d3 100644 --- a/static/app/components/group/groupSummary.tsx +++ b/static/app/components/group/groupSummary.tsx @@ -1,14 +1,16 @@ import styled from '@emotion/styled'; -import {useAutofixSetup} from 'sentry/components/events/autofix/useAutofixSetup'; import Placeholder from 'sentry/components/placeholder'; import {IconFatal, IconFocus, IconSpan} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import {IssueCategory} from 'sentry/types/group'; +import type {Event} from 'sentry/types/event'; +import type {Group} from 'sentry/types/group'; +import type {Project} from 'sentry/types/project'; import marked from 'sentry/utils/marked'; import {type ApiQueryKey, useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; +import {useAiConfig} from 'sentry/views/issueDetails/streamline/hooks/useAiConfig'; interface GroupSummaryData { groupId: string; @@ -18,14 +20,6 @@ interface GroupSummaryData { whatsWrong?: string | null; } -const isSummaryEnabled = ( - hasGenAIConsent: boolean, - hideAiFeatures: boolean, - groupCategory: IssueCategory -) => { - return hasGenAIConsent && !hideAiFeatures && groupCategory === IssueCategory.ERROR; -}; - export const makeGroupSummaryQueryKey = ( organizationSlug: string, groupId: string @@ -34,30 +28,26 @@ export const makeGroupSummaryQueryKey = ( {method: 'POST'}, ]; -export function useGroupSummary(groupId: string, groupCategory: IssueCategory) { +export function useGroupSummary( + group: Group, + event: Event | null | undefined, + project: Project +) { const organization = useOrganization(); - // We piggyback and use autofix's genai consent check for now. - const { - data: autofixSetupData, - isPending: isAutofixSetupLoading, - isError: isAutofixSetupError, - } = useAutofixSetup({groupId}); - const hasGenAIConsent = autofixSetupData?.genAIConsent.ok ?? false; - const hideAiFeatures = organization.hideAiFeatures; + const aiConfig = useAiConfig(group, event, project); const queryData = useApiQuery( - makeGroupSummaryQueryKey(organization.slug, groupId), + makeGroupSummaryQueryKey(organization.slug, group.id), { staleTime: Infinity, // Cache the result indefinitely as it's unlikely to change if it's already computed - enabled: isSummaryEnabled(hasGenAIConsent, hideAiFeatures, groupCategory), + enabled: aiConfig.hasSummary, } ); return { ...queryData, - isPending: isAutofixSetupLoading || queryData.isPending, - isError: queryData.isError || isAutofixSetupError, - hasGenAIConsent, + isPending: aiConfig.isAutofixSetupLoading || queryData.isPending, + isError: queryData.isError, }; } diff --git a/static/app/components/hovercard.tsx b/static/app/components/hovercard.tsx index ef82873eae8e6c..058d1161495658 100644 --- a/static/app/components/hovercard.tsx +++ b/static/app/components/hovercard.tsx @@ -177,6 +177,7 @@ const Header = styled('div')` const Body = styled('div')` padding: ${space(2)}; min-height: 30px; + word-wrap: break-word; `; const Divider = styled('div')` diff --git a/static/app/components/issueDiff/index.spec.tsx b/static/app/components/issueDiff/index.spec.tsx index 51aacbd7939ff9..ea0d60a0d7363f 100644 --- a/static/app/components/issueDiff/index.spec.tsx +++ b/static/app/components/issueDiff/index.spec.tsx @@ -15,7 +15,7 @@ describe('IssueDiff', function () { const entries123Base = Entries123Base(); const api = new MockApiClient(); const organization = OrganizationFixture(); - const project = ProjectFixture({features: ['similarity-embeddings']}); + const project = ProjectFixture(); beforeEach(function () { MockApiClient.addMockResponse({ @@ -92,6 +92,7 @@ describe('IssueDiff', function () { action: 'PUSH', key: 'default', }} + hasSimilarityEmbeddingsProjectFeature /> ); diff --git a/static/app/components/issueDiff/index.tsx b/static/app/components/issueDiff/index.tsx index 1bbb3dd50a1e64..859368cbbe45bd 100644 --- a/static/app/components/issueDiff/index.tsx +++ b/static/app/components/issueDiff/index.tsx @@ -31,6 +31,7 @@ type Props = { targetIssueId: string; baseEventId?: string; className?: string; + hasSimilarityEmbeddingsProjectFeature?: boolean; organization?: Organization; shouldBeGrouped?: string; targetEventId?: string; @@ -67,12 +68,12 @@ class IssueDiff extends Component { baseEventId, targetEventId, organization, - project, shouldBeGrouped, location, + hasSimilarityEmbeddingsProjectFeature, } = this.props; const hasSimilarityEmbeddingsFeature = - project.features.includes('similarity-embeddings') || + hasSimilarityEmbeddingsProjectFeature || location.query.similarityEmbeddings === '1'; // Fetch component and event data diff --git a/static/app/components/modals/diffModal.spec.tsx b/static/app/components/modals/diffModal.spec.tsx index c763244ee561e1..26417688ea4799 100644 --- a/static/app/components/modals/diffModal.spec.tsx +++ b/static/app/components/modals/diffModal.spec.tsx @@ -28,6 +28,10 @@ describe('DiffModal', function () { url: '/projects/123/project-slug/events/789/', body: [], }); + MockApiClient.addMockResponse({ + url: `/projects/org-slug/project-slug/`, + body: {features: []}, + }); const styledWrapper = styled(c => c.children); diff --git a/static/app/components/modals/diffModal.tsx b/static/app/components/modals/diffModal.tsx index e472aea915c71d..816f033f22777e 100644 --- a/static/app/components/modals/diffModal.tsx +++ b/static/app/components/modals/diffModal.tsx @@ -2,16 +2,32 @@ import {css} from '@emotion/react'; import type {ModalRenderProps} from 'sentry/actionCreators/modal'; import IssueDiff from 'sentry/components/issueDiff'; +import {useDetailedProject} from 'sentry/utils/useDetailedProject'; import useOrganization from 'sentry/utils/useOrganization'; type Props = ModalRenderProps & React.ComponentProps; function DiffModal({className, Body, CloseButton, ...props}: Props) { const organization = useOrganization(); + const {project} = props; + const {data: projectData} = useDetailedProject({ + orgSlug: organization.slug, + projectSlug: project.slug, + }); + // similarity-embeddings feature is only available on project details + const similarityEmbeddingsProjectFeature = projectData?.features.includes( + 'similarity-embeddings' + ); + return ( - + ); } diff --git a/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx b/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx index 268919cd4399a9..94543d87695743 100644 --- a/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx +++ b/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx @@ -97,6 +97,11 @@ describe('add to dashboard modal', () => { body: testDashboard, }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/releases/stats/', + body: [], + }); + eventsStatsMock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/events-stats/', body: [], diff --git a/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx b/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx index f03b399feaf527..7da42feb17ae34 100644 --- a/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx +++ b/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx @@ -306,11 +306,11 @@ function AddToDashboardModal({ widgetLegendState={widgetLegendState} onLegendSelectChanged={() => {}} legendOptions={ - organization.features.includes('dashboards-releases-on-charts') && widgetLegendState.widgetRequiresLegendUnselection(widget) ? {selected: unselectedReleasesForCharts} : undefined } + disableFullscreen /> diff --git a/static/app/components/modals/widgetViewerModal.spec.tsx b/static/app/components/modals/widgetViewerModal.spec.tsx index 00ec22ba6afc23..8edf9f12b1edd4 100644 --- a/static/app/components/modals/widgetViewerModal.spec.tsx +++ b/static/app/components/modals/widgetViewerModal.spec.tsx @@ -131,6 +131,11 @@ describe('Modals -> WidgetViewerModal', function () { body: [], }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/releases/stats/', + body: [], + }); + eventsMetaMock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/events-meta/', body: {count: 33323612}, diff --git a/static/app/components/modals/widgetViewerModal.tsx b/static/app/components/modals/widgetViewerModal.tsx index 2ef63c2053c062..f343822c3a8797 100644 --- a/static/app/components/modals/widgetViewerModal.tsx +++ b/static/app/components/modals/widgetViewerModal.tsx @@ -3,7 +3,6 @@ import {components} from 'react-select'; import {css} from '@emotion/react'; import styled from '@emotion/styled'; import * as Sentry from '@sentry/react'; -import type {User} from '@sentry/types'; import {truncate} from '@sentry/utils'; import type {DataZoomComponentOption} from 'echarts'; import type {Location} from 'history'; @@ -32,6 +31,7 @@ import {space} from 'sentry/styles/space'; import type {PageFilters, SelectValue} from 'sentry/types/core'; import type {Series} from 'sentry/types/echarts'; import type {Organization} from 'sentry/types/organization'; +import type {User} from 'sentry/types/user'; import {defined} from 'sentry/utils'; import {trackAnalytics} from 'sentry/utils/analytics'; import {getUtcDateString} from 'sentry/utils/dates'; @@ -1084,6 +1084,10 @@ function WidgetViewerModal(props: Props) { }); }} disabled={!hasEditAccess} + title={ + !hasEditAccess && + t('You do not have permission to edit this widget') + } > {t('Edit Widget')} diff --git a/static/app/components/nav/config.tsx b/static/app/components/nav/config.tsx index b0b058936d6881..0c128bff586ea0 100644 --- a/static/app/components/nav/config.tsx +++ b/static/app/components/nav/config.tsx @@ -1,5 +1,5 @@ import {openHelpSearchModal} from 'sentry/actionCreators/modal'; -import type {NavConfig, NavSidebarItem} from 'sentry/components/nav/utils'; +import type {NavConfig} from 'sentry/components/nav/utils'; import { IconDashboard, IconGraph, @@ -15,26 +15,23 @@ import {t} from 'sentry/locale'; import ConfigStore from 'sentry/stores/configStore'; import type {Organization} from 'sentry/types/organization'; import {getDiscoverLandingUrl} from 'sentry/utils/discover/urls'; -import {MODULE_BASE_URLS} from 'sentry/views/insights/common/utils/useModuleURL'; -import {MODULE_SIDEBAR_TITLE as MODULE_TITLE_HTTP} from 'sentry/views/insights/http/settings'; import { AI_LANDING_SUB_PATH, - AI_LANDING_TITLE, + AI_SIDEBAR_LABEL, } from 'sentry/views/insights/pages/ai/settings'; import { BACKEND_LANDING_SUB_PATH, - BACKEND_LANDING_TITLE, + BACKEND_SIDEBAR_LABEL, } from 'sentry/views/insights/pages/backend/settings'; import { FRONTEND_LANDING_SUB_PATH, - FRONTEND_LANDING_TITLE, + FRONTEND_SIDEBAR_LABEL, } from 'sentry/views/insights/pages/frontend/settings'; import { MOBILE_LANDING_SUB_PATH, - MOBILE_LANDING_TITLE, + MOBILE_SIDEBAR_LABEL, } from 'sentry/views/insights/pages/mobile/settings'; import {DOMAIN_VIEW_BASE_URL} from 'sentry/views/insights/pages/settings'; -import {INSIGHTS_BASE_URL, MODULE_TITLES} from 'sentry/views/insights/settings'; import {getSearchForIssueGroup, IssueGroup} from 'sentry/views/issueList/utils'; /** @@ -45,91 +42,13 @@ import {getSearchForIssueGroup, IssueGroup} from 'sentry/views/issueList/utils'; */ export function createNavConfig({organization}: {organization: Organization}): NavConfig { const prefix = `organizations/${organization.slug}`; - const insightsPrefix = `${prefix}/${INSIGHTS_BASE_URL}`; - const hasPerfDomainViews = organization.features.includes('insights-domain-view'); - - const insights: NavSidebarItem = { - label: t('Insights'), - icon: , - feature: {features: 'insights-entry-points'}, - submenu: [ - { - label: MODULE_TITLE_HTTP, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.http}/`, - }, - {label: MODULE_TITLES.db, to: `/${insightsPrefix}/${MODULE_BASE_URLS.db}/`}, - { - label: MODULE_TITLES.resource, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.resource}/`, - }, - { - label: MODULE_TITLES.app_start, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.app_start}/`, - }, - { - label: MODULE_TITLES['mobile-screens'], - to: `/${insightsPrefix}/${MODULE_BASE_URLS['mobile-screens']}/`, - feature: {features: 'insights-mobile-screens-module'}, - }, - { - label: MODULE_TITLES.vital, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.vital}/`, - }, - { - label: MODULE_TITLES.cache, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.cache}/`, - }, - { - label: MODULE_TITLES.queue, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.queue}/`, - }, - { - label: MODULE_TITLES.ai, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.ai}/`, - feature: {features: 'insights-entry-points'}, - }, - ], - }; - - const perf: NavSidebarItem = { - label: t('Perf.'), - to: '/performance/', - icon: , - feature: { - features: 'performance-view', - hookName: 'feature-disabled:performance-sidebar-item', - }, - }; - - const perfDomainViews: NavSidebarItem = { - label: t('Perf.'), - icon: , - feature: {features: ['insights-domain-view', 'performance-view']}, - submenu: [ - { - label: FRONTEND_LANDING_TITLE, - to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${FRONTEND_LANDING_SUB_PATH}/`, - }, - { - label: BACKEND_LANDING_TITLE, - to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${BACKEND_LANDING_SUB_PATH}/`, - }, - { - label: AI_LANDING_TITLE, - to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${AI_LANDING_SUB_PATH}/`, - }, - { - label: MOBILE_LANDING_TITLE, - to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${MOBILE_LANDING_SUB_PATH}/`, - }, - ], - }; return { main: [ { label: t('Issues'), icon: , + analyticsKey: 'issues', submenu: [ { label: t('All'), @@ -154,10 +73,16 @@ export function createNavConfig({organization}: {organization: Organization}): N {label: t('Feedback'), to: `/${prefix}/feedback/`}, ], }, - {label: t('Projects'), to: `/${prefix}/projects/`, icon: }, + { + label: t('Projects'), + analyticsKey: 'projects', + to: `/${prefix}/projects/`, + icon: , + }, { label: t('Explore'), icon: , + analyticsKey: 'explore', submenu: [ { label: t('Traces'), @@ -198,9 +123,43 @@ export function createNavConfig({organization}: {organization: Organization}): N {label: t('Crons'), to: `/${prefix}/crons/`}, ], }, - ...(hasPerfDomainViews ? [perfDomainViews, perf] : [insights, perf]), + { + label: t('Insights'), + icon: , + analyticsKey: 'insights-domains', + feature: {features: ['performance-view']}, + submenu: [ + { + label: FRONTEND_SIDEBAR_LABEL, + to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${FRONTEND_LANDING_SUB_PATH}/`, + }, + { + label: BACKEND_SIDEBAR_LABEL, + to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${BACKEND_LANDING_SUB_PATH}/`, + }, + { + label: MOBILE_SIDEBAR_LABEL, + to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${MOBILE_LANDING_SUB_PATH}/`, + }, + { + label: AI_SIDEBAR_LABEL, + to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${AI_LANDING_SUB_PATH}/`, + }, + ], + }, + { + label: t('Perf.'), + to: '/performance/', + analyticsKey: 'performance', + icon: , + feature: { + features: 'performance-view', + hookName: 'feature-disabled:performance-sidebar-item', + }, + }, { label: t('Boards'), + analyticsKey: 'customizable-dashboards', to: '/dashboards/', icon: , feature: { @@ -209,12 +168,18 @@ export function createNavConfig({organization}: {organization: Organization}): N requireAll: false, }, }, - {label: t('Alerts'), to: `/${prefix}/alerts/rules/`, icon: }, + { + label: t('Alerts'), + analyticsKey: 'alerts', + to: `/${prefix}/alerts/rules/`, + icon: , + }, ], footer: [ { label: t('Help'), icon: , + analyticsKey: 'help', dropdown: [ { key: 'search', @@ -242,6 +207,7 @@ export function createNavConfig({organization}: {organization: Organization}): N }, { label: t('Settings'), + analyticsKey: 'settings', to: `/settings/${organization.slug}/`, icon: , }, diff --git a/static/app/components/nav/index.spec.tsx b/static/app/components/nav/index.spec.tsx index 0ca7e1efb43bde..2572f9e5d50cbb 100644 --- a/static/app/components/nav/index.spec.tsx +++ b/static/app/components/nav/index.spec.tsx @@ -2,7 +2,13 @@ import {LocationFixture} from 'sentry-fixture/locationFixture'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {RouterFixture} from 'sentry-fixture/routerFixture'; -import {getAllByRole, render, screen} from 'sentry-test/reactTestingLibrary'; +import {trackAnalytics} from 'sentry/utils/analytics'; + +jest.mock('sentry/utils/analytics', () => ({ + trackAnalytics: jest.fn(), +})); + +import {getAllByRole, render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; import Nav from 'sentry/components/nav'; @@ -101,7 +107,9 @@ describe('Nav', function () { beforeEach(() => { render(